Package madgraph :: Package iolibs :: Module export_v4
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_v4

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  from madgraph.iolibs.helas_call_writers import HelasCallWriter 
  16  """Methods and classes to export matrix elements to v4 format.""" 
  17   
  18  import copy 
  19  from cStringIO import StringIO 
  20  from distutils import dir_util 
  21  import itertools 
  22  import fractions 
  23  import glob 
  24  import logging 
  25  import math 
  26  import os 
  27  import re 
  28  import shutil 
  29  import subprocess 
  30  import sys 
  31  import time 
  32  import traceback 
  33   
  34  import aloha 
  35   
  36  import madgraph.core.base_objects as base_objects 
  37  import madgraph.core.color_algebra as color 
  38  import madgraph.core.helas_objects as helas_objects 
  39  import madgraph.iolibs.drawing_eps as draw 
  40  import madgraph.iolibs.files as files 
  41  import madgraph.iolibs.group_subprocs as group_subprocs 
  42  import madgraph.iolibs.file_writers as writers 
  43  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  44  import madgraph.iolibs.template_files as template_files 
  45  import madgraph.iolibs.ufo_expression_parsers as parsers 
  46  import madgraph.iolibs.helas_call_writers as helas_call_writers 
  47  import madgraph.interface.common_run_interface as common_run_interface 
  48  import madgraph.various.diagram_symmetry as diagram_symmetry 
  49  import madgraph.various.misc as misc 
  50  import madgraph.various.banner as banner_mod 
  51  import madgraph.various.process_checks as process_checks 
  52  import madgraph.loop.loop_diagram_generation as loop_diagram_generation 
  53  import aloha.create_aloha as create_aloha 
  54  import models.import_ufo as import_ufo 
  55  import models.write_param_card as param_writer 
  56  import models.check_param_card as check_param_card 
  57   
  58   
  59  from madgraph import MadGraph5Error, MG5DIR, ReadWrite 
  60  from madgraph.iolibs.files import cp, ln, mv 
  61   
  62  from madgraph import InvalidCmd 
  63   
  64  pjoin = os.path.join 
  65   
  66  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  67  logger = logging.getLogger('madgraph.export_v4') 
  68   
  69  default_compiler= {'fortran': 'gfortran', 
  70                         'f2py': 'f2py', 
  71                         'cpp':'g++'} 
72 73 74 -class VirtualExporter(object):
75 76 #exporter variable who modified the way madgraph interacts with this class 77 78 grouped_mode = 'madevent' 79 # This variable changes the type of object called within 'generate_subprocess_directory' 80 #functions. 81 # False to avoid grouping (only identical matrix element are merged) 82 # 'madevent' group the massless quark and massless lepton 83 # 'madweight' group the gluon with the massless quark 84 sa_symmetry = False 85 # If no grouped_mode=False, uu~ and u~u will be called independently. 86 #Putting sa_symmetry generates only one of the two matrix-element. 87 check = True 88 # Ask madgraph to check if the directory already exists and propose to the user to 89 #remove it first if this is the case 90 output = 'Template' 91 # [Template, None, dir] 92 # - Template, madgraph will call copy_template 93 # - dir, madgraph will just create an empty directory for initialisation 94 # - None, madgraph do nothing for initialisation 95 exporter = 'v4' 96 # language of the output 'v4' for Fortran output 97 # 'cpp' for C++ output 98 99
100 - def __init__(self, dir_path = "", opt=None):
101 # cmd_options is a dictionary with all the optional argurment passed at output time 102 103 # Activate some monkey patching for the helas call writer. 104 helas_call_writers.HelasCallWriter.customize_argument_for_all_other_helas_object = \ 105 self.helas_call_writer_custom
106 107 108 # helper function for customise helas writter 109 @staticmethod
110 - def custom_helas_call(call, arg):
111 """static method to customise the way aloha function call are written 112 call is the default template for the call 113 arg are the dictionary used for the call 114 """ 115 return call, arg
116 117 helas_call_writer_custom = lambda x,y,z: x.custom_helas_call(y,z) 118 119
120 - def copy_template(self, model):
121 return
122
123 - def generate_subprocess_directory(self, subproc_group, helicity_model, me=None):
124 # generate_subprocess_directory(self, matrix_element, helicity_model, me_number) [for ungrouped] 125 return 0 # return an integer stating the number of call to helicity routine
126
127 - def convert_model(self, model, wanted_lorentz=[], wanted_couplings=[]):
128 return
129
130 - def finalize(self,matrix_element, cmdhistory, MG5options, outputflag):
131 return
132 133
134 - def pass_information_from_cmd(self, cmd):
135 """pass information from the command interface to the exporter. 136 Please do not modify any object of the interface from the exporter. 137 """ 138 return
139
140 - def modify_grouping(self, matrix_element):
141 return False, matrix_element
142
143 - def export_model_files(self, model_v4_path):
144 raise Exception, "V4 model not supported by this type of exporter. Please use UFO model" 145 return
146
147 - def export_helas(self, HELAS_PATH):
148 raise Exception, "V4 model not supported by this type of exporter. Please use UFO model" 149 return
150
151 #=============================================================================== 152 # ProcessExporterFortran 153 #=============================================================================== 154 -class ProcessExporterFortran(VirtualExporter):
155 """Class to take care of exporting a set of matrix elements to 156 Fortran (v4) format.""" 157 158 default_opt = {'clean': False, 'complex_mass':False, 159 'export_format':'madevent', 'mp': False, 160 'v5_model': True, 161 'output_options':{} 162 } 163 grouped_mode = False 164
165 - def __init__(self, dir_path = "", opt=None):
166 """Initiate the ProcessExporterFortran with directory information""" 167 self.mgme_dir = MG5DIR 168 self.dir_path = dir_path 169 self.model = None 170 171 self.opt = dict(self.default_opt) 172 if opt: 173 self.opt.update(opt) 174 175 self.cmd_options = self.opt['output_options'] 176 177 #place holder to pass information to the run_interface 178 self.proc_characteristic = banner_mod.ProcCharacteristic() 179 # call mother class 180 super(ProcessExporterFortran,self).__init__(dir_path, opt)
181 182 183 #=========================================================================== 184 # process exporter fortran switch between group and not grouped 185 #===========================================================================
186 - def export_processes(self, matrix_elements, fortran_model):
187 """Make the switch between grouped and not grouped output""" 188 189 calls = 0 190 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 191 for (group_number, me_group) in enumerate(matrix_elements): 192 calls = calls + self.generate_subprocess_directory(\ 193 me_group, fortran_model, group_number) 194 else: 195 for me_number, me in enumerate(matrix_elements.get_matrix_elements()): 196 calls = calls + self.generate_subprocess_directory(\ 197 me, fortran_model, me_number) 198 199 return calls
200 201 202 #=========================================================================== 203 # create the run_card 204 #===========================================================================
205 - def create_run_card(self, matrix_elements, history):
206 """ """ 207 208 209 # bypass this for the loop-check 210 import madgraph.loop.loop_helas_objects as loop_helas_objects 211 if isinstance(matrix_elements, loop_helas_objects.LoopHelasMatrixElement): 212 matrix_elements = None 213 214 run_card = banner_mod.RunCard() 215 216 217 default=True 218 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 219 processes = [me.get('processes') for megroup in matrix_elements 220 for me in megroup['matrix_elements']] 221 elif matrix_elements: 222 processes = [me.get('processes') 223 for me in matrix_elements['matrix_elements']] 224 else: 225 default =False 226 227 if default: 228 run_card.create_default_for_process(self.proc_characteristic, 229 history, 230 processes) 231 232 233 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 234 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'))
235 236 237 #=========================================================================== 238 # copy the Template in a new directory. 239 #===========================================================================
240 - def copy_template(self, model):
241 """create the directory run_name as a copy of the MadEvent 242 Template, and clean the directory 243 """ 244 245 #First copy the full template tree if dir_path doesn't exit 246 if not os.path.isdir(self.dir_path): 247 assert self.mgme_dir, \ 248 "No valid MG_ME path given for MG4 run directory creation." 249 logger.info('initialize a new directory: %s' % \ 250 os.path.basename(self.dir_path)) 251 shutil.copytree(pjoin(self.mgme_dir, 'Template/LO'), 252 self.dir_path, True) 253 # distutils.dir_util.copy_tree since dir_path already exists 254 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 255 self.dir_path) 256 # copy plot_card 257 for card in ['plot_card']: 258 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 259 try: 260 shutil.copy(pjoin(self.dir_path, 'Cards',card + '.dat'), 261 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 262 except IOError: 263 logger.warning("Failed to copy " + card + ".dat to default") 264 elif os.getcwd() == os.path.realpath(self.dir_path): 265 logger.info('working in local directory: %s' % \ 266 os.path.realpath(self.dir_path)) 267 # distutils.dir_util.copy_tree since dir_path already exists 268 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/LO'), 269 self.dir_path) 270 # for name in misc.glob('Template/LO/*', self.mgme_dir): 271 # name = os.path.basename(name) 272 # filname = pjoin(self.mgme_dir, 'Template','LO',name) 273 # if os.path.isfile(filename): 274 # files.cp(filename, pjoin(self.dir_path,name)) 275 # elif os.path.isdir(filename): 276 # shutil.copytree(filename, pjoin(self.dir_path,name), True) 277 # distutils.dir_util.copy_tree since dir_path already exists 278 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 279 self.dir_path) 280 # Copy plot_card 281 for card in ['plot_card']: 282 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 283 try: 284 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 285 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 286 except IOError: 287 logger.warning("Failed to copy " + card + ".dat to default") 288 elif not os.path.isfile(pjoin(self.dir_path, 'TemplateVersion.txt')): 289 assert self.mgme_dir, \ 290 "No valid MG_ME path given for MG4 run directory creation." 291 try: 292 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 293 except IOError: 294 MG5_version = misc.get_pkg_info() 295 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write(MG5_version['version']) 296 297 #Ensure that the Template is clean 298 if self.opt['clean']: 299 logger.info('remove old information in %s' % \ 300 os.path.basename(self.dir_path)) 301 if os.environ.has_key('MADGRAPH_BASE'): 302 misc.call([pjoin('bin', 'internal', 'clean_template'), 303 '--web'], cwd=self.dir_path) 304 else: 305 try: 306 misc.call([pjoin('bin', 'internal', 'clean_template')], \ 307 cwd=self.dir_path) 308 except Exception, why: 309 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 310 % (os.path.basename(self.dir_path),why)) 311 312 #Write version info 313 MG_version = misc.get_pkg_info() 314 open(pjoin(self.dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 315 MG_version['version']) 316 317 # add the makefile in Source directory 318 filename = pjoin(self.dir_path,'Source','makefile') 319 self.write_source_makefile(writers.FileWriter(filename)) 320 321 # add the DiscreteSampler information 322 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'DiscreteSampler.f'), 323 pjoin(self.dir_path, 'Source')) 324 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'StringCast.f'), 325 pjoin(self.dir_path, 'Source')) 326 327 # We need to create the correct open_data for the pdf 328 self.write_pdf_opendata()
329 330 331 #=========================================================================== 332 # Call MadAnalysis5 to generate the default cards for this process 333 #===========================================================================
334 - def create_default_madanalysis5_cards(self, history, proc_defs, processes, 335 ma5_path, output_dir, levels = ['parton','hadron']):
336 """ Call MA5 so that it writes default cards for both parton and 337 post-shower levels, tailored for this particular process.""" 338 339 if len(levels)==0: 340 return 341 start = time.time() 342 logger.info('Generating MadAnalysis5 default cards tailored to this process') 343 try: 344 MA5_interpreter = common_run_interface.CommonRunCmd.\ 345 get_MadAnalysis5_interpreter(MG5DIR,ma5_path,loglevel=100) 346 except (Exception, SystemExit) as e: 347 logger.warning('Fail to create a MadAnalysis5 instance. Therefore the default analysis with MadAnalysis5 will be empty.') 348 return 349 if MA5_interpreter is None: 350 return 351 352 MA5_main = MA5_interpreter.main 353 for lvl in ['parton','hadron']: 354 if lvl in levels: 355 card_to_generate = pjoin(output_dir,'madanalysis5_%s_card_default.dat'%lvl) 356 try: 357 text = MA5_main.madgraph.generate_card(history, proc_defs, processes,lvl) 358 except (Exception, SystemExit) as e: 359 # keep the default card (skip only) 360 logger.warning('MadAnalysis5 failed to write a %s-level'%lvl+ 361 ' default analysis card for this process.') 362 logger.warning('Therefore, %s-level default analysis with MadAnalysis5 will be empty.'%lvl) 363 error=StringIO() 364 traceback.print_exc(file=error) 365 logger.debug('MadAnalysis5 error was:') 366 logger.debug('-'*60) 367 logger.debug(error.getvalue()[:-1]) 368 logger.debug('-'*60) 369 else: 370 open(card_to_generate,'w').write(text) 371 stop = time.time() 372 if stop-start >1: 373 logger.info('Cards created in %.2fs' % (stop-start))
374 375 #=========================================================================== 376 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 377 #===========================================================================
378 - def write_procdef_mg5(self, file_pos, modelname, process_str):
379 """ write an equivalent of the MG4 proc_card in order that all the Madevent 380 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 381 382 proc_card_template = template_files.mg4_proc_card.mg4_template 383 process_template = template_files.mg4_proc_card.process_template 384 process_text = '' 385 coupling = '' 386 new_process_content = [] 387 388 389 # First find the coupling and suppress the coupling from process_str 390 #But first ensure that coupling are define whithout spaces: 391 process_str = process_str.replace(' =', '=') 392 process_str = process_str.replace('= ', '=') 393 process_str = process_str.replace(',',' , ') 394 #now loop on the element and treat all the coupling 395 for info in process_str.split(): 396 if '=' in info: 397 coupling += info + '\n' 398 else: 399 new_process_content.append(info) 400 # Recombine the process_str (which is the input process_str without coupling 401 #info) 402 process_str = ' '.join(new_process_content) 403 404 #format the SubProcess 405 replace_dict = {'process': process_str, 406 'coupling': coupling} 407 process_text += process_template.substitute(replace_dict) 408 409 replace_dict = {'process': process_text, 410 'model': modelname, 411 'multiparticle':''} 412 text = proc_card_template.substitute(replace_dict) 413 414 if file_pos: 415 ff = open(file_pos, 'w') 416 ff.write(text) 417 ff.close() 418 else: 419 return replace_dict
420 421
422 - def pass_information_from_cmd(self, cmd):
423 """Pass information for MA5""" 424 425 self.proc_defs = cmd._curr_proc_defs
426 427 #=========================================================================== 428 # Create jpeg diagrams, html pages,proc_card_mg5.dat and madevent.tar.gz 429 #===========================================================================
430 - def finalize(self, matrix_elements, history='', mg5options={}, flaglist=[]):
431 """Function to finalize v4 directory, for inheritance.""" 432 433 self.create_run_card(matrix_elements, history) 434 self.create_MA5_cards(matrix_elements, history)
435
436 - def create_MA5_cards(self,matrix_elements,history):
437 """ A wrapper around the creation of the MA5 cards so that it can be 438 bypassed by daughter classes (i.e. in standalone).""" 439 if 'madanalysis5_path' in self.opt and not \ 440 self.opt['madanalysis5_path'] is None and not self.proc_defs is None: 441 processes = None 442 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 443 processes = [me.get('processes') for megroup in matrix_elements 444 for me in megroup['matrix_elements']] 445 elif matrix_elements: 446 processes = [me.get('processes') 447 for me in matrix_elements['matrix_elements']] 448 449 self.create_default_madanalysis5_cards( 450 history, self.proc_defs, processes, 451 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'), 452 levels = ['hadron','parton']) 453 454 for level in ['hadron','parton']: 455 # Copying these cards turn on the use of MadAnalysis5 by default. 456 if os.path.isfile(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level)): 457 shutil.copy(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level), 458 pjoin(self.dir_path,'Cards','madanalysis5_%s_card.dat'%level))
459 460 #=========================================================================== 461 # Create the proc_characteristic file passing information to the run_interface 462 #===========================================================================
463 - def create_proc_charac(self, matrix_elements=None, history="", **opts):
464 465 self.proc_characteristic.write(pjoin(self.dir_path, 'SubProcesses', 'proc_characteristics'))
466 467 #=========================================================================== 468 # write_matrix_element_v4 469 #===========================================================================
470 - def write_matrix_element_v4(self):
471 """Function to write a matrix.f file, for inheritance. 472 """ 473 pass
474 475 #=========================================================================== 476 # write_pdf_opendata 477 #===========================================================================
478 - def write_pdf_opendata(self):
479 """ modify the pdf opendata file, to allow direct access to cluster node 480 repository if configure""" 481 482 if not self.opt["cluster_local_path"]: 483 changer = {"pdf_systemwide": ""} 484 else: 485 to_add = """ 486 tempname='%(path)s'//Tablefile 487 open(IU,file=tempname,status='old',ERR=1) 488 return 489 1 tempname='%(path)s/Pdfdata/'//Tablefile 490 open(IU,file=tempname,status='old',ERR=2) 491 return 492 2 tempname='%(path)s/lhapdf'//Tablefile 493 open(IU,file=tempname,status='old',ERR=3) 494 return 495 3 tempname='%(path)s/../lhapdf/pdfsets/'//Tablefile 496 open(IU,file=tempname,status='old',ERR=4) 497 return 498 4 tempname='%(path)s/../lhapdf/pdfsets/6.1/'//Tablefile 499 open(IU,file=tempname,status='old',ERR=5) 500 return 501 """ % {"path" : self.opt["cluster_local_path"]} 502 503 changer = {"pdf_systemwide": to_add} 504 505 506 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "opendata.f")) 507 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_opendata.f"),"r").read() 508 ff.writelines(template % changer) 509 510 # Do the same for lhapdf set 511 if not self.opt["cluster_local_path"]: 512 changer = {"cluster_specific_path": ""} 513 else: 514 to_add=""" 515 LHAPath='%(path)s/PDFsets' 516 Inquire(File=LHAPath, exist=exists) 517 if(exists)return 518 LHAPath='%(path)s/../lhapdf/pdfsets/6.1/' 519 Inquire(File=LHAPath, exist=exists) 520 if(exists)return 521 LHAPath='%(path)s/../lhapdf/pdfsets/' 522 Inquire(File=LHAPath, exist=exists) 523 if(exists)return 524 LHAPath='./PDFsets' 525 """ % {"path" : self.opt["cluster_local_path"]} 526 changer = {"cluster_specific_path": to_add} 527 528 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f")) 529 #ff = open(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f"),"w") 530 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_wrap_lhapdf.f"),"r").read() 531 ff.writelines(template % changer) 532 533 534 return
535 536 537 538 #=========================================================================== 539 # write_maxparticles_file 540 #===========================================================================
541 - def write_maxparticles_file(self, writer, matrix_elements):
542 """Write the maxparticles.inc file for MadEvent""" 543 544 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 545 maxparticles = max([me.get_nexternal_ninitial()[0] for me in \ 546 matrix_elements.get('matrix_elements')]) 547 else: 548 maxparticles = max([me.get_nexternal_ninitial()[0] \ 549 for me in matrix_elements]) 550 551 lines = "integer max_particles\n" 552 lines += "parameter(max_particles=%d)" % maxparticles 553 554 # Write the file 555 writer.writelines(lines) 556 557 return True
558 559 560 #=========================================================================== 561 # export the model 562 #===========================================================================
563 - def export_model_files(self, model_path):
564 """Configure the files/link of the process according to the model""" 565 566 # Import the model 567 for file in os.listdir(model_path): 568 if os.path.isfile(pjoin(model_path, file)): 569 shutil.copy2(pjoin(model_path, file), \ 570 pjoin(self.dir_path, 'Source', 'MODEL'))
571 572 586 594 595 596 #=========================================================================== 597 # export the helas routine 598 #===========================================================================
599 - def export_helas(self, helas_path):
600 """Configure the files/link of the process according to the model""" 601 602 # Import helas routine 603 for filename in os.listdir(helas_path): 604 filepos = pjoin(helas_path, filename) 605 if os.path.isfile(filepos): 606 if filepos.endswith('Makefile.template'): 607 cp(filepos, self.dir_path + '/Source/DHELAS/Makefile') 608 elif filepos.endswith('Makefile'): 609 pass 610 else: 611 cp(filepos, self.dir_path + '/Source/DHELAS')
612 # following lines do the same but whithout symbolic link 613 # 614 #def export_helas(mgme_dir, dir_path): 615 # 616 # # Copy the HELAS directory 617 # helas_dir = pjoin(mgme_dir, 'HELAS') 618 # for filename in os.listdir(helas_dir): 619 # if os.path.isfile(pjoin(helas_dir, filename)): 620 # shutil.copy2(pjoin(helas_dir, filename), 621 # pjoin(dir_path, 'Source', 'DHELAS')) 622 # shutil.move(pjoin(dir_path, 'Source', 'DHELAS', 'Makefile.template'), 623 # pjoin(dir_path, 'Source', 'DHELAS', 'Makefile')) 624 # 625 626 #=========================================================================== 627 # generate_subprocess_directory 628 #===========================================================================
629 - def generate_subprocess_directory(self, matrix_element, 630 fortran_model, 631 me_number):
632 """Routine to generate a subprocess directory (for inheritance)""" 633 634 pass
635 636 #=========================================================================== 637 # get_source_libraries_list 638 #===========================================================================
639 - def get_source_libraries_list(self):
640 """ Returns the list of libraries to be compiling when compiling the 641 SOURCE directory. It is different for loop_induced processes and 642 also depends on the value of the 'output_dependencies' option""" 643 644 return ['$(LIBDIR)libdhelas.$(libext)', 645 '$(LIBDIR)libpdf.$(libext)', 646 '$(LIBDIR)libmodel.$(libext)', 647 '$(LIBDIR)libcernlib.$(libext)', 648 '$(LIBDIR)libbias.$(libext)']
649 650 #=========================================================================== 651 # write_source_makefile 652 #===========================================================================
653 - def write_source_makefile(self, writer):
654 """Write the nexternal.inc file for MG4""" 655 656 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 657 set_of_lib = ' '.join(['$(LIBRARIES)']+self.get_source_libraries_list()) 658 if self.opt['model'] == 'mssm' or self.opt['model'].startswith('mssm-'): 659 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 660 MODEL/MG5_param.dat: ../Cards/param_card.dat\n\t../bin/madevent treatcards param 661 param_card.inc: MODEL/MG5_param.dat\n\t../bin/madevent treatcards param\n''' 662 else: 663 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 664 param_card.inc: ../Cards/param_card.dat\n\t../bin/madevent treatcards param\n''' 665 666 replace_dict= {'libraries': set_of_lib, 667 'model':model_line, 668 'additional_dsample': '', 669 'additional_dependencies':''} 670 671 if writer: 672 text = open(path).read() % replace_dict 673 writer.write(text) 674 675 return replace_dict
676 677 #=========================================================================== 678 # write_nexternal_madspin 679 #===========================================================================
680 - def write_nexternal_madspin(self, writer, nexternal, ninitial):
681 """Write the nexternal_prod.inc file for madspin""" 682 683 replace_dict = {} 684 685 replace_dict['nexternal'] = nexternal 686 replace_dict['ninitial'] = ninitial 687 688 file = """ \ 689 integer nexternal_prod 690 parameter (nexternal_prod=%(nexternal)d) 691 integer nincoming_prod 692 parameter (nincoming_prod=%(ninitial)d)""" % replace_dict 693 694 # Write the file 695 if writer: 696 writer.writelines(file) 697 return True 698 else: 699 return replace_dict
700 701 #=========================================================================== 702 # write_helamp_madspin 703 #===========================================================================
704 - def write_helamp_madspin(self, writer, ncomb):
705 """Write the helamp.inc file for madspin""" 706 707 replace_dict = {} 708 709 replace_dict['ncomb'] = ncomb 710 711 file = """ \ 712 integer ncomb1 713 parameter (ncomb1=%(ncomb)d) 714 double precision helamp(ncomb1) 715 common /to_helamp/helamp """ % replace_dict 716 717 # Write the file 718 if writer: 719 writer.writelines(file) 720 return True 721 else: 722 return replace_dict
723 724 725 726 #=========================================================================== 727 # write_nexternal_file 728 #===========================================================================
729 - def write_nexternal_file(self, writer, nexternal, ninitial):
730 """Write the nexternal.inc file for MG4""" 731 732 replace_dict = {} 733 734 replace_dict['nexternal'] = nexternal 735 replace_dict['ninitial'] = ninitial 736 737 file = """ \ 738 integer nexternal 739 parameter (nexternal=%(nexternal)d) 740 integer nincoming 741 parameter (nincoming=%(ninitial)d)""" % replace_dict 742 743 # Write the file 744 if writer: 745 writer.writelines(file) 746 return True 747 else: 748 return replace_dict
749 #=========================================================================== 750 # write_pmass_file 751 #===========================================================================
752 - def write_pmass_file(self, writer, matrix_element):
753 """Write the pmass.inc file for MG4""" 754 755 model = matrix_element.get('processes')[0].get('model') 756 757 lines = [] 758 for wf in matrix_element.get_external_wavefunctions(): 759 mass = model.get('particle_dict')[wf.get('pdg_code')].get('mass') 760 if mass.lower() != "zero": 761 mass = "abs(%s)" % mass 762 763 lines.append("pmass(%d)=%s" % \ 764 (wf.get('number_external'), mass)) 765 766 # Write the file 767 writer.writelines(lines) 768 769 return True
770 771 #=========================================================================== 772 # write_ngraphs_file 773 #===========================================================================
774 - def write_ngraphs_file(self, writer, nconfigs):
775 """Write the ngraphs.inc file for MG4. Needs input from 776 write_configs_file.""" 777 778 file = " integer n_max_cg\n" 779 file = file + "parameter (n_max_cg=%d)" % nconfigs 780 781 # Write the file 782 writer.writelines(file) 783 784 return True
785 786 #=========================================================================== 787 # write_leshouche_file 788 #===========================================================================
789 - def write_leshouche_file(self, writer, matrix_element):
790 """Write the leshouche.inc file for MG4""" 791 792 # Write the file 793 writer.writelines(self.get_leshouche_lines(matrix_element, 0)) 794 795 return True
796 797 #=========================================================================== 798 # get_leshouche_lines 799 #===========================================================================
800 - def get_leshouche_lines(self, matrix_element, numproc):
801 """Write the leshouche.inc file for MG4""" 802 803 # Extract number of external particles 804 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 805 806 lines = [] 807 for iproc, proc in enumerate(matrix_element.get('processes')): 808 legs = proc.get_legs_with_decays() 809 lines.append("DATA (IDUP(i,%d,%d),i=1,%d)/%s/" % \ 810 (iproc + 1, numproc+1, nexternal, 811 ",".join([str(l.get('id')) for l in legs]))) 812 if iproc == 0 and numproc == 0: 813 for i in [1, 2]: 814 lines.append("DATA (MOTHUP(%d,i),i=1,%2r)/%s/" % \ 815 (i, nexternal, 816 ",".join([ "%3r" % 0 ] * ninitial + \ 817 [ "%3r" % i ] * (nexternal - ninitial)))) 818 819 # Here goes the color connections corresponding to the JAMPs 820 # Only one output, for the first subproc! 821 if iproc == 0: 822 # If no color basis, just output trivial color flow 823 if not matrix_element.get('color_basis'): 824 for i in [1, 2]: 825 lines.append("DATA (ICOLUP(%d,i,1,%d),i=1,%2r)/%s/" % \ 826 (i, numproc+1,nexternal, 827 ",".join([ "%3r" % 0 ] * nexternal))) 828 829 else: 830 # First build a color representation dictionnary 831 repr_dict = {} 832 for l in legs: 833 repr_dict[l.get('number')] = \ 834 proc.get('model').get_particle(l.get('id')).get_color()\ 835 * (-1)**(1+l.get('state')) 836 # Get the list of color flows 837 color_flow_list = \ 838 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 839 ninitial) 840 # And output them properly 841 for cf_i, color_flow_dict in enumerate(color_flow_list): 842 for i in [0, 1]: 843 lines.append("DATA (ICOLUP(%d,i,%d,%d),i=1,%2r)/%s/" % \ 844 (i + 1, cf_i + 1, numproc+1, nexternal, 845 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 846 for l in legs]))) 847 848 return lines
849 850 851 852 853 #=========================================================================== 854 # write_maxamps_file 855 #===========================================================================
856 - def write_maxamps_file(self, writer, maxamps, maxflows, 857 maxproc,maxsproc):
858 """Write the maxamps.inc file for MG4.""" 859 860 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 861 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 862 (maxamps, maxflows) 863 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 864 (maxproc, maxsproc) 865 866 # Write the file 867 writer.writelines(file) 868 869 return True
870 871 872 #=========================================================================== 873 # Routines to output UFO models in MG4 format 874 #=========================================================================== 875
876 - def convert_model(self, model, wanted_lorentz = [], 877 wanted_couplings = []):
878 """ Create a full valid MG4 model from a MG5 model (coming from UFO)""" 879 880 # Make sure aloha is in quadruple precision if needed 881 old_aloha_mp=aloha.mp_precision 882 aloha.mp_precision=self.opt['mp'] 883 884 # create the MODEL 885 write_dir=pjoin(self.dir_path, 'Source', 'MODEL') 886 model_builder = UFO_model_to_mg4(model, write_dir, self.opt + self.proc_characteristic) 887 model_builder.build(wanted_couplings) 888 889 # Backup the loop mode, because it can be changed in what follows. 890 old_loop_mode = aloha.loop_mode 891 892 # Create the aloha model or use the existing one (for loop exporters 893 # this is useful as the aloha model will be used again in the 894 # LoopHelasMatrixElements generated). We do not save the model generated 895 # here if it didn't exist already because it would be a waste of 896 # memory for tree level applications since aloha is only needed at the 897 # time of creating the aloha fortran subroutines. 898 if hasattr(self, 'aloha_model'): 899 aloha_model = self.aloha_model 900 else: 901 aloha_model = create_aloha.AbstractALOHAModel(os.path.basename(model.get('modelpath'))) 902 aloha_model.add_Lorentz_object(model.get('lorentz')) 903 904 # Compute the subroutines 905 if wanted_lorentz: 906 aloha_model.compute_subset(wanted_lorentz) 907 else: 908 aloha_model.compute_all(save=False) 909 910 # Write them out 911 write_dir=pjoin(self.dir_path, 'Source', 'DHELAS') 912 aloha_model.write(write_dir, 'Fortran') 913 914 # Revert the original aloha loop mode 915 aloha.loop_mode = old_loop_mode 916 917 #copy Helas Template 918 cp(MG5DIR + '/aloha/template_files/Makefile_F', write_dir+'/makefile') 919 if any([any(['L' in tag for tag in d[1]]) for d in wanted_lorentz]): 920 cp(MG5DIR + '/aloha/template_files/aloha_functions_loop.f', 921 write_dir+'/aloha_functions.f') 922 aloha_model.loop_mode = False 923 else: 924 cp(MG5DIR + '/aloha/template_files/aloha_functions.f', 925 write_dir+'/aloha_functions.f') 926 create_aloha.write_aloha_file_inc(write_dir, '.f', '.o') 927 928 # Make final link in the Process 929 self.make_model_symbolic_link() 930 931 # Re-establish original aloha mode 932 aloha.mp_precision=old_aloha_mp
933 934 935 #=========================================================================== 936 # Helper functions 937 #===========================================================================
938 - def modify_grouping(self, matrix_element):
939 """allow to modify the grouping (if grouping is in place) 940 return two value: 941 - True/False if the matrix_element was modified 942 - the new(or old) matrix element""" 943 944 return False, matrix_element
945 946 #=========================================================================== 947 # Helper functions 948 #===========================================================================
949 - def get_mg5_info_lines(self):
950 """Return info lines for MG5, suitable to place at beginning of 951 Fortran files""" 952 953 info = misc.get_pkg_info() 954 info_lines = "" 955 if info and info.has_key('version') and info.has_key('date'): 956 info_lines = "# Generated by MadGraph5_aMC@NLO v. %s, %s\n" % \ 957 (info['version'], info['date']) 958 info_lines = info_lines + \ 959 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 960 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 961 else: 962 info_lines = "# Generated by MadGraph5_aMC@NLO\n" + \ 963 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 964 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 965 966 return info_lines
967
968 - def get_process_info_lines(self, matrix_element):
969 """Return info lines describing the processes for this matrix element""" 970 971 return"\n".join([ "C " + process.nice_string().replace('\n', '\nC * ') \ 972 for process in matrix_element.get('processes')])
973 974
975 - def get_helicity_lines(self, matrix_element,array_name='NHEL'):
976 """Return the Helicity matrix definition lines for this matrix element""" 977 978 helicity_line_list = [] 979 i = 0 980 for helicities in matrix_element.get_helicity_matrix(): 981 i = i + 1 982 int_list = [i, len(helicities)] 983 int_list.extend(helicities) 984 helicity_line_list.append(\ 985 ("DATA ("+array_name+"(I,%4r),I=1,%d) /" + \ 986 ",".join(['%2r'] * len(helicities)) + "/") % tuple(int_list)) 987 988 return "\n".join(helicity_line_list)
989
990 - def get_ic_line(self, matrix_element):
991 """Return the IC definition line coming after helicities, required by 992 switchmom in madevent""" 993 994 nexternal = matrix_element.get_nexternal_ninitial()[0] 995 int_list = range(1, nexternal + 1) 996 997 return "DATA (IC(I,1),I=1,%i) /%s/" % (nexternal, 998 ",".join([str(i) for \ 999 i in int_list]))
1000
1001 - def set_chosen_SO_index(self, process, squared_orders):
1002 """ From the squared order constraints set by the user, this function 1003 finds what indices of the squared_orders list the user intends to pick. 1004 It returns this as a string of comma-separated successive '.true.' or 1005 '.false.' for each index.""" 1006 1007 user_squared_orders = process.get('squared_orders') 1008 split_orders = process.get('split_orders') 1009 1010 if len(user_squared_orders)==0: 1011 return ','.join(['.true.']*len(squared_orders)) 1012 1013 res = [] 1014 for sqsos in squared_orders: 1015 is_a_match = True 1016 for user_sqso, value in user_squared_orders.items(): 1017 if (process.get_squared_order_type(user_sqso) =='==' and \ 1018 value!=sqsos[split_orders.index(user_sqso)]) or \ 1019 (process.get_squared_order_type(user_sqso) in ['<=','='] and \ 1020 value<sqsos[split_orders.index(user_sqso)]) or \ 1021 (process.get_squared_order_type(user_sqso) == '>' and \ 1022 value>=sqsos[split_orders.index(user_sqso)]): 1023 is_a_match = False 1024 break 1025 res.append('.true.' if is_a_match else '.false.') 1026 1027 return ','.join(res)
1028
1029 - def get_split_orders_lines(self, orders, array_name, n=5):
1030 """ Return the split orders definition as defined in the list orders and 1031 for the name of the array 'array_name'. Split rows in chunks of size n.""" 1032 1033 ret_list = [] 1034 for index, order in enumerate(orders): 1035 for k in xrange(0, len(order), n): 1036 ret_list.append("DATA (%s(%3r,i),i=%3r,%3r) /%s/" % \ 1037 (array_name,index + 1, k + 1, min(k + n, len(order)), 1038 ','.join(["%5r" % i for i in order[k:k + n]]))) 1039 return ret_list
1040
1041 - def format_integer_list(self, list, name, n=5):
1042 """ Return an initialization of the python list in argument following 1043 the fortran syntax using the data keyword assignment, filling an array 1044 of name 'name'. It splits rows in chunks of size n.""" 1045 1046 ret_list = [] 1047 for k in xrange(0, len(list), n): 1048 ret_list.append("DATA (%s(i),i=%3r,%3r) /%s/" % \ 1049 (name, k + 1, min(k + n, len(list)), 1050 ','.join(["%5r" % i for i in list[k:k + n]]))) 1051 return ret_list
1052
1053 - def get_color_data_lines(self, matrix_element, n=6):
1054 """Return the color matrix definition lines for this matrix element. Split 1055 rows in chunks of size n.""" 1056 1057 if not matrix_element.get('color_matrix'): 1058 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 1059 else: 1060 ret_list = [] 1061 my_cs = color.ColorString() 1062 for index, denominator in \ 1063 enumerate(matrix_element.get('color_matrix').\ 1064 get_line_denominators()): 1065 # First write the common denominator for this color matrix line 1066 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 1067 # Then write the numerators for the matrix elements 1068 num_list = matrix_element.get('color_matrix').\ 1069 get_line_numerators(index, denominator) 1070 1071 for k in xrange(0, len(num_list), n): 1072 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 1073 (index + 1, k + 1, min(k + n, len(num_list)), 1074 ','.join(["%5r" % i for i in num_list[k:k + n]]))) 1075 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[index]) 1076 ret_list.append("C %s" % repr(my_cs)) 1077 return ret_list
1078 1079
1080 - def get_den_factor_line(self, matrix_element):
1081 """Return the denominator factor line for this matrix element""" 1082 1083 return "DATA IDEN/%2r/" % \ 1084 matrix_element.get_denominator_factor()
1085
1086 - def get_icolamp_lines(self, mapconfigs, matrix_element, num_matrix_element):
1087 """Return the ICOLAMP matrix, showing which JAMPs contribute to 1088 which configs (diagrams).""" 1089 1090 ret_list = [] 1091 1092 booldict = {False: ".false.", True: ".true."} 1093 1094 if not matrix_element.get('color_basis'): 1095 # No color, so only one color factor. Simply write a ".true." 1096 # for each config (i.e., each diagram with only 3 particle 1097 # vertices 1098 configs = len(mapconfigs) 1099 ret_list.append("DATA(icolamp(1,i,%d),i=1,%d)/%s/" % \ 1100 (num_matrix_element, configs, 1101 ','.join([".true." for i in range(configs)]))) 1102 return ret_list 1103 1104 # There is a color basis - create a list showing which JAMPs have 1105 # contributions to which configs 1106 1107 # Only want to include leading color flows, so find max_Nc 1108 color_basis = matrix_element.get('color_basis') 1109 1110 # We don't want to include the power of Nc's which come from the potential 1111 # loop color trace (i.e. in the case of a closed fermion loop for example) 1112 # so we subtract it here when computing max_Nc 1113 max_Nc = max(sum([[(v[4]-v[5]) for v in val] for val in 1114 color_basis.values()],[])) 1115 1116 # Crate dictionary between diagram number and JAMP number 1117 diag_jamp = {} 1118 for ijamp, col_basis_elem in \ 1119 enumerate(sorted(matrix_element.get('color_basis').keys())): 1120 for diag_tuple in matrix_element.get('color_basis')[col_basis_elem]: 1121 # Only use color flows with Nc == max_Nc. However, notice that 1122 # we don't want to include the Nc power coming from the loop 1123 # in this counting. 1124 if (diag_tuple[4]-diag_tuple[5]) == max_Nc: 1125 diag_num = diag_tuple[0] + 1 1126 # Add this JAMP number to this diag_num 1127 diag_jamp[diag_num] = diag_jamp.setdefault(diag_num, []) + \ 1128 [ijamp+1] 1129 1130 colamps = ijamp + 1 1131 for iconfig, num_diag in enumerate(mapconfigs): 1132 if num_diag == 0: 1133 continue 1134 1135 # List of True or False 1136 bool_list = [(i + 1 in diag_jamp[num_diag]) for i in range(colamps)] 1137 # Add line 1138 ret_list.append("DATA(icolamp(i,%d,%d),i=1,%d)/%s/" % \ 1139 (iconfig+1, num_matrix_element, colamps, 1140 ','.join(["%s" % booldict[b] for b in \ 1141 bool_list]))) 1142 1143 return ret_list
1144
1145 - def get_amp2_lines(self, matrix_element, config_map = []):
1146 """Return the amp2(i) = sum(amp for diag(i))^2 lines""" 1147 1148 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 1149 # Get minimum legs in a vertex 1150 vert_list = [max(diag.get_vertex_leg_numbers()) for diag in \ 1151 matrix_element.get('diagrams') if diag.get_vertex_leg_numbers()!=[]] 1152 minvert = min(vert_list) if vert_list!=[] else 0 1153 1154 ret_lines = [] 1155 if config_map: 1156 # In this case, we need to sum up all amplitudes that have 1157 # identical topologies, as given by the config_map (which 1158 # gives the topology/config for each of the diagrams 1159 diagrams = matrix_element.get('diagrams') 1160 # Combine the diagrams with identical topologies 1161 config_to_diag_dict = {} 1162 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1163 if config_map[idiag] == 0: 1164 continue 1165 try: 1166 config_to_diag_dict[config_map[idiag]].append(idiag) 1167 except KeyError: 1168 config_to_diag_dict[config_map[idiag]] = [idiag] 1169 # Write out the AMP2s summing squares of amplitudes belonging 1170 # to eiher the same diagram or different diagrams with 1171 # identical propagator properties. Note that we need to use 1172 # AMP2 number corresponding to the first diagram number used 1173 # for that AMP2. 1174 for config in sorted(config_to_diag_dict.keys()): 1175 1176 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % \ 1177 {"num": (config_to_diag_dict[config][0] + 1)} 1178 1179 amp = "+".join(["AMP(%(num)d)" % {"num": a.get('number')} for a in \ 1180 sum([diagrams[idiag].get('amplitudes') for \ 1181 idiag in config_to_diag_dict[config]], [])]) 1182 1183 # Not using \sum |M|^2 anymore since this creates troubles 1184 # when ckm is not diagonal due to the JIM mechanism. 1185 if '+' in amp: 1186 line += "(%s)*dconjg(%s)" % (amp, amp) 1187 else: 1188 line += "%s*dconjg(%s)" % (amp, amp) 1189 ret_lines.append(line) 1190 else: 1191 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1192 # Ignore any diagrams with 4-particle vertices. 1193 if diag.get_vertex_leg_numbers()!=[] and max(diag.get_vertex_leg_numbers()) > minvert: 1194 continue 1195 # Now write out the expression for AMP2, meaning the sum of 1196 # squared amplitudes belonging to the same diagram 1197 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % {"num": (idiag + 1)} 1198 line += "+".join(["AMP(%(num)d)*dconjg(AMP(%(num)d))" % \ 1199 {"num": a.get('number')} for a in \ 1200 diag.get('amplitudes')]) 1201 ret_lines.append(line) 1202 1203 return ret_lines
1204 1205 #=========================================================================== 1206 # Returns the data statements initializing the coeffictients for the JAMP 1207 # decomposition. It is used when the JAMP initialization is decided to be 1208 # done through big arrays containing the projection coefficients. 1209 #===========================================================================
1210 - def get_JAMP_coefs(self, color_amplitudes, color_basis=None, tag_letter="",\ 1211 n=50, Nc_value=3):
1212 """This functions return the lines defining the DATA statement setting 1213 the coefficients building the JAMPS out of the AMPS. Split rows in 1214 bunches of size n. 1215 One can specify the color_basis from which the color amplitudes originates 1216 so that there are commentaries telling what color structure each JAMP 1217 corresponds to.""" 1218 1219 if(not isinstance(color_amplitudes,list) or 1220 not (color_amplitudes and isinstance(color_amplitudes[0],list))): 1221 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_coefs" 1222 1223 res_list = [] 1224 my_cs = color.ColorString() 1225 for index, coeff_list in enumerate(color_amplitudes): 1226 # Create the list of the complete numerical coefficient. 1227 coefs_list=[coefficient[0][0]*coefficient[0][1]*\ 1228 (fractions.Fraction(Nc_value)**coefficient[0][3]) for \ 1229 coefficient in coeff_list] 1230 # Create the list of the numbers of the contributing amplitudes. 1231 # Mutliply by -1 for those which have an imaginary coefficient. 1232 ampnumbers_list=[coefficient[1]*(-1 if coefficient[0][2] else 1) \ 1233 for coefficient in coeff_list] 1234 # Find the common denominator. 1235 commondenom=abs(reduce(fractions.gcd, coefs_list).denominator) 1236 num_list=[(coefficient*commondenom).numerator \ 1237 for coefficient in coefs_list] 1238 res_list.append("DATA NCONTRIBAMPS%s(%i)/%i/"%(tag_letter,\ 1239 index+1,len(num_list))) 1240 res_list.append("DATA DENOMCCOEF%s(%i)/%i/"%(tag_letter,\ 1241 index+1,commondenom)) 1242 if color_basis: 1243 my_cs.from_immutable(sorted(color_basis.keys())[index]) 1244 res_list.append("C %s" % repr(my_cs)) 1245 for k in xrange(0, len(num_list), n): 1246 res_list.append("DATA (NUMCCOEF%s(%3r,i),i=%6r,%6r) /%s/" % \ 1247 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1248 ','.join(["%6r" % i for i in num_list[k:k + n]]))) 1249 res_list.append("DATA (AMPNUMBERS%s(%3r,i),i=%6r,%6r) /%s/" % \ 1250 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1251 ','.join(["%6r" % i for i in ampnumbers_list[k:k + n]]))) 1252 pass 1253 return res_list
1254 1255
1256 - def get_JAMP_lines_split_order(self, col_amps, split_order_amps, 1257 split_order_names=None, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)"):
1258 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1259 defined as a matrix element or directly as a color_amplitudes dictionary. 1260 The split_order_amps specifies the group of amplitudes sharing the same 1261 amplitude orders which should be put in together in a given set of JAMPS. 1262 The split_order_amps is supposed to have the format of the second output 1263 of the function get_split_orders_mapping function in helas_objects.py. 1264 The split_order_names is optional (it should correspond to the process 1265 'split_orders' attribute) and only present to provide comments in the 1266 JAMP definitions in the code.""" 1267 1268 # Let the user call get_JAMP_lines_split_order directly from a 1269 error_msg="Malformed '%s' argument passed to the "+\ 1270 "get_JAMP_lines_split_order function: %s"%str(split_order_amps) 1271 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1272 color_amplitudes=col_amps.get_color_amplitudes() 1273 elif(isinstance(col_amps,list)): 1274 if(col_amps and isinstance(col_amps[0],list)): 1275 color_amplitudes=col_amps 1276 else: 1277 raise MadGraph5Error, error_msg%'col_amps' 1278 else: 1279 raise MadGraph5Error, error_msg%'col_amps' 1280 1281 # Verify the sanity of the split_order_amps and split_order_names args 1282 if isinstance(split_order_amps,list): 1283 for elem in split_order_amps: 1284 if len(elem)!=2: 1285 raise MadGraph5Error, error_msg%'split_order_amps' 1286 # Check the first element of the two lists to make sure they are 1287 # integers, although in principle they should all be integers. 1288 if not isinstance(elem[0],tuple) or \ 1289 not isinstance(elem[1],tuple) or \ 1290 not isinstance(elem[0][0],int) or \ 1291 not isinstance(elem[1][0],int): 1292 raise MadGraph5Error, error_msg%'split_order_amps' 1293 else: 1294 raise MadGraph5Error, error_msg%'split_order_amps' 1295 1296 if not split_order_names is None: 1297 if isinstance(split_order_names,list): 1298 # Should specify the same number of names as there are elements 1299 # in the key of the split_order_amps. 1300 if len(split_order_names)!=len(split_order_amps[0][0]): 1301 raise MadGraph5Error, error_msg%'split_order_names' 1302 # Check the first element of the list to be a string 1303 if not isinstance(split_order_names[0],str): 1304 raise MadGraph5Error, error_msg%'split_order_names' 1305 else: 1306 raise MadGraph5Error, error_msg%'split_order_names' 1307 1308 # Now scan all contributing orders to be individually computed and 1309 # construct the list of color_amplitudes for JAMP to be constructed 1310 # accordingly. 1311 res_list=[] 1312 for i, amp_order in enumerate(split_order_amps): 1313 col_amps_order = [] 1314 for jamp in color_amplitudes: 1315 col_amps_order.append(filter(lambda col_amp: 1316 col_amp[1] in amp_order[1],jamp)) 1317 if split_order_names: 1318 res_list.append('C JAMPs contributing to orders '+' '.join( 1319 ['%s=%i'%order for order in zip(split_order_names, 1320 amp_order[0])])) 1321 if self.opt['export_format'] in ['madloop_matchbox']: 1322 res_list.extend(self.get_JAMP_lines(col_amps_order, 1323 JAMP_format="JAMP(%s,{0})".format(str(i+1)), 1324 JAMP_formatLC="LNJAMP(%s,{0})".format(str(i+1)))) 1325 else: 1326 res_list.extend(self.get_JAMP_lines(col_amps_order, 1327 JAMP_format="JAMP(%s,{0})".format(str(i+1)))) 1328 1329 return res_list
1330 1331
1332 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", 1333 split=-1):
1334 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1335 defined as a matrix element or directly as a color_amplitudes dictionary, 1336 Jamp_formatLC should be define to allow to add LeadingColor computation 1337 (usefull for MatchBox) 1338 The split argument defines how the JAMP lines should be split in order 1339 not to be too long.""" 1340 1341 # Let the user call get_JAMP_lines directly from a MatrixElement or from 1342 # the color amplitudes lists. 1343 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1344 color_amplitudes=col_amps.get_color_amplitudes() 1345 elif(isinstance(col_amps,list)): 1346 if(col_amps and isinstance(col_amps[0],list)): 1347 color_amplitudes=col_amps 1348 else: 1349 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1350 else: 1351 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1352 1353 1354 res_list = [] 1355 for i, coeff_list in enumerate(color_amplitudes): 1356 # It might happen that coeff_list is empty if this function was 1357 # called from get_JAMP_lines_split_order (i.e. if some color flow 1358 # does not contribute at all for a given order). 1359 # In this case we simply set it to 0. 1360 if coeff_list==[]: 1361 res_list.append(((JAMP_format+"=0D0") % str(i + 1))) 1362 continue 1363 # Break the JAMP definition into 'n=split' pieces to avoid having 1364 # arbitrarly long lines. 1365 first=True 1366 n = (len(coeff_list)+1 if split<=0 else split) 1367 while coeff_list!=[]: 1368 coefs=coeff_list[:n] 1369 coeff_list=coeff_list[n:] 1370 res = ((JAMP_format+"=") % str(i + 1)) + \ 1371 ((JAMP_format % str(i + 1)) if not first and split>0 else '') 1372 1373 first=False 1374 # Optimization: if all contributions to that color basis element have 1375 # the same coefficient (up to a sign), put it in front 1376 list_fracs = [abs(coefficient[0][1]) for coefficient in coefs] 1377 common_factor = False 1378 diff_fracs = list(set(list_fracs)) 1379 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1: 1380 common_factor = True 1381 global_factor = diff_fracs[0] 1382 res = res + '%s(' % self.coeff(1, global_factor, False, 0) 1383 1384 # loop for JAMP 1385 for (coefficient, amp_number) in coefs: 1386 if not coefficient: 1387 continue 1388 if common_factor: 1389 res = (res + "%s" + AMP_format) % \ 1390 (self.coeff(coefficient[0], 1391 coefficient[1] / abs(coefficient[1]), 1392 coefficient[2], 1393 coefficient[3]), 1394 str(amp_number)) 1395 else: 1396 res = (res + "%s" + AMP_format) % (self.coeff(coefficient[0], 1397 coefficient[1], 1398 coefficient[2], 1399 coefficient[3]), 1400 str(amp_number)) 1401 1402 if common_factor: 1403 res = res + ')' 1404 1405 res_list.append(res) 1406 1407 return res_list
1408
1409 - def get_pdf_lines(self, matrix_element, ninitial, subproc_group = False):
1410 """Generate the PDF lines for the auto_dsig.f file""" 1411 1412 processes = matrix_element.get('processes') 1413 model = processes[0].get('model') 1414 1415 pdf_definition_lines = "" 1416 pdf_data_lines = "" 1417 pdf_lines = "" 1418 1419 if ninitial == 1: 1420 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 1421 for i, proc in enumerate(processes): 1422 process_line = proc.base_string() 1423 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1424 pdf_lines = pdf_lines + "\nPD(IPROC)=1d0\n" 1425 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 1426 else: 1427 # Pick out all initial state particles for the two beams 1428 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 1429 p in processes]))), 1430 sorted(list(set([p.get_initial_pdg(2) for \ 1431 p in processes])))] 1432 1433 # Prepare all variable names 1434 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 1435 sum(initial_states,[])]) 1436 for key,val in pdf_codes.items(): 1437 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 1438 1439 # Set conversion from PDG code to number used in PDF calls 1440 pdgtopdf = {21: 0, 22: 7} 1441 1442 # Fill in missing entries of pdgtopdf 1443 for pdg in sum(initial_states,[]): 1444 if not pdg in pdgtopdf and not pdg in pdgtopdf.values(): 1445 pdgtopdf[pdg] = pdg 1446 elif pdg not in pdgtopdf and pdg in pdgtopdf.values(): 1447 # If any particle has pdg code 7, we need to use something else 1448 pdgtopdf[pdg] = 6000000 + pdg 1449 1450 # Get PDF variable declarations for all initial states 1451 for i in [0,1]: 1452 pdf_definition_lines += "DOUBLE PRECISION " + \ 1453 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1454 for pdg in \ 1455 initial_states[i]]) + \ 1456 "\n" 1457 1458 # Get PDF data lines for all initial states 1459 for i in [0,1]: 1460 pdf_data_lines += "DATA " + \ 1461 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1462 for pdg in initial_states[i]]) + \ 1463 "/%d*1D0/" % len(initial_states[i]) + \ 1464 "\n" 1465 1466 # Get PDF lines for all different initial states 1467 for i, init_states in enumerate(initial_states): 1468 if subproc_group: 1469 pdf_lines = pdf_lines + \ 1470 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 1471 % (i + 1, i + 1) 1472 else: 1473 pdf_lines = pdf_lines + \ 1474 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 1475 % (i + 1, i + 1) 1476 1477 for nbi,initial_state in enumerate(init_states): 1478 if initial_state in pdf_codes.keys(): 1479 if subproc_group: 1480 pdf_lines = pdf_lines + \ 1481 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP, 1," + \ 1482 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 1483 (pdf_codes[initial_state], 1484 i + 1, i + 1, pdgtopdf[initial_state], 1485 i + 1, i + 1) 1486 else: 1487 pdf_lines = pdf_lines + \ 1488 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP, %d," + \ 1489 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 1490 (pdf_codes[initial_state], 1491 i + 1, i + 1, pdgtopdf[initial_state], 1492 i + 1, 1493 i + 1, i + 1) 1494 pdf_lines = pdf_lines + "ENDIF\n" 1495 1496 # Add up PDFs for the different initial state particles 1497 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 1498 for proc in processes: 1499 process_line = proc.base_string() 1500 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1501 pdf_lines = pdf_lines + "\nPD(IPROC)=" 1502 for ibeam in [1, 2]: 1503 initial_state = proc.get_initial_pdg(ibeam) 1504 if initial_state in pdf_codes.keys(): 1505 pdf_lines = pdf_lines + "%s%d*" % \ 1506 (pdf_codes[initial_state], ibeam) 1507 else: 1508 pdf_lines = pdf_lines + "1d0*" 1509 # Remove last "*" from pdf_lines 1510 pdf_lines = pdf_lines[:-1] + "\n" 1511 pdf_lines = pdf_lines + "PD(0)=PD(0)+DABS(PD(IPROC))\n" 1512 1513 # Remove last line break from the return variables 1514 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1]
1515 1516 #=========================================================================== 1517 # write_props_file 1518 #===========================================================================
1519 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
1520 """Write the props.inc file for MadEvent. Needs input from 1521 write_configs_file.""" 1522 1523 lines = [] 1524 1525 particle_dict = matrix_element.get('processes')[0].get('model').\ 1526 get('particle_dict') 1527 1528 for iconf, configs in enumerate(s_and_t_channels): 1529 for vertex in configs[0] + configs[1][:-1]: 1530 leg = vertex.get('legs')[-1] 1531 if leg.get('id') not in particle_dict: 1532 # Fake propagator used in multiparticle vertices 1533 mass = 'zero' 1534 width = 'zero' 1535 pow_part = 0 1536 else: 1537 particle = particle_dict[leg.get('id')] 1538 # Get mass 1539 if particle.get('mass').lower() == 'zero': 1540 mass = particle.get('mass') 1541 else: 1542 mass = "abs(%s)" % particle.get('mass') 1543 # Get width 1544 if particle.get('width').lower() == 'zero': 1545 width = particle.get('width') 1546 else: 1547 width = "abs(%s)" % particle.get('width') 1548 1549 pow_part = 1 + int(particle.is_boson()) 1550 1551 lines.append("prmass(%d,%d) = %s" % \ 1552 (leg.get('number'), iconf + 1, mass)) 1553 lines.append("prwidth(%d,%d) = %s" % \ 1554 (leg.get('number'), iconf + 1, width)) 1555 lines.append("pow(%d,%d) = %d" % \ 1556 (leg.get('number'), iconf + 1, pow_part)) 1557 1558 # Write the file 1559 writer.writelines(lines) 1560 1561 return True
1562 1563 #=========================================================================== 1564 # write_configs_file 1565 #===========================================================================
1566 - def write_configs_file(self, writer, matrix_element):
1567 """Write the configs.inc file for MadEvent""" 1568 1569 # Extract number of external particles 1570 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1571 1572 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 1573 mapconfigs = [c[0] for c in configs] 1574 model = matrix_element.get('processes')[0].get('model') 1575 return mapconfigs, self.write_configs_file_from_diagrams(writer, 1576 [[c[1]] for c in configs], 1577 mapconfigs, 1578 nexternal, ninitial, 1579 model)
1580 1581 #=========================================================================== 1582 # write_configs_file_from_diagrams 1583 #===========================================================================
1584 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 1585 nexternal, ninitial, model):
1586 """Write the actual configs.inc file. 1587 1588 configs is the diagrams corresponding to configs (each 1589 diagrams is a list of corresponding diagrams for all 1590 subprocesses, with None if there is no corresponding diagrams 1591 for a given process). 1592 mapconfigs gives the diagram number for each config. 1593 1594 For s-channels, we need to output one PDG for each subprocess in 1595 the subprocess group, in order to be able to pick the right 1596 one for multiprocesses.""" 1597 1598 lines = [] 1599 1600 s_and_t_channels = [] 1601 1602 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 1603 for config in configs if [d for d in config if d][0].\ 1604 get_vertex_leg_numbers()!=[]] 1605 minvert = min(vert_list) if vert_list!=[] else 0 1606 1607 # Number of subprocesses 1608 nsubprocs = len(configs[0]) 1609 1610 nconfigs = 0 1611 1612 new_pdg = model.get_first_non_pdg() 1613 1614 for iconfig, helas_diags in enumerate(configs): 1615 if any(vert > minvert for vert in [d for d in helas_diags if d]\ 1616 [0].get_vertex_leg_numbers()) : 1617 # Only 3-vertices allowed in configs.inc except for vertices 1618 # which originate from a shrunk loop. 1619 continue 1620 nconfigs += 1 1621 1622 # Need s- and t-channels for all subprocesses, including 1623 # those that don't contribute to this config 1624 empty_verts = [] 1625 stchannels = [] 1626 for h in helas_diags: 1627 if h: 1628 # get_s_and_t_channels gives vertices starting from 1629 # final state external particles and working inwards 1630 stchannels.append(h.get('amplitudes')[0].\ 1631 get_s_and_t_channels(ninitial, model, new_pdg)) 1632 else: 1633 stchannels.append((empty_verts, None)) 1634 1635 # For t-channels, just need the first non-empty one 1636 tchannels = [t for s,t in stchannels if t != None][0] 1637 1638 # For s_and_t_channels (to be used later) use only first config 1639 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 1640 tchannels]) 1641 1642 # Make sure empty_verts is same length as real vertices 1643 if any([s for s,t in stchannels]): 1644 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 1645 1646 # Reorganize s-channel vertices to get a list of all 1647 # subprocesses for each vertex 1648 schannels = zip(*[s for s,t in stchannels]) 1649 else: 1650 schannels = [] 1651 1652 allchannels = schannels 1653 if len(tchannels) > 1: 1654 # Write out tchannels only if there are any non-trivial ones 1655 allchannels = schannels + tchannels 1656 1657 # Write out propagators for s-channel and t-channel vertices 1658 1659 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 1660 # Correspondance between the config and the diagram = amp2 1661 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 1662 mapconfigs[iconfig])) 1663 1664 for verts in allchannels: 1665 if verts in schannels: 1666 vert = [v for v in verts if v][0] 1667 else: 1668 vert = verts 1669 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1670 last_leg = vert.get('legs')[-1] 1671 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 1672 (last_leg.get('number'), nconfigs, len(daughters), 1673 ",".join([str(d) for d in daughters]))) 1674 if verts in schannels: 1675 pdgs = [] 1676 for v in verts: 1677 if v: 1678 pdgs.append(v.get('legs')[-1].get('id')) 1679 else: 1680 pdgs.append(0) 1681 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1682 (last_leg.get('number'), nconfigs, nsubprocs, 1683 ",".join([str(d) for d in pdgs]))) 1684 lines.append("data tprid(%d,%d)/0/" % \ 1685 (last_leg.get('number'), nconfigs)) 1686 elif verts in tchannels[:-1]: 1687 lines.append("data tprid(%d,%d)/%d/" % \ 1688 (last_leg.get('number'), nconfigs, 1689 abs(last_leg.get('id')))) 1690 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1691 (last_leg.get('number'), nconfigs, nsubprocs, 1692 ",".join(['0'] * nsubprocs))) 1693 1694 # Write out number of configs 1695 lines.append("# Number of configs") 1696 lines.append("data mapconfig(0)/%d/" % nconfigs) 1697 1698 # Write the file 1699 writer.writelines(lines) 1700 1701 return s_and_t_channels
1702 1703 #=========================================================================== 1704 # Global helper methods 1705 #=========================================================================== 1706
1707 - def coeff(self, ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1708 """Returns a nicely formatted string for the coefficients in JAMP lines""" 1709 1710 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power 1711 1712 if total_coeff == 1: 1713 if is_imaginary: 1714 return '+imag1*' 1715 else: 1716 return '+' 1717 elif total_coeff == -1: 1718 if is_imaginary: 1719 return '-imag1*' 1720 else: 1721 return '-' 1722 1723 res_str = '%+iD0' % total_coeff.numerator 1724 1725 if total_coeff.denominator != 1: 1726 # Check if total_coeff is an integer 1727 res_str = res_str + '/%iD0' % total_coeff.denominator 1728 1729 if is_imaginary: 1730 res_str = res_str + '*imag1' 1731 1732 return res_str + '*'
1733 1734
1735 - def set_fortran_compiler(self, default_compiler, force=False):
1736 """Set compiler based on what's available on the system""" 1737 1738 # Check for compiler 1739 if default_compiler['fortran'] and misc.which(default_compiler['fortran']): 1740 f77_compiler = default_compiler['fortran'] 1741 elif misc.which('gfortran'): 1742 f77_compiler = 'gfortran' 1743 elif misc.which('g77'): 1744 f77_compiler = 'g77' 1745 elif misc.which('f77'): 1746 f77_compiler = 'f77' 1747 elif default_compiler['fortran']: 1748 logger.warning('No Fortran Compiler detected! Please install one') 1749 f77_compiler = default_compiler['fortran'] # maybe misc fail so try with it 1750 else: 1751 raise MadGraph5Error, 'No Fortran Compiler detected! Please install one' 1752 logger.info('Use Fortran compiler ' + f77_compiler) 1753 1754 1755 # Check for compiler. 1. set default. 1756 if default_compiler['f2py']: 1757 f2py_compiler = default_compiler['f2py'] 1758 else: 1759 f2py_compiler = '' 1760 # Try to find the correct one. 1761 if default_compiler['f2py'] and misc.which(default_compiler['f2py']): 1762 f2py_compiler = default_compiler['f2py'] 1763 elif misc.which('f2py'): 1764 f2py_compiler = 'f2py' 1765 elif sys.version_info[1] == 6: 1766 if misc.which('f2py-2.6'): 1767 f2py_compiler = 'f2py-2.6' 1768 elif misc.which('f2py2.6'): 1769 f2py_compiler = 'f2py2.6' 1770 elif sys.version_info[1] == 7: 1771 if misc.which('f2py-2.7'): 1772 f2py_compiler = 'f2py-2.7' 1773 elif misc.which('f2py2.7'): 1774 f2py_compiler = 'f2py2.7' 1775 1776 to_replace = {'fortran': f77_compiler, 'f2py': f2py_compiler} 1777 1778 1779 self.replace_make_opt_f_compiler(to_replace) 1780 # Replace also for Template but not for cluster 1781 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite: 1782 self.replace_make_opt_f_compiler(to_replace, pjoin(MG5DIR, 'Template', 'LO')) 1783 1784 return f77_compiler
1785 1786 # an alias for backward compatibility 1787 set_compiler = set_fortran_compiler 1788 1789
1790 - def set_cpp_compiler(self, default_compiler, force=False):
1791 """Set compiler based on what's available on the system""" 1792 1793 # Check for compiler 1794 if default_compiler and misc.which(default_compiler): 1795 compiler = default_compiler 1796 elif misc.which('g++'): 1797 #check if clang version 1798 p = misc.Popen(['g++', '--version'], stdout=subprocess.PIPE, 1799 stderr=subprocess.PIPE) 1800 out, _ = p.communicate() 1801 if 'clang' in out and misc.which('clang'): 1802 compiler = 'clang' 1803 else: 1804 compiler = 'g++' 1805 elif misc.which('c++'): 1806 compiler = 'c++' 1807 elif misc.which('clang'): 1808 compiler = 'clang' 1809 elif default_compiler: 1810 logger.warning('No c++ Compiler detected! Please install one') 1811 compiler = default_compiler # maybe misc fail so try with it 1812 else: 1813 raise MadGraph5Error, 'No c++ Compiler detected! Please install one' 1814 logger.info('Use c++ compiler ' + compiler) 1815 self.replace_make_opt_c_compiler(compiler) 1816 # Replace also for Template but not for cluster 1817 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite and \ 1818 not __debug__ and not os.path.exists(pjoin(MG5DIR,'bin','create_release.py')): 1819 self.replace_make_opt_c_compiler(compiler, pjoin(MG5DIR, 'Template', 'LO')) 1820 1821 return compiler
1822 1823
1824 - def replace_make_opt_f_compiler(self, compilers, root_dir = ""):
1825 """Set FC=compiler in Source/make_opts""" 1826 1827 assert isinstance(compilers, dict) 1828 1829 mod = False #avoid to rewrite the file if not needed 1830 if not root_dir: 1831 root_dir = self.dir_path 1832 1833 compiler= compilers['fortran'] 1834 f2py_compiler = compilers['f2py'] 1835 if not f2py_compiler: 1836 f2py_compiler = 'f2py' 1837 for_update= {'DEFAULT_F_COMPILER':compiler, 1838 'DEFAULT_F2PY_COMPILER':f2py_compiler} 1839 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1840 1841 try: 1842 common_run_interface.CommonRunCmd.update_make_opts_full( 1843 make_opts, for_update) 1844 except IOError: 1845 if root_dir == self.dir_path: 1846 logger.info('Fail to set compiler. Trying to continue anyway.')
1847
1848 - def replace_make_opt_c_compiler(self, compiler, root_dir = ""):
1849 """Set CXX=compiler in Source/make_opts. 1850 The version is also checked, in order to set some extra flags 1851 if the compiler is clang (on MACOS)""" 1852 1853 is_clang = misc.detect_if_cpp_compiler_is_clang(compiler) 1854 is_lc = misc.detect_cpp_std_lib_dependence(compiler) == '-lc++' 1855 1856 1857 # list of the variable to set in the make_opts file 1858 for_update= {'DEFAULT_CPP_COMPILER':compiler, 1859 'MACFLAG':'-mmacosx-version-min=10.7' if is_clang and is_lc else '', 1860 'STDLIB': '-lc++' if is_lc else '-lstdc++', 1861 'STDLIB_FLAG': '-stdlib=libc++' if is_lc and is_clang else '' 1862 } 1863 1864 # for MOJAVE remove the MACFLAG: 1865 if is_clang: 1866 import platform 1867 version, _, _ = platform.mac_ver() 1868 if not version:# not linux 1869 version = 14 # set version to remove MACFLAG 1870 else: 1871 version = int(version.split('.')[1]) 1872 if version >= 14: 1873 for_update['MACFLAG'] = '-mmacosx-version-min=10.8' if is_lc else '' 1874 1875 if not root_dir: 1876 root_dir = self.dir_path 1877 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1878 1879 try: 1880 common_run_interface.CommonRunCmd.update_make_opts_full( 1881 make_opts, for_update) 1882 except IOError: 1883 if root_dir == self.dir_path: 1884 logger.info('Fail to set compiler. Trying to continue anyway.') 1885 1886 return
1887
1888 #=============================================================================== 1889 # ProcessExporterFortranSA 1890 #=============================================================================== 1891 -class ProcessExporterFortranSA(ProcessExporterFortran):
1892 """Class to take care of exporting a set of matrix elements to 1893 MadGraph v4 StandAlone format.""" 1894 1895 matrix_template = "matrix_standalone_v4.inc" 1896
1897 - def __init__(self, *args,**opts):
1898 """add the format information compare to standard init""" 1899 1900 if 'format' in opts: 1901 self.format = opts['format'] 1902 del opts['format'] 1903 else: 1904 self.format = 'standalone' 1905 1906 self.prefix_info = {} 1907 ProcessExporterFortran.__init__(self, *args, **opts)
1908
1909 - def copy_template(self, model):
1910 """Additional actions needed for setup of Template 1911 """ 1912 1913 #First copy the full template tree if dir_path doesn't exit 1914 if os.path.isdir(self.dir_path): 1915 return 1916 1917 logger.info('initialize a new standalone directory: %s' % \ 1918 os.path.basename(self.dir_path)) 1919 temp_dir = pjoin(self.mgme_dir, 'Template/LO') 1920 1921 # Create the directory structure 1922 os.mkdir(self.dir_path) 1923 os.mkdir(pjoin(self.dir_path, 'Source')) 1924 os.mkdir(pjoin(self.dir_path, 'Source', 'MODEL')) 1925 os.mkdir(pjoin(self.dir_path, 'Source', 'DHELAS')) 1926 os.mkdir(pjoin(self.dir_path, 'SubProcesses')) 1927 os.mkdir(pjoin(self.dir_path, 'bin')) 1928 os.mkdir(pjoin(self.dir_path, 'bin', 'internal')) 1929 os.mkdir(pjoin(self.dir_path, 'lib')) 1930 os.mkdir(pjoin(self.dir_path, 'Cards')) 1931 1932 # Information at top-level 1933 #Write version info 1934 shutil.copy(pjoin(temp_dir, 'TemplateVersion.txt'), self.dir_path) 1935 try: 1936 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 1937 except IOError: 1938 MG5_version = misc.get_pkg_info() 1939 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 1940 "5." + MG5_version['version']) 1941 1942 1943 # Add file in SubProcesses 1944 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f_sp'), 1945 pjoin(self.dir_path, 'SubProcesses', 'makefileP')) 1946 1947 if self.format == 'standalone': 1948 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'check_sa.f'), 1949 pjoin(self.dir_path, 'SubProcesses', 'check_sa.f')) 1950 1951 # Add file in Source 1952 shutil.copy(pjoin(temp_dir, 'Source', 'make_opts'), 1953 pjoin(self.dir_path, 'Source')) 1954 # add the makefile 1955 filename = pjoin(self.dir_path,'Source','makefile') 1956 self.write_source_makefile(writers.FileWriter(filename))
1957 1958 #=========================================================================== 1959 # export model files 1960 #===========================================================================
1961 - def export_model_files(self, model_path):
1962 """export the model dependent files for V4 model""" 1963 1964 super(ProcessExporterFortranSA,self).export_model_files(model_path) 1965 # Add the routine update_as_param in v4 model 1966 # This is a function created in the UFO 1967 text=""" 1968 subroutine update_as_param() 1969 call setpara('param_card.dat',.false.) 1970 return 1971 end 1972 """ 1973 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 1974 ff.write(text) 1975 ff.close() 1976 1977 text = open(pjoin(self.dir_path,'SubProcesses','check_sa.f')).read() 1978 text = text.replace('call setpara(\'param_card.dat\')', 'call setpara(\'param_card.dat\', .true.)') 1979 fsock = open(pjoin(self.dir_path,'SubProcesses','check_sa.f'), 'w') 1980 fsock.write(text) 1981 fsock.close() 1982 1983 self.make_model_symbolic_link()
1984 1985 #=========================================================================== 1986 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 1987 #===========================================================================
1988 - def write_procdef_mg5(self, file_pos, modelname, process_str):
1989 """ write an equivalent of the MG4 proc_card in order that all the Madevent 1990 Perl script of MadEvent4 are still working properly for pure MG5 run. 1991 Not needed for StandAlone so just return 1992 """ 1993 1994 return
1995 1996 1997 #=========================================================================== 1998 # Make the Helas and Model directories for Standalone directory 1999 #===========================================================================
2000 - def make(self):
2001 """Run make in the DHELAS and MODEL directories, to set up 2002 everything for running standalone 2003 """ 2004 2005 source_dir = pjoin(self.dir_path, "Source") 2006 logger.info("Running make for Helas") 2007 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2008 logger.info("Running make for Model") 2009 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran')
2010 2011 #=========================================================================== 2012 # Create proc_card_mg5.dat for Standalone directory 2013 #===========================================================================
2014 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2015 """Finalize Standalone MG4 directory by 2016 generation proc_card_mg5.dat 2017 generate a global makefile 2018 """ 2019 2020 compiler = {'fortran': mg5options['fortran_compiler'], 2021 'cpp': mg5options['cpp_compiler'], 2022 'f2py': mg5options['f2py_compiler']} 2023 2024 self.compiler_choice(compiler) 2025 self.make() 2026 2027 # Write command history as proc_card_mg5 2028 if history and os.path.isdir(pjoin(self.dir_path, 'Cards')): 2029 output_file = pjoin(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2030 history.write(output_file) 2031 2032 ProcessExporterFortran.finalize(self, matrix_elements, 2033 history, mg5options, flaglist) 2034 open(pjoin(self.dir_path,'__init__.py'),'w') 2035 open(pjoin(self.dir_path,'SubProcesses','__init__.py'),'w') 2036 2037 if 'mode' in self.opt and self.opt['mode'] == "reweight": 2038 #add the module to hande the NLO weight 2039 files.copytree(pjoin(MG5DIR, 'Template', 'RWGTNLO'), 2040 pjoin(self.dir_path, 'Source')) 2041 files.copytree(pjoin(MG5DIR, 'Template', 'NLO', 'Source', 'PDF'), 2042 pjoin(self.dir_path, 'Source', 'PDF')) 2043 self.write_pdf_opendata() 2044 2045 if self.prefix_info: 2046 self.write_f2py_splitter() 2047 self.write_f2py_makefile() 2048 self.write_f2py_check_sa(matrix_elements, 2049 pjoin(self.dir_path,'SubProcesses','check_sa.py')) 2050 else: 2051 # create a single makefile to compile all the subprocesses 2052 text = '''\n# For python linking (require f2py part of numpy)\nifeq ($(origin MENUM),undefined)\n MENUM=2\nendif\n''' 2053 deppython = '' 2054 for Pdir in os.listdir(pjoin(self.dir_path,'SubProcesses')): 2055 if os.path.isdir(pjoin(self.dir_path, 'SubProcesses', Pdir)): 2056 text += '%(0)s/matrix$(MENUM)py.so:\n\tcd %(0)s;make matrix$(MENUM)py.so\n'% {'0': Pdir} 2057 deppython += ' %(0)s/matrix$(MENUM)py.so ' % {'0': Pdir} 2058 text+='all: %s\n\techo \'done\'' % deppython 2059 2060 ff = open(pjoin(self.dir_path, 'SubProcesses', 'makefile'),'a') 2061 ff.write(text) 2062 ff.close()
2063
2064 - def write_f2py_splitter(self):
2065 """write a function to call the correct matrix element""" 2066 2067 template = """ 2068 %(python_information)s 2069 subroutine smatrixhel(pdgs, npdg, p, ALPHAS, SCALE2, nhel, ANS) 2070 IMPLICIT NONE 2071 2072 CF2PY double precision, intent(in), dimension(0:3,npdg) :: p 2073 CF2PY integer, intent(in), dimension(npdg) :: pdgs 2074 CF2PY integer, intent(in) :: npdg 2075 CF2PY double precision, intent(out) :: ANS 2076 CF2PY double precision, intent(in) :: ALPHAS 2077 CF2PY double precision, intent(in) :: SCALE2 2078 integer pdgs(*) 2079 integer npdg, nhel 2080 double precision p(*) 2081 double precision ANS, ALPHAS, PI,SCALE2 2082 include 'coupl.inc' 2083 2084 PI = 3.141592653589793D0 2085 G = 2* DSQRT(ALPHAS*PI) 2086 CALL UPDATE_AS_PARAM() 2087 if (scale2.ne.0d0) stop 1 2088 2089 %(smatrixhel)s 2090 2091 return 2092 end 2093 2094 SUBROUTINE INITIALISE(PATH) 2095 C ROUTINE FOR F2PY to read the benchmark point. 2096 IMPLICIT NONE 2097 CHARACTER*512 PATH 2098 CF2PY INTENT(IN) :: PATH 2099 CALL SETPARA(PATH) !first call to setup the paramaters 2100 RETURN 2101 END 2102 2103 subroutine get_pdg_order(PDG) 2104 IMPLICIT NONE 2105 CF2PY INTEGER, intent(out) :: PDG(%(nb_me)i,%(maxpart)i) 2106 INTEGER PDG(%(nb_me)i,%(maxpart)i), PDGS(%(nb_me)i,%(maxpart)i) 2107 DATA PDGS/ %(pdgs)s / 2108 PDG = PDGS 2109 RETURN 2110 END 2111 2112 subroutine get_prefix(PREFIX) 2113 IMPLICIT NONE 2114 CF2PY CHARACTER*20, intent(out) :: PREFIX(%(nb_me)i) 2115 character*20 PREFIX(%(nb_me)i),PREF(%(nb_me)i) 2116 DATA PREF / '%(prefix)s'/ 2117 PREFIX = PREF 2118 RETURN 2119 END 2120 2121 2122 """ 2123 2124 allids = self.prefix_info.keys() 2125 allprefix = [self.prefix_info[key][0] for key in allids] 2126 min_nexternal = min([len(ids) for ids in allids]) 2127 max_nexternal = max([len(ids) for ids in allids]) 2128 2129 info = [] 2130 for key, (prefix, tag) in self.prefix_info.items(): 2131 info.append('#PY %s : %s # %s' % (tag, key, prefix)) 2132 2133 2134 text = [] 2135 for n_ext in range(min_nexternal, max_nexternal+1): 2136 current = [ids for ids in allids if len(ids)==n_ext] 2137 if not current: 2138 continue 2139 if min_nexternal != max_nexternal: 2140 if n_ext == min_nexternal: 2141 text.append(' if (npdg.eq.%i)then' % n_ext) 2142 else: 2143 text.append(' else if (npdg.eq.%i)then' % n_ext) 2144 for ii,pdgs in enumerate(current): 2145 condition = '.and.'.join(['%i.eq.pdgs(%i)' %(pdg, i+1) for i, pdg in enumerate(pdgs)]) 2146 if ii==0: 2147 text.append( ' if(%s) then ! %i' % (condition, i)) 2148 else: 2149 text.append( ' else if(%s) then ! %i' % (condition,i)) 2150 text.append(' call %ssmatrixhel(p, nhel, ans)' % self.prefix_info[pdgs][0]) 2151 text.append(' endif') 2152 #close the function 2153 if min_nexternal != max_nexternal: 2154 text.append('endif') 2155 2156 formatting = {'python_information':'\n'.join(info), 2157 'smatrixhel': '\n'.join(text), 2158 'maxpart': max_nexternal, 2159 'nb_me': len(allids), 2160 'pdgs': ','.join(str(pdg[i]) if i<len(pdg) else '0' 2161 for i in range(max_nexternal) for pdg in allids), 2162 'prefix':'\',\''.join(allprefix) 2163 } 2164 formatting['lenprefix'] = len(formatting['prefix']) 2165 text = template % formatting 2166 fsock = writers.FortranWriter(pjoin(self.dir_path, 'SubProcesses', 'all_matrix.f'),'w') 2167 fsock.writelines(text) 2168 fsock.close()
2169
2170 - def write_f2py_check_sa(self, matrix_element, writer):
2171 """ Write the general check_sa.py in SubProcesses that calls all processes successively.""" 2172 # To be implemented. It is just an example file, i.e. not crucial. 2173 return
2174
2175 - def write_f2py_makefile(self):
2176 """ """ 2177 # Add file in SubProcesses 2178 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f2py'), 2179 pjoin(self.dir_path, 'SubProcesses', 'makefile'))
2180
2181 - def create_MA5_cards(self,*args,**opts):
2182 """ Overload the function of the mother so as to bypass this in StandAlone.""" 2183 pass
2184
2185 - def compiler_choice(self, compiler):
2186 """ Different daughter classes might want different compilers. 2187 So this function is meant to be overloaded if desired.""" 2188 2189 self.set_compiler(compiler)
2190 2191 #=========================================================================== 2192 # generate_subprocess_directory 2193 #===========================================================================
2194 - def generate_subprocess_directory(self, matrix_element, 2195 fortran_model, number):
2196 """Generate the Pxxxxx directory for a subprocess in MG4 standalone, 2197 including the necessary matrix.f and nexternal.inc files""" 2198 2199 cwd = os.getcwd() 2200 # Create the directory PN_xx_xxxxx in the specified path 2201 dirpath = pjoin(self.dir_path, 'SubProcesses', \ 2202 "P%s" % matrix_element.get('processes')[0].shell_string()) 2203 2204 if self.opt['sa_symmetry']: 2205 # avoid symmetric output 2206 for i,proc in enumerate(matrix_element.get('processes')): 2207 2208 tag = proc.get_tag() 2209 legs = proc.get('legs')[:] 2210 leg0 = proc.get('legs')[0] 2211 leg1 = proc.get('legs')[1] 2212 if not leg1.get('state'): 2213 proc.get('legs')[0] = leg1 2214 proc.get('legs')[1] = leg0 2215 flegs = proc.get('legs')[2:] 2216 for perm in itertools.permutations(flegs): 2217 for i,p in enumerate(perm): 2218 proc.get('legs')[i+2] = p 2219 dirpath2 = pjoin(self.dir_path, 'SubProcesses', \ 2220 "P%s" % proc.shell_string()) 2221 #restore original order 2222 proc.get('legs')[2:] = legs[2:] 2223 if os.path.exists(dirpath2): 2224 proc.get('legs')[:] = legs 2225 return 0 2226 proc.get('legs')[:] = legs 2227 2228 try: 2229 os.mkdir(dirpath) 2230 except os.error as error: 2231 logger.warning(error.strerror + " " + dirpath) 2232 2233 #try: 2234 # os.chdir(dirpath) 2235 #except os.error: 2236 # logger.error('Could not cd to directory %s' % dirpath) 2237 # return 0 2238 2239 logger.info('Creating files in directory %s' % dirpath) 2240 2241 # Extract number of external particles 2242 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2243 2244 # Create the matrix.f file and the nexternal.inc file 2245 if self.opt['export_format']=='standalone_msP': 2246 filename = pjoin(dirpath, 'matrix_prod.f') 2247 else: 2248 filename = pjoin(dirpath, 'matrix.f') 2249 2250 proc_prefix = '' 2251 if 'prefix' in self.cmd_options: 2252 if self.cmd_options['prefix'] == 'int': 2253 proc_prefix = 'M%s_' % number 2254 elif self.cmd_options['prefix'] == 'proc': 2255 proc_prefix = matrix_element.get('processes')[0].shell_string().split('_',1)[1] 2256 else: 2257 raise Exception, '--prefix options supports only \'int\' and \'proc\'' 2258 for proc in matrix_element.get('processes'): 2259 ids = [l.get('id') for l in proc.get('legs_with_decays')] 2260 self.prefix_info[tuple(ids)] = [proc_prefix, proc.get_tag()] 2261 2262 calls = self.write_matrix_element_v4( 2263 writers.FortranWriter(filename), 2264 matrix_element, 2265 fortran_model, 2266 proc_prefix=proc_prefix) 2267 2268 if self.opt['export_format'] == 'standalone_msP': 2269 filename = pjoin(dirpath,'configs_production.inc') 2270 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2271 writers.FortranWriter(filename), 2272 matrix_element) 2273 2274 filename = pjoin(dirpath,'props_production.inc') 2275 self.write_props_file(writers.FortranWriter(filename), 2276 matrix_element, 2277 s_and_t_channels) 2278 2279 filename = pjoin(dirpath,'nexternal_prod.inc') 2280 self.write_nexternal_madspin(writers.FortranWriter(filename), 2281 nexternal, ninitial) 2282 2283 if self.opt['export_format']=='standalone_msF': 2284 filename = pjoin(dirpath, 'helamp.inc') 2285 ncomb=matrix_element.get_helicity_combinations() 2286 self.write_helamp_madspin(writers.FortranWriter(filename), 2287 ncomb) 2288 2289 filename = pjoin(dirpath, 'nexternal.inc') 2290 self.write_nexternal_file(writers.FortranWriter(filename), 2291 nexternal, ninitial) 2292 2293 filename = pjoin(dirpath, 'pmass.inc') 2294 self.write_pmass_file(writers.FortranWriter(filename), 2295 matrix_element) 2296 2297 filename = pjoin(dirpath, 'ngraphs.inc') 2298 self.write_ngraphs_file(writers.FortranWriter(filename), 2299 len(matrix_element.get_all_amplitudes())) 2300 2301 # Generate diagrams 2302 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 2303 filename = pjoin(dirpath, "matrix.ps") 2304 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2305 get('diagrams'), 2306 filename, 2307 model=matrix_element.get('processes')[0].\ 2308 get('model'), 2309 amplitude=True) 2310 logger.info("Generating Feynman diagrams for " + \ 2311 matrix_element.get('processes')[0].nice_string()) 2312 plot.draw() 2313 2314 linkfiles = ['check_sa.f', 'coupl.inc'] 2315 2316 if proc_prefix and os.path.exists(pjoin(dirpath, '..', 'check_sa.f')): 2317 text = open(pjoin(dirpath, '..', 'check_sa.f')).read() 2318 pat = re.compile('smatrix', re.I) 2319 new_text, n = re.subn(pat, '%ssmatrix' % proc_prefix, text) 2320 with open(pjoin(dirpath, 'check_sa.f'),'w') as f: 2321 f.write(new_text) 2322 linkfiles.pop(0) 2323 2324 for file in linkfiles: 2325 ln('../%s' % file, cwd=dirpath) 2326 ln('../makefileP', name='makefile', cwd=dirpath) 2327 # Return to original PWD 2328 #os.chdir(cwd) 2329 2330 if not calls: 2331 calls = 0 2332 return calls
2333 2334 2335 #=========================================================================== 2336 # write_source_makefile 2337 #===========================================================================
2338 - def write_source_makefile(self, writer):
2339 """Write the nexternal.inc file for MG4""" 2340 2341 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 2342 set_of_lib = '$(LIBDIR)libdhelas.$(libext) $(LIBDIR)libmodel.$(libext)' 2343 model_line='''$(LIBDIR)libmodel.$(libext): MODEL\n\t cd MODEL; make\n''' 2344 2345 replace_dict= {'libraries': set_of_lib, 2346 'model':model_line, 2347 'additional_dsample': '', 2348 'additional_dependencies':''} 2349 2350 text = open(path).read() % replace_dict 2351 2352 if writer: 2353 writer.write(text) 2354 2355 return replace_dict
2356 2357 #=========================================================================== 2358 # write_matrix_element_v4 2359 #===========================================================================
2360 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 2361 write=True, proc_prefix=''):
2362 """Export a matrix element to a matrix.f file in MG4 standalone format 2363 if write is on False, just return the replace_dict and not write anything.""" 2364 2365 2366 if not matrix_element.get('processes') or \ 2367 not matrix_element.get('diagrams'): 2368 return 0 2369 2370 if writer: 2371 if not isinstance(writer, writers.FortranWriter): 2372 raise writers.FortranWriter.FortranWriterError(\ 2373 "writer not FortranWriter but %s" % type(writer)) 2374 # Set lowercase/uppercase Fortran code 2375 writers.FortranWriter.downcase = False 2376 2377 2378 if not self.opt.has_key('sa_symmetry'): 2379 self.opt['sa_symmetry']=False 2380 2381 2382 # The proc_id is for MadEvent grouping which is never used in SA. 2383 replace_dict = {'global_variable':'', 'amp2_lines':'', 2384 'proc_prefix':proc_prefix, 'proc_id':''} 2385 2386 # Extract helas calls 2387 helas_calls = fortran_model.get_matrix_element_calls(\ 2388 matrix_element) 2389 2390 replace_dict['helas_calls'] = "\n".join(helas_calls) 2391 2392 # Extract version number and date from VERSION file 2393 info_lines = self.get_mg5_info_lines() 2394 replace_dict['info_lines'] = info_lines 2395 2396 # Extract process info lines 2397 process_lines = self.get_process_info_lines(matrix_element) 2398 replace_dict['process_lines'] = process_lines 2399 2400 # Extract number of external particles 2401 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2402 replace_dict['nexternal'] = nexternal 2403 replace_dict['nincoming'] = ninitial 2404 2405 # Extract ncomb 2406 ncomb = matrix_element.get_helicity_combinations() 2407 replace_dict['ncomb'] = ncomb 2408 2409 # Extract helicity lines 2410 helicity_lines = self.get_helicity_lines(matrix_element) 2411 replace_dict['helicity_lines'] = helicity_lines 2412 2413 # Extract overall denominator 2414 # Averaging initial state color, spin, and identical FS particles 2415 replace_dict['den_factor_line'] = self.get_den_factor_line(matrix_element) 2416 2417 # Extract ngraphs 2418 ngraphs = matrix_element.get_number_of_amplitudes() 2419 replace_dict['ngraphs'] = ngraphs 2420 2421 # Extract nwavefuncs 2422 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2423 replace_dict['nwavefuncs'] = nwavefuncs 2424 2425 # Extract ncolor 2426 ncolor = max(1, len(matrix_element.get('color_basis'))) 2427 replace_dict['ncolor'] = ncolor 2428 2429 replace_dict['hel_avg_factor'] = matrix_element.get_hel_avg_factor() 2430 replace_dict['beamone_helavgfactor'], replace_dict['beamtwo_helavgfactor'] =\ 2431 matrix_element.get_beams_hel_avg_factor() 2432 2433 # Extract color data lines 2434 color_data_lines = self.get_color_data_lines(matrix_element) 2435 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2436 2437 if self.opt['export_format']=='standalone_msP': 2438 # For MadSpin need to return the AMP2 2439 amp2_lines = self.get_amp2_lines(matrix_element, [] ) 2440 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2441 replace_dict['global_variable'] = \ 2442 " Double Precision amp2(NGRAPHS)\n common/to_amps/ amp2\n" 2443 2444 # JAMP definition, depends on the number of independent split orders 2445 split_orders=matrix_element.get('processes')[0].get('split_orders') 2446 2447 if len(split_orders)==0: 2448 replace_dict['nSplitOrders']='' 2449 # Extract JAMP lines 2450 jamp_lines = self.get_JAMP_lines(matrix_element) 2451 # Consider the output of a dummy order 'ALL_ORDERS' for which we 2452 # set all amplitude order to weight 1 and only one squared order 2453 # contribution which is of course ALL_ORDERS=2. 2454 squared_orders = [(2,),] 2455 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 2456 replace_dict['chosen_so_configs'] = '.TRUE.' 2457 replace_dict['nSqAmpSplitOrders']=1 2458 replace_dict['split_order_str_list']='' 2459 else: 2460 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 2461 replace_dict['nAmpSplitOrders']=len(amp_orders) 2462 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 2463 replace_dict['nSplitOrders']=len(split_orders) 2464 replace_dict['split_order_str_list']=str(split_orders) 2465 amp_so = self.get_split_orders_lines( 2466 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 2467 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 2468 replace_dict['ampsplitorders']='\n'.join(amp_so) 2469 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 2470 jamp_lines = self.get_JAMP_lines_split_order(\ 2471 matrix_element,amp_orders,split_order_names=split_orders) 2472 2473 # Now setup the array specifying what squared split order is chosen 2474 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 2475 matrix_element.get('processes')[0],squared_orders) 2476 2477 # For convenience we also write the driver check_sa_splitOrders.f 2478 # that explicitely writes out the contribution from each squared order. 2479 # The original driver still works and is compiled with 'make' while 2480 # the splitOrders one is compiled with 'make check_sa_born_splitOrders' 2481 check_sa_writer=writers.FortranWriter('check_sa_born_splitOrders.f') 2482 self.write_check_sa_splitOrders(squared_orders,split_orders, 2483 nexternal,ninitial,proc_prefix,check_sa_writer) 2484 2485 if write: 2486 writers.FortranWriter('nsqso_born.inc').writelines( 2487 """INTEGER NSQSO_BORN 2488 PARAMETER (NSQSO_BORN=%d)"""%replace_dict['nSqAmpSplitOrders']) 2489 2490 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2491 2492 matrix_template = self.matrix_template 2493 if self.opt['export_format']=='standalone_msP' : 2494 matrix_template = 'matrix_standalone_msP_v4.inc' 2495 elif self.opt['export_format']=='standalone_msF': 2496 matrix_template = 'matrix_standalone_msF_v4.inc' 2497 elif self.opt['export_format']=='matchbox': 2498 replace_dict["proc_prefix"] = 'MG5_%i_' % matrix_element.get('processes')[0].get('id') 2499 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2500 2501 if len(split_orders)>0: 2502 if self.opt['export_format'] in ['standalone_msP', 'standalone_msF']: 2503 logger.debug("Warning: The export format %s is not "+\ 2504 " available for individual ME evaluation of given coupl. orders."+\ 2505 " Only the total ME will be computed.", self.opt['export_format']) 2506 elif self.opt['export_format'] in ['madloop_matchbox']: 2507 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2508 matrix_template = "matrix_standalone_matchbox_splitOrders_v4.inc" 2509 else: 2510 matrix_template = "matrix_standalone_splitOrders_v4.inc" 2511 2512 replace_dict['template_file'] = pjoin(_file_path, 'iolibs', 'template_files', matrix_template) 2513 replace_dict['template_file2'] = pjoin(_file_path, \ 2514 'iolibs/template_files/split_orders_helping_functions.inc') 2515 if write and writer: 2516 path = replace_dict['template_file'] 2517 content = open(path).read() 2518 content = content % replace_dict 2519 # Write the file 2520 writer.writelines(content) 2521 # Add the helper functions. 2522 if len(split_orders)>0: 2523 content = '\n' + open(replace_dict['template_file2'])\ 2524 .read()%replace_dict 2525 writer.writelines(content) 2526 return len(filter(lambda call: call.find('#') != 0, helas_calls)) 2527 else: 2528 replace_dict['return_value'] = len(filter(lambda call: call.find('#') != 0, helas_calls)) 2529 return replace_dict # for subclass update
2530
2531 - def write_check_sa_splitOrders(self,squared_orders, split_orders, nexternal, 2532 nincoming, proc_prefix, writer):
2533 """ Write out a more advanced version of the check_sa drivers that 2534 individually returns the matrix element for each contributing squared 2535 order.""" 2536 2537 check_sa_content = open(pjoin(self.mgme_dir, 'madgraph', 'iolibs', \ 2538 'template_files', 'check_sa_splitOrders.f')).read() 2539 printout_sq_orders=[] 2540 for i, squared_order in enumerate(squared_orders): 2541 sq_orders=[] 2542 for j, sqo in enumerate(squared_order): 2543 sq_orders.append('%s=%d'%(split_orders[j],sqo)) 2544 printout_sq_orders.append(\ 2545 "write(*,*) '%d) Matrix element for (%s) = ',MATELEMS(%d)"\ 2546 %(i+1,' '.join(sq_orders),i+1)) 2547 printout_sq_orders='\n'.join(printout_sq_orders) 2548 replace_dict = {'printout_sqorders':printout_sq_orders, 2549 'nSplitOrders':len(squared_orders), 2550 'nexternal':nexternal, 2551 'nincoming':nincoming, 2552 'proc_prefix':proc_prefix} 2553 2554 if writer: 2555 writer.writelines(check_sa_content % replace_dict) 2556 else: 2557 return replace_dict
2558
2559 -class ProcessExporterFortranMatchBox(ProcessExporterFortranSA):
2560 """class to take care of exporting a set of matrix element for the Matchbox 2561 code in the case of Born only routine""" 2562 2563 default_opt = {'clean': False, 'complex_mass':False, 2564 'export_format':'matchbox', 'mp': False, 2565 'sa_symmetry': True} 2566 2567 #specific template of the born 2568 2569 2570 matrix_template = "matrix_standalone_matchbox.inc" 2571 2572 @staticmethod
2573 - def get_color_string_lines(matrix_element):
2574 """Return the color matrix definition lines for this matrix element. Split 2575 rows in chunks of size n.""" 2576 2577 if not matrix_element.get('color_matrix'): 2578 return "\n".join(["out = 1"]) 2579 2580 #start the real work 2581 color_denominators = matrix_element.get('color_matrix').\ 2582 get_line_denominators() 2583 matrix_strings = [] 2584 my_cs = color.ColorString() 2585 for i_color in xrange(len(color_denominators)): 2586 # Then write the numerators for the matrix elements 2587 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[i_color]) 2588 t_str=repr(my_cs) 2589 t_match=re.compile(r"(\w+)\(([\s\d+\,]*)\)") 2590 # from '1 T(2,4,1) Tr(4,5,6) Epsilon(5,3,2,1) T(1,2)' returns with findall: 2591 # [('T', '2,4,1'), ('Tr', '4,5,6'), ('Epsilon', '5,3,2,1'), ('T', '1,2')] 2592 all_matches = t_match.findall(t_str) 2593 output = {} 2594 arg=[] 2595 for match in all_matches: 2596 ctype, tmparg = match[0], [m.strip() for m in match[1].split(',')] 2597 if ctype in ['ColorOne' ]: 2598 continue 2599 if ctype not in ['T', 'Tr' ]: 2600 raise MadGraph5Error, 'Color Structure not handled by Matchbox: %s' % ctype 2601 tmparg += ['0'] 2602 arg +=tmparg 2603 for j, v in enumerate(arg): 2604 output[(i_color,j)] = v 2605 2606 for key in output: 2607 if matrix_strings == []: 2608 #first entry 2609 matrix_strings.append(""" 2610 if (in1.eq.%s.and.in2.eq.%s)then 2611 out = %s 2612 """ % (key[0], key[1], output[key])) 2613 else: 2614 #not first entry 2615 matrix_strings.append(""" 2616 elseif (in1.eq.%s.and.in2.eq.%s)then 2617 out = %s 2618 """ % (key[0], key[1], output[key])) 2619 if len(matrix_strings): 2620 matrix_strings.append(" else \n out = - 1 \n endif") 2621 else: 2622 return "\n out = - 1 \n " 2623 return "\n".join(matrix_strings)
2624
2625 - def make(self,*args,**opts):
2626 pass
2627
2628 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", split=-1, 2629 JAMP_formatLC=None):
2630 2631 """Adding leading color part of the colorflow""" 2632 2633 if not JAMP_formatLC: 2634 JAMP_formatLC= "LN%s" % JAMP_format 2635 2636 error_msg="Malformed '%s' argument passed to the get_JAMP_lines" 2637 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 2638 col_amps=col_amps.get_color_amplitudes() 2639 elif(isinstance(col_amps,list)): 2640 if(col_amps and isinstance(col_amps[0],list)): 2641 col_amps=col_amps 2642 else: 2643 raise MadGraph5Error, error_msg % 'col_amps' 2644 else: 2645 raise MadGraph5Error, error_msg % 'col_amps' 2646 2647 text = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(col_amps, 2648 JAMP_format=JAMP_format, 2649 AMP_format=AMP_format, 2650 split=-1) 2651 2652 2653 # Filter the col_ampls to generate only those without any 1/NC terms 2654 2655 LC_col_amps = [] 2656 for coeff_list in col_amps: 2657 to_add = [] 2658 for (coefficient, amp_number) in coeff_list: 2659 if coefficient[3]==0: 2660 to_add.append( (coefficient, amp_number) ) 2661 LC_col_amps.append(to_add) 2662 2663 text += super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(LC_col_amps, 2664 JAMP_format=JAMP_formatLC, 2665 AMP_format=AMP_format, 2666 split=-1) 2667 2668 return text
2669
2670 2671 2672 2673 #=============================================================================== 2674 # ProcessExporterFortranMW 2675 #=============================================================================== 2676 -class ProcessExporterFortranMW(ProcessExporterFortran):
2677 """Class to take care of exporting a set of matrix elements to 2678 MadGraph v4 - MadWeight format.""" 2679 2680 matrix_file="matrix_standalone_v4.inc" 2681
2682 - def copy_template(self, model):
2683 """Additional actions needed for setup of Template 2684 """ 2685 2686 super(ProcessExporterFortranMW, self).copy_template(model) 2687 2688 # Add the MW specific file 2689 shutil.copytree(pjoin(MG5DIR,'Template','MadWeight'), 2690 pjoin(self.dir_path, 'Source','MadWeight'), True) 2691 shutil.copytree(pjoin(MG5DIR,'madgraph','madweight'), 2692 pjoin(self.dir_path, 'bin','internal','madweight'), True) 2693 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','setrun.f'), 2694 pjoin(self.dir_path, 'Source','setrun.f')) 2695 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','run.inc'), 2696 pjoin(self.dir_path, 'Source','run.inc')) 2697 # File created from Template (Different in some child class) 2698 filename = os.path.join(self.dir_path,'Source','run_config.inc') 2699 self.write_run_config_file(writers.FortranWriter(filename)) 2700 2701 try: 2702 subprocess.call([os.path.join(self.dir_path, 'Source','MadWeight','bin','internal','pass_to_madweight')], 2703 stdout = os.open(os.devnull, os.O_RDWR), 2704 stderr = os.open(os.devnull, os.O_RDWR), 2705 cwd=self.dir_path) 2706 except OSError: 2707 # Probably madweight already called 2708 pass 2709 2710 # Copy the different python file in the Template 2711 self.copy_python_file() 2712 # create the appropriate cuts.f 2713 self.get_mw_cuts_version() 2714 2715 # add the makefile in Source directory 2716 filename = os.path.join(self.dir_path,'Source','makefile') 2717 self.write_source_makefile(writers.FortranWriter(filename))
2718 2719 2720 2721 2722 #=========================================================================== 2723 # convert_model 2724 #===========================================================================
2725 - def convert_model(self, model, wanted_lorentz = [], 2726 wanted_couplings = []):
2727 2728 super(ProcessExporterFortranMW,self).convert_model(model, 2729 wanted_lorentz, wanted_couplings) 2730 2731 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 2732 try: 2733 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 2734 except OSError as error: 2735 pass 2736 model_path = model.get('modelpath') 2737 # This is not safe if there is a '##' or '-' in the path. 2738 shutil.copytree(model_path, 2739 pjoin(self.dir_path,'bin','internal','ufomodel'), 2740 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 2741 if hasattr(model, 'restrict_card'): 2742 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 2743 'restrict_default.dat') 2744 if isinstance(model.restrict_card, check_param_card.ParamCard): 2745 model.restrict_card.write(out_path) 2746 else: 2747 files.cp(model.restrict_card, out_path)
2748 2749 #=========================================================================== 2750 # generate_subprocess_directory 2751 #===========================================================================
2752 - def copy_python_file(self):
2753 """copy the python file require for the Template""" 2754 2755 # madevent interface 2756 cp(_file_path+'/interface/madweight_interface.py', 2757 self.dir_path+'/bin/internal/madweight_interface.py') 2758 cp(_file_path+'/interface/extended_cmd.py', 2759 self.dir_path+'/bin/internal/extended_cmd.py') 2760 cp(_file_path+'/interface/common_run_interface.py', 2761 self.dir_path+'/bin/internal/common_run_interface.py') 2762 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 2763 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 2764 cp(_file_path+'/iolibs/save_load_object.py', 2765 self.dir_path+'/bin/internal/save_load_object.py') 2766 cp(_file_path+'/madevent/gen_crossxhtml.py', 2767 self.dir_path+'/bin/internal/gen_crossxhtml.py') 2768 cp(_file_path+'/madevent/sum_html.py', 2769 self.dir_path+'/bin/internal/sum_html.py') 2770 cp(_file_path+'/various/FO_analyse_card.py', 2771 self.dir_path+'/bin/internal/FO_analyse_card.py') 2772 cp(_file_path+'/iolibs/file_writers.py', 2773 self.dir_path+'/bin/internal/file_writers.py') 2774 #model file 2775 cp(_file_path+'../models/check_param_card.py', 2776 self.dir_path+'/bin/internal/check_param_card.py') 2777 2778 #madevent file 2779 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 2780 cp(_file_path+'/various/lhe_parser.py', 2781 self.dir_path+'/bin/internal/lhe_parser.py') 2782 2783 cp(_file_path+'/various/banner.py', 2784 self.dir_path+'/bin/internal/banner.py') 2785 cp(_file_path+'/various/shower_card.py', 2786 self.dir_path+'/bin/internal/shower_card.py') 2787 cp(_file_path+'/various/cluster.py', 2788 self.dir_path+'/bin/internal/cluster.py') 2789 2790 # logging configuration 2791 cp(_file_path+'/interface/.mg5_logging.conf', 2792 self.dir_path+'/bin/internal/me5_logging.conf') 2793 cp(_file_path+'/interface/coloring_logging.py', 2794 self.dir_path+'/bin/internal/coloring_logging.py')
2795 2796 2797 #=========================================================================== 2798 # Change the version of cuts.f to the one compatible with MW 2799 #===========================================================================
2800 - def get_mw_cuts_version(self, outpath=None):
2801 """create the appropriate cuts.f 2802 This is based on the one associated to ME output but: 2803 1) No clustering (=> remove initcluster/setclscales) 2804 2) Adding the definition of cut_bw at the file. 2805 """ 2806 2807 template = open(pjoin(MG5DIR,'Template','LO','SubProcesses','cuts.f')) 2808 2809 text = StringIO() 2810 #1) remove all dependencies in ickkw >1: 2811 nb_if = 0 2812 for line in template: 2813 if 'if(xqcut.gt.0d0' in line: 2814 nb_if = 1 2815 if nb_if == 0: 2816 text.write(line) 2817 continue 2818 if re.search(r'if\(.*\)\s*then', line): 2819 nb_if += 1 2820 elif 'endif' in line: 2821 nb_if -= 1 2822 2823 #2) add fake cut_bw (have to put the true one later) 2824 text.write(""" 2825 logical function cut_bw(p) 2826 include 'madweight_param.inc' 2827 double precision p(*) 2828 if (bw_cut) then 2829 cut_bw = .true. 2830 else 2831 stop 1 2832 endif 2833 return 2834 end 2835 """) 2836 2837 final = text.getvalue() 2838 #3) remove the call to initcluster: 2839 template = final.replace('call initcluster', '! Remove for MW!call initcluster') 2840 template = template.replace('genps.inc', 'maxparticles.inc') 2841 #Now we can write it 2842 if not outpath: 2843 fsock = open(pjoin(self.dir_path, 'SubProcesses', 'cuts.f'), 'w') 2844 elif isinstance(outpath, str): 2845 fsock = open(outpath, 'w') 2846 else: 2847 fsock = outpath 2848 fsock.write(template)
2849 2850 2851 2852 #=========================================================================== 2853 # Make the Helas and Model directories for Standalone directory 2854 #===========================================================================
2855 - def make(self):
2856 """Run make in the DHELAS, MODEL, PDF and CERNLIB directories, to set up 2857 everything for running madweight 2858 """ 2859 2860 source_dir = os.path.join(self.dir_path, "Source") 2861 logger.info("Running make for Helas") 2862 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2863 logger.info("Running make for Model") 2864 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran') 2865 logger.info("Running make for PDF") 2866 misc.compile(arg=['../lib/libpdf.a'], cwd=source_dir, mode='fortran') 2867 logger.info("Running make for CERNLIB") 2868 misc.compile(arg=['../lib/libcernlib.a'], cwd=source_dir, mode='fortran') 2869 logger.info("Running make for GENERIC") 2870 misc.compile(arg=['../lib/libgeneric.a'], cwd=source_dir, mode='fortran') 2871 logger.info("Running make for blocks") 2872 misc.compile(arg=['../lib/libblocks.a'], cwd=source_dir, mode='fortran') 2873 logger.info("Running make for tools") 2874 misc.compile(arg=['../lib/libtools.a'], cwd=source_dir, mode='fortran')
2875 2876 #=========================================================================== 2877 # Create proc_card_mg5.dat for MadWeight directory 2878 #===========================================================================
2879 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2880 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 2881 2882 compiler = {'fortran': mg5options['fortran_compiler'], 2883 'cpp': mg5options['cpp_compiler'], 2884 'f2py': mg5options['f2py_compiler']} 2885 2886 2887 2888 #proc_charac 2889 self.create_proc_charac() 2890 2891 # Write maxparticles.inc based on max of ME's/subprocess groups 2892 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 2893 self.write_maxparticles_file(writers.FortranWriter(filename), 2894 matrix_elements) 2895 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2896 pjoin(self.dir_path, 'Source','MadWeight','blocks')) 2897 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2898 pjoin(self.dir_path, 'Source','MadWeight','tools')) 2899 2900 self.set_compiler(compiler) 2901 self.make() 2902 2903 # Write command history as proc_card_mg5 2904 if os.path.isdir(os.path.join(self.dir_path, 'Cards')): 2905 output_file = os.path.join(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2906 history.write(output_file) 2907 2908 ProcessExporterFortran.finalize(self, matrix_elements, 2909 history, mg5options, flaglist)
2910 2911 2912 2913 #=========================================================================== 2914 # create the run_card for MW 2915 #===========================================================================
2916 - def create_run_card(self, matrix_elements, history):
2917 """ """ 2918 2919 run_card = banner_mod.RunCard() 2920 2921 # pass to default for MW 2922 run_card["run_tag"] = "\'not_use\'" 2923 run_card["fixed_ren_scale"] = "T" 2924 run_card["fixed_fac_scale"] = "T" 2925 run_card.remove_all_cut() 2926 2927 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 2928 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2929 python_template=True) 2930 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'), 2931 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2932 python_template=True)
2933 2934 #=========================================================================== 2935 # export model files 2936 #===========================================================================
2937 - def export_model_files(self, model_path):
2938 """export the model dependent files for V4 model""" 2939 2940 super(ProcessExporterFortranMW,self).export_model_files(model_path) 2941 # Add the routine update_as_param in v4 model 2942 # This is a function created in the UFO 2943 text=""" 2944 subroutine update_as_param() 2945 call setpara('param_card.dat',.false.) 2946 return 2947 end 2948 """ 2949 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 2950 ff.write(text) 2951 ff.close() 2952 2953 # Modify setrun.f 2954 text = open(os.path.join(self.dir_path,'Source','setrun.f')).read() 2955 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 2956 fsock = open(os.path.join(self.dir_path,'Source','setrun.f'), 'w') 2957 fsock.write(text) 2958 fsock.close() 2959 2960 # Modify initialization.f 2961 text = open(os.path.join(self.dir_path,'SubProcesses','initialization.f')).read() 2962 text = text.replace('call setpara(param_name)', 'call setpara(param_name, .true.)') 2963 fsock = open(os.path.join(self.dir_path,'SubProcesses','initialization.f'), 'w') 2964 fsock.write(text) 2965 fsock.close() 2966 2967 2968 self.make_model_symbolic_link()
2969 2970 #=========================================================================== 2971 # generate_subprocess_directory 2972 #===========================================================================
2973 - def generate_subprocess_directory(self, matrix_element, 2974 fortran_model,number):
2975 """Generate the Pxxxxx directory for a subprocess in MG4 MadWeight format, 2976 including the necessary matrix.f and nexternal.inc files""" 2977 2978 cwd = os.getcwd() 2979 # Create the directory PN_xx_xxxxx in the specified path 2980 dirpath = os.path.join(self.dir_path, 'SubProcesses', \ 2981 "P%s" % matrix_element.get('processes')[0].shell_string()) 2982 2983 try: 2984 os.mkdir(dirpath) 2985 except os.error as error: 2986 logger.warning(error.strerror + " " + dirpath) 2987 2988 #try: 2989 # os.chdir(dirpath) 2990 #except os.error: 2991 # logger.error('Could not cd to directory %s' % dirpath) 2992 # return 0 2993 2994 logger.info('Creating files in directory %s' % dirpath) 2995 2996 # Extract number of external particles 2997 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2998 2999 # Create the matrix.f file and the nexternal.inc file 3000 filename = pjoin(dirpath,'matrix.f') 3001 calls,ncolor = self.write_matrix_element_v4( 3002 writers.FortranWriter(filename), 3003 matrix_element, 3004 fortran_model) 3005 3006 filename = pjoin(dirpath, 'auto_dsig.f') 3007 self.write_auto_dsig_file(writers.FortranWriter(filename), 3008 matrix_element) 3009 3010 filename = pjoin(dirpath, 'configs.inc') 3011 mapconfigs, s_and_t_channels = self.write_configs_file(\ 3012 writers.FortranWriter(filename), 3013 matrix_element) 3014 3015 filename = pjoin(dirpath, 'nexternal.inc') 3016 self.write_nexternal_file(writers.FortranWriter(filename), 3017 nexternal, ninitial) 3018 3019 filename = pjoin(dirpath, 'leshouche.inc') 3020 self.write_leshouche_file(writers.FortranWriter(filename), 3021 matrix_element) 3022 3023 filename = pjoin(dirpath, 'props.inc') 3024 self.write_props_file(writers.FortranWriter(filename), 3025 matrix_element, 3026 s_and_t_channels) 3027 3028 filename = pjoin(dirpath, 'pmass.inc') 3029 self.write_pmass_file(writers.FortranWriter(filename), 3030 matrix_element) 3031 3032 filename = pjoin(dirpath, 'ngraphs.inc') 3033 self.write_ngraphs_file(writers.FortranWriter(filename), 3034 len(matrix_element.get_all_amplitudes())) 3035 3036 filename = pjoin(dirpath, 'maxamps.inc') 3037 self.write_maxamps_file(writers.FortranWriter(filename), 3038 len(matrix_element.get('diagrams')), 3039 ncolor, 3040 len(matrix_element.get('processes')), 3041 1) 3042 3043 filename = pjoin(dirpath, 'phasespace.inc') 3044 self.write_phasespace_file(writers.FortranWriter(filename), 3045 len(matrix_element.get('diagrams')), 3046 ) 3047 3048 # Generate diagrams 3049 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 3050 filename = pjoin(dirpath, "matrix.ps") 3051 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3052 get('diagrams'), 3053 filename, 3054 model=matrix_element.get('processes')[0].\ 3055 get('model'), 3056 amplitude='') 3057 logger.info("Generating Feynman diagrams for " + \ 3058 matrix_element.get('processes')[0].nice_string()) 3059 plot.draw() 3060 3061 #import genps.inc and maxconfigs.inc into Subprocesses 3062 ln(self.dir_path + '/Source/genps.inc', self.dir_path + '/SubProcesses', log=False) 3063 #ln(self.dir_path + '/Source/maxconfigs.inc', self.dir_path + '/SubProcesses', log=False) 3064 3065 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f', 'genps.inc'] 3066 3067 for file in linkfiles: 3068 ln('../%s' % file, starting_dir=cwd) 3069 3070 ln('nexternal.inc', '../../Source', log=False, cwd=dirpath) 3071 ln('leshouche.inc', '../../Source', log=False, cwd=dirpath) 3072 ln('maxamps.inc', '../../Source', log=False, cwd=dirpath) 3073 ln('phasespace.inc', '../', log=True, cwd=dirpath) 3074 # Return to original PWD 3075 #os.chdir(cwd) 3076 3077 if not calls: 3078 calls = 0 3079 return calls
3080 3081 #=========================================================================== 3082 # write_matrix_element_v4 3083 #===========================================================================
3084 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model,proc_id = "", config_map = []):
3085 """Export a matrix element to a matrix.f file in MG4 MadWeight format""" 3086 3087 if not matrix_element.get('processes') or \ 3088 not matrix_element.get('diagrams'): 3089 return 0 3090 3091 if writer: 3092 if not isinstance(writer, writers.FortranWriter): 3093 raise writers.FortranWriter.FortranWriterError(\ 3094 "writer not FortranWriter") 3095 3096 # Set lowercase/uppercase Fortran code 3097 writers.FortranWriter.downcase = False 3098 3099 replace_dict = {} 3100 3101 # Extract version number and date from VERSION file 3102 info_lines = self.get_mg5_info_lines() 3103 replace_dict['info_lines'] = info_lines 3104 3105 # Extract process info lines 3106 process_lines = self.get_process_info_lines(matrix_element) 3107 replace_dict['process_lines'] = process_lines 3108 3109 # Set proc_id 3110 replace_dict['proc_id'] = proc_id 3111 3112 # Extract number of external particles 3113 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3114 replace_dict['nexternal'] = nexternal 3115 3116 # Extract ncomb 3117 ncomb = matrix_element.get_helicity_combinations() 3118 replace_dict['ncomb'] = ncomb 3119 3120 # Extract helicity lines 3121 helicity_lines = self.get_helicity_lines(matrix_element) 3122 replace_dict['helicity_lines'] = helicity_lines 3123 3124 # Extract overall denominator 3125 # Averaging initial state color, spin, and identical FS particles 3126 den_factor_line = self.get_den_factor_line(matrix_element) 3127 replace_dict['den_factor_line'] = den_factor_line 3128 3129 # Extract ngraphs 3130 ngraphs = matrix_element.get_number_of_amplitudes() 3131 replace_dict['ngraphs'] = ngraphs 3132 3133 # Extract nwavefuncs 3134 nwavefuncs = matrix_element.get_number_of_wavefunctions() 3135 replace_dict['nwavefuncs'] = nwavefuncs 3136 3137 # Extract ncolor 3138 ncolor = max(1, len(matrix_element.get('color_basis'))) 3139 replace_dict['ncolor'] = ncolor 3140 3141 # Extract color data lines 3142 color_data_lines = self.get_color_data_lines(matrix_element) 3143 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 3144 3145 # Extract helas calls 3146 helas_calls = fortran_model.get_matrix_element_calls(\ 3147 matrix_element) 3148 3149 replace_dict['helas_calls'] = "\n".join(helas_calls) 3150 3151 # Extract JAMP lines 3152 jamp_lines = self.get_JAMP_lines(matrix_element) 3153 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 3154 3155 replace_dict['template_file'] = os.path.join(_file_path, \ 3156 'iolibs/template_files/%s' % self.matrix_file) 3157 replace_dict['template_file2'] = '' 3158 3159 if writer: 3160 file = open(replace_dict['template_file']).read() 3161 file = file % replace_dict 3162 # Write the file 3163 writer.writelines(file) 3164 return len(filter(lambda call: call.find('#') != 0, helas_calls)),ncolor 3165 else: 3166 replace_dict['return_value'] = (len(filter(lambda call: call.find('#') != 0, helas_calls)),ncolor)
3167 3168 #=========================================================================== 3169 # write_source_makefile 3170 #===========================================================================
3171 - def write_source_makefile(self, writer):
3172 """Write the nexternal.inc file for madweight""" 3173 3174 3175 path = os.path.join(_file_path,'iolibs','template_files','madweight_makefile_source') 3176 set_of_lib = '$(LIBRARIES) $(LIBDIR)libdhelas.$(libext) $(LIBDIR)libpdf.$(libext) $(LIBDIR)libmodel.$(libext) $(LIBDIR)libcernlib.$(libext) $(LIBDIR)libtf.$(libext)' 3177 text = open(path).read() % {'libraries': set_of_lib} 3178 writer.write(text) 3179 3180 return True
3181
3182 - def write_phasespace_file(self, writer, nb_diag):
3183 """ """ 3184 3185 template = """ include 'maxparticles.inc' 3186 integer max_branches 3187 parameter (max_branches=max_particles-1) 3188 integer max_configs 3189 parameter (max_configs=%(nb_diag)s) 3190 3191 c channel position 3192 integer config_pos,perm_pos 3193 common /to_config/config_pos,perm_pos 3194 3195 """ 3196 3197 writer.write(template % {'nb_diag': nb_diag})
3198 3199 3200 #=========================================================================== 3201 # write_auto_dsig_file 3202 #===========================================================================
3203 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
3204 """Write the auto_dsig.f file for the differential cross section 3205 calculation, includes pdf call information (MadWeight format)""" 3206 3207 if not matrix_element.get('processes') or \ 3208 not matrix_element.get('diagrams'): 3209 return 0 3210 3211 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 3212 3213 if ninitial < 1 or ninitial > 2: 3214 raise writers.FortranWriter.FortranWriterError, \ 3215 """Need ninitial = 1 or 2 to write auto_dsig file""" 3216 3217 replace_dict = {} 3218 3219 # Extract version number and date from VERSION file 3220 info_lines = self.get_mg5_info_lines() 3221 replace_dict['info_lines'] = info_lines 3222 3223 # Extract process info lines 3224 process_lines = self.get_process_info_lines(matrix_element) 3225 replace_dict['process_lines'] = process_lines 3226 3227 # Set proc_id 3228 replace_dict['proc_id'] = proc_id 3229 replace_dict['numproc'] = 1 3230 3231 # Set dsig_line 3232 if ninitial == 1: 3233 # No conversion, since result of decay should be given in GeV 3234 dsig_line = "pd(0)*dsiguu" 3235 else: 3236 # Convert result (in GeV) to pb 3237 dsig_line = "pd(0)*conv*dsiguu" 3238 3239 replace_dict['dsig_line'] = dsig_line 3240 3241 # Extract pdf lines 3242 pdf_vars, pdf_data, pdf_lines = \ 3243 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 3244 replace_dict['pdf_vars'] = pdf_vars 3245 replace_dict['pdf_data'] = pdf_data 3246 replace_dict['pdf_lines'] = pdf_lines 3247 3248 # Lines that differ between subprocess group and regular 3249 if proc_id: 3250 replace_dict['numproc'] = int(proc_id) 3251 replace_dict['passcuts_begin'] = "" 3252 replace_dict['passcuts_end'] = "" 3253 # Set lines for subprocess group version 3254 # Set define_iconfigs_lines 3255 replace_dict['define_subdiag_lines'] = \ 3256 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3257 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3258 else: 3259 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 3260 replace_dict['passcuts_end'] = "ENDIF" 3261 replace_dict['define_subdiag_lines'] = "" 3262 3263 if writer: 3264 file = open(os.path.join(_file_path, \ 3265 'iolibs/template_files/auto_dsig_mw.inc')).read() 3266 3267 file = file % replace_dict 3268 # Write the file 3269 writer.writelines(file) 3270 else: 3271 return replace_dict
3272 #=========================================================================== 3273 # write_configs_file 3274 #===========================================================================
3275 - def write_configs_file(self, writer, matrix_element):
3276 """Write the configs.inc file for MadEvent""" 3277 3278 # Extract number of external particles 3279 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3280 3281 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 3282 mapconfigs = [c[0] for c in configs] 3283 model = matrix_element.get('processes')[0].get('model') 3284 return mapconfigs, self.write_configs_file_from_diagrams(writer, 3285 [[c[1]] for c in configs], 3286 mapconfigs, 3287 nexternal, ninitial,matrix_element, model)
3288 3289 #=========================================================================== 3290 # write_run_configs_file 3291 #===========================================================================
3292 - def write_run_config_file(self, writer):
3293 """Write the run_configs.inc file for MadWeight""" 3294 3295 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 3296 text = open(path).read() % {'chanperjob':'5'} 3297 writer.write(text) 3298 return True
3299 3300 #=========================================================================== 3301 # write_configs_file_from_diagrams 3302 #===========================================================================
3303 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 3304 nexternal, ninitial, matrix_element, model):
3305 """Write the actual configs.inc file. 3306 3307 configs is the diagrams corresponding to configs (each 3308 diagrams is a list of corresponding diagrams for all 3309 subprocesses, with None if there is no corresponding diagrams 3310 for a given process). 3311 mapconfigs gives the diagram number for each config. 3312 3313 For s-channels, we need to output one PDG for each subprocess in 3314 the subprocess group, in order to be able to pick the right 3315 one for multiprocesses.""" 3316 3317 lines = [] 3318 3319 particle_dict = matrix_element.get('processes')[0].get('model').\ 3320 get('particle_dict') 3321 3322 s_and_t_channels = [] 3323 3324 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 3325 for config in configs if [d for d in config if d][0].\ 3326 get_vertex_leg_numbers()!=[]] 3327 3328 minvert = min(vert_list) if vert_list!=[] else 0 3329 # Number of subprocesses 3330 nsubprocs = len(configs[0]) 3331 3332 nconfigs = 0 3333 3334 new_pdg = model.get_first_non_pdg() 3335 3336 for iconfig, helas_diags in enumerate(configs): 3337 if any([vert > minvert for vert in 3338 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 3339 # Only 3-vertices allowed in configs.inc 3340 continue 3341 nconfigs += 1 3342 3343 # Need s- and t-channels for all subprocesses, including 3344 # those that don't contribute to this config 3345 empty_verts = [] 3346 stchannels = [] 3347 for h in helas_diags: 3348 if h: 3349 # get_s_and_t_channels gives vertices starting from 3350 # final state external particles and working inwards 3351 stchannels.append(h.get('amplitudes')[0].\ 3352 get_s_and_t_channels(ninitial,model,new_pdg)) 3353 else: 3354 stchannels.append((empty_verts, None)) 3355 3356 # For t-channels, just need the first non-empty one 3357 tchannels = [t for s,t in stchannels if t != None][0] 3358 3359 # For s_and_t_channels (to be used later) use only first config 3360 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 3361 tchannels]) 3362 3363 # Make sure empty_verts is same length as real vertices 3364 if any([s for s,t in stchannels]): 3365 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 3366 3367 # Reorganize s-channel vertices to get a list of all 3368 # subprocesses for each vertex 3369 schannels = zip(*[s for s,t in stchannels]) 3370 else: 3371 schannels = [] 3372 3373 allchannels = schannels 3374 if len(tchannels) > 1: 3375 # Write out tchannels only if there are any non-trivial ones 3376 allchannels = schannels + tchannels 3377 3378 # Write out propagators for s-channel and t-channel vertices 3379 3380 #lines.append("# Diagram %d" % (mapconfigs[iconfig])) 3381 # Correspondance between the config and the diagram = amp2 3382 lines.append("* %d %d " % (nconfigs, 3383 mapconfigs[iconfig])) 3384 3385 for verts in allchannels: 3386 if verts in schannels: 3387 vert = [v for v in verts if v][0] 3388 else: 3389 vert = verts 3390 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 3391 last_leg = vert.get('legs')[-1] 3392 line=str(last_leg.get('number'))+" "+str(daughters[0])+" "+str(daughters[1]) 3393 # lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 3394 # (last_leg.get('number'), nconfigs, len(daughters), 3395 # ",".join([str(d) for d in daughters]))) 3396 3397 if last_leg.get('id') == 21 and 21 not in particle_dict: 3398 # Fake propagator used in multiparticle vertices 3399 mass = 'zero' 3400 width = 'zero' 3401 pow_part = 0 3402 else: 3403 if (last_leg.get('id')!=7): 3404 particle = particle_dict[last_leg.get('id')] 3405 # Get mass 3406 mass = particle.get('mass') 3407 # Get width 3408 width = particle.get('width') 3409 else : # fake propagator used in multiparticle vertices 3410 mass= 'zero' 3411 width= 'zero' 3412 3413 line=line+" "+mass+" "+width+" " 3414 3415 if verts in schannels: 3416 pdgs = [] 3417 for v in verts: 3418 if v: 3419 pdgs.append(v.get('legs')[-1].get('id')) 3420 else: 3421 pdgs.append(0) 3422 lines.append(line+" S "+str(last_leg.get('id'))) 3423 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3424 # (last_leg.get('number'), nconfigs, nsubprocs, 3425 # ",".join([str(d) for d in pdgs]))) 3426 # lines.append("data tprid(%d,%d)/0/" % \ 3427 # (last_leg.get('number'), nconfigs)) 3428 elif verts in tchannels[:-1]: 3429 lines.append(line+" T "+str(last_leg.get('id'))) 3430 # lines.append("data tprid(%d,%d)/%d/" % \ 3431 # (last_leg.get('number'), nconfigs, 3432 # abs(last_leg.get('id')))) 3433 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3434 # (last_leg.get('number'), nconfigs, nsubprocs, 3435 # ",".join(['0'] * nsubprocs))) 3436 3437 # Write out number of configs 3438 # lines.append("# Number of configs") 3439 # lines.append("data mapconfig(0)/%d/" % nconfigs) 3440 lines.append(" * ") # a line with just a star indicates this is the end of file 3441 # Write the file 3442 writer.writelines(lines) 3443 3444 return s_and_t_channels
3445
3446 3447 3448 #=============================================================================== 3449 # ProcessExporterFortranME 3450 #=============================================================================== 3451 -class ProcessExporterFortranME(ProcessExporterFortran):
3452 """Class to take care of exporting a set of matrix elements to 3453 MadEvent format.""" 3454 3455 matrix_file = "matrix_madevent_v4.inc" 3456 3457 # helper function for customise helas writter 3458 @staticmethod
3459 - def custom_helas_call(call, arg):
3460 if arg['mass'] == '%(M)s,%(W)s,': 3461 arg['mass'] = '%(M)s, fk_%(W)s,' 3462 elif '%(W)s' in arg['mass']: 3463 raise Exception 3464 return call, arg
3465
3466 - def copy_template(self, model):
3467 """Additional actions needed for setup of Template 3468 """ 3469 3470 super(ProcessExporterFortranME, self).copy_template(model) 3471 3472 # File created from Template (Different in some child class) 3473 filename = pjoin(self.dir_path,'Source','run_config.inc') 3474 self.write_run_config_file(writers.FortranWriter(filename)) 3475 3476 # The next file are model dependant (due to SLAH convention) 3477 self.model_name = model.get('name') 3478 # Add the symmetry.f 3479 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3480 self.write_symmetry(writers.FortranWriter(filename)) 3481 # 3482 filename = pjoin(self.dir_path,'SubProcesses','addmothers.f') 3483 self.write_addmothers(writers.FortranWriter(filename)) 3484 # Copy the different python file in the Template 3485 self.copy_python_file()
3486 3487 3488 3489 3490 3491 3492 #=========================================================================== 3493 # generate_subprocess_directory 3494 #===========================================================================
3495 - def copy_python_file(self):
3496 """copy the python file require for the Template""" 3497 3498 # madevent interface 3499 cp(_file_path+'/interface/madevent_interface.py', 3500 self.dir_path+'/bin/internal/madevent_interface.py') 3501 cp(_file_path+'/interface/extended_cmd.py', 3502 self.dir_path+'/bin/internal/extended_cmd.py') 3503 cp(_file_path+'/interface/common_run_interface.py', 3504 self.dir_path+'/bin/internal/common_run_interface.py') 3505 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3506 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3507 cp(_file_path+'/iolibs/save_load_object.py', 3508 self.dir_path+'/bin/internal/save_load_object.py') 3509 cp(_file_path+'/iolibs/file_writers.py', 3510 self.dir_path+'/bin/internal/file_writers.py') 3511 #model file 3512 cp(_file_path+'../models/check_param_card.py', 3513 self.dir_path+'/bin/internal/check_param_card.py') 3514 3515 #copy all the file present in madevent directory 3516 for name in os.listdir(pjoin(_file_path, 'madevent')): 3517 if name not in ['__init__.py'] and name.endswith('.py'): 3518 cp(_file_path+'/madevent/'+name, self.dir_path+'/bin/internal/') 3519 3520 #madevent file 3521 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3522 cp(_file_path+'/various/lhe_parser.py', 3523 self.dir_path+'/bin/internal/lhe_parser.py') 3524 cp(_file_path+'/various/banner.py', 3525 self.dir_path+'/bin/internal/banner.py') 3526 cp(_file_path+'/various/histograms.py', 3527 self.dir_path+'/bin/internal/histograms.py') 3528 cp(_file_path+'/various/plot_djrs.py', 3529 self.dir_path+'/bin/internal/plot_djrs.py') 3530 cp(_file_path+'/various/systematics.py', self.dir_path+'/bin/internal/systematics.py') 3531 3532 cp(_file_path+'/various/cluster.py', 3533 self.dir_path+'/bin/internal/cluster.py') 3534 cp(_file_path+'/madevent/combine_runs.py', 3535 self.dir_path+'/bin/internal/combine_runs.py') 3536 # logging configuration 3537 cp(_file_path+'/interface/.mg5_logging.conf', 3538 self.dir_path+'/bin/internal/me5_logging.conf') 3539 cp(_file_path+'/interface/coloring_logging.py', 3540 self.dir_path+'/bin/internal/coloring_logging.py') 3541 # shower card and FO_analyse_card. 3542 # Although not needed, it is imported by banner.py 3543 cp(_file_path+'/various/shower_card.py', 3544 self.dir_path+'/bin/internal/shower_card.py') 3545 cp(_file_path+'/various/FO_analyse_card.py', 3546 self.dir_path+'/bin/internal/FO_analyse_card.py')
3547 3548
3549 - def convert_model(self, model, wanted_lorentz = [], 3550 wanted_couplings = []):
3551 3552 super(ProcessExporterFortranME,self).convert_model(model, 3553 wanted_lorentz, wanted_couplings) 3554 3555 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 3556 try: 3557 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 3558 except OSError as error: 3559 pass 3560 model_path = model.get('modelpath') 3561 # This is not safe if there is a '##' or '-' in the path. 3562 shutil.copytree(model_path, 3563 pjoin(self.dir_path,'bin','internal','ufomodel'), 3564 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 3565 if hasattr(model, 'restrict_card'): 3566 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 3567 'restrict_default.dat') 3568 if isinstance(model.restrict_card, check_param_card.ParamCard): 3569 model.restrict_card.write(out_path) 3570 else: 3571 files.cp(model.restrict_card, out_path)
3572 3573 #=========================================================================== 3574 # export model files 3575 #===========================================================================
3576 - def export_model_files(self, model_path):
3577 """export the model dependent files""" 3578 3579 super(ProcessExporterFortranME,self).export_model_files(model_path) 3580 3581 # Add the routine update_as_param in v4 model 3582 # This is a function created in the UFO 3583 text=""" 3584 subroutine update_as_param() 3585 call setpara('param_card.dat',.false.) 3586 return 3587 end 3588 """ 3589 ff = open(pjoin(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3590 ff.write(text) 3591 ff.close() 3592 3593 # Add the symmetry.f 3594 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3595 self.write_symmetry(writers.FortranWriter(filename), v5=False) 3596 3597 # Modify setrun.f 3598 text = open(pjoin(self.dir_path,'Source','setrun.f')).read() 3599 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3600 fsock = open(pjoin(self.dir_path,'Source','setrun.f'), 'w') 3601 fsock.write(text) 3602 fsock.close() 3603 3604 self.make_model_symbolic_link()
3605 3606 #=========================================================================== 3607 # generate_subprocess_directory 3608 #===========================================================================
3609 - def generate_subprocess_directory(self, matrix_element, 3610 fortran_model, 3611 me_number):
3612 """Generate the Pxxxxx directory for a subprocess in MG4 madevent, 3613 including the necessary matrix.f and various helper files""" 3614 3615 cwd = os.getcwd() 3616 path = pjoin(self.dir_path, 'SubProcesses') 3617 3618 3619 if not self.model: 3620 self.model = matrix_element.get('processes')[0].get('model') 3621 3622 3623 3624 #os.chdir(path) 3625 # Create the directory PN_xx_xxxxx in the specified path 3626 subprocdir = "P%s" % matrix_element.get('processes')[0].shell_string() 3627 try: 3628 os.mkdir(pjoin(path,subprocdir)) 3629 except os.error as error: 3630 logger.warning(error.strerror + " " + subprocdir) 3631 3632 #try: 3633 # os.chdir(subprocdir) 3634 #except os.error: 3635 # logger.error('Could not cd to directory %s' % subprocdir) 3636 # return 0 3637 3638 logger.info('Creating files in directory %s' % subprocdir) 3639 Ppath = pjoin(path, subprocdir) 3640 3641 # Extract number of external particles 3642 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3643 3644 # Add the driver.f 3645 ncomb = matrix_element.get_helicity_combinations() 3646 filename = pjoin(Ppath,'driver.f') 3647 self.write_driver(writers.FortranWriter(filename),ncomb,n_grouped_proc=1, 3648 v5=self.opt['v5_model']) 3649 3650 # Create the matrix.f file, auto_dsig.f file and all inc files 3651 filename = pjoin(Ppath, 'matrix.f') 3652 calls, ncolor = \ 3653 self.write_matrix_element_v4(writers.FortranWriter(filename), 3654 matrix_element, fortran_model, subproc_number = me_number) 3655 3656 filename = pjoin(Ppath, 'auto_dsig.f') 3657 self.write_auto_dsig_file(writers.FortranWriter(filename), 3658 matrix_element) 3659 3660 filename = pjoin(Ppath, 'configs.inc') 3661 mapconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 3662 writers.FortranWriter(filename), 3663 matrix_element) 3664 3665 filename = pjoin(Ppath, 'config_nqcd.inc') 3666 self.write_config_nqcd_file(writers.FortranWriter(filename), 3667 nqcd_list) 3668 3669 filename = pjoin(Ppath, 'config_subproc_map.inc') 3670 self.write_config_subproc_map_file(writers.FortranWriter(filename), 3671 s_and_t_channels) 3672 3673 filename = pjoin(Ppath, 'coloramps.inc') 3674 self.write_coloramps_file(writers.FortranWriter(filename), 3675 mapconfigs, 3676 matrix_element) 3677 3678 filename = pjoin(Ppath, 'get_color.f') 3679 self.write_colors_file(writers.FortranWriter(filename), 3680 matrix_element) 3681 3682 filename = pjoin(Ppath, 'decayBW.inc') 3683 self.write_decayBW_file(writers.FortranWriter(filename), 3684 s_and_t_channels) 3685 3686 filename = pjoin(Ppath, 'dname.mg') 3687 self.write_dname_file(writers.FileWriter(filename), 3688 "P"+matrix_element.get('processes')[0].shell_string()) 3689 3690 filename = pjoin(Ppath, 'iproc.dat') 3691 self.write_iproc_file(writers.FortranWriter(filename), 3692 me_number) 3693 3694 filename = pjoin(Ppath, 'leshouche.inc') 3695 self.write_leshouche_file(writers.FortranWriter(filename), 3696 matrix_element) 3697 3698 filename = pjoin(Ppath, 'maxamps.inc') 3699 self.write_maxamps_file(writers.FortranWriter(filename), 3700 len(matrix_element.get('diagrams')), 3701 ncolor, 3702 len(matrix_element.get('processes')), 3703 1) 3704 3705 filename = pjoin(Ppath, 'mg.sym') 3706 self.write_mg_sym_file(writers.FortranWriter(filename), 3707 matrix_element) 3708 3709 filename = pjoin(Ppath, 'ncombs.inc') 3710 self.write_ncombs_file(writers.FortranWriter(filename), 3711 nexternal) 3712 3713 filename = pjoin(Ppath, 'nexternal.inc') 3714 self.write_nexternal_file(writers.FortranWriter(filename), 3715 nexternal, ninitial) 3716 3717 filename = pjoin(Ppath, 'ngraphs.inc') 3718 self.write_ngraphs_file(writers.FortranWriter(filename), 3719 len(mapconfigs)) 3720 3721 3722 filename = pjoin(Ppath, 'pmass.inc') 3723 self.write_pmass_file(writers.FortranWriter(filename), 3724 matrix_element) 3725 3726 filename = pjoin(Ppath, 'props.inc') 3727 self.write_props_file(writers.FortranWriter(filename), 3728 matrix_element, 3729 s_and_t_channels) 3730 3731 # Find config symmetries and permutations 3732 symmetry, perms, ident_perms = \ 3733 diagram_symmetry.find_symmetry(matrix_element) 3734 3735 filename = pjoin(Ppath, 'symswap.inc') 3736 self.write_symswap_file(writers.FortranWriter(filename), 3737 ident_perms) 3738 3739 filename = pjoin(Ppath, 'symfact_orig.dat') 3740 self.write_symfact_file(open(filename, 'w'), symmetry) 3741 3742 # Generate diagrams 3743 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 3744 filename = pjoin(Ppath, "matrix.ps") 3745 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3746 get('diagrams'), 3747 filename, 3748 model=matrix_element.get('processes')[0].\ 3749 get('model'), 3750 amplitude=True) 3751 logger.info("Generating Feynman diagrams for " + \ 3752 matrix_element.get('processes')[0].nice_string()) 3753 plot.draw() 3754 3755 self.link_files_in_SubProcess(Ppath) 3756 3757 #import nexternal/leshouche in Source 3758 ln(pjoin(Ppath,'nexternal.inc'), pjoin(self.dir_path,'Source'), log=False) 3759 ln(pjoin(Ppath,'leshouche.inc'), pjoin(self.dir_path,'Source'), log=False) 3760 ln(pjoin(Ppath,'maxamps.inc'), pjoin(self.dir_path,'Source'), log=False) 3761 # Return to SubProcesses dir 3762 #os.chdir(os.path.pardir) 3763 3764 # Add subprocess to subproc.mg 3765 filename = pjoin(path, 'subproc.mg') 3766 files.append_to_file(filename, 3767 self.write_subproc, 3768 subprocdir) 3769 3770 # Return to original dir 3771 #os.chdir(cwd) 3772 3773 # Generate info page 3774 gen_infohtml.make_info_html(self.dir_path) 3775 3776 3777 if not calls: 3778 calls = 0 3779 return calls
3780 3781 link_Sub_files = ['addmothers.f', 3782 'cluster.f', 3783 'cluster.inc', 3784 'coupl.inc', 3785 'cuts.f', 3786 'cuts.inc', 3787 'genps.f', 3788 'genps.inc', 3789 'idenparts.f', 3790 'initcluster.f', 3791 'makefile', 3792 'message.inc', 3793 'myamp.f', 3794 'reweight.f', 3795 'run.inc', 3796 'maxconfigs.inc', 3797 'maxparticles.inc', 3798 'run_config.inc', 3799 'lhe_event_infos.inc', 3800 'setcuts.f', 3801 'setscales.f', 3802 'sudakov.inc', 3803 'symmetry.f', 3804 'unwgt.f', 3805 'dummy_fct.f' 3806 ] 3807 3821 3822
3823 - def finalize(self, matrix_elements, history, mg5options, flaglist):
3824 """Finalize ME v4 directory by creating jpeg diagrams, html 3825 pages,proc_card_mg5.dat and madevent.tar.gz.""" 3826 3827 if 'nojpeg' in flaglist: 3828 makejpg = False 3829 else: 3830 makejpg = True 3831 if 'online' in flaglist: 3832 online = True 3833 else: 3834 online = False 3835 3836 compiler = {'fortran': mg5options['fortran_compiler'], 3837 'cpp': mg5options['cpp_compiler'], 3838 'f2py': mg5options['f2py_compiler']} 3839 3840 # indicate that the output type is not grouped 3841 if not isinstance(self, ProcessExporterFortranMEGroup): 3842 self.proc_characteristic['grouped_matrix'] = False 3843 3844 self.proc_characteristic['complex_mass_scheme'] = mg5options['complex_mass_scheme'] 3845 3846 # set limitation linked to the model 3847 3848 3849 # indicate the PDG of all initial particle 3850 try: 3851 pdgs1 = [p.get_initial_pdg(1) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 3852 pdgs2 = [p.get_initial_pdg(2) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 3853 except AttributeError: 3854 pdgs1 = [p.get_initial_pdg(1) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 3855 pdgs2 = [p.get_initial_pdg(2) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 3856 self.proc_characteristic['pdg_initial1'] = pdgs1 3857 self.proc_characteristic['pdg_initial2'] = pdgs2 3858 3859 3860 modelname = self.opt['model'] 3861 if modelname == 'mssm' or modelname.startswith('mssm-'): 3862 param_card = pjoin(self.dir_path, 'Cards','param_card.dat') 3863 mg5_param = pjoin(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 3864 check_param_card.convert_to_mg5card(param_card, mg5_param) 3865 check_param_card.check_valid_param_card(mg5_param) 3866 3867 # Add the combine_events.f modify param_card path/number of @X 3868 filename = pjoin(self.dir_path,'Source','combine_events.f') 3869 try: 3870 nb_proc =[p.get('id') for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes')] 3871 except AttributeError: 3872 nb_proc =[p.get('id') for m in matrix_elements.get('matrix_elements') for p in m.get('processes')] 3873 nb_proc = len(set(nb_proc)) 3874 self.write_combine_events(writers.FortranWriter(filename), nb_proc) # already formatted 3875 # Write maxconfigs.inc based on max of ME's/subprocess groups 3876 filename = pjoin(self.dir_path,'Source','maxconfigs.inc') 3877 self.write_maxconfigs_file(writers.FortranWriter(filename), 3878 matrix_elements) 3879 3880 # Write maxparticles.inc based on max of ME's/subprocess groups 3881 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 3882 self.write_maxparticles_file(writers.FortranWriter(filename), 3883 matrix_elements) 3884 3885 # Touch "done" file 3886 os.system('touch %s/done' % pjoin(self.dir_path,'SubProcesses')) 3887 3888 # Check for compiler 3889 self.set_compiler(compiler) 3890 self.set_cpp_compiler(compiler['cpp']) 3891 3892 3893 old_pos = os.getcwd() 3894 subpath = pjoin(self.dir_path, 'SubProcesses') 3895 3896 P_dir_list = [proc for proc in os.listdir(subpath) 3897 if os.path.isdir(pjoin(subpath,proc)) and proc[0] == 'P'] 3898 3899 devnull = os.open(os.devnull, os.O_RDWR) 3900 # Convert the poscript in jpg files (if authorize) 3901 if makejpg: 3902 try: 3903 os.remove(pjoin(self.dir_path,'HTML','card.jpg')) 3904 except Exception, error: 3905 pass 3906 3907 if misc.which('gs'): 3908 logger.info("Generate jpeg diagrams") 3909 for Pdir in P_dir_list: 3910 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 3911 stdout = devnull, cwd=pjoin(subpath, Pdir)) 3912 3913 logger.info("Generate web pages") 3914 # Create the WebPage using perl script 3915 3916 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 3917 stdout = devnull,cwd=pjoin(self.dir_path)) 3918 3919 #os.chdir(os.path.pardir) 3920 3921 obj = gen_infohtml.make_info_html(self.dir_path) 3922 3923 if online: 3924 nb_channel = obj.rep_rule['nb_gen_diag'] 3925 open(pjoin(self.dir_path, 'Online'),'w').write(str(nb_channel)) 3926 #add the information to proc_charac 3927 self.proc_characteristic['nb_channel'] = obj.rep_rule['nb_gen_diag'] 3928 3929 # Write command history as proc_card_mg5 3930 if os.path.isdir(pjoin(self.dir_path,'Cards')): 3931 output_file = pjoin(self.dir_path,'Cards', 'proc_card_mg5.dat') 3932 history.write(output_file) 3933 3934 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3935 stdout = devnull) 3936 3937 #crate the proc_characteristic file 3938 self.create_proc_charac(matrix_elements, history) 3939 3940 # create the run_card 3941 ProcessExporterFortran.finalize(self, matrix_elements, history, mg5options, flaglist) 3942 3943 # Run "make" to generate madevent.tar.gz file 3944 if os.path.exists(pjoin(self.dir_path,'SubProcesses', 'subproc.mg')): 3945 if os.path.exists(pjoin(self.dir_path,'madevent.tar.gz')): 3946 os.remove(pjoin(self.dir_path,'madevent.tar.gz')) 3947 misc.call([os.path.join(self.dir_path, 'bin', 'internal', 'make_madevent_tar')], 3948 stdout = devnull, cwd=self.dir_path) 3949 3950 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3951 stdout = devnull, cwd=self.dir_path)
3952 3953 3954 3955 3956 3957 3958 #return to the initial dir 3959 #os.chdir(old_pos) 3960 3961 #=========================================================================== 3962 # write_matrix_element_v4 3963 #===========================================================================
3964 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 3965 proc_id = "", config_map = [], subproc_number = ""):
3966 """Export a matrix element to a matrix.f file in MG4 madevent format""" 3967 3968 if not matrix_element.get('processes') or \ 3969 not matrix_element.get('diagrams'): 3970 return 0 3971 3972 if writer: 3973 if not isinstance(writer, writers.FortranWriter): 3974 raise writers.FortranWriter.FortranWriterError(\ 3975 "writer not FortranWriter") 3976 # Set lowercase/uppercase Fortran code 3977 writers.FortranWriter.downcase = False 3978 3979 # check if MLM/.../ is supported for this matrix-element and update associate flag 3980 if self.model and 'MLM' in self.model["limitations"]: 3981 if 'MLM' not in self.proc_characteristic["limitations"]: 3982 used_couplings = matrix_element.get_used_couplings(output="set") 3983 for vertex in self.model.get('interactions'): 3984 particles = [p for p in vertex.get('particles')] 3985 if 21 in [p.get('pdg_code') for p in particles]: 3986 colors = [par.get('color') for par in particles] 3987 if 1 in colors: 3988 continue 3989 elif 'QCD' not in vertex.get('orders'): 3990 for bad_coup in vertex.get('couplings').values(): 3991 if bad_coup in used_couplings: 3992 self.proc_characteristic["limitations"].append('MLM') 3993 break 3994 3995 # The proc prefix is not used for MadEvent output so it can safely be set 3996 # to an empty string. 3997 replace_dict = {'proc_prefix':''} 3998 3999 # Extract helas calls 4000 helas_calls = fortran_model.get_matrix_element_calls(\ 4001 matrix_element) 4002 4003 4004 replace_dict['helas_calls'] = "\n".join(helas_calls) 4005 4006 4007 #adding the support for the fake width (forbidding too small width) 4008 mass_width = matrix_element.get_all_mass_widths() 4009 width_list = set([e[1] for e in mass_width]) 4010 4011 replace_dict['fake_width_declaration'] = \ 4012 (' double precision fk_%s \n' * len(width_list)) % tuple(width_list) 4013 replace_dict['fake_width_declaration'] += \ 4014 (' save fk_%s \n' * len(width_list)) % tuple(width_list) 4015 fk_w_defs = [] 4016 one_def = ' fk_%(w)s = SIGN(MAX(ABS(%(w)s), ABS(%(m)s*small_width_treatment)), %(w)s)' 4017 for m, w in mass_width: 4018 if w == 'zero': 4019 if ' fk_zero = 0d0' not in fk_w_defs: 4020 fk_w_defs.append(' fk_zero = 0d0') 4021 continue 4022 fk_w_defs.append(one_def %{'m':m, 'w':w}) 4023 replace_dict['fake_width_definitions'] = '\n'.join(fk_w_defs) 4024 4025 # Extract version number and date from VERSION file 4026 info_lines = self.get_mg5_info_lines() 4027 replace_dict['info_lines'] = info_lines 4028 4029 # Extract process info lines 4030 process_lines = self.get_process_info_lines(matrix_element) 4031 replace_dict['process_lines'] = process_lines 4032 4033 # Set proc_id 4034 replace_dict['proc_id'] = proc_id 4035 4036 # Extract ncomb 4037 ncomb = matrix_element.get_helicity_combinations() 4038 replace_dict['ncomb'] = ncomb 4039 4040 # Extract helicity lines 4041 helicity_lines = self.get_helicity_lines(matrix_element) 4042 replace_dict['helicity_lines'] = helicity_lines 4043 4044 # Extract IC line 4045 ic_line = self.get_ic_line(matrix_element) 4046 replace_dict['ic_line'] = ic_line 4047 4048 # Extract overall denominator 4049 # Averaging initial state color, spin, and identical FS particles 4050 den_factor_line = self.get_den_factor_line(matrix_element) 4051 replace_dict['den_factor_line'] = den_factor_line 4052 4053 # Extract ngraphs 4054 ngraphs = matrix_element.get_number_of_amplitudes() 4055 replace_dict['ngraphs'] = ngraphs 4056 4057 # Extract ndiags 4058 ndiags = len(matrix_element.get('diagrams')) 4059 replace_dict['ndiags'] = ndiags 4060 4061 # Set define_iconfigs_lines 4062 replace_dict['define_iconfigs_lines'] = \ 4063 """INTEGER MAPCONFIG(0:LMAXCONFIGS), ICONFIG 4064 COMMON/TO_MCONFIGS/MAPCONFIG, ICONFIG""" 4065 4066 if proc_id: 4067 # Set lines for subprocess group version 4068 # Set define_iconfigs_lines 4069 replace_dict['define_iconfigs_lines'] += \ 4070 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4071 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4072 # Set set_amp2_line 4073 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(SUBDIAG(%s))/XTOT" % \ 4074 proc_id 4075 else: 4076 # Standard running 4077 # Set set_amp2_line 4078 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(MAPCONFIG(ICONFIG))/XTOT" 4079 4080 # Extract nwavefuncs 4081 nwavefuncs = matrix_element.get_number_of_wavefunctions() 4082 replace_dict['nwavefuncs'] = nwavefuncs 4083 4084 # Extract ncolor 4085 ncolor = max(1, len(matrix_element.get('color_basis'))) 4086 replace_dict['ncolor'] = ncolor 4087 4088 # Extract color data lines 4089 color_data_lines = self.get_color_data_lines(matrix_element) 4090 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 4091 4092 4093 # Set the size of Wavefunction 4094 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 4095 replace_dict['wavefunctionsize'] = 18 4096 else: 4097 replace_dict['wavefunctionsize'] = 6 4098 4099 # Extract amp2 lines 4100 amp2_lines = self.get_amp2_lines(matrix_element, config_map) 4101 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 4102 4103 # The JAMP definition depends on the splitting order 4104 split_orders=matrix_element.get('processes')[0].get('split_orders') 4105 if len(split_orders)>0: 4106 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 4107 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 4108 matrix_element.get('processes')[0],squared_orders) 4109 else: 4110 # Consider the output of a dummy order 'ALL_ORDERS' for which we 4111 # set all amplitude order to weight 1 and only one squared order 4112 # contribution which is of course ALL_ORDERS=2. 4113 squared_orders = [(2,),] 4114 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 4115 replace_dict['chosen_so_configs'] = '.TRUE.' 4116 4117 replace_dict['nAmpSplitOrders']=len(amp_orders) 4118 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 4119 replace_dict['split_order_str_list']=str(split_orders) 4120 replace_dict['nSplitOrders']=max(len(split_orders),1) 4121 amp_so = self.get_split_orders_lines( 4122 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 4123 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 4124 replace_dict['ampsplitorders']='\n'.join(amp_so) 4125 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 4126 4127 4128 # Extract JAMP lines 4129 # If no split_orders then artificiall add one entry called 'ALL_ORDERS' 4130 jamp_lines = self.get_JAMP_lines_split_order(\ 4131 matrix_element,amp_orders,split_order_names= 4132 split_orders if len(split_orders)>0 else ['ALL_ORDERS']) 4133 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 4134 4135 replace_dict['template_file'] = pjoin(_file_path, \ 4136 'iolibs/template_files/%s' % self.matrix_file) 4137 replace_dict['template_file2'] = pjoin(_file_path, \ 4138 'iolibs/template_files/split_orders_helping_functions.inc') 4139 if writer: 4140 file = open(replace_dict['template_file']).read() 4141 file = file % replace_dict 4142 # Add the split orders helper functions. 4143 file = file + '\n' + open(replace_dict['template_file2'])\ 4144 .read()%replace_dict 4145 # Write the file 4146 writer.writelines(file) 4147 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor 4148 else: 4149 replace_dict['return_value'] = (len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor) 4150 return replace_dict
4151 4152 #=========================================================================== 4153 # write_auto_dsig_file 4154 #===========================================================================
4155 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
4156 """Write the auto_dsig.f file for the differential cross section 4157 calculation, includes pdf call information""" 4158 4159 if not matrix_element.get('processes') or \ 4160 not matrix_element.get('diagrams'): 4161 return 0 4162 4163 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 4164 self.proc_characteristic['ninitial'] = ninitial 4165 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 4166 4167 # Add information relevant for MLM matching: 4168 # Maximum QCD power in all the contributions 4169 max_qcd_order = 0 4170 for diag in matrix_element.get('diagrams'): 4171 orders = diag.calculate_orders() 4172 if 'QCD' in orders: 4173 max_qcd_order = max(max_qcd_order,orders['QCD']) 4174 max_n_light_final_partons = max(len([1 for id in proc.get_final_ids() 4175 if proc.get('model').get_particle(id).get('mass')=='ZERO' and 4176 proc.get('model').get_particle(id).get('color')>1]) 4177 for proc in matrix_element.get('processes')) 4178 # Maximum number of final state light jets to be matched 4179 self.proc_characteristic['max_n_matched_jets'] = max( 4180 self.proc_characteristic['max_n_matched_jets'], 4181 min(max_qcd_order,max_n_light_final_partons)) 4182 4183 # List of default pdgs to be considered for the CKKWl merging cut 4184 self.proc_characteristic['colored_pdgs'] = \ 4185 sorted(list(set([abs(p.get('pdg_code')) for p in 4186 matrix_element.get('processes')[0].get('model').get('particles') if 4187 p.get('color')>1]))) 4188 4189 if ninitial < 1 or ninitial > 2: 4190 raise writers.FortranWriter.FortranWriterError, \ 4191 """Need ninitial = 1 or 2 to write auto_dsig file""" 4192 4193 replace_dict = {} 4194 4195 # Extract version number and date from VERSION file 4196 info_lines = self.get_mg5_info_lines() 4197 replace_dict['info_lines'] = info_lines 4198 4199 # Extract process info lines 4200 process_lines = self.get_process_info_lines(matrix_element) 4201 replace_dict['process_lines'] = process_lines 4202 4203 # Set proc_id 4204 replace_dict['proc_id'] = proc_id 4205 replace_dict['numproc'] = 1 4206 4207 # Set dsig_line 4208 if ninitial == 1: 4209 # No conversion, since result of decay should be given in GeV 4210 dsig_line = "pd(0)*dsiguu" 4211 else: 4212 # Convert result (in GeV) to pb 4213 dsig_line = "pd(0)*conv*dsiguu" 4214 4215 replace_dict['dsig_line'] = dsig_line 4216 4217 # Extract pdf lines 4218 pdf_vars, pdf_data, pdf_lines = \ 4219 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 4220 replace_dict['pdf_vars'] = pdf_vars 4221 replace_dict['pdf_data'] = pdf_data 4222 replace_dict['pdf_lines'] = pdf_lines 4223 4224 # Lines that differ between subprocess group and regular 4225 if proc_id: 4226 replace_dict['numproc'] = int(proc_id) 4227 replace_dict['passcuts_begin'] = "" 4228 replace_dict['passcuts_end'] = "" 4229 # Set lines for subprocess group version 4230 # Set define_iconfigs_lines 4231 replace_dict['define_subdiag_lines'] = \ 4232 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4233 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4234 replace_dict['cutsdone'] = "" 4235 else: 4236 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 4237 replace_dict['passcuts_end'] = "ENDIF" 4238 replace_dict['define_subdiag_lines'] = "" 4239 replace_dict['cutsdone'] = " cutsdone=.false.\n cutspassed=.false." 4240 4241 if not isinstance(self, ProcessExporterFortranMEGroup): 4242 ncomb=matrix_element.get_helicity_combinations() 4243 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 4244 else: 4245 replace_dict['read_write_good_hel'] = "" 4246 4247 context = {'read_write_good_hel':True} 4248 4249 if writer: 4250 file = open(pjoin(_file_path, \ 4251 'iolibs/template_files/auto_dsig_v4.inc')).read() 4252 file = file % replace_dict 4253 4254 # Write the file 4255 writer.writelines(file, context=context) 4256 else: 4257 return replace_dict, context
4258 #=========================================================================== 4259 # write_coloramps_file 4260 #===========================================================================
4261 - def write_coloramps_file(self, writer, mapconfigs, matrix_element):
4262 """Write the coloramps.inc file for MadEvent""" 4263 4264 lines = self.get_icolamp_lines(mapconfigs, matrix_element, 1) 4265 lines.insert(0, "logical icolamp(%d,%d,1)" % \ 4266 (max(len(matrix_element.get('color_basis').keys()), 1), 4267 len(mapconfigs))) 4268 4269 4270 # Write the file 4271 writer.writelines(lines) 4272 4273 return True
4274 4275 #=========================================================================== 4276 # write_colors_file 4277 #===========================================================================
4278 - def write_colors_file(self, writer, matrix_elements):
4279 """Write the get_color.f file for MadEvent, which returns color 4280 for all particles used in the matrix element.""" 4281 4282 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 4283 matrix_elements = [matrix_elements] 4284 4285 model = matrix_elements[0].get('processes')[0].get('model') 4286 4287 # We need the both particle and antiparticle wf_ids, since the identity 4288 # depends on the direction of the wf. 4289 wf_ids = set(sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 4290 for wf in d.get('wavefunctions')],[]) \ 4291 for d in me.get('diagrams')], []) \ 4292 for me in matrix_elements], [])) 4293 4294 leg_ids = set(sum([sum([sum([[l.get('id'), 4295 model.get_particle(l.get('id')).get_anti_pdg_code()] \ 4296 for l in p.get_legs_with_decays()], []) \ 4297 for p in me.get('processes')], []) \ 4298 for me in matrix_elements], [])) 4299 particle_ids = sorted(list(wf_ids.union(leg_ids))) 4300 4301 lines = """function get_color(ipdg) 4302 implicit none 4303 integer get_color, ipdg 4304 4305 if(ipdg.eq.%d)then 4306 get_color=%d 4307 return 4308 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 4309 4310 for part_id in particle_ids[1:]: 4311 lines += """else if(ipdg.eq.%d)then 4312 get_color=%d 4313 return 4314 """ % (part_id, model.get_particle(part_id).get_color()) 4315 # Dummy particle for multiparticle vertices with pdg given by 4316 # first code not in the model 4317 lines += """else if(ipdg.eq.%d)then 4318 c This is dummy particle used in multiparticle vertices 4319 get_color=2 4320 return 4321 """ % model.get_first_non_pdg() 4322 lines += """else 4323 write(*,*)'Error: No color given for pdg ',ipdg 4324 get_color=0 4325 return 4326 endif 4327 end 4328 """ 4329 4330 # Write the file 4331 writer.writelines(lines) 4332 4333 return True
4334 4335 #=========================================================================== 4336 # write_config_nqcd_file 4337 #===========================================================================
4338 - def write_config_nqcd_file(self, writer, nqcd_list):
4339 """Write the config_nqcd.inc with the number of QCD couplings 4340 for each config""" 4341 4342 lines = [] 4343 for iconf, n in enumerate(nqcd_list): 4344 lines.append("data nqcd(%d)/%d/" % (iconf+1, n)) 4345 4346 # Write the file 4347 writer.writelines(lines) 4348 4349 return True
4350 4351 #=========================================================================== 4352 # write_maxconfigs_file 4353 #===========================================================================
4354 - def write_maxconfigs_file(self, writer, matrix_elements):
4355 """Write the maxconfigs.inc file for MadEvent""" 4356 4357 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 4358 maxconfigs = max([me.get_num_configs() for me in \ 4359 matrix_elements.get('matrix_elements')]) 4360 else: 4361 maxconfigs = max([me.get_num_configs() for me in matrix_elements]) 4362 4363 lines = "integer lmaxconfigs\n" 4364 lines += "parameter(lmaxconfigs=%d)" % maxconfigs 4365 4366 # Write the file 4367 writer.writelines(lines) 4368 4369 return True
4370 4371 #=========================================================================== 4372 # read_write_good_hel 4373 #===========================================================================
4374 - def read_write_good_hel(self, ncomb):
4375 """return the code to read/write the good_hel common_block""" 4376 4377 convert = {'ncomb' : ncomb} 4378 output = """ 4379 subroutine write_good_hel(stream_id) 4380 implicit none 4381 integer stream_id 4382 INTEGER NCOMB 4383 PARAMETER ( NCOMB=%(ncomb)d) 4384 LOGICAL GOODHEL(NCOMB) 4385 INTEGER NTRY 4386 common/BLOCK_GOODHEL/NTRY,GOODHEL 4387 write(stream_id,*) GOODHEL 4388 return 4389 end 4390 4391 4392 subroutine read_good_hel(stream_id) 4393 implicit none 4394 include 'genps.inc' 4395 integer stream_id 4396 INTEGER NCOMB 4397 PARAMETER ( NCOMB=%(ncomb)d) 4398 LOGICAL GOODHEL(NCOMB) 4399 INTEGER NTRY 4400 common/BLOCK_GOODHEL/NTRY,GOODHEL 4401 read(stream_id,*) GOODHEL 4402 NTRY = MAXTRIES + 1 4403 return 4404 end 4405 4406 subroutine init_good_hel() 4407 implicit none 4408 INTEGER NCOMB 4409 PARAMETER ( NCOMB=%(ncomb)d) 4410 LOGICAL GOODHEL(NCOMB) 4411 INTEGER NTRY 4412 INTEGER I 4413 4414 do i=1,NCOMB 4415 GOODHEL(I) = .false. 4416 enddo 4417 NTRY = 0 4418 end 4419 4420 integer function get_maxsproc() 4421 implicit none 4422 get_maxsproc = 1 4423 return 4424 end 4425 4426 """ % convert 4427 4428 return output
4429 4430 #=========================================================================== 4431 # write_config_subproc_map_file 4432 #===========================================================================
4433 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
4434 """Write a dummy config_subproc.inc file for MadEvent""" 4435 4436 lines = [] 4437 4438 for iconfig in range(len(s_and_t_channels)): 4439 lines.append("DATA CONFSUB(1,%d)/1/" % \ 4440 (iconfig + 1)) 4441 4442 # Write the file 4443 writer.writelines(lines) 4444 4445 return True
4446 4447 #=========================================================================== 4448 # write_configs_file 4449 #===========================================================================
4450 - def write_configs_file(self, writer, matrix_element):
4451 """Write the configs.inc file for MadEvent""" 4452 4453 # Extract number of external particles 4454 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4455 4456 model = matrix_element.get('processes')[0].get('model') 4457 configs = [(i+1, d) for (i, d) in \ 4458 enumerate(matrix_element.get('diagrams'))] 4459 mapconfigs = [c[0] for c in configs] 4460 return mapconfigs, self.write_configs_file_from_diagrams(writer, 4461 [[c[1]] for c in configs], 4462 mapconfigs, 4463 nexternal, ninitial, 4464 model)
4465 4466 #=========================================================================== 4467 # write_run_configs_file 4468 #===========================================================================
4469 - def write_run_config_file(self, writer):
4470 """Write the run_configs.inc file for MadEvent""" 4471 4472 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 4473 4474 if self.proc_characteristic['loop_induced']: 4475 job_per_chan = 1 4476 else: 4477 job_per_chan = 5 4478 4479 if writer: 4480 text = open(path).read() % {'chanperjob': job_per_chan} 4481 writer.write(text) 4482 return True 4483 else: 4484 return {'chanperjob': job_per_chan}
4485 4486 #=========================================================================== 4487 # write_configs_file_from_diagrams 4488 #===========================================================================
4489 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 4490 nexternal, ninitial, model):
4491 """Write the actual configs.inc file. 4492 4493 configs is the diagrams corresponding to configs (each 4494 diagrams is a list of corresponding diagrams for all 4495 subprocesses, with None if there is no corresponding diagrams 4496 for a given process). 4497 mapconfigs gives the diagram number for each config. 4498 4499 For s-channels, we need to output one PDG for each subprocess in 4500 the subprocess group, in order to be able to pick the right 4501 one for multiprocesses.""" 4502 4503 lines = [] 4504 4505 s_and_t_channels = [] 4506 4507 nqcd_list = [] 4508 4509 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 4510 for config in configs if [d for d in config if d][0].\ 4511 get_vertex_leg_numbers()!=[]] 4512 minvert = min(vert_list) if vert_list!=[] else 0 4513 4514 # Number of subprocesses 4515 nsubprocs = len(configs[0]) 4516 4517 nconfigs = 0 4518 4519 new_pdg = model.get_first_non_pdg() 4520 4521 for iconfig, helas_diags in enumerate(configs): 4522 if any([vert > minvert for vert in 4523 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 4524 # Only 3-vertices allowed in configs.inc 4525 continue 4526 nconfigs += 1 4527 4528 # Need s- and t-channels for all subprocesses, including 4529 # those that don't contribute to this config 4530 empty_verts = [] 4531 stchannels = [] 4532 for h in helas_diags: 4533 if h: 4534 # get_s_and_t_channels gives vertices starting from 4535 # final state external particles and working inwards 4536 stchannels.append(h.get('amplitudes')[0].\ 4537 get_s_and_t_channels(ninitial, model, 4538 new_pdg)) 4539 else: 4540 stchannels.append((empty_verts, None)) 4541 4542 # For t-channels, just need the first non-empty one 4543 tchannels = [t for s,t in stchannels if t != None][0] 4544 4545 # For s_and_t_channels (to be used later) use only first config 4546 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 4547 tchannels]) 4548 4549 # Make sure empty_verts is same length as real vertices 4550 if any([s for s,t in stchannels]): 4551 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 4552 4553 # Reorganize s-channel vertices to get a list of all 4554 # subprocesses for each vertex 4555 schannels = zip(*[s for s,t in stchannels]) 4556 else: 4557 schannels = [] 4558 4559 allchannels = schannels 4560 if len(tchannels) > 1: 4561 # Write out tchannels only if there are any non-trivial ones 4562 allchannels = schannels + tchannels 4563 4564 # Write out propagators for s-channel and t-channel vertices 4565 4566 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 4567 # Correspondance between the config and the diagram = amp2 4568 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 4569 mapconfigs[iconfig])) 4570 # Number of QCD couplings in this diagram 4571 nqcd = 0 4572 for h in helas_diags: 4573 if h: 4574 try: 4575 nqcd = h.calculate_orders()['QCD'] 4576 except KeyError: 4577 pass 4578 break 4579 else: 4580 continue 4581 4582 nqcd_list.append(nqcd) 4583 4584 for verts in allchannels: 4585 if verts in schannels: 4586 vert = [v for v in verts if v][0] 4587 else: 4588 vert = verts 4589 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 4590 last_leg = vert.get('legs')[-1] 4591 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 4592 (last_leg.get('number'), nconfigs, len(daughters), 4593 ",".join([str(d) for d in daughters]))) 4594 if verts in schannels: 4595 pdgs = [] 4596 for v in verts: 4597 if v: 4598 pdgs.append(v.get('legs')[-1].get('id')) 4599 else: 4600 pdgs.append(0) 4601 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4602 (last_leg.get('number'), nconfigs, nsubprocs, 4603 ",".join([str(d) for d in pdgs]))) 4604 lines.append("data tprid(%d,%d)/0/" % \ 4605 (last_leg.get('number'), nconfigs)) 4606 elif verts in tchannels[:-1]: 4607 lines.append("data tprid(%d,%d)/%d/" % \ 4608 (last_leg.get('number'), nconfigs, 4609 abs(last_leg.get('id')))) 4610 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4611 (last_leg.get('number'), nconfigs, nsubprocs, 4612 ",".join(['0'] * nsubprocs))) 4613 4614 # Write out number of configs 4615 lines.append("# Number of configs") 4616 lines.append("data mapconfig(0)/%d/" % nconfigs) 4617 4618 # Write the file 4619 writer.writelines(lines) 4620 4621 return s_and_t_channels, nqcd_list
4622 4623 #=========================================================================== 4624 # write_decayBW_file 4625 #===========================================================================
4626 - def write_decayBW_file(self, writer, s_and_t_channels):
4627 """Write the decayBW.inc file for MadEvent""" 4628 4629 lines = [] 4630 4631 booldict = {None: "0", True: "1", False: "2"} 4632 4633 for iconf, config in enumerate(s_and_t_channels): 4634 schannels = config[0] 4635 for vertex in schannels: 4636 # For the resulting leg, pick out whether it comes from 4637 # decay or not, as given by the onshell flag 4638 leg = vertex.get('legs')[-1] 4639 lines.append("data gForceBW(%d,%d)/%s/" % \ 4640 (leg.get('number'), iconf + 1, 4641 booldict[leg.get('onshell')])) 4642 4643 # Write the file 4644 writer.writelines(lines) 4645 4646 return True
4647 4648 #=========================================================================== 4649 # write_dname_file 4650 #===========================================================================
4651 - def write_dname_file(self, writer, dir_name):
4652 """Write the dname.mg file for MG4""" 4653 4654 line = "DIRNAME=%s" % dir_name 4655 4656 # Write the file 4657 writer.write(line + "\n") 4658 4659 return True
4660 4661 #=========================================================================== 4662 # write_driver 4663 #===========================================================================
4664 - def write_driver(self, writer, ncomb, n_grouped_proc, v5=True):
4665 """Write the SubProcess/driver.f file for MG4""" 4666 4667 path = pjoin(_file_path,'iolibs','template_files','madevent_driver.f') 4668 4669 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4670 card = 'Source/MODEL/MG5_param.dat' 4671 else: 4672 card = 'param_card.dat' 4673 # Requiring each helicity configuration to be probed by 10 points for 4674 # matrix element before using the resulting grid for MC over helicity 4675 # sampling. 4676 # We multiply this by 2 because each grouped subprocess is called at most 4677 # twice for each IMIRROR. 4678 replace_dict = {'param_card_name':card, 4679 'ncomb':ncomb, 4680 'hel_init_points':n_grouped_proc*10*2} 4681 if not v5: 4682 replace_dict['secondparam']=',.true.' 4683 else: 4684 replace_dict['secondparam']='' 4685 4686 if writer: 4687 text = open(path).read() % replace_dict 4688 writer.write(text) 4689 return True 4690 else: 4691 return replace_dict
4692 4693 #=========================================================================== 4694 # write_addmothers 4695 #===========================================================================
4696 - def write_addmothers(self, writer):
4697 """Write the SubProcess/addmothers.f""" 4698 4699 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 4700 4701 text = open(path).read() % {'iconfig': 'diag_number'} 4702 writer.write(text) 4703 4704 return True
4705 4706 4707 #=========================================================================== 4708 # write_combine_events 4709 #===========================================================================
4710 - def write_combine_events(self, writer, nb_proc=100):
4711 """Write the SubProcess/driver.f file for MG4""" 4712 4713 path = pjoin(_file_path,'iolibs','template_files','madevent_combine_events.f') 4714 4715 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4716 card = 'Source/MODEL/MG5_param.dat' 4717 else: 4718 card = 'param_card.dat' 4719 4720 #set maxpup (number of @X in the process card) 4721 4722 text = open(path).read() % {'param_card_name':card, 'maxpup':nb_proc+1} 4723 #the +1 is just a security. This is not needed but I feel(OM) safer with it. 4724 writer.write(text) 4725 4726 return True
4727 4728 4729 #=========================================================================== 4730 # write_symmetry 4731 #===========================================================================
4732 - def write_symmetry(self, writer, v5=True):
4733 """Write the SubProcess/driver.f file for ME""" 4734 4735 path = pjoin(_file_path,'iolibs','template_files','madevent_symmetry.f') 4736 4737 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4738 card = 'Source/MODEL/MG5_param.dat' 4739 else: 4740 card = 'param_card.dat' 4741 4742 if v5: 4743 replace_dict = {'param_card_name':card, 'setparasecondarg':''} 4744 else: 4745 replace_dict= {'param_card_name':card, 'setparasecondarg':',.true.'} 4746 4747 if writer: 4748 text = open(path).read() 4749 text = text % replace_dict 4750 writer.write(text) 4751 return True 4752 else: 4753 return replace_dict
4754 4755 4756 4757 #=========================================================================== 4758 # write_iproc_file 4759 #===========================================================================
4760 - def write_iproc_file(self, writer, me_number):
4761 """Write the iproc.dat file for MG4""" 4762 line = "%d" % (me_number + 1) 4763 4764 # Write the file 4765 for line_to_write in writer.write_line(line): 4766 writer.write(line_to_write) 4767 return True
4768 4769 #=========================================================================== 4770 # write_mg_sym_file 4771 #===========================================================================
4772 - def write_mg_sym_file(self, writer, matrix_element):
4773 """Write the mg.sym file for MadEvent.""" 4774 4775 lines = [] 4776 4777 # Extract process with all decays included 4778 final_legs = filter(lambda leg: leg.get('state') == True, 4779 matrix_element.get('processes')[0].get_legs_with_decays()) 4780 4781 ninitial = len(filter(lambda leg: leg.get('state') == False, 4782 matrix_element.get('processes')[0].get('legs'))) 4783 4784 identical_indices = {} 4785 4786 # Extract identical particle info 4787 for i, leg in enumerate(final_legs): 4788 if leg.get('id') in identical_indices: 4789 identical_indices[leg.get('id')].append(\ 4790 i + ninitial + 1) 4791 else: 4792 identical_indices[leg.get('id')] = [i + ninitial + 1] 4793 4794 # Remove keys which have only one particle 4795 for key in identical_indices.keys(): 4796 if len(identical_indices[key]) < 2: 4797 del identical_indices[key] 4798 4799 # Write mg.sym file 4800 lines.append(str(len(identical_indices.keys()))) 4801 for key in identical_indices.keys(): 4802 lines.append(str(len(identical_indices[key]))) 4803 for number in identical_indices[key]: 4804 lines.append(str(number)) 4805 4806 # Write the file 4807 writer.writelines(lines) 4808 4809 return True
4810 4811 #=========================================================================== 4812 # write_mg_sym_file 4813 #===========================================================================
4814 - def write_default_mg_sym_file(self, writer):
4815 """Write the mg.sym file for MadEvent.""" 4816 4817 lines = "0" 4818 4819 # Write the file 4820 writer.writelines(lines) 4821 4822 return True
4823 4824 #=========================================================================== 4825 # write_ncombs_file 4826 #===========================================================================
4827 - def write_ncombs_file(self, writer, nexternal):
4828 """Write the ncombs.inc file for MadEvent.""" 4829 4830 # ncomb (used for clustering) is 2^nexternal 4831 file = " integer n_max_cl\n" 4832 file = file + "parameter (n_max_cl=%d)" % (2 ** nexternal) 4833 4834 # Write the file 4835 writer.writelines(file) 4836 4837 return True
4838 4839 #=========================================================================== 4840 # write_processes_file 4841 #===========================================================================
4842 - def write_processes_file(self, writer, subproc_group):
4843 """Write the processes.dat file with info about the subprocesses 4844 in this group.""" 4845 4846 lines = [] 4847 4848 for ime, me in \ 4849 enumerate(subproc_group.get('matrix_elements')): 4850 lines.append("%s %s" % (str(ime+1) + " " * (7-len(str(ime+1))), 4851 ",".join(p.base_string() for p in \ 4852 me.get('processes')))) 4853 if me.get('has_mirror_process'): 4854 mirror_procs = [copy.copy(p) for p in me.get('processes')] 4855 for proc in mirror_procs: 4856 legs = copy.copy(proc.get('legs_with_decays')) 4857 legs.insert(0, legs.pop(1)) 4858 proc.set("legs_with_decays", legs) 4859 lines.append("mirror %s" % ",".join(p.base_string() for p in \ 4860 mirror_procs)) 4861 else: 4862 lines.append("mirror none") 4863 4864 # Write the file 4865 writer.write("\n".join(lines)) 4866 4867 return True
4868 4869 #=========================================================================== 4870 # write_symswap_file 4871 #===========================================================================
4872 - def write_symswap_file(self, writer, ident_perms):
4873 """Write the file symswap.inc for MG4 by comparing diagrams using 4874 the internal matrix element value functionality.""" 4875 4876 lines = [] 4877 4878 # Write out lines for symswap.inc file (used to permute the 4879 # external leg momenta 4880 for iperm, perm in enumerate(ident_perms): 4881 lines.append("data (isym(i,%d),i=1,nexternal)/%s/" % \ 4882 (iperm+1, ",".join([str(i+1) for i in perm]))) 4883 lines.append("data nsym/%d/" % len(ident_perms)) 4884 4885 # Write the file 4886 writer.writelines(lines) 4887 4888 return True
4889 4890 #=========================================================================== 4891 # write_symfact_file 4892 #===========================================================================
4893 - def write_symfact_file(self, writer, symmetry):
4894 """Write the files symfact.dat for MG4 by comparing diagrams using 4895 the internal matrix element value functionality.""" 4896 4897 pos = max(2, int(math.ceil(math.log10(len(symmetry))))) 4898 form = "%"+str(pos)+"r %"+str(pos+1)+"r" 4899 # Write out lines for symswap.inc file (used to permute the 4900 # external leg momenta 4901 lines = [ form %(i+1, s) for i,s in enumerate(symmetry) if s != 0] 4902 # Write the file 4903 writer.write('\n'.join(lines)) 4904 writer.write('\n') 4905 4906 return True
4907 4908 #=========================================================================== 4909 # write_symperms_file 4910 #===========================================================================
4911 - def write_symperms_file(self, writer, perms):
4912 """Write the symperms.inc file for subprocess group, used for 4913 symmetric configurations""" 4914 4915 lines = [] 4916 for iperm, perm in enumerate(perms): 4917 lines.append("data (perms(i,%d),i=1,nexternal)/%s/" % \ 4918 (iperm+1, ",".join([str(i+1) for i in perm]))) 4919 4920 # Write the file 4921 writer.writelines(lines) 4922 4923 return True
4924 4925 #=========================================================================== 4926 # write_subproc 4927 #===========================================================================
4928 - def write_subproc(self, writer, subprocdir):
4929 """Append this subprocess to the subproc.mg file for MG4""" 4930 4931 # Write line to file 4932 writer.write(subprocdir + "\n") 4933 4934 return True
4935
4936 #=============================================================================== 4937 # ProcessExporterFortranMEGroup 4938 #=============================================================================== 4939 -class ProcessExporterFortranMEGroup(ProcessExporterFortranME):
4940 """Class to take care of exporting a set of matrix elements to 4941 MadEvent subprocess group format.""" 4942 4943 matrix_file = "matrix_madevent_group_v4.inc" 4944 grouped_mode = 'madevent' 4945 #=========================================================================== 4946 # generate_subprocess_directory 4947 #===========================================================================
4948 - def generate_subprocess_directory(self, subproc_group, 4949 fortran_model, 4950 group_number):
4951 """Generate the Pn directory for a subprocess group in MadEvent, 4952 including the necessary matrix_N.f files, configs.inc and various 4953 other helper files.""" 4954 4955 assert isinstance(subproc_group, group_subprocs.SubProcessGroup), \ 4956 "subproc_group object not SubProcessGroup" 4957 4958 if not self.model: 4959 self.model = subproc_group.get('matrix_elements')[0].\ 4960 get('processes')[0].get('model') 4961 4962 cwd = os.getcwd() 4963 path = pjoin(self.dir_path, 'SubProcesses') 4964 4965 os.chdir(path) 4966 pathdir = os.getcwd() 4967 4968 # Create the directory PN in the specified path 4969 subprocdir = "P%d_%s" % (subproc_group.get('number'), 4970 subproc_group.get('name')) 4971 try: 4972 os.mkdir(subprocdir) 4973 except os.error as error: 4974 logger.warning(error.strerror + " " + subprocdir) 4975 4976 try: 4977 os.chdir(subprocdir) 4978 except os.error: 4979 logger.error('Could not cd to directory %s' % subprocdir) 4980 return 0 4981 4982 logger.info('Creating files in directory %s' % subprocdir) 4983 4984 # Create the matrix.f files, auto_dsig.f files and all inc files 4985 # for all subprocesses in the group 4986 4987 maxamps = 0 4988 maxflows = 0 4989 tot_calls = 0 4990 4991 matrix_elements = subproc_group.get('matrix_elements') 4992 4993 # Add the driver.f, all grouped ME's must share the same number of 4994 # helicity configuration 4995 ncomb = matrix_elements[0].get_helicity_combinations() 4996 for me in matrix_elements[1:]: 4997 if ncomb!=me.get_helicity_combinations(): 4998 raise MadGraph5Error, "All grouped processes must share the "+\ 4999 "same number of helicity configurations." 5000 5001 filename = 'driver.f' 5002 self.write_driver(writers.FortranWriter(filename),ncomb, 5003 n_grouped_proc=len(matrix_elements), v5=self.opt['v5_model']) 5004 5005 for ime, matrix_element in \ 5006 enumerate(matrix_elements): 5007 filename = 'matrix%d.f' % (ime+1) 5008 calls, ncolor = \ 5009 self.write_matrix_element_v4(writers.FortranWriter(filename), 5010 matrix_element, 5011 fortran_model, 5012 proc_id=str(ime+1), 5013 config_map=subproc_group.get('diagram_maps')[ime], 5014 subproc_number=group_number) 5015 5016 filename = 'auto_dsig%d.f' % (ime+1) 5017 self.write_auto_dsig_file(writers.FortranWriter(filename), 5018 matrix_element, 5019 str(ime+1)) 5020 5021 # Keep track of needed quantities 5022 tot_calls += int(calls) 5023 maxflows = max(maxflows, ncolor) 5024 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 5025 5026 # Draw diagrams 5027 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 5028 filename = "matrix%d.ps" % (ime+1) 5029 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 5030 get('diagrams'), 5031 filename, 5032 model = \ 5033 matrix_element.get('processes')[0].\ 5034 get('model'), 5035 amplitude=True) 5036 logger.info("Generating Feynman diagrams for " + \ 5037 matrix_element.get('processes')[0].nice_string()) 5038 plot.draw() 5039 5040 # Extract number of external particles 5041 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 5042 5043 # Generate a list of diagrams corresponding to each configuration 5044 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 5045 # If a subprocess has no diagrams for this config, the number is 0 5046 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 5047 5048 filename = 'auto_dsig.f' 5049 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 5050 subproc_group) 5051 5052 filename = 'coloramps.inc' 5053 self.write_coloramps_file(writers.FortranWriter(filename), 5054 subproc_diagrams_for_config, 5055 maxflows, 5056 matrix_elements) 5057 5058 filename = 'get_color.f' 5059 self.write_colors_file(writers.FortranWriter(filename), 5060 matrix_elements) 5061 5062 filename = 'config_subproc_map.inc' 5063 self.write_config_subproc_map_file(writers.FortranWriter(filename), 5064 subproc_diagrams_for_config) 5065 5066 filename = 'configs.inc' 5067 nconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 5068 writers.FortranWriter(filename), 5069 subproc_group, 5070 subproc_diagrams_for_config) 5071 5072 filename = 'config_nqcd.inc' 5073 self.write_config_nqcd_file(writers.FortranWriter(filename), 5074 nqcd_list) 5075 5076 filename = 'decayBW.inc' 5077 self.write_decayBW_file(writers.FortranWriter(filename), 5078 s_and_t_channels) 5079 5080 filename = 'dname.mg' 5081 self.write_dname_file(writers.FortranWriter(filename), 5082 subprocdir) 5083 5084 filename = 'iproc.dat' 5085 self.write_iproc_file(writers.FortranWriter(filename), 5086 group_number) 5087 5088 filename = 'leshouche.inc' 5089 self.write_leshouche_file(writers.FortranWriter(filename), 5090 subproc_group) 5091 5092 filename = 'maxamps.inc' 5093 self.write_maxamps_file(writers.FortranWriter(filename), 5094 maxamps, 5095 maxflows, 5096 max([len(me.get('processes')) for me in \ 5097 matrix_elements]), 5098 len(matrix_elements)) 5099 5100 # Note that mg.sym is not relevant for this case 5101 filename = 'mg.sym' 5102 self.write_default_mg_sym_file(writers.FortranWriter(filename)) 5103 5104 filename = 'mirrorprocs.inc' 5105 self.write_mirrorprocs(writers.FortranWriter(filename), 5106 subproc_group) 5107 5108 filename = 'ncombs.inc' 5109 self.write_ncombs_file(writers.FortranWriter(filename), 5110 nexternal) 5111 5112 filename = 'nexternal.inc' 5113 self.write_nexternal_file(writers.FortranWriter(filename), 5114 nexternal, ninitial) 5115 5116 filename = 'ngraphs.inc' 5117 self.write_ngraphs_file(writers.FortranWriter(filename), 5118 nconfigs) 5119 5120 filename = 'pmass.inc' 5121 self.write_pmass_file(writers.FortranWriter(filename), 5122 matrix_element) 5123 5124 filename = 'props.inc' 5125 self.write_props_file(writers.FortranWriter(filename), 5126 matrix_element, 5127 s_and_t_channels) 5128 5129 filename = 'processes.dat' 5130 files.write_to_file(filename, 5131 self.write_processes_file, 5132 subproc_group) 5133 5134 # Find config symmetries and permutations 5135 symmetry, perms, ident_perms = \ 5136 diagram_symmetry.find_symmetry(subproc_group) 5137 5138 filename = 'symswap.inc' 5139 self.write_symswap_file(writers.FortranWriter(filename), 5140 ident_perms) 5141 5142 filename = 'symfact_orig.dat' 5143 self.write_symfact_file(open(filename, 'w'), symmetry) 5144 5145 # check consistency 5146 for i, sym_fact in enumerate(symmetry): 5147 if sym_fact > 0: 5148 continue 5149 if nqcd_list[i] != nqcd_list[abs(sym_fact)-1]: 5150 misc.sprint(i, sym_fact, nqcd_list[i], nqcd_list[abs(sym_fact)]) 5151 raise Exception, "identical diagram with different QCD powwer" 5152 5153 5154 5155 filename = 'symperms.inc' 5156 self.write_symperms_file(writers.FortranWriter(filename), 5157 perms) 5158 5159 # Generate jpgs -> pass in make_html 5160 #os.system(pjoin('..', '..', 'bin', 'gen_jpeg-pl')) 5161 5162 self.link_files_in_SubProcess(pjoin(pathdir,subprocdir)) 5163 5164 #import nexternal/leshouch in Source 5165 ln('nexternal.inc', '../../Source', log=False) 5166 ln('leshouche.inc', '../../Source', log=False) 5167 ln('maxamps.inc', '../../Source', log=False) 5168 5169 # Return to SubProcesses dir) 5170 os.chdir(pathdir) 5171 5172 # Add subprocess to subproc.mg 5173 filename = 'subproc.mg' 5174 files.append_to_file(filename, 5175 self.write_subproc, 5176 subprocdir) 5177 5178 # Return to original dir 5179 os.chdir(cwd) 5180 5181 if not tot_calls: 5182 tot_calls = 0 5183 return tot_calls
5184 5185 #=========================================================================== 5186 # write_super_auto_dsig_file 5187 #===========================================================================
5188 - def write_super_auto_dsig_file(self, writer, subproc_group):
5189 """Write the auto_dsig.f file selecting between the subprocesses 5190 in subprocess group mode""" 5191 5192 replace_dict = {} 5193 5194 # Extract version number and date from VERSION file 5195 info_lines = self.get_mg5_info_lines() 5196 replace_dict['info_lines'] = info_lines 5197 5198 matrix_elements = subproc_group.get('matrix_elements') 5199 5200 # Extract process info lines 5201 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 5202 matrix_elements]) 5203 replace_dict['process_lines'] = process_lines 5204 5205 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 5206 replace_dict['nexternal'] = nexternal 5207 5208 replace_dict['nsprocs'] = 2*len(matrix_elements) 5209 5210 # Generate dsig definition line 5211 dsig_def_line = "DOUBLE PRECISION " + \ 5212 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 5213 range(len(matrix_elements))]) 5214 replace_dict["dsig_def_line"] = dsig_def_line 5215 5216 # Generate dsig process lines 5217 call_dsig_proc_lines = [] 5218 for iproc in range(len(matrix_elements)): 5219 call_dsig_proc_lines.append(\ 5220 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 5221 {"num": iproc + 1, 5222 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 5223 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 5224 5225 ncomb=matrix_elements[0].get_helicity_combinations() 5226 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 5227 5228 if writer: 5229 file = open(pjoin(_file_path, \ 5230 'iolibs/template_files/super_auto_dsig_group_v4.inc')).read() 5231 file = file % replace_dict 5232 5233 # Write the file 5234 writer.writelines(file) 5235 else: 5236 return replace_dict
5237 5238 #=========================================================================== 5239 # write_mirrorprocs 5240 #===========================================================================
5241 - def write_mirrorprocs(self, writer, subproc_group):
5242 """Write the mirrorprocs.inc file determining which processes have 5243 IS mirror process in subprocess group mode.""" 5244 5245 lines = [] 5246 bool_dict = {True: '.true.', False: '.false.'} 5247 matrix_elements = subproc_group.get('matrix_elements') 5248 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 5249 (len(matrix_elements), 5250 ",".join([bool_dict[me.get('has_mirror_process')] for \ 5251 me in matrix_elements]))) 5252 # Write the file 5253 writer.writelines(lines)
5254 5255 #=========================================================================== 5256 # write_addmothers 5257 #===========================================================================
5258 - def write_addmothers(self, writer):
5259 """Write the SubProcess/addmothers.f""" 5260 5261 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 5262 5263 text = open(path).read() % {'iconfig': 'lconfig'} 5264 writer.write(text) 5265 5266 return True
5267 5268 5269 #=========================================================================== 5270 # write_coloramps_file 5271 #===========================================================================
5272 - def write_coloramps_file(self, writer, diagrams_for_config, maxflows, 5273 matrix_elements):
5274 """Write the coloramps.inc file for MadEvent in Subprocess group mode""" 5275 5276 # Create a map from subprocess (matrix element) to a list of 5277 # the diagrams corresponding to each config 5278 5279 lines = [] 5280 5281 subproc_to_confdiag = {} 5282 for config in diagrams_for_config: 5283 for subproc, diag in enumerate(config): 5284 try: 5285 subproc_to_confdiag[subproc].append(diag) 5286 except KeyError: 5287 subproc_to_confdiag[subproc] = [diag] 5288 5289 for subproc in sorted(subproc_to_confdiag.keys()): 5290 lines.extend(self.get_icolamp_lines(subproc_to_confdiag[subproc], 5291 matrix_elements[subproc], 5292 subproc + 1)) 5293 5294 lines.insert(0, "logical icolamp(%d,%d,%d)" % \ 5295 (maxflows, 5296 len(diagrams_for_config), 5297 len(matrix_elements))) 5298 5299 # Write the file 5300 writer.writelines(lines) 5301 5302 return True
5303 5304 #=========================================================================== 5305 # write_config_subproc_map_file 5306 #===========================================================================
5307 - def write_config_subproc_map_file(self, writer, config_subproc_map):
5308 """Write the config_subproc_map.inc file for subprocess groups""" 5309 5310 lines = [] 5311 # Output only configs that have some corresponding diagrams 5312 iconfig = 0 5313 for config in config_subproc_map: 5314 if set(config) == set([0]): 5315 continue 5316 lines.append("DATA (CONFSUB(i,%d),i=1,%d)/%s/" % \ 5317 (iconfig + 1, len(config), 5318 ",".join([str(i) for i in config]))) 5319 iconfig += 1 5320 # Write the file 5321 writer.writelines(lines) 5322 5323 return True
5324 5325 #=========================================================================== 5326 # read_write_good_hel 5327 #===========================================================================
5328 - def read_write_good_hel(self, ncomb):
5329 """return the code to read/write the good_hel common_block""" 5330 5331 convert = {'ncomb' : ncomb} 5332 5333 output = """ 5334 subroutine write_good_hel(stream_id) 5335 implicit none 5336 integer stream_id 5337 INTEGER NCOMB 5338 PARAMETER ( NCOMB=%(ncomb)d) 5339 LOGICAL GOODHEL(NCOMB, 2) 5340 INTEGER NTRY(2) 5341 common/BLOCK_GOODHEL/NTRY,GOODHEL 5342 write(stream_id,*) GOODHEL 5343 return 5344 end 5345 5346 5347 subroutine read_good_hel(stream_id) 5348 implicit none 5349 include 'genps.inc' 5350 integer stream_id 5351 INTEGER NCOMB 5352 PARAMETER ( NCOMB=%(ncomb)d) 5353 LOGICAL GOODHEL(NCOMB, 2) 5354 INTEGER NTRY(2) 5355 common/BLOCK_GOODHEL/NTRY,GOODHEL 5356 read(stream_id,*) GOODHEL 5357 NTRY(1) = MAXTRIES + 1 5358 NTRY(2) = MAXTRIES + 1 5359 return 5360 end 5361 5362 subroutine init_good_hel() 5363 implicit none 5364 INTEGER NCOMB 5365 PARAMETER ( NCOMB=%(ncomb)d) 5366 LOGICAL GOODHEL(NCOMB, 2) 5367 INTEGER NTRY(2) 5368 INTEGER I 5369 5370 do i=1,NCOMB 5371 GOODHEL(I,1) = .false. 5372 GOODHEL(I,2) = .false. 5373 enddo 5374 NTRY(1) = 0 5375 NTRY(2) = 0 5376 end 5377 5378 integer function get_maxsproc() 5379 implicit none 5380 include 'maxamps.inc' 5381 5382 get_maxsproc = maxsproc 5383 return 5384 end 5385 5386 """ % convert 5387 5388 return output
5389 5390 5391 5392 #=========================================================================== 5393 # write_configs_file 5394 #===========================================================================
5395 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
5396 """Write the configs.inc file with topology information for a 5397 subprocess group. Use the first subprocess with a diagram for each 5398 configuration.""" 5399 5400 matrix_elements = subproc_group.get('matrix_elements') 5401 model = matrix_elements[0].get('processes')[0].get('model') 5402 5403 diagrams = [] 5404 config_numbers = [] 5405 for iconfig, config in enumerate(diagrams_for_config): 5406 # Check if any diagrams correspond to this config 5407 if set(config) == set([0]): 5408 continue 5409 subproc_diags = [] 5410 for s,d in enumerate(config): 5411 if d: 5412 subproc_diags.append(matrix_elements[s].\ 5413 get('diagrams')[d-1]) 5414 else: 5415 subproc_diags.append(None) 5416 diagrams.append(subproc_diags) 5417 config_numbers.append(iconfig + 1) 5418 5419 # Extract number of external particles 5420 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 5421 5422 return len(diagrams), \ 5423 self.write_configs_file_from_diagrams(writer, diagrams, 5424 config_numbers, 5425 nexternal, ninitial, 5426 model)
5427 5428 #=========================================================================== 5429 # write_run_configs_file 5430 #===========================================================================
5431 - def write_run_config_file(self, writer):
5432 """Write the run_configs.inc file for MadEvent""" 5433 5434 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 5435 if self.proc_characteristic['loop_induced']: 5436 job_per_chan = 1 5437 else: 5438 job_per_chan = 2 5439 text = open(path).read() % {'chanperjob':job_per_chan} 5440 writer.write(text) 5441 return True
5442 5443 5444 #=========================================================================== 5445 # write_leshouche_file 5446 #===========================================================================
5447 - def write_leshouche_file(self, writer, subproc_group):
5448 """Write the leshouche.inc file for MG4""" 5449 5450 all_lines = [] 5451 5452 for iproc, matrix_element in \ 5453 enumerate(subproc_group.get('matrix_elements')): 5454 all_lines.extend(self.get_leshouche_lines(matrix_element, 5455 iproc)) 5456 # Write the file 5457 writer.writelines(all_lines) 5458 return True
5459 5460
5461 - def finalize(self,*args, **opts):
5462 5463 super(ProcessExporterFortranMEGroup, self).finalize(*args, **opts) 5464 #ensure that the grouping information is on the correct value 5465 self.proc_characteristic['grouped_matrix'] = True
5466 5467 5468 #=============================================================================== 5469 # UFO_model_to_mg4 5470 #=============================================================================== 5471 5472 python_to_fortran = lambda x: parsers.UFOExpressionParserFortran().parse(x)
5473 5474 -class UFO_model_to_mg4(object):
5475 """ A converter of the UFO-MG5 Model to the MG4 format """ 5476 5477 # The list below shows the only variables the user is allowed to change by 5478 # himself for each PS point. If he changes any other, then calling 5479 # UPDATE_AS_PARAM() (or equivalently MP_UPDATE_AS_PARAM()) will not 5480 # correctly account for the change. 5481 PS_dependent_key = ['aS','MU_R'] 5482 mp_complex_format = 'complex*32' 5483 mp_real_format = 'real*16' 5484 # Warning, it is crucial none of the couplings/parameters of the model 5485 # starts with this prefix. I should add a check for this. 5486 # You can change it as the global variable to check_param_card.ParamCard 5487 mp_prefix = check_param_card.ParamCard.mp_prefix 5488
5489 - def __init__(self, model, output_path, opt=None):
5490 """ initialization of the objects """ 5491 5492 self.model = model 5493 self.model_name = model['name'] 5494 self.dir_path = output_path 5495 5496 self.opt = {'complex_mass': False, 'export_format': 'madevent', 'mp':True, 5497 'loop_induced': False} 5498 if opt: 5499 self.opt.update(opt) 5500 5501 self.coups_dep = [] # (name, expression, type) 5502 self.coups_indep = [] # (name, expression, type) 5503 self.params_dep = [] # (name, expression, type) 5504 self.params_indep = [] # (name, expression, type) 5505 self.params_ext = [] # external parameter 5506 self.p_to_f = parsers.UFOExpressionParserFortran(self.model) 5507 self.mp_p_to_f = parsers.UFOExpressionParserMPFortran(self.model)
5508
5510 """modify the parameter if some of them are identical up to the case""" 5511 5512 lower_dict={} 5513 duplicate = set() 5514 keys = self.model['parameters'].keys() 5515 for key in keys: 5516 for param in self.model['parameters'][key]: 5517 lower_name = param.name.lower() 5518 if not lower_name: 5519 continue 5520 try: 5521 lower_dict[lower_name].append(param) 5522 except KeyError,error: 5523 lower_dict[lower_name] = [param] 5524 else: 5525 duplicate.add(lower_name) 5526 logger.debug('%s is define both as lower case and upper case.' 5527 % lower_name) 5528 if not duplicate: 5529 return 5530 5531 re_expr = r'''\b(%s)\b''' 5532 to_change = [] 5533 change={} 5534 for value in duplicate: 5535 for i, var in enumerate(lower_dict[value]): 5536 to_change.append(var.name) 5537 new_name = '%s%s' % (var.name.lower(), 5538 ('__%d'%(i+1) if i>0 else '')) 5539 change[var.name] = new_name 5540 var.name = new_name 5541 5542 # Apply the modification to the map_CTcoup_CTparam of the model 5543 # if it has one (giving for each coupling the CT parameters whcih 5544 # are necessary and which should be exported to the model. 5545 if hasattr(self.model,'map_CTcoup_CTparam'): 5546 for coup, ctparams in self.model.map_CTcoup_CTparam: 5547 for i, ctparam in enumerate(ctparams): 5548 try: 5549 self.model.map_CTcoup_CTparam[coup][i] = change[ctparam] 5550 except KeyError: 5551 pass 5552 5553 replace = lambda match_pattern: change[match_pattern.groups()[0]] 5554 rep_pattern = re.compile(re_expr % '|'.join(to_change)) 5555 5556 # change parameters 5557 for key in keys: 5558 if key == ('external',): 5559 continue 5560 for param in self.model['parameters'][key]: 5561 param.expr = rep_pattern.sub(replace, param.expr) 5562 5563 # change couplings 5564 for key in self.model['couplings'].keys(): 5565 for coup in self.model['couplings'][key]: 5566 coup.expr = rep_pattern.sub(replace, coup.expr) 5567 5568 # change mass/width 5569 for part in self.model['particles']: 5570 if str(part.get('mass')) in to_change: 5571 part.set('mass', rep_pattern.sub(replace, str(part.get('mass')))) 5572 if str(part.get('width')) in to_change: 5573 part.set('width', rep_pattern.sub(replace, str(part.get('width'))))
5574
5575 - def refactorize(self, wanted_couplings = []):
5576 """modify the couplings to fit with MG4 convention """ 5577 5578 # Keep only separation in alphaS 5579 keys = self.model['parameters'].keys() 5580 keys.sort(key=len) 5581 for key in keys: 5582 to_add = [o for o in self.model['parameters'][key] if o.name] 5583 5584 if key == ('external',): 5585 self.params_ext += to_add 5586 elif any([(k in key) for k in self.PS_dependent_key]): 5587 self.params_dep += to_add 5588 else: 5589 self.params_indep += to_add 5590 # same for couplings 5591 keys = self.model['couplings'].keys() 5592 keys.sort(key=len) 5593 for key, coup_list in self.model['couplings'].items(): 5594 if any([(k in key) for k in self.PS_dependent_key]): 5595 self.coups_dep += [c for c in coup_list if 5596 (not wanted_couplings or c.name in \ 5597 wanted_couplings)] 5598 else: 5599 self.coups_indep += [c for c in coup_list if 5600 (not wanted_couplings or c.name in \ 5601 wanted_couplings)] 5602 5603 # MG4 use G and not aS as it basic object for alphas related computation 5604 #Pass G in the independant list 5605 if 'G' in self.params_dep: 5606 index = self.params_dep.index('G') 5607 G = self.params_dep.pop(index) 5608 # G.expr = '2*cmath.sqrt(as*pi)' 5609 # self.params_indep.insert(0, self.params_dep.pop(index)) 5610 # No need to add it if not defined 5611 5612 if 'aS' not in self.params_ext: 5613 logger.critical('aS not define as external parameter adding it!') 5614 #self.model['parameters']['aS'] = base_objects.ParamCardVariable('aS', 0.138,'DUMMY',(1,)) 5615 self.params_indep.append( base_objects. ModelVariable('aS', '0.138','real')) 5616 self.params_indep.append( base_objects. ModelVariable('G', '4.1643','real'))
5617 - def build(self, wanted_couplings = [], full=True):
5618 """modify the couplings to fit with MG4 convention and creates all the 5619 different files""" 5620 5621 self.pass_parameter_to_case_insensitive() 5622 self.refactorize(wanted_couplings) 5623 5624 # write the files 5625 if full: 5626 if wanted_couplings: 5627 # extract the wanted ct parameters 5628 self.extract_needed_CTparam(wanted_couplings=wanted_couplings) 5629 self.write_all()
5630 5631
5632 - def open(self, name, comment='c', format='default'):
5633 """ Open the file name in the correct directory and with a valid 5634 header.""" 5635 5636 file_path = pjoin(self.dir_path, name) 5637 5638 if format == 'fortran': 5639 fsock = writers.FortranWriter(file_path, 'w') 5640 else: 5641 fsock = open(file_path, 'w') 5642 5643 file.writelines(fsock, comment * 77 + '\n') 5644 file.writelines(fsock,'%(comment)s written by the UFO converter\n' % \ 5645 {'comment': comment + (6 - len(comment)) * ' '}) 5646 file.writelines(fsock, comment * 77 + '\n\n') 5647 return fsock
5648 5649
5650 - def write_all(self):
5651 """ write all the files """ 5652 #write the part related to the external parameter 5653 self.create_ident_card() 5654 self.create_param_read() 5655 5656 #write the definition of the parameter 5657 self.create_input() 5658 self.create_intparam_def(dp=True,mp=False) 5659 if self.opt['mp']: 5660 self.create_intparam_def(dp=False,mp=True) 5661 5662 # definition of the coupling. 5663 self.create_actualize_mp_ext_param_inc() 5664 self.create_coupl_inc() 5665 self.create_write_couplings() 5666 self.create_couplings() 5667 5668 # the makefile 5669 self.create_makeinc() 5670 self.create_param_write() 5671 5672 # The model functions 5673 self.create_model_functions_inc() 5674 self.create_model_functions_def() 5675 5676 # The param_card.dat 5677 self.create_param_card() 5678 5679 5680 # All the standard files 5681 self.copy_standard_file()
5682 5683 ############################################################################ 5684 ## ROUTINE CREATING THE FILES ############################################ 5685 ############################################################################ 5686
5687 - def copy_standard_file(self):
5688 """Copy the standard files for the fortran model.""" 5689 5690 #copy the library files 5691 file_to_link = ['formats.inc','printout.f', \ 5692 'rw_para.f', 'testprog.f'] 5693 5694 for filename in file_to_link: 5695 cp( MG5DIR + '/models/template_files/fortran/' + filename, \ 5696 self.dir_path) 5697 5698 file = open(os.path.join(MG5DIR,\ 5699 'models/template_files/fortran/rw_para.f')).read() 5700 5701 includes=["include \'coupl.inc\'","include \'input.inc\'", 5702 "include \'model_functions.inc\'"] 5703 if self.opt['mp']: 5704 includes.extend(["include \'mp_coupl.inc\'","include \'mp_input.inc\'"]) 5705 # In standalone and madloop we do no use the compiled param card but 5706 # still parse the .dat one so we must load it. 5707 if self.opt['loop_induced']: 5708 #loop induced follow MadEvent way to handle the card. 5709 load_card = '' 5710 lha_read_filename='lha_read.f' 5711 elif self.opt['export_format'] in ['madloop','madloop_optimized', 'madloop_matchbox']: 5712 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5713 lha_read_filename='lha_read_mp.f' 5714 elif self.opt['export_format'].startswith('standalone') \ 5715 or self.opt['export_format'] in ['madweight', 'plugin']\ 5716 or self.opt['export_format'].startswith('matchbox'): 5717 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5718 lha_read_filename='lha_read.f' 5719 else: 5720 load_card = '' 5721 lha_read_filename='lha_read.f' 5722 cp( MG5DIR + '/models/template_files/fortran/' + lha_read_filename, \ 5723 os.path.join(self.dir_path,'lha_read.f')) 5724 5725 file=file%{'includes':'\n '.join(includes), 5726 'load_card':load_card} 5727 writer=open(os.path.join(self.dir_path,'rw_para.f'),'w') 5728 writer.writelines(file) 5729 writer.close() 5730 5731 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 5732 or self.opt['loop_induced']: 5733 cp( MG5DIR + '/models/template_files/fortran/makefile_madevent', 5734 self.dir_path + '/makefile') 5735 if self.opt['export_format'] in ['FKS5_default', 'FKS5_optimized']: 5736 path = pjoin(self.dir_path, 'makefile') 5737 text = open(path).read() 5738 text = text.replace('madevent','aMCatNLO') 5739 open(path, 'w').writelines(text) 5740 elif self.opt['export_format'] in ['standalone', 'standalone_msP','standalone_msF', 5741 'madloop','madloop_optimized', 'standalone_rw', 5742 'madweight','matchbox','madloop_matchbox', 'plugin']: 5743 cp( MG5DIR + '/models/template_files/fortran/makefile_standalone', 5744 self.dir_path + '/makefile') 5745 #elif self.opt['export_format'] in []: 5746 #pass 5747 else: 5748 raise MadGraph5Error('Unknown format')
5749
5750 - def create_coupl_inc(self):
5751 """ write coupling.inc """ 5752 5753 fsock = self.open('coupl.inc', format='fortran') 5754 if self.opt['mp']: 5755 mp_fsock = self.open('mp_coupl.inc', format='fortran') 5756 mp_fsock_same_name = self.open('mp_coupl_same_name.inc',\ 5757 format='fortran') 5758 5759 # Write header 5760 header = """double precision G 5761 common/strong/ G 5762 5763 double complex gal(2) 5764 common/weak/ gal 5765 5766 double precision MU_R 5767 common/rscale/ MU_R 5768 5769 double precision Nf 5770 parameter(Nf=%d) 5771 """ % self.model.get_nflav() 5772 5773 fsock.writelines(header) 5774 5775 if self.opt['mp']: 5776 header = """%(real_mp_format)s %(mp_prefix)sG 5777 common/MP_strong/ %(mp_prefix)sG 5778 5779 %(complex_mp_format)s %(mp_prefix)sgal(2) 5780 common/MP_weak/ %(mp_prefix)sgal 5781 5782 %(complex_mp_format)s %(mp_prefix)sMU_R 5783 common/MP_rscale/ %(mp_prefix)sMU_R 5784 5785 """ 5786 5787 5788 5789 5790 mp_fsock.writelines(header%{'real_mp_format':self.mp_real_format, 5791 'complex_mp_format':self.mp_complex_format, 5792 'mp_prefix':self.mp_prefix}) 5793 mp_fsock_same_name.writelines(header%{'real_mp_format':self.mp_real_format, 5794 'complex_mp_format':self.mp_complex_format, 5795 'mp_prefix':''}) 5796 5797 # Write the Mass definition/ common block 5798 masses = set() 5799 widths = set() 5800 if self.opt['complex_mass']: 5801 complex_mass = set() 5802 5803 for particle in self.model.get('particles'): 5804 #find masses 5805 one_mass = particle.get('mass') 5806 if one_mass.lower() != 'zero': 5807 masses.add(one_mass) 5808 5809 # find width 5810 one_width = particle.get('width') 5811 if one_width.lower() != 'zero': 5812 widths.add(one_width) 5813 if self.opt['complex_mass'] and one_mass.lower() != 'zero': 5814 complex_mass.add('CMASS_%s' % one_mass) 5815 5816 if masses: 5817 fsock.writelines('double precision '+','.join(masses)+'\n') 5818 fsock.writelines('common/masses/ '+','.join(masses)+'\n\n') 5819 if self.opt['mp']: 5820 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5821 ','.join(masses)+'\n') 5822 mp_fsock_same_name.writelines('common/MP_masses/ '+\ 5823 ','.join(masses)+'\n\n') 5824 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5825 self.mp_prefix+m for m in masses])+'\n') 5826 mp_fsock.writelines('common/MP_masses/ '+\ 5827 ','.join([self.mp_prefix+m for m in masses])+'\n\n') 5828 5829 if widths: 5830 fsock.writelines('double precision '+','.join(widths)+'\n') 5831 fsock.writelines('common/widths/ '+','.join(widths)+'\n\n') 5832 if self.opt['mp']: 5833 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5834 ','.join(widths)+'\n') 5835 mp_fsock_same_name.writelines('common/MP_widths/ '+\ 5836 ','.join(widths)+'\n\n') 5837 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5838 self.mp_prefix+w for w in widths])+'\n') 5839 mp_fsock.writelines('common/MP_widths/ '+\ 5840 ','.join([self.mp_prefix+w for w in widths])+'\n\n') 5841 5842 # Write the Couplings 5843 coupling_list = [coupl.name for coupl in self.coups_dep + self.coups_indep] 5844 fsock.writelines('double complex '+', '.join(coupling_list)+'\n') 5845 fsock.writelines('common/couplings/ '+', '.join(coupling_list)+'\n') 5846 if self.opt['mp']: 5847 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5848 ','.join(coupling_list)+'\n') 5849 mp_fsock_same_name.writelines('common/MP_couplings/ '+\ 5850 ','.join(coupling_list)+'\n\n') 5851 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5852 self.mp_prefix+c for c in coupling_list])+'\n') 5853 mp_fsock.writelines('common/MP_couplings/ '+\ 5854 ','.join([self.mp_prefix+c for c in coupling_list])+'\n\n') 5855 5856 # Write complex mass for complex mass scheme (if activated) 5857 if self.opt['complex_mass'] and complex_mass: 5858 fsock.writelines('double complex '+', '.join(complex_mass)+'\n') 5859 fsock.writelines('common/complex_mass/ '+', '.join(complex_mass)+'\n') 5860 if self.opt['mp']: 5861 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5862 ','.join(complex_mass)+'\n') 5863 mp_fsock_same_name.writelines('common/MP_complex_mass/ '+\ 5864 ','.join(complex_mass)+'\n\n') 5865 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5866 self.mp_prefix+cm for cm in complex_mass])+'\n') 5867 mp_fsock.writelines('common/MP_complex_mass/ '+\ 5868 ','.join([self.mp_prefix+cm for cm in complex_mass])+'\n\n')
5869
5870 - def create_write_couplings(self):
5871 """ write the file coupl_write.inc """ 5872 5873 fsock = self.open('coupl_write.inc', format='fortran') 5874 5875 fsock.writelines("""write(*,*) ' Couplings of %s' 5876 write(*,*) ' ---------------------------------' 5877 write(*,*) ' '""" % self.model_name) 5878 def format(coupl): 5879 return 'write(*,2) \'%(name)s = \', %(name)s' % {'name': coupl.name}
5880 5881 # Write the Couplings 5882 lines = [format(coupl) for coupl in self.coups_dep + self.coups_indep] 5883 fsock.writelines('\n'.join(lines)) 5884 5885
5886 - def create_input(self):
5887 """create input.inc containing the definition of the parameters""" 5888 5889 fsock = self.open('input.inc', format='fortran') 5890 if self.opt['mp']: 5891 mp_fsock = self.open('mp_input.inc', format='fortran') 5892 5893 #find mass/ width since they are already define 5894 already_def = set() 5895 for particle in self.model.get('particles'): 5896 already_def.add(particle.get('mass').lower()) 5897 already_def.add(particle.get('width').lower()) 5898 if self.opt['complex_mass']: 5899 already_def.add('cmass_%s' % particle.get('mass').lower()) 5900 5901 is_valid = lambda name: name.lower() not in ['g', 'mu_r', 'zero'] and \ 5902 name.lower() not in already_def 5903 5904 real_parameters = [param.name for param in self.params_dep + 5905 self.params_indep if param.type == 'real' 5906 and is_valid(param.name)] 5907 5908 real_parameters += [param.name for param in self.params_ext 5909 if param.type == 'real'and 5910 is_valid(param.name)] 5911 5912 # check the parameter is a CT parameter or not 5913 # if yes, just use the needed ones 5914 real_parameters = [param for param in real_parameters \ 5915 if self.check_needed_param(param)] 5916 5917 fsock.writelines('double precision '+','.join(real_parameters)+'\n') 5918 fsock.writelines('common/params_R/ '+','.join(real_parameters)+'\n\n') 5919 if self.opt['mp']: 5920 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5921 self.mp_prefix+p for p in real_parameters])+'\n') 5922 mp_fsock.writelines('common/MP_params_R/ '+','.join([\ 5923 self.mp_prefix+p for p in real_parameters])+'\n\n') 5924 5925 complex_parameters = [param.name for param in self.params_dep + 5926 self.params_indep if param.type == 'complex' and 5927 is_valid(param.name)] 5928 5929 # check the parameter is a CT parameter or not 5930 # if yes, just use the needed ones 5931 complex_parameters = [param for param in complex_parameters \ 5932 if self.check_needed_param(param)] 5933 5934 if complex_parameters: 5935 fsock.writelines('double complex '+','.join(complex_parameters)+'\n') 5936 fsock.writelines('common/params_C/ '+','.join(complex_parameters)+'\n\n') 5937 if self.opt['mp']: 5938 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5939 self.mp_prefix+p for p in complex_parameters])+'\n') 5940 mp_fsock.writelines('common/MP_params_C/ '+','.join([\ 5941 self.mp_prefix+p for p in complex_parameters])+'\n\n')
5942
5943 - def check_needed_param(self, param):
5944 """ Returns whether the parameter in argument is needed for this 5945 specific computation or not.""" 5946 5947 # If this is a leading order model or if there was no CT parameter 5948 # employed in this NLO model, one can directly return that the 5949 # parameter is needed since only CTParameters are filtered. 5950 if not hasattr(self, 'allCTparameters') or \ 5951 self.allCTparameters is None or self.usedCTparameters is None or \ 5952 len(self.allCTparameters)==0: 5953 return True 5954 5955 # We must allow the conjugate shorthand for the complex parameter as 5956 # well so we check wether either the parameter name or its name with 5957 # 'conjg__' substituted with '' is present in the list. 5958 # This is acceptable even if some parameter had an original name 5959 # including 'conjg__' in it, because at worst we export a parameter 5960 # was not needed. 5961 param = param.lower() 5962 cjg_param = param.replace('conjg__','',1) 5963 5964 # First make sure it is a CTparameter 5965 if param not in self.allCTparameters and \ 5966 cjg_param not in self.allCTparameters: 5967 return True 5968 5969 # Now check if it is in the list of CTparameters actually used 5970 return (param in self.usedCTparameters or \ 5971 cjg_param in self.usedCTparameters)
5972
5973 - def extract_needed_CTparam(self,wanted_couplings=[]):
5974 """ Extract what are the needed CT parameters given the wanted_couplings""" 5975 5976 if not hasattr(self.model,'map_CTcoup_CTparam') or not wanted_couplings: 5977 # Setting these lists to none wil disable the filtering in 5978 # check_needed_param 5979 self.allCTparameters = None 5980 self.usedCTparameters = None 5981 return 5982 5983 # All CTparameters appearin in all CT couplings 5984 allCTparameters=self.model.map_CTcoup_CTparam.values() 5985 # Define in this class the list of all CT parameters 5986 self.allCTparameters=list(\ 5987 set(itertools.chain.from_iterable(allCTparameters))) 5988 5989 # All used CT couplings 5990 w_coupls = [coupl.lower() for coupl in wanted_couplings] 5991 allUsedCTCouplings = [coupl for coupl in 5992 self.model.map_CTcoup_CTparam.keys() if coupl.lower() in w_coupls] 5993 5994 # Now define the list of all CT parameters that are actually used 5995 self.usedCTparameters=list(\ 5996 set(itertools.chain.from_iterable([ 5997 self.model.map_CTcoup_CTparam[coupl] for coupl in allUsedCTCouplings 5998 ]))) 5999 6000 # Now at last, make these list case insensitive 6001 self.allCTparameters = [ct.lower() for ct in self.allCTparameters] 6002 self.usedCTparameters = [ct.lower() for ct in self.usedCTparameters]
6003
6004 - def create_intparam_def(self, dp=True, mp=False):
6005 """ create intparam_definition.inc setting the internal parameters. 6006 Output the double precision and/or the multiple precision parameters 6007 depending on the parameters dp and mp. If mp only, then the file names 6008 get the 'mp_' prefix. 6009 """ 6010 6011 fsock = self.open('%sintparam_definition.inc'% 6012 ('mp_' if mp and not dp else ''), format='fortran') 6013 6014 fsock.write_comments(\ 6015 "Parameters that should not be recomputed event by event.\n") 6016 fsock.writelines("if(readlha) then\n") 6017 if dp: 6018 fsock.writelines("G = 2 * DSQRT(AS*PI) ! for the first init\n") 6019 if mp: 6020 fsock.writelines("MP__G = 2 * SQRT(MP__AS*MP__PI) ! for the first init\n") 6021 6022 for param in self.params_indep: 6023 if param.name == 'ZERO': 6024 continue 6025 # check whether the parameter is a CT parameter 6026 # if yes,just used the needed ones 6027 if not self.check_needed_param(param.name): 6028 continue 6029 if dp: 6030 fsock.writelines("%s = %s\n" % (param.name, 6031 self.p_to_f.parse(param.expr))) 6032 if mp: 6033 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 6034 self.mp_p_to_f.parse(param.expr))) 6035 6036 fsock.writelines('endif') 6037 6038 fsock.write_comments('\nParameters that should be recomputed at an event by even basis.\n') 6039 if dp: 6040 fsock.writelines("aS = G**2/4/pi\n") 6041 if mp: 6042 fsock.writelines("MP__aS = MP__G**2/4/MP__PI\n") 6043 for param in self.params_dep: 6044 # check whether the parameter is a CT parameter 6045 # if yes,just used the needed ones 6046 if not self.check_needed_param(param.name): 6047 continue 6048 if dp: 6049 fsock.writelines("%s = %s\n" % (param.name, 6050 self.p_to_f.parse(param.expr))) 6051 elif mp: 6052 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 6053 self.mp_p_to_f.parse(param.expr))) 6054 6055 fsock.write_comments("\nDefinition of the EW coupling used in the write out of aqed\n") 6056 if ('aEWM1',) in self.model['parameters']: 6057 if dp: 6058 fsock.writelines(""" gal(1) = 3.5449077018110318d0 / DSQRT(aEWM1) 6059 gal(2) = 1d0 6060 """) 6061 elif mp: 6062 fsock.writelines(""" %(mp_prefix)sgal(1) = 2 * SQRT(MP__PI/MP__aEWM1) 6063 %(mp_prefix)sgal(2) = 1d0 6064 """ %{'mp_prefix':self.mp_prefix}) 6065 pass 6066 # in Gmu scheme, aEWM1 is not external but Gf is an exteranl variable 6067 elif ('Gf',) in self.model['parameters']: 6068 if dp: 6069 fsock.writelines(""" gal(1) = 2.378414230005442133435d0*MDL_MW*DSQRT(1D0-MDL_MW**2/MDL_MZ**2)*DSQRT(MDL_Gf) 6070 gal(2) = 1d0 6071 """) 6072 elif mp: 6073 fsock.writelines(""" %(mp_prefix)sgal(1) = 2*MP__MDL_MW*SQRT(1e0_16-MP__MDL_MW**2/MP__MDL_MZ**2)*SQRT(SQRT(2e0_16)*MP__MDL_Gf) 6074 %(mp_prefix)sgal(2) = 1d0 6075 """ %{'mp_prefix':self.mp_prefix}) 6076 pass 6077 else: 6078 if dp: 6079 logger.warning('$RED aEWM1 and Gf not define in MODEL. AQED will not be written correcty in LHE FILE') 6080 fsock.writelines(""" gal(1) = 1d0 6081 gal(2) = 1d0 6082 """) 6083 elif mp: 6084 fsock.writelines(""" %(mp_prefix)sgal(1) = 1e0_16 6085 %(mp_prefix)sgal(2) = 1e0_16 6086 """%{'mp_prefix':self.mp_prefix})
6087 6088
6089 - def create_couplings(self):
6090 """ create couplings.f and all couplingsX.f """ 6091 6092 nb_def_by_file = 25 6093 6094 self.create_couplings_main(nb_def_by_file) 6095 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6096 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6097 6098 for i in range(nb_coup_indep): 6099 # For the independent couplings, we compute the double and multiple 6100 # precision ones together 6101 data = self.coups_indep[nb_def_by_file * i: 6102 min(len(self.coups_indep), nb_def_by_file * (i+1))] 6103 self.create_couplings_part(i + 1, data, dp=True, mp=self.opt['mp']) 6104 6105 for i in range(nb_coup_dep): 6106 # For the dependent couplings, we compute the double and multiple 6107 # precision ones in separate subroutines. 6108 data = self.coups_dep[nb_def_by_file * i: 6109 min(len(self.coups_dep), nb_def_by_file * (i+1))] 6110 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6111 dp=True,mp=False) 6112 if self.opt['mp']: 6113 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6114 dp=False,mp=True)
6115 6116
6117 - def create_couplings_main(self, nb_def_by_file=25):
6118 """ create couplings.f """ 6119 6120 fsock = self.open('couplings.f', format='fortran') 6121 6122 fsock.writelines("""subroutine coup() 6123 6124 implicit none 6125 double precision PI, ZERO 6126 logical READLHA 6127 parameter (PI=3.141592653589793d0) 6128 parameter (ZERO=0d0) 6129 include \'model_functions.inc\'""") 6130 if self.opt['mp']: 6131 fsock.writelines("""%s MP__PI, MP__ZERO 6132 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6133 parameter (MP__ZERO=0e0_16) 6134 include \'mp_input.inc\' 6135 include \'mp_coupl.inc\' 6136 """%self.mp_real_format) 6137 fsock.writelines("""include \'input.inc\' 6138 include \'coupl.inc\' 6139 READLHA = .true. 6140 include \'intparam_definition.inc\'""") 6141 if self.opt['mp']: 6142 fsock.writelines("""include \'mp_intparam_definition.inc\'\n""") 6143 6144 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6145 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6146 6147 fsock.writelines('\n'.join(\ 6148 ['call coup%s()' % (i + 1) for i in range(nb_coup_indep)])) 6149 6150 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6151 6152 fsock.writelines('\n'.join(\ 6153 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6154 for i in range(nb_coup_dep)])) 6155 if self.opt['mp']: 6156 fsock.writelines('\n'.join(\ 6157 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6158 for i in range(nb_coup_dep)])) 6159 fsock.writelines('''\n return \n end\n''') 6160 6161 fsock.writelines("""subroutine update_as_param() 6162 6163 implicit none 6164 double precision PI, ZERO 6165 logical READLHA 6166 parameter (PI=3.141592653589793d0) 6167 parameter (ZERO=0d0) 6168 include \'model_functions.inc\'""") 6169 fsock.writelines("""include \'input.inc\' 6170 include \'coupl.inc\' 6171 READLHA = .false.""") 6172 fsock.writelines(""" 6173 include \'intparam_definition.inc\'\n 6174 """) 6175 6176 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6177 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6178 6179 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6180 6181 fsock.writelines('\n'.join(\ 6182 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6183 for i in range(nb_coup_dep)])) 6184 fsock.writelines('''\n return \n end\n''') 6185 6186 fsock.writelines("""subroutine update_as_param2(mu_r2,as2) 6187 6188 implicit none 6189 double precision PI 6190 parameter (PI=3.141592653589793d0) 6191 double precision mu_r2, as2 6192 include \'model_functions.inc\'""") 6193 fsock.writelines("""include \'input.inc\' 6194 include \'coupl.inc\'""") 6195 fsock.writelines(""" 6196 if (mu_r2.gt.0d0) MU_R = mu_r2 6197 G = SQRT(4.0d0*PI*AS2) 6198 AS = as2 6199 6200 CALL UPDATE_AS_PARAM() 6201 """) 6202 fsock.writelines('''\n return \n end\n''') 6203 6204 if self.opt['mp']: 6205 fsock.writelines("""subroutine mp_update_as_param() 6206 6207 implicit none 6208 logical READLHA 6209 include \'model_functions.inc\'""") 6210 fsock.writelines("""%s MP__PI, MP__ZERO 6211 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6212 parameter (MP__ZERO=0e0_16) 6213 include \'mp_input.inc\' 6214 include \'mp_coupl.inc\' 6215 """%self.mp_real_format) 6216 fsock.writelines("""include \'input.inc\' 6217 include \'coupl.inc\' 6218 include \'actualize_mp_ext_params.inc\' 6219 READLHA = .false. 6220 include \'mp_intparam_definition.inc\'\n 6221 """) 6222 6223 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6224 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6225 6226 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6227 6228 fsock.writelines('\n'.join(\ 6229 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6230 for i in range(nb_coup_dep)])) 6231 fsock.writelines('''\n return \n end\n''')
6232
6233 - def create_couplings_part(self, nb_file, data, dp=True, mp=False):
6234 """ create couplings[nb_file].f containing information coming from data. 6235 Outputs the computation of the double precision and/or the multiple 6236 precision couplings depending on the parameters dp and mp. 6237 If mp is True and dp is False, then the prefix 'MP_' is appended to the 6238 filename and subroutine name. 6239 """ 6240 6241 fsock = self.open('%scouplings%s.f' %('mp_' if mp and not dp else '', 6242 nb_file), format='fortran') 6243 fsock.writelines("""subroutine %scoup%s() 6244 6245 implicit none 6246 include \'model_functions.inc\'"""%('mp_' if mp and not dp else '',nb_file)) 6247 if dp: 6248 fsock.writelines(""" 6249 double precision PI, ZERO 6250 parameter (PI=3.141592653589793d0) 6251 parameter (ZERO=0d0) 6252 include 'input.inc' 6253 include 'coupl.inc'""") 6254 if mp: 6255 fsock.writelines("""%s MP__PI, MP__ZERO 6256 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6257 parameter (MP__ZERO=0e0_16) 6258 include \'mp_input.inc\' 6259 include \'mp_coupl.inc\' 6260 """%self.mp_real_format) 6261 6262 for coupling in data: 6263 if dp: 6264 fsock.writelines('%s = %s' % (coupling.name, 6265 self.p_to_f.parse(coupling.expr))) 6266 if mp: 6267 fsock.writelines('%s%s = %s' % (self.mp_prefix,coupling.name, 6268 self.mp_p_to_f.parse(coupling.expr))) 6269 fsock.writelines('end')
6270
6271 - def create_model_functions_inc(self):
6272 """ Create model_functions.inc which contains the various declarations 6273 of auxiliary functions which might be used in the couplings expressions 6274 """ 6275 6276 additional_fct = [] 6277 # check for functions define in the UFO model 6278 ufo_fct = self.model.get('functions') 6279 if ufo_fct: 6280 for fct in ufo_fct: 6281 # already handle by default 6282 if fct.name not in ["complexconjugate", "re", "im", "sec", 6283 "csc", "asec", "acsc", "theta_function", "cond", 6284 "condif", "reglogp", "reglogm", "reglog", "recms", "arg", "cot", 6285 "grreglog","regsqrt"]: 6286 additional_fct.append(fct.name) 6287 6288 6289 fsock = self.open('model_functions.inc', format='fortran') 6290 fsock.writelines("""double complex cond 6291 double complex condif 6292 double complex reglog 6293 double complex reglogp 6294 double complex reglogm 6295 double complex recms 6296 double complex arg 6297 double complex grreglog 6298 double complex regsqrt 6299 %s 6300 """ % "\n".join([" double complex %s" % i for i in additional_fct])) 6301 6302 6303 if self.opt['mp']: 6304 fsock.writelines("""%(complex_mp_format)s mp_cond 6305 %(complex_mp_format)s mp_condif 6306 %(complex_mp_format)s mp_reglog 6307 %(complex_mp_format)s mp_reglogp 6308 %(complex_mp_format)s mp_reglogm 6309 %(complex_mp_format)s mp_recms 6310 %(complex_mp_format)s mp_arg 6311 %(complex_mp_format)s mp_grreglog 6312 %(complex_mp_format)s mp_regsqrt 6313 %(additional)s 6314 """ %\ 6315 {"additional": "\n".join([" %s mp_%s" % (self.mp_complex_format, i) for i in additional_fct]), 6316 'complex_mp_format':self.mp_complex_format 6317 })
6318
6319 - def create_model_functions_def(self):
6320 """ Create model_functions.f which contains the various definitions 6321 of auxiliary functions which might be used in the couplings expressions 6322 Add the functions.f functions for formfactors support 6323 """ 6324 6325 fsock = self.open('model_functions.f', format='fortran') 6326 fsock.writelines("""double complex function cond(condition,truecase,falsecase) 6327 implicit none 6328 double complex condition,truecase,falsecase 6329 if(condition.eq.(0.0d0,0.0d0)) then 6330 cond=truecase 6331 else 6332 cond=falsecase 6333 endif 6334 end 6335 6336 double complex function condif(condition,truecase,falsecase) 6337 implicit none 6338 logical condition 6339 double complex truecase,falsecase 6340 if(condition) then 6341 condif=truecase 6342 else 6343 condif=falsecase 6344 endif 6345 end 6346 6347 double complex function recms(condition,expr) 6348 implicit none 6349 logical condition 6350 double complex expr 6351 if(condition)then 6352 recms=expr 6353 else 6354 recms=dcmplx(dble(expr)) 6355 endif 6356 end 6357 6358 double complex function reglog(arg) 6359 implicit none 6360 double complex TWOPII 6361 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6362 double complex arg 6363 if(arg.eq.(0.0d0,0.0d0)) then 6364 reglog=(0.0d0,0.0d0) 6365 else 6366 reglog=log(arg) 6367 endif 6368 end 6369 6370 double complex function reglogp(arg) 6371 implicit none 6372 double complex TWOPII 6373 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6374 double complex arg 6375 if(arg.eq.(0.0d0,0.0d0))then 6376 reglogp=(0.0d0,0.0d0) 6377 else 6378 if(dble(arg).lt.0.0d0.and.dimag(arg).lt.0.0d0)then 6379 reglogp=log(arg) + TWOPII 6380 else 6381 reglogp=log(arg) 6382 endif 6383 endif 6384 end 6385 6386 double complex function reglogm(arg) 6387 implicit none 6388 double complex TWOPII 6389 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6390 double complex arg 6391 if(arg.eq.(0.0d0,0.0d0))then 6392 reglogm=(0.0d0,0.0d0) 6393 else 6394 if(dble(arg).lt.0.0d0.and.dimag(arg).gt.0.0d0)then 6395 reglogm=log(arg) - TWOPII 6396 else 6397 reglogm=log(arg) 6398 endif 6399 endif 6400 end 6401 6402 double complex function regsqrt(arg_in) 6403 implicit none 6404 double complex arg_in 6405 double complex arg 6406 arg=arg_in 6407 if(dabs(dimag(arg)).eq.0.0d0)then 6408 arg=dcmplx(dble(arg),0.0d0) 6409 endif 6410 if(dabs(dble(arg)).eq.0.0d0)then 6411 arg=dcmplx(0.0d0,dimag(arg)) 6412 endif 6413 regsqrt=sqrt(arg) 6414 end 6415 6416 double complex function grreglog(logsw,expr1_in,expr2_in) 6417 implicit none 6418 double complex TWOPII 6419 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6420 double complex expr1_in,expr2_in 6421 double complex expr1,expr2 6422 double precision logsw 6423 double precision imagexpr 6424 logical firstsheet 6425 expr1=expr1_in 6426 expr2=expr2_in 6427 if(dabs(dimag(expr1)).eq.0.0d0)then 6428 expr1=dcmplx(dble(expr1),0.0d0) 6429 endif 6430 if(dabs(dble(expr1)).eq.0.0d0)then 6431 expr1=dcmplx(0.0d0,dimag(expr1)) 6432 endif 6433 if(dabs(dimag(expr2)).eq.0.0d0)then 6434 expr2=dcmplx(dble(expr2),0.0d0) 6435 endif 6436 if(dabs(dble(expr2)).eq.0.0d0)then 6437 expr2=dcmplx(0.0d0,dimag(expr2)) 6438 endif 6439 if(expr1.eq.(0.0d0,0.0d0))then 6440 grreglog=(0.0d0,0.0d0) 6441 else 6442 imagexpr=dimag(expr1)*dimag(expr2) 6443 firstsheet=imagexpr.ge.0.0d0 6444 firstsheet=firstsheet.or.dble(expr1).ge.0.0d0 6445 firstsheet=firstsheet.or.dble(expr2).ge.0.0d0 6446 if(firstsheet)then 6447 grreglog=log(expr1) 6448 else 6449 if(dimag(expr1).gt.0.0d0)then 6450 grreglog=log(expr1) - logsw*TWOPII 6451 else 6452 grreglog=log(expr1) + logsw*TWOPII 6453 endif 6454 endif 6455 endif 6456 end 6457 6458 double complex function arg(comnum) 6459 implicit none 6460 double complex comnum 6461 double complex iim 6462 iim = (0.0d0,1.0d0) 6463 if(comnum.eq.(0.0d0,0.0d0)) then 6464 arg=(0.0d0,0.0d0) 6465 else 6466 arg=log(comnum/abs(comnum))/iim 6467 endif 6468 end""") 6469 if self.opt['mp']: 6470 fsock.writelines(""" 6471 6472 %(complex_mp_format)s function mp_cond(condition,truecase,falsecase) 6473 implicit none 6474 %(complex_mp_format)s condition,truecase,falsecase 6475 if(condition.eq.(0.0e0_16,0.0e0_16)) then 6476 mp_cond=truecase 6477 else 6478 mp_cond=falsecase 6479 endif 6480 end 6481 6482 %(complex_mp_format)s function mp_condif(condition,truecase,falsecase) 6483 implicit none 6484 logical condition 6485 %(complex_mp_format)s truecase,falsecase 6486 if(condition) then 6487 mp_condif=truecase 6488 else 6489 mp_condif=falsecase 6490 endif 6491 end 6492 6493 %(complex_mp_format)s function mp_recms(condition,expr) 6494 implicit none 6495 logical condition 6496 %(complex_mp_format)s expr 6497 if(condition)then 6498 mp_recms=expr 6499 else 6500 mp_recms=cmplx(real(expr),kind=16) 6501 endif 6502 end 6503 6504 %(complex_mp_format)s function mp_reglog(arg) 6505 implicit none 6506 %(complex_mp_format)s TWOPII 6507 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6508 %(complex_mp_format)s arg 6509 if(arg.eq.(0.0e0_16,0.0e0_16)) then 6510 mp_reglog=(0.0e0_16,0.0e0_16) 6511 else 6512 mp_reglog=log(arg) 6513 endif 6514 end 6515 6516 %(complex_mp_format)s function mp_reglogp(arg) 6517 implicit none 6518 %(complex_mp_format)s TWOPII 6519 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6520 %(complex_mp_format)s arg 6521 if(arg.eq.(0.0e0_16,0.0e0_16))then 6522 mp_reglogp=(0.0e0_16,0.0e0_16) 6523 else 6524 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).lt.0.0e0_16)then 6525 mp_reglogp=log(arg) + TWOPII 6526 else 6527 mp_reglogp=log(arg) 6528 endif 6529 endif 6530 end 6531 6532 %(complex_mp_format)s function mp_reglogm(arg) 6533 implicit none 6534 %(complex_mp_format)s TWOPII 6535 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6536 %(complex_mp_format)s arg 6537 if(arg.eq.(0.0e0_16,0.0e0_16))then 6538 mp_reglogm=(0.0e0_16,0.0e0_16) 6539 else 6540 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).gt.0.0e0_16)then 6541 mp_reglogm=log(arg) - TWOPII 6542 else 6543 mp_reglogm=log(arg) 6544 endif 6545 endif 6546 end 6547 6548 %(complex_mp_format)s function mp_regsqrt(arg_in) 6549 implicit none 6550 %(complex_mp_format)s arg_in 6551 %(complex_mp_format)s arg 6552 arg=arg_in 6553 if(abs(imagpart(arg)).eq.0.0e0_16)then 6554 arg=cmplx(real(arg,kind=16),0.0e0_16) 6555 endif 6556 if(abs(real(arg,kind=16)).eq.0.0e0_16)then 6557 arg=cmplx(0.0e0_16,imagpart(arg)) 6558 endif 6559 mp_regsqrt=sqrt(arg) 6560 end 6561 6562 6563 %(complex_mp_format)s function mp_grreglog(logsw,expr1_in,expr2_in) 6564 implicit none 6565 %(complex_mp_format)s TWOPII 6566 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6567 %(complex_mp_format)s expr1_in,expr2_in 6568 %(complex_mp_format)s expr1,expr2 6569 %(real_mp_format)s logsw 6570 %(real_mp_format)s imagexpr 6571 logical firstsheet 6572 expr1=expr1_in 6573 expr2=expr2_in 6574 if(abs(imagpart(expr1)).eq.0.0e0_16)then 6575 expr1=cmplx(real(expr1,kind=16),0.0e0_16) 6576 endif 6577 if(abs(real(expr1,kind=16)).eq.0.0e0_16)then 6578 expr1=cmplx(0.0e0_16,imagpart(expr1)) 6579 endif 6580 if(abs(imagpart(expr2)).eq.0.0e0_16)then 6581 expr2=cmplx(real(expr2,kind=16),0.0e0_16) 6582 endif 6583 if(abs(real(expr2,kind=16)).eq.0.0e0_16)then 6584 expr2=cmplx(0.0e0_16,imagpart(expr2)) 6585 endif 6586 if(expr1.eq.(0.0e0_16,0.0e0_16))then 6587 mp_grreglog=(0.0e0_16,0.0e0_16) 6588 else 6589 imagexpr=imagpart(expr1)*imagpart(expr2) 6590 firstsheet=imagexpr.ge.0.0e0_16 6591 firstsheet=firstsheet.or.real(expr1,kind=16).ge.0.0e0_16 6592 firstsheet=firstsheet.or.real(expr2,kind=16).ge.0.0e0_16 6593 if(firstsheet)then 6594 mp_grreglog=log(expr1) 6595 else 6596 if(imagpart(expr1).gt.0.0e0_16)then 6597 mp_grreglog=log(expr1) - logsw*TWOPII 6598 else 6599 mp_grreglog=log(expr1) + logsw*TWOPII 6600 endif 6601 endif 6602 endif 6603 end 6604 6605 %(complex_mp_format)s function mp_arg(comnum) 6606 implicit none 6607 %(complex_mp_format)s comnum 6608 %(complex_mp_format)s imm 6609 imm = (0.0e0_16,1.0e0_16) 6610 if(comnum.eq.(0.0e0_16,0.0e0_16)) then 6611 mp_arg=(0.0e0_16,0.0e0_16) 6612 else 6613 mp_arg=log(comnum/abs(comnum))/imm 6614 endif 6615 end"""%{'complex_mp_format':self.mp_complex_format,'real_mp_format':self.mp_real_format}) 6616 6617 6618 #check for the file functions.f 6619 model_path = self.model.get('modelpath') 6620 if os.path.exists(pjoin(model_path,'Fortran','functions.f')): 6621 fsock.write_comment_line(' USER DEFINE FUNCTIONS ') 6622 input = pjoin(model_path,'Fortran','functions.f') 6623 file.writelines(fsock, open(input).read()) 6624 fsock.write_comment_line(' END USER DEFINE FUNCTIONS ') 6625 6626 # check for functions define in the UFO model 6627 ufo_fct = self.model.get('functions') 6628 if ufo_fct: 6629 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS ') 6630 for fct in ufo_fct: 6631 # already handle by default 6632 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc", "condif", 6633 "theta_function", "cond", "reglog", "reglogp", "reglogm", "recms","arg", 6634 "grreglog","regsqrt"]: 6635 ufo_fct_template = """ 6636 double complex function %(name)s(%(args)s) 6637 implicit none 6638 double complex %(args)s 6639 %(definitions)s 6640 %(name)s = %(fct)s 6641 6642 return 6643 end 6644 """ 6645 str_fct = self.p_to_f.parse(fct.expr) 6646 if not self.p_to_f.to_define: 6647 definitions = [] 6648 else: 6649 definitions=[] 6650 for d in self.p_to_f.to_define: 6651 if d == 'pi': 6652 definitions.append(' double precision pi') 6653 definitions.append(' data pi /3.1415926535897932d0/') 6654 else: 6655 definitions.append(' double complex %s' % d) 6656 6657 text = ufo_fct_template % { 6658 'name': fct.name, 6659 'args': ", ".join(fct.arguments), 6660 'fct': str_fct, 6661 'definitions': '\n'.join(definitions) 6662 } 6663 6664 fsock.writelines(text) 6665 if self.opt['mp']: 6666 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS FOR MP') 6667 for fct in ufo_fct: 6668 # already handle by default 6669 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc","condif", 6670 "theta_function", "cond", "reglog", "reglogp","reglogm", "recms","arg", 6671 "grreglog","regsqrt"]: 6672 ufo_fct_template = """ 6673 %(complex_mp_format)s function mp_%(name)s(mp__%(args)s) 6674 implicit none 6675 %(complex_mp_format)s mp__%(args)s 6676 %(definitions)s 6677 mp_%(name)s = %(fct)s 6678 6679 return 6680 end 6681 """ 6682 str_fct = self.mp_p_to_f.parse(fct.expr) 6683 if not self.mp_p_to_f.to_define: 6684 definitions = [] 6685 else: 6686 definitions=[] 6687 for d in self.mp_p_to_f.to_define: 6688 if d == 'pi': 6689 definitions.append(' %s mp__pi' % self.mp_real_format) 6690 definitions.append(' data mp__pi /3.141592653589793238462643383279502884197e+00_16/') 6691 else: 6692 definitions.append(' %s mp_%s' % (self.mp_complex_format,d)) 6693 text = ufo_fct_template % { 6694 'name': fct.name, 6695 'args': ", mp__".join(fct.arguments), 6696 'fct': str_fct, 6697 'definitions': '\n'.join(definitions), 6698 'complex_mp_format': self.mp_complex_format 6699 } 6700 fsock.writelines(text) 6701 6702 6703 6704 fsock.write_comment_line(' STOP UFO DEFINE FUNCTIONS ')
6705 6706 6707
6708 - def create_makeinc(self):
6709 """create makeinc.inc containing the file to compile """ 6710 6711 fsock = self.open('makeinc.inc', comment='#') 6712 text = 'MODEL = couplings.o lha_read.o printout.o rw_para.o' 6713 text += ' model_functions.o ' 6714 6715 nb_coup_indep = 1 + len(self.coups_dep) // 25 6716 nb_coup_dep = 1 + len(self.coups_indep) // 25 6717 couplings_files=['couplings%s.o' % (i+1) \ 6718 for i in range(nb_coup_dep + nb_coup_indep) ] 6719 if self.opt['mp']: 6720 couplings_files+=['mp_couplings%s.o' % (i+1) for i in \ 6721 range(nb_coup_dep,nb_coup_dep + nb_coup_indep) ] 6722 text += ' '.join(couplings_files) 6723 fsock.writelines(text)
6724
6725 - def create_param_write(self):
6726 """ create param_write """ 6727 6728 fsock = self.open('param_write.inc', format='fortran') 6729 6730 fsock.writelines("""write(*,*) ' External Params' 6731 write(*,*) ' ---------------------------------' 6732 write(*,*) ' '""") 6733 def format(name): 6734 return 'write(*,*) \'%(name)s = \', %(name)s' % {'name': name}
6735 6736 # Write the external parameter 6737 lines = [format(param.name) for param in self.params_ext] 6738 fsock.writelines('\n'.join(lines)) 6739 6740 fsock.writelines("""write(*,*) ' Internal Params' 6741 write(*,*) ' ---------------------------------' 6742 write(*,*) ' '""") 6743 lines = [format(data.name) for data in self.params_indep 6744 if data.name != 'ZERO' and self.check_needed_param(data.name)] 6745 fsock.writelines('\n'.join(lines)) 6746 fsock.writelines("""write(*,*) ' Internal Params evaluated point by point' 6747 write(*,*) ' ----------------------------------------' 6748 write(*,*) ' '""") 6749 lines = [format(data.name) for data in self.params_dep \ 6750 if self.check_needed_param(data.name)] 6751 6752 fsock.writelines('\n'.join(lines)) 6753 6754 6755
6756 - def create_ident_card(self):
6757 """ create the ident_card.dat """ 6758 6759 def format(parameter): 6760 """return the line for the ident_card corresponding to this parameter""" 6761 colum = [parameter.lhablock.lower()] + \ 6762 [str(value) for value in parameter.lhacode] + \ 6763 [parameter.name] 6764 if not parameter.name: 6765 return '' 6766 return ' '.join(colum)+'\n'
6767 6768 fsock = self.open('ident_card.dat') 6769 6770 external_param = [format(param) for param in self.params_ext] 6771 fsock.writelines('\n'.join(external_param)) 6772
6773 - def create_actualize_mp_ext_param_inc(self):
6774 """ create the actualize_mp_ext_params.inc code """ 6775 6776 # In principle one should actualize all external, but for now, it is 6777 # hardcoded that only AS and MU_R can by dynamically changed by the user 6778 # so that we only update those ones. 6779 # Of course, to be on the safe side, one could decide to update all 6780 # external parameters. 6781 update_params_list=[p for p in self.params_ext if p.name in 6782 self.PS_dependent_key] 6783 6784 res_strings = ["%(mp_prefix)s%(name)s=%(name)s"\ 6785 %{'mp_prefix':self.mp_prefix,'name':param.name}\ 6786 for param in update_params_list] 6787 # When read_lha is false, it is G which is taken in input and not AS, so 6788 # this is what should be reset here too. 6789 if 'aS' in [param.name for param in update_params_list]: 6790 res_strings.append("%(mp_prefix)sG=G"%{'mp_prefix':self.mp_prefix}) 6791 6792 fsock = self.open('actualize_mp_ext_params.inc', format='fortran') 6793 fsock.writelines('\n'.join(res_strings))
6794
6795 - def create_param_read(self):
6796 """create param_read""" 6797 6798 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 6799 or self.opt['loop_induced']: 6800 fsock = self.open('param_read.inc', format='fortran') 6801 fsock.writelines(' include \'../param_card.inc\'') 6802 return 6803 6804 def format_line(parameter): 6805 """return the line for the ident_card corresponding to this 6806 parameter""" 6807 template = \ 6808 """ call LHA_get_real(npara,param,value,'%(name)s',%(name)s,%(value)s)""" \ 6809 % {'name': parameter.name, 6810 'value': self.p_to_f.parse(str(parameter.value.real))} 6811 if self.opt['mp']: 6812 template = template+ \ 6813 ("\n call MP_LHA_get_real(npara,param,value,'%(name)s',"+ 6814 "%(mp_prefix)s%(name)s,%(value)s)") \ 6815 % {'name': parameter.name,'mp_prefix': self.mp_prefix, 6816 'value': self.mp_p_to_f.parse(str(parameter.value.real))} 6817 return template 6818 6819 fsock = self.open('param_read.inc', format='fortran') 6820 res_strings = [format_line(param) \ 6821 for param in self.params_ext] 6822 6823 # Correct width sign for Majorana particles (where the width 6824 # and mass need to have the same sign) 6825 for particle in self.model.get('particles'): 6826 if particle.is_fermion() and particle.get('self_antipart') and \ 6827 particle.get('width').lower() != 'zero': 6828 6829 res_strings.append('%(width)s = sign(%(width)s,%(mass)s)' % \ 6830 {'width': particle.get('width'), 'mass': particle.get('mass')}) 6831 if self.opt['mp']: 6832 res_strings.append(\ 6833 ('%(mp_pref)s%(width)s = sign(%(mp_pref)s%(width)s,'+\ 6834 '%(mp_pref)s%(mass)s)')%{'width': particle.get('width'),\ 6835 'mass': particle.get('mass'),'mp_pref':self.mp_prefix}) 6836 6837 fsock.writelines('\n'.join(res_strings)) 6838 6839 6840 @staticmethod
6841 - def create_param_card_static(model, output_path, rule_card_path=False, 6842 mssm_convert=True):
6843 """ create the param_card.dat for a givent model --static method-- """ 6844 #1. Check if a default param_card is present: 6845 done = False 6846 if hasattr(model, 'restrict_card') and isinstance(model.restrict_card, str): 6847 restrict_name = os.path.basename(model.restrict_card)[9:-4] 6848 model_path = model.get('modelpath') 6849 if os.path.exists(pjoin(model_path,'paramcard_%s.dat' % restrict_name)): 6850 done = True 6851 files.cp(pjoin(model_path,'paramcard_%s.dat' % restrict_name), 6852 output_path) 6853 if not done: 6854 param_writer.ParamCardWriter(model, output_path) 6855 6856 if rule_card_path: 6857 if hasattr(model, 'rule_card'): 6858 model.rule_card.write_file(rule_card_path) 6859 6860 if mssm_convert: 6861 model_name = model.get('name') 6862 # IF MSSM convert the card to SLAH1 6863 if model_name == 'mssm' or model_name.startswith('mssm-'): 6864 import models.check_param_card as translator 6865 # Check the format of the param_card for Pythia and make it correct 6866 if rule_card_path: 6867 translator.make_valid_param_card(output_path, rule_card_path) 6868 translator.convert_to_slha1(output_path)
6869
6870 - def create_param_card(self):
6871 """ create the param_card.dat """ 6872 6873 rule_card = pjoin(self.dir_path, 'param_card_rule.dat') 6874 if not hasattr(self.model, 'rule_card'): 6875 rule_card=False 6876 self.create_param_card_static(self.model, 6877 output_path=pjoin(self.dir_path, 'param_card.dat'), 6878 rule_card_path=rule_card, 6879 mssm_convert=True)
6880
6881 -def ExportV4Factory(cmd, noclean, output_type='default', group_subprocesses=True, cmd_options={}):
6882 """ Determine which Export_v4 class is required. cmd is the command 6883 interface containing all potential usefull information. 6884 The output_type argument specifies from which context the output 6885 is called. It is 'madloop' for MadLoop5, 'amcatnlo' for FKS5 output 6886 and 'default' for tree-level outputs.""" 6887 6888 opt = dict(cmd.options) 6889 opt['output_options'] = cmd_options 6890 6891 # ========================================================================== 6892 # First check whether Ninja must be installed. 6893 # Ninja would only be required if: 6894 # a) Loop optimized output is selected 6895 # b) the process gathered from the amplitude generated use loops 6896 6897 if len(cmd._curr_amps)>0: 6898 try: 6899 curr_proc = cmd._curr_amps[0].get('process') 6900 except base_objects.PhysicsObject.PhysicsObjectError: 6901 curr_proc = None 6902 elif hasattr(cmd,'_fks_multi_proc') and \ 6903 len(cmd._fks_multi_proc.get('process_definitions'))>0: 6904 curr_proc = cmd._fks_multi_proc.get('process_definitions')[0] 6905 else: 6906 curr_proc = None 6907 6908 requires_reduction_tool = opt['loop_optimized_output'] and \ 6909 (not curr_proc is None) and \ 6910 (curr_proc.get('perturbation_couplings') != [] and \ 6911 not curr_proc.get('NLO_mode') in [None,'real','tree','LO','LOonly']) 6912 6913 # An installation is required then, but only if the specified path is the 6914 # default local one and that the Ninja library appears missing. 6915 if requires_reduction_tool: 6916 cmd.install_reduction_library() 6917 6918 # ========================================================================== 6919 # First treat the MadLoop5 standalone case 6920 MadLoop_SA_options = {'clean': not noclean, 6921 'complex_mass':cmd.options['complex_mass_scheme'], 6922 'export_format':'madloop', 6923 'mp':True, 6924 'loop_dir': os.path.join(cmd._mgme_dir,'Template','loop_material'), 6925 'cuttools_dir': cmd._cuttools_dir, 6926 'iregi_dir':cmd._iregi_dir, 6927 'pjfry_dir':cmd.options['pjfry'], 6928 'golem_dir':cmd.options['golem'], 6929 'samurai_dir':cmd.options['samurai'], 6930 'ninja_dir':cmd.options['ninja'], 6931 'collier_dir':cmd.options['collier'], 6932 'fortran_compiler':cmd.options['fortran_compiler'], 6933 'f2py_compiler':cmd.options['f2py_compiler'], 6934 'output_dependencies':cmd.options['output_dependencies'], 6935 'SubProc_prefix':'P', 6936 'compute_color_flows':cmd.options['loop_color_flows'], 6937 'mode': 'reweight' if cmd._export_format == "standalone_rw" else '', 6938 'cluster_local_path': cmd.options['cluster_local_path'], 6939 'output_options': cmd_options 6940 } 6941 6942 if output_type.startswith('madloop'): 6943 import madgraph.loop.loop_exporters as loop_exporters 6944 if os.path.isdir(os.path.join(cmd._mgme_dir, 'Template/loop_material')): 6945 ExporterClass=None 6946 if not cmd.options['loop_optimized_output']: 6947 ExporterClass=loop_exporters.LoopProcessExporterFortranSA 6948 else: 6949 if output_type == "madloop": 6950 ExporterClass=loop_exporters.LoopProcessOptimizedExporterFortranSA 6951 MadLoop_SA_options['export_format'] = 'madloop_optimized' 6952 elif output_type == "madloop_matchbox": 6953 ExporterClass=loop_exporters.LoopProcessExporterFortranMatchBox 6954 MadLoop_SA_options['export_format'] = 'madloop_matchbox' 6955 else: 6956 raise Exception, "output_type not recognize %s" % output_type 6957 return ExporterClass(cmd._export_dir, MadLoop_SA_options) 6958 else: 6959 raise MadGraph5Error('MG5_aMC cannot find the \'loop_material\' directory'+\ 6960 ' in %s'%str(cmd._mgme_dir)) 6961 6962 # Then treat the aMC@NLO output 6963 elif output_type=='amcatnlo': 6964 import madgraph.iolibs.export_fks as export_fks 6965 ExporterClass=None 6966 amcatnlo_options = dict(opt) 6967 amcatnlo_options.update(MadLoop_SA_options) 6968 amcatnlo_options['mp'] = len(cmd._fks_multi_proc.get_virt_amplitudes()) > 0 6969 if not cmd.options['loop_optimized_output']: 6970 logger.info("Writing out the aMC@NLO code") 6971 ExporterClass = export_fks.ProcessExporterFortranFKS 6972 amcatnlo_options['export_format']='FKS5_default' 6973 else: 6974 logger.info("Writing out the aMC@NLO code, using optimized Loops") 6975 ExporterClass = export_fks.ProcessOptimizedExporterFortranFKS 6976 amcatnlo_options['export_format']='FKS5_optimized' 6977 return ExporterClass(cmd._export_dir, amcatnlo_options) 6978 6979 6980 # Then the default tree-level output 6981 elif output_type=='default': 6982 assert group_subprocesses in [True, False] 6983 6984 opt = dict(opt) 6985 opt.update({'clean': not noclean, 6986 'complex_mass': cmd.options['complex_mass_scheme'], 6987 'export_format':cmd._export_format, 6988 'mp': False, 6989 'sa_symmetry':False, 6990 'model': cmd._curr_model.get('name'), 6991 'v5_model': False if cmd._model_v4_path else True }) 6992 6993 format = cmd._export_format #shortcut 6994 6995 if format in ['standalone_msP', 'standalone_msF', 'standalone_rw']: 6996 opt['sa_symmetry'] = True 6997 elif format == 'plugin': 6998 opt['sa_symmetry'] = cmd._export_plugin.sa_symmetry 6999 7000 loop_induced_opt = dict(opt) 7001 loop_induced_opt.update(MadLoop_SA_options) 7002 loop_induced_opt['export_format'] = 'madloop_optimized' 7003 loop_induced_opt['SubProc_prefix'] = 'PV' 7004 # For loop_induced output with MadEvent, we must have access to the 7005 # color flows. 7006 loop_induced_opt['compute_color_flows'] = True 7007 for key in opt: 7008 if key not in loop_induced_opt: 7009 loop_induced_opt[key] = opt[key] 7010 7011 # Madevent output supports MadAnalysis5 7012 if format in ['madevent']: 7013 opt['madanalysis5'] = cmd.options['madanalysis5_path'] 7014 7015 if format == 'matrix' or format.startswith('standalone'): 7016 return ProcessExporterFortranSA(cmd._export_dir, opt, format=format) 7017 7018 elif format in ['madevent'] and group_subprocesses: 7019 if isinstance(cmd._curr_amps[0], 7020 loop_diagram_generation.LoopAmplitude): 7021 import madgraph.loop.loop_exporters as loop_exporters 7022 return loop_exporters.LoopInducedExporterMEGroup( 7023 cmd._export_dir,loop_induced_opt) 7024 else: 7025 return ProcessExporterFortranMEGroup(cmd._export_dir,opt) 7026 elif format in ['madevent']: 7027 if isinstance(cmd._curr_amps[0], 7028 loop_diagram_generation.LoopAmplitude): 7029 import madgraph.loop.loop_exporters as loop_exporters 7030 return loop_exporters.LoopInducedExporterMENoGroup( 7031 cmd._export_dir,loop_induced_opt) 7032 else: 7033 return ProcessExporterFortranME(cmd._export_dir,opt) 7034 elif format in ['matchbox']: 7035 return ProcessExporterFortranMatchBox(cmd._export_dir,opt) 7036 elif cmd._export_format in ['madweight'] and group_subprocesses: 7037 7038 return ProcessExporterFortranMWGroup(cmd._export_dir, opt) 7039 elif cmd._export_format in ['madweight']: 7040 return ProcessExporterFortranMW(cmd._export_dir, opt) 7041 elif format == 'plugin': 7042 if isinstance(cmd._curr_amps[0], 7043 loop_diagram_generation.LoopAmplitude): 7044 return cmd._export_plugin(cmd._export_dir, loop_induced_opt) 7045 else: 7046 return cmd._export_plugin(cmd._export_dir, opt) 7047 7048 else: 7049 raise Exception, 'Wrong export_v4 format' 7050 else: 7051 raise MadGraph5Error, 'Output type %s not reckognized in ExportV4Factory.'
7052
7053 7054 7055 7056 #=============================================================================== 7057 # ProcessExporterFortranMWGroup 7058 #=============================================================================== 7059 -class ProcessExporterFortranMWGroup(ProcessExporterFortranMW):
7060 """Class to take care of exporting a set of matrix elements to 7061 MadEvent subprocess group format.""" 7062 7063 matrix_file = "matrix_madweight_group_v4.inc" 7064 grouped_mode = 'madweight' 7065 #=========================================================================== 7066 # generate_subprocess_directory 7067 #===========================================================================
7068 - def generate_subprocess_directory(self, subproc_group, 7069 fortran_model, 7070 group_number):
7071 """Generate the Pn directory for a subprocess group in MadEvent, 7072 including the necessary matrix_N.f files, configs.inc and various 7073 other helper files.""" 7074 7075 if not isinstance(subproc_group, group_subprocs.SubProcessGroup): 7076 raise base_objects.PhysicsObject.PhysicsObjectError,\ 7077 "subproc_group object not SubProcessGroup" 7078 7079 if not self.model: 7080 self.model = subproc_group.get('matrix_elements')[0].\ 7081 get('processes')[0].get('model') 7082 7083 pathdir = os.path.join(self.dir_path, 'SubProcesses') 7084 7085 # Create the directory PN in the specified path 7086 subprocdir = "P%d_%s" % (subproc_group.get('number'), 7087 subproc_group.get('name')) 7088 try: 7089 os.mkdir(pjoin(pathdir, subprocdir)) 7090 except os.error as error: 7091 logger.warning(error.strerror + " " + subprocdir) 7092 7093 7094 logger.info('Creating files in directory %s' % subprocdir) 7095 Ppath = pjoin(pathdir, subprocdir) 7096 7097 # Create the matrix.f files, auto_dsig.f files and all inc files 7098 # for all subprocesses in the group 7099 7100 maxamps = 0 7101 maxflows = 0 7102 tot_calls = 0 7103 7104 matrix_elements = subproc_group.get('matrix_elements') 7105 7106 for ime, matrix_element in \ 7107 enumerate(matrix_elements): 7108 filename = pjoin(Ppath, 'matrix%d.f' % (ime+1)) 7109 calls, ncolor = \ 7110 self.write_matrix_element_v4(writers.FortranWriter(filename), 7111 matrix_element, 7112 fortran_model, 7113 str(ime+1), 7114 subproc_group.get('diagram_maps')[\ 7115 ime]) 7116 7117 filename = pjoin(Ppath, 'auto_dsig%d.f' % (ime+1)) 7118 self.write_auto_dsig_file(writers.FortranWriter(filename), 7119 matrix_element, 7120 str(ime+1)) 7121 7122 # Keep track of needed quantities 7123 tot_calls += int(calls) 7124 maxflows = max(maxflows, ncolor) 7125 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 7126 7127 # Draw diagrams 7128 filename = pjoin(Ppath, "matrix%d.ps" % (ime+1)) 7129 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 7130 get('diagrams'), 7131 filename, 7132 model = \ 7133 matrix_element.get('processes')[0].\ 7134 get('model'), 7135 amplitude=True) 7136 logger.info("Generating Feynman diagrams for " + \ 7137 matrix_element.get('processes')[0].nice_string()) 7138 plot.draw() 7139 7140 # Extract number of external particles 7141 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 7142 7143 # Generate a list of diagrams corresponding to each configuration 7144 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 7145 # If a subprocess has no diagrams for this config, the number is 0 7146 7147 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 7148 7149 filename = pjoin(Ppath, 'auto_dsig.f') 7150 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 7151 subproc_group) 7152 7153 filename = pjoin(Ppath,'configs.inc') 7154 nconfigs, s_and_t_channels = self.write_configs_file(\ 7155 writers.FortranWriter(filename), 7156 subproc_group, 7157 subproc_diagrams_for_config) 7158 7159 filename = pjoin(Ppath, 'leshouche.inc') 7160 self.write_leshouche_file(writers.FortranWriter(filename), 7161 subproc_group) 7162 7163 filename = pjoin(Ppath, 'phasespace.inc') 7164 self.write_phasespace_file(writers.FortranWriter(filename), 7165 nconfigs) 7166 7167 7168 filename = pjoin(Ppath, 'maxamps.inc') 7169 self.write_maxamps_file(writers.FortranWriter(filename), 7170 maxamps, 7171 maxflows, 7172 max([len(me.get('processes')) for me in \ 7173 matrix_elements]), 7174 len(matrix_elements)) 7175 7176 filename = pjoin(Ppath, 'mirrorprocs.inc') 7177 self.write_mirrorprocs(writers.FortranWriter(filename), 7178 subproc_group) 7179 7180 filename = pjoin(Ppath, 'nexternal.inc') 7181 self.write_nexternal_file(writers.FortranWriter(filename), 7182 nexternal, ninitial) 7183 7184 filename = pjoin(Ppath, 'pmass.inc') 7185 self.write_pmass_file(writers.FortranWriter(filename), 7186 matrix_element) 7187 7188 filename = pjoin(Ppath, 'props.inc') 7189 self.write_props_file(writers.FortranWriter(filename), 7190 matrix_element, 7191 s_and_t_channels) 7192 7193 # filename = pjoin(Ppath, 'processes.dat') 7194 # files.write_to_file(filename, 7195 # self.write_processes_file, 7196 # subproc_group) 7197 7198 # Generate jpgs -> pass in make_html 7199 #os.system(os.path.join('..', '..', 'bin', 'gen_jpeg-pl')) 7200 7201 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f'] 7202 7203 for file in linkfiles: 7204 ln('../%s' % file, cwd=Ppath) 7205 7206 ln('nexternal.inc', '../../Source', cwd=Ppath, log=False) 7207 ln('leshouche.inc', '../../Source', cwd=Ppath, log=False) 7208 ln('maxamps.inc', '../../Source', cwd=Ppath, log=False) 7209 ln('../../Source/maxparticles.inc', '.', log=True, cwd=Ppath) 7210 ln('../../Source/maxparticles.inc', '.', name='genps.inc', log=True, cwd=Ppath) 7211 ln('phasespace.inc', '../', log=True, cwd=Ppath) 7212 if not tot_calls: 7213 tot_calls = 0 7214 return tot_calls
7215 7216 7217 #=========================================================================== 7218 # Helper functions 7219 #===========================================================================
7220 - def modify_grouping(self, matrix_element):
7221 """allow to modify the grouping (if grouping is in place) 7222 return two value: 7223 - True/False if the matrix_element was modified 7224 - the new(or old) matrix element""" 7225 7226 return True, matrix_element.split_lepton_grouping()
7227 7228 #=========================================================================== 7229 # write_super_auto_dsig_file 7230 #===========================================================================
7231 - def write_super_auto_dsig_file(self, writer, subproc_group):
7232 """Write the auto_dsig.f file selecting between the subprocesses 7233 in subprocess group mode""" 7234 7235 replace_dict = {} 7236 7237 # Extract version number and date from VERSION file 7238 info_lines = self.get_mg5_info_lines() 7239 replace_dict['info_lines'] = info_lines 7240 7241 matrix_elements = subproc_group.get('matrix_elements') 7242 7243 # Extract process info lines 7244 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 7245 matrix_elements]) 7246 replace_dict['process_lines'] = process_lines 7247 7248 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 7249 replace_dict['nexternal'] = nexternal 7250 7251 replace_dict['nsprocs'] = 2*len(matrix_elements) 7252 7253 # Generate dsig definition line 7254 dsig_def_line = "DOUBLE PRECISION " + \ 7255 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 7256 range(len(matrix_elements))]) 7257 replace_dict["dsig_def_line"] = dsig_def_line 7258 7259 # Generate dsig process lines 7260 call_dsig_proc_lines = [] 7261 for iproc in range(len(matrix_elements)): 7262 call_dsig_proc_lines.append(\ 7263 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 7264 {"num": iproc + 1, 7265 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 7266 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 7267 7268 if writer: 7269 file = open(os.path.join(_file_path, \ 7270 'iolibs/template_files/super_auto_dsig_mw_group_v4.inc')).read() 7271 file = file % replace_dict 7272 # Write the file 7273 writer.writelines(file) 7274 else: 7275 return replace_dict
7276 7277 #=========================================================================== 7278 # write_mirrorprocs 7279 #===========================================================================
7280 - def write_mirrorprocs(self, writer, subproc_group):
7281 """Write the mirrorprocs.inc file determining which processes have 7282 IS mirror process in subprocess group mode.""" 7283 7284 lines = [] 7285 bool_dict = {True: '.true.', False: '.false.'} 7286 matrix_elements = subproc_group.get('matrix_elements') 7287 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 7288 (len(matrix_elements), 7289 ",".join([bool_dict[me.get('has_mirror_process')] for \ 7290 me in matrix_elements]))) 7291 # Write the file 7292 writer.writelines(lines)
7293 7294 #=========================================================================== 7295 # write_configs_file 7296 #===========================================================================
7297 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
7298 """Write the configs.inc file with topology information for a 7299 subprocess group. Use the first subprocess with a diagram for each 7300 configuration.""" 7301 7302 matrix_elements = subproc_group.get('matrix_elements') 7303 model = matrix_elements[0].get('processes')[0].get('model') 7304 7305 diagrams = [] 7306 config_numbers = [] 7307 for iconfig, config in enumerate(diagrams_for_config): 7308 # Check if any diagrams correspond to this config 7309 if set(config) == set([0]): 7310 continue 7311 subproc_diags = [] 7312 for s,d in enumerate(config): 7313 if d: 7314 subproc_diags.append(matrix_elements[s].\ 7315 get('diagrams')[d-1]) 7316 else: 7317 subproc_diags.append(None) 7318 diagrams.append(subproc_diags) 7319 config_numbers.append(iconfig + 1) 7320 7321 # Extract number of external particles 7322 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 7323 7324 return len(diagrams), \ 7325 self.write_configs_file_from_diagrams(writer, diagrams, 7326 config_numbers, 7327 nexternal, ninitial, 7328 matrix_elements[0],model)
7329 7330 #=========================================================================== 7331 # write_run_configs_file 7332 #===========================================================================
7333 - def write_run_config_file(self, writer):
7334 """Write the run_configs.inc file for MadEvent""" 7335 7336 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 7337 text = open(path).read() % {'chanperjob':'2'} 7338 writer.write(text) 7339 return True
7340 7341 7342 #=========================================================================== 7343 # write_leshouche_file 7344 #===========================================================================
7345 - def write_leshouche_file(self, writer, subproc_group):
7346 """Write the leshouche.inc file for MG4""" 7347 7348 all_lines = [] 7349 7350 for iproc, matrix_element in \ 7351 enumerate(subproc_group.get('matrix_elements')): 7352 all_lines.extend(self.get_leshouche_lines(matrix_element, 7353 iproc)) 7354 7355 # Write the file 7356 writer.writelines(all_lines) 7357 7358 return True
7359