Package madgraph :: Package iolibs :: Module export_v4
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_v4

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  from madgraph.iolibs.helas_call_writers import HelasCallWriter 
  16  """Methods and classes to export matrix elements to v4 format.""" 
  17   
  18  import copy 
  19  from cStringIO import StringIO 
  20  from distutils import dir_util 
  21  import itertools 
  22  import fractions 
  23  import glob 
  24  import logging 
  25  import math 
  26  import os 
  27  import re 
  28  import shutil 
  29  import subprocess 
  30  import sys 
  31  import time 
  32  import traceback 
  33   
  34  import aloha 
  35   
  36  import madgraph.core.base_objects as base_objects 
  37  import madgraph.core.color_algebra as color 
  38  import madgraph.core.helas_objects as helas_objects 
  39  import madgraph.iolibs.drawing_eps as draw 
  40  import madgraph.iolibs.files as files 
  41  import madgraph.iolibs.group_subprocs as group_subprocs 
  42  import madgraph.iolibs.file_writers as writers 
  43  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  44  import madgraph.iolibs.template_files as template_files 
  45  import madgraph.iolibs.ufo_expression_parsers as parsers 
  46  import madgraph.iolibs.helas_call_writers as helas_call_writers 
  47  import madgraph.interface.common_run_interface as common_run_interface 
  48  import madgraph.various.diagram_symmetry as diagram_symmetry 
  49  import madgraph.various.misc as misc 
  50  import madgraph.various.banner as banner_mod 
  51  import madgraph.various.process_checks as process_checks 
  52  import madgraph.loop.loop_diagram_generation as loop_diagram_generation 
  53  import aloha.create_aloha as create_aloha 
  54  import models.import_ufo as import_ufo 
  55  import models.write_param_card as param_writer 
  56  import models.check_param_card as check_param_card 
  57   
  58   
  59  from madgraph import MadGraph5Error, MG5DIR, ReadWrite 
  60  from madgraph.iolibs.files import cp, ln, mv 
  61   
  62  from madgraph import InvalidCmd 
  63   
  64  pjoin = os.path.join 
  65   
  66  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  67  logger = logging.getLogger('madgraph.export_v4') 
  68   
  69  default_compiler= {'fortran': 'gfortran', 
  70                         'f2py': 'f2py', 
  71                         'cpp':'g++'} 
72 73 74 -class VirtualExporter(object):
75 76 #exporter variable who modified the way madgraph interacts with this class 77 78 grouped_mode = 'madevent' 79 # This variable changes the type of object called within 'generate_subprocess_directory' 80 #functions. 81 # False to avoid grouping (only identical matrix element are merged) 82 # 'madevent' group the massless quark and massless lepton 83 # 'madweight' group the gluon with the massless quark 84 sa_symmetry = False 85 # If no grouped_mode=False, uu~ and u~u will be called independently. 86 #Putting sa_symmetry generates only one of the two matrix-element. 87 check = True 88 # Ask madgraph to check if the directory already exists and propose to the user to 89 #remove it first if this is the case 90 output = 'Template' 91 # [Template, None, dir] 92 # - Template, madgraph will call copy_template 93 # - dir, madgraph will just create an empty directory for initialisation 94 # - None, madgraph do nothing for initialisation 95 exporter = 'v4' 96 # language of the output 'v4' for Fortran output 97 # 'cpp' for C++ output 98 99
100 - def __init__(self, dir_path = "", opt=None):
101 # cmd_options is a dictionary with all the optional argurment passed at output time 102 103 # Activate some monkey patching for the helas call writer. 104 helas_call_writers.HelasCallWriter.customize_argument_for_all_other_helas_object = \ 105 self.helas_call_writer_custom
106 107 108 # helper function for customise helas writter 109 @staticmethod
110 - def custom_helas_call(call, arg):
111 """static method to customise the way aloha function call are written 112 call is the default template for the call 113 arg are the dictionary used for the call 114 """ 115 return call, arg
116 117 helas_call_writer_custom = lambda x,y,z: x.custom_helas_call(y,z) 118 119
120 - def copy_template(self, model):
121 return
122
123 - def generate_subprocess_directory(self, subproc_group, helicity_model, me=None):
124 # generate_subprocess_directory(self, matrix_element, helicity_model, me_number) [for ungrouped] 125 return 0 # return an integer stating the number of call to helicity routine
126
127 - def convert_model(self, model, wanted_lorentz=[], wanted_couplings=[]):
128 return
129
130 - def finalize(self,matrix_element, cmdhistory, MG5options, outputflag):
131 return
132 133
134 - def pass_information_from_cmd(self, cmd):
135 """pass information from the command interface to the exporter. 136 Please do not modify any object of the interface from the exporter. 137 """ 138 return
139
140 - def modify_grouping(self, matrix_element):
141 return False, matrix_element
142
143 - def export_model_files(self, model_v4_path):
144 raise Exception, "V4 model not supported by this type of exporter. Please use UFO model" 145 return
146
147 - def export_helas(self, HELAS_PATH):
148 raise Exception, "V4 model not supported by this type of exporter. Please use UFO model" 149 return
150
151 #=============================================================================== 152 # ProcessExporterFortran 153 #=============================================================================== 154 -class ProcessExporterFortran(VirtualExporter):
155 """Class to take care of exporting a set of matrix elements to 156 Fortran (v4) format.""" 157 158 default_opt = {'clean': False, 'complex_mass':False, 159 'export_format':'madevent', 'mp': False, 160 'v5_model': True, 161 'output_options':{} 162 } 163 grouped_mode = False 164
165 - def __init__(self, dir_path = "", opt=None):
166 """Initiate the ProcessExporterFortran with directory information""" 167 self.mgme_dir = MG5DIR 168 self.dir_path = dir_path 169 self.model = None 170 171 self.opt = dict(self.default_opt) 172 if opt: 173 self.opt.update(opt) 174 175 self.cmd_options = self.opt['output_options'] 176 177 #place holder to pass information to the run_interface 178 self.proc_characteristic = banner_mod.ProcCharacteristic() 179 180 # call mother class 181 super(ProcessExporterFortran,self).__init__(dir_path, opt)
182 183 184 #=========================================================================== 185 # process exporter fortran switch between group and not grouped 186 #===========================================================================
187 - def export_processes(self, matrix_elements, fortran_model):
188 """Make the switch between grouped and not grouped output""" 189 190 calls = 0 191 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 192 for (group_number, me_group) in enumerate(matrix_elements): 193 calls = calls + self.generate_subprocess_directory(\ 194 me_group, fortran_model, group_number) 195 else: 196 for me_number, me in enumerate(matrix_elements.get_matrix_elements()): 197 calls = calls + self.generate_subprocess_directory(\ 198 me, fortran_model, me_number) 199 200 return calls
201 202 203 #=========================================================================== 204 # create the run_card 205 #===========================================================================
206 - def create_run_card(self, matrix_elements, history):
207 """ """ 208 209 210 # bypass this for the loop-check 211 import madgraph.loop.loop_helas_objects as loop_helas_objects 212 if isinstance(matrix_elements, loop_helas_objects.LoopHelasMatrixElement): 213 matrix_elements = None 214 215 run_card = banner_mod.RunCard() 216 217 218 default=True 219 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 220 processes = [me.get('processes') for megroup in matrix_elements 221 for me in megroup['matrix_elements']] 222 elif matrix_elements: 223 processes = [me.get('processes') 224 for me in matrix_elements['matrix_elements']] 225 else: 226 default =False 227 228 if default: 229 run_card.create_default_for_process(self.proc_characteristic, 230 history, 231 processes) 232 233 234 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 235 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'))
236 237 238 #=========================================================================== 239 # copy the Template in a new directory. 240 #===========================================================================
241 - def copy_template(self, model):
242 """create the directory run_name as a copy of the MadEvent 243 Template, and clean the directory 244 """ 245 246 #First copy the full template tree if dir_path doesn't exit 247 if not os.path.isdir(self.dir_path): 248 assert self.mgme_dir, \ 249 "No valid MG_ME path given for MG4 run directory creation." 250 logger.info('initialize a new directory: %s' % \ 251 os.path.basename(self.dir_path)) 252 shutil.copytree(pjoin(self.mgme_dir, 'Template/LO'), 253 self.dir_path, True) 254 # distutils.dir_util.copy_tree since dir_path already exists 255 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 256 self.dir_path) 257 # copy plot_card 258 for card in ['plot_card']: 259 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 260 try: 261 shutil.copy(pjoin(self.dir_path, 'Cards',card + '.dat'), 262 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 263 except IOError: 264 logger.warning("Failed to copy " + card + ".dat to default") 265 elif os.getcwd() == os.path.realpath(self.dir_path): 266 logger.info('working in local directory: %s' % \ 267 os.path.realpath(self.dir_path)) 268 # distutils.dir_util.copy_tree since dir_path already exists 269 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/LO'), 270 self.dir_path) 271 # for name in misc.glob('Template/LO/*', self.mgme_dir): 272 # name = os.path.basename(name) 273 # filname = pjoin(self.mgme_dir, 'Template','LO',name) 274 # if os.path.isfile(filename): 275 # files.cp(filename, pjoin(self.dir_path,name)) 276 # elif os.path.isdir(filename): 277 # shutil.copytree(filename, pjoin(self.dir_path,name), True) 278 # distutils.dir_util.copy_tree since dir_path already exists 279 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 280 self.dir_path) 281 # Copy plot_card 282 for card in ['plot_card']: 283 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 284 try: 285 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 286 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 287 except IOError: 288 logger.warning("Failed to copy " + card + ".dat to default") 289 elif not os.path.isfile(pjoin(self.dir_path, 'TemplateVersion.txt')): 290 assert self.mgme_dir, \ 291 "No valid MG_ME path given for MG4 run directory creation." 292 try: 293 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 294 except IOError: 295 MG5_version = misc.get_pkg_info() 296 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write(MG5_version['version']) 297 298 #Ensure that the Template is clean 299 if self.opt['clean']: 300 logger.info('remove old information in %s' % \ 301 os.path.basename(self.dir_path)) 302 if os.environ.has_key('MADGRAPH_BASE'): 303 misc.call([pjoin('bin', 'internal', 'clean_template'), 304 '--web'], cwd=self.dir_path) 305 else: 306 try: 307 misc.call([pjoin('bin', 'internal', 'clean_template')], \ 308 cwd=self.dir_path) 309 except Exception, why: 310 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 311 % (os.path.basename(self.dir_path),why)) 312 313 #Write version info 314 MG_version = misc.get_pkg_info() 315 open(pjoin(self.dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 316 MG_version['version']) 317 318 # add the makefile in Source directory 319 filename = pjoin(self.dir_path,'Source','makefile') 320 self.write_source_makefile(writers.FileWriter(filename)) 321 322 # add the DiscreteSampler information 323 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'DiscreteSampler.f'), 324 pjoin(self.dir_path, 'Source')) 325 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'StringCast.f'), 326 pjoin(self.dir_path, 'Source')) 327 328 # We need to create the correct open_data for the pdf 329 self.write_pdf_opendata()
330 331 332 #=========================================================================== 333 # Call MadAnalysis5 to generate the default cards for this process 334 #===========================================================================
335 - def create_default_madanalysis5_cards(self, history, proc_defs, processes, 336 ma5_path, output_dir, levels = ['parton','hadron']):
337 """ Call MA5 so that it writes default cards for both parton and 338 post-shower levels, tailored for this particular process.""" 339 340 if len(levels)==0: 341 return 342 start = time.time() 343 logger.info('Generating MadAnalysis5 default cards tailored to this process') 344 try: 345 MA5_interpreter = common_run_interface.CommonRunCmd.\ 346 get_MadAnalysis5_interpreter(MG5DIR,ma5_path,loglevel=100) 347 except (Exception, SystemExit) as e: 348 logger.warning('Fail to create a MadAnalysis5 instance. Therefore the default analysis with MadAnalysis5 will be empty.') 349 return 350 if MA5_interpreter is None: 351 return 352 353 MA5_main = MA5_interpreter.main 354 for lvl in ['parton','hadron']: 355 if lvl in levels: 356 card_to_generate = pjoin(output_dir,'madanalysis5_%s_card_default.dat'%lvl) 357 try: 358 text = MA5_main.madgraph.generate_card(history, proc_defs, processes,lvl) 359 except (Exception, SystemExit) as e: 360 # keep the default card (skip only) 361 logger.warning('MadAnalysis5 failed to write a %s-level'%lvl+ 362 ' default analysis card for this process.') 363 logger.warning('Therefore, %s-level default analysis with MadAnalysis5 will be empty.'%lvl) 364 error=StringIO() 365 traceback.print_exc(file=error) 366 logger.debug('MadAnalysis5 error was:') 367 logger.debug('-'*60) 368 logger.debug(error.getvalue()[:-1]) 369 logger.debug('-'*60) 370 else: 371 open(card_to_generate,'w').write(text) 372 stop = time.time() 373 if stop-start >1: 374 logger.info('Cards created in %.2fs' % (stop-start))
375 376 #=========================================================================== 377 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 378 #===========================================================================
379 - def write_procdef_mg5(self, file_pos, modelname, process_str):
380 """ write an equivalent of the MG4 proc_card in order that all the Madevent 381 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 382 383 proc_card_template = template_files.mg4_proc_card.mg4_template 384 process_template = template_files.mg4_proc_card.process_template 385 process_text = '' 386 coupling = '' 387 new_process_content = [] 388 389 390 # First find the coupling and suppress the coupling from process_str 391 #But first ensure that coupling are define whithout spaces: 392 process_str = process_str.replace(' =', '=') 393 process_str = process_str.replace('= ', '=') 394 process_str = process_str.replace(',',' , ') 395 #now loop on the element and treat all the coupling 396 for info in process_str.split(): 397 if '=' in info: 398 coupling += info + '\n' 399 else: 400 new_process_content.append(info) 401 # Recombine the process_str (which is the input process_str without coupling 402 #info) 403 process_str = ' '.join(new_process_content) 404 405 #format the SubProcess 406 replace_dict = {'process': process_str, 407 'coupling': coupling} 408 process_text += process_template.substitute(replace_dict) 409 410 replace_dict = {'process': process_text, 411 'model': modelname, 412 'multiparticle':''} 413 text = proc_card_template.substitute(replace_dict) 414 415 if file_pos: 416 ff = open(file_pos, 'w') 417 ff.write(text) 418 ff.close() 419 else: 420 return replace_dict
421 422
423 - def pass_information_from_cmd(self, cmd):
424 """Pass information for MA5""" 425 426 self.proc_defs = cmd._curr_proc_defs
427 428 #=========================================================================== 429 # Create jpeg diagrams, html pages,proc_card_mg5.dat and madevent.tar.gz 430 #===========================================================================
431 - def finalize(self, matrix_elements, history='', mg5options={}, flaglist=[]):
432 """Function to finalize v4 directory, for inheritance.""" 433 434 self.create_run_card(matrix_elements, history) 435 self.create_MA5_cards(matrix_elements, history)
436
437 - def create_MA5_cards(self,matrix_elements,history):
438 """ A wrapper around the creation of the MA5 cards so that it can be 439 bypassed by daughter classes (i.e. in standalone).""" 440 if 'madanalysis5_path' in self.opt and not \ 441 self.opt['madanalysis5_path'] is None and not self.proc_defs is None: 442 processes = None 443 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 444 processes = [me.get('processes') for megroup in matrix_elements 445 for me in megroup['matrix_elements']] 446 elif matrix_elements: 447 processes = [me.get('processes') 448 for me in matrix_elements['matrix_elements']] 449 450 self.create_default_madanalysis5_cards( 451 history, self.proc_defs, processes, 452 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'), 453 levels = ['hadron','parton']) 454 455 for level in ['hadron','parton']: 456 # Copying these cards turn on the use of MadAnalysis5 by default. 457 if os.path.isfile(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level)): 458 shutil.copy(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level), 459 pjoin(self.dir_path,'Cards','madanalysis5_%s_card.dat'%level))
460 461 #=========================================================================== 462 # Create the proc_characteristic file passing information to the run_interface 463 #===========================================================================
464 - def create_proc_charac(self, matrix_elements=None, history="", **opts):
465 466 self.proc_characteristic.write(pjoin(self.dir_path, 'SubProcesses', 'proc_characteristics'))
467 468 #=========================================================================== 469 # write_matrix_element_v4 470 #===========================================================================
471 - def write_matrix_element_v4(self):
472 """Function to write a matrix.f file, for inheritance. 473 """ 474 pass
475 476 #=========================================================================== 477 # write_pdf_opendata 478 #===========================================================================
479 - def write_pdf_opendata(self):
480 """ modify the pdf opendata file, to allow direct access to cluster node 481 repository if configure""" 482 483 if not self.opt["cluster_local_path"]: 484 changer = {"pdf_systemwide": ""} 485 else: 486 to_add = """ 487 tempname='%(path)s'//Tablefile 488 open(IU,file=tempname,status='old',ERR=1) 489 return 490 1 tempname='%(path)s/Pdfdata/'//Tablefile 491 open(IU,file=tempname,status='old',ERR=2) 492 return 493 2 tempname='%(path)s/lhapdf'//Tablefile 494 open(IU,file=tempname,status='old',ERR=3) 495 return 496 3 tempname='%(path)s/../lhapdf/pdfsets/'//Tablefile 497 open(IU,file=tempname,status='old',ERR=4) 498 return 499 4 tempname='%(path)s/../lhapdf/pdfsets/6.1/'//Tablefile 500 open(IU,file=tempname,status='old',ERR=5) 501 return 502 """ % {"path" : self.opt["cluster_local_path"]} 503 504 changer = {"pdf_systemwide": to_add} 505 506 507 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "opendata.f")) 508 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_opendata.f"),"r").read() 509 ff.writelines(template % changer) 510 511 # Do the same for lhapdf set 512 if not self.opt["cluster_local_path"]: 513 changer = {"cluster_specific_path": ""} 514 else: 515 to_add=""" 516 LHAPath='%(path)s/PDFsets' 517 Inquire(File=LHAPath, exist=exists) 518 if(exists)return 519 LHAPath='%(path)s/../lhapdf/pdfsets/6.1/' 520 Inquire(File=LHAPath, exist=exists) 521 if(exists)return 522 LHAPath='%(path)s/../lhapdf/pdfsets/' 523 Inquire(File=LHAPath, exist=exists) 524 if(exists)return 525 LHAPath='./PDFsets' 526 """ % {"path" : self.opt["cluster_local_path"]} 527 changer = {"cluster_specific_path": to_add} 528 529 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f")) 530 #ff = open(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f"),"w") 531 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_wrap_lhapdf.f"),"r").read() 532 ff.writelines(template % changer) 533 534 535 return
536 537 538 539 #=========================================================================== 540 # write_maxparticles_file 541 #===========================================================================
542 - def write_maxparticles_file(self, writer, matrix_elements):
543 """Write the maxparticles.inc file for MadEvent""" 544 545 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 546 maxparticles = max([me.get_nexternal_ninitial()[0] for me in \ 547 matrix_elements.get('matrix_elements')]) 548 else: 549 maxparticles = max([me.get_nexternal_ninitial()[0] \ 550 for me in matrix_elements]) 551 552 lines = "integer max_particles\n" 553 lines += "parameter(max_particles=%d)" % maxparticles 554 555 # Write the file 556 writer.writelines(lines) 557 558 return True
559 560 561 #=========================================================================== 562 # export the model 563 #===========================================================================
564 - def export_model_files(self, model_path):
565 """Configure the files/link of the process according to the model""" 566 567 # Import the model 568 for file in os.listdir(model_path): 569 if os.path.isfile(pjoin(model_path, file)): 570 shutil.copy2(pjoin(model_path, file), \ 571 pjoin(self.dir_path, 'Source', 'MODEL'))
572 573 587 595 596 597 #=========================================================================== 598 # export the helas routine 599 #===========================================================================
600 - def export_helas(self, helas_path):
601 """Configure the files/link of the process according to the model""" 602 603 # Import helas routine 604 for filename in os.listdir(helas_path): 605 filepos = pjoin(helas_path, filename) 606 if os.path.isfile(filepos): 607 if filepos.endswith('Makefile.template'): 608 cp(filepos, self.dir_path + '/Source/DHELAS/Makefile') 609 elif filepos.endswith('Makefile'): 610 pass 611 else: 612 cp(filepos, self.dir_path + '/Source/DHELAS')
613 # following lines do the same but whithout symbolic link 614 # 615 #def export_helas(mgme_dir, dir_path): 616 # 617 # # Copy the HELAS directory 618 # helas_dir = pjoin(mgme_dir, 'HELAS') 619 # for filename in os.listdir(helas_dir): 620 # if os.path.isfile(pjoin(helas_dir, filename)): 621 # shutil.copy2(pjoin(helas_dir, filename), 622 # pjoin(dir_path, 'Source', 'DHELAS')) 623 # shutil.move(pjoin(dir_path, 'Source', 'DHELAS', 'Makefile.template'), 624 # pjoin(dir_path, 'Source', 'DHELAS', 'Makefile')) 625 # 626 627 #=========================================================================== 628 # generate_subprocess_directory 629 #===========================================================================
630 - def generate_subprocess_directory(self, matrix_element, 631 fortran_model, 632 me_number):
633 """Routine to generate a subprocess directory (for inheritance)""" 634 635 pass
636 637 #=========================================================================== 638 # get_source_libraries_list 639 #===========================================================================
640 - def get_source_libraries_list(self):
641 """ Returns the list of libraries to be compiling when compiling the 642 SOURCE directory. It is different for loop_induced processes and 643 also depends on the value of the 'output_dependencies' option""" 644 645 return ['$(LIBDIR)libdhelas.$(libext)', 646 '$(LIBDIR)libpdf.$(libext)', 647 '$(LIBDIR)libmodel.$(libext)', 648 '$(LIBDIR)libcernlib.$(libext)', 649 '$(LIBDIR)libbias.$(libext)']
650 651 #=========================================================================== 652 # write_source_makefile 653 #===========================================================================
654 - def write_source_makefile(self, writer):
655 """Write the nexternal.inc file for MG4""" 656 657 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 658 set_of_lib = ' '.join(['$(LIBRARIES)']+self.get_source_libraries_list()) 659 if self.opt['model'] == 'mssm' or self.opt['model'].startswith('mssm-'): 660 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 661 MODEL/MG5_param.dat: ../Cards/param_card.dat\n\t../bin/madevent treatcards param 662 param_card.inc: MODEL/MG5_param.dat\n\t../bin/madevent treatcards param\n''' 663 else: 664 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 665 param_card.inc: ../Cards/param_card.dat\n\t../bin/madevent treatcards param\n''' 666 667 replace_dict= {'libraries': set_of_lib, 668 'model':model_line, 669 'additional_dsample': '', 670 'additional_dependencies':''} 671 672 if writer: 673 text = open(path).read() % replace_dict 674 writer.write(text) 675 676 return replace_dict
677 678 #=========================================================================== 679 # write_nexternal_madspin 680 #===========================================================================
681 - def write_nexternal_madspin(self, writer, nexternal, ninitial):
682 """Write the nexternal_prod.inc file for madspin""" 683 684 replace_dict = {} 685 686 replace_dict['nexternal'] = nexternal 687 replace_dict['ninitial'] = ninitial 688 689 file = """ \ 690 integer nexternal_prod 691 parameter (nexternal_prod=%(nexternal)d) 692 integer nincoming_prod 693 parameter (nincoming_prod=%(ninitial)d)""" % replace_dict 694 695 # Write the file 696 if writer: 697 writer.writelines(file) 698 return True 699 else: 700 return replace_dict
701 702 #=========================================================================== 703 # write_helamp_madspin 704 #===========================================================================
705 - def write_helamp_madspin(self, writer, ncomb):
706 """Write the helamp.inc file for madspin""" 707 708 replace_dict = {} 709 710 replace_dict['ncomb'] = ncomb 711 712 file = """ \ 713 integer ncomb1 714 parameter (ncomb1=%(ncomb)d) 715 double precision helamp(ncomb1) 716 common /to_helamp/helamp """ % replace_dict 717 718 # Write the file 719 if writer: 720 writer.writelines(file) 721 return True 722 else: 723 return replace_dict
724 725 726 727 #=========================================================================== 728 # write_nexternal_file 729 #===========================================================================
730 - def write_nexternal_file(self, writer, nexternal, ninitial):
731 """Write the nexternal.inc file for MG4""" 732 733 replace_dict = {} 734 735 replace_dict['nexternal'] = nexternal 736 replace_dict['ninitial'] = ninitial 737 738 file = """ \ 739 integer nexternal 740 parameter (nexternal=%(nexternal)d) 741 integer nincoming 742 parameter (nincoming=%(ninitial)d)""" % replace_dict 743 744 # Write the file 745 if writer: 746 writer.writelines(file) 747 return True 748 else: 749 return replace_dict
750 #=========================================================================== 751 # write_pmass_file 752 #===========================================================================
753 - def write_pmass_file(self, writer, matrix_element):
754 """Write the pmass.inc file for MG4""" 755 756 model = matrix_element.get('processes')[0].get('model') 757 758 lines = [] 759 for wf in matrix_element.get_external_wavefunctions(): 760 mass = model.get('particle_dict')[wf.get('pdg_code')].get('mass') 761 if mass.lower() != "zero": 762 mass = "abs(%s)" % mass 763 764 lines.append("pmass(%d)=%s" % \ 765 (wf.get('number_external'), mass)) 766 767 # Write the file 768 writer.writelines(lines) 769 770 return True
771 772 #=========================================================================== 773 # write_ngraphs_file 774 #===========================================================================
775 - def write_ngraphs_file(self, writer, nconfigs):
776 """Write the ngraphs.inc file for MG4. Needs input from 777 write_configs_file.""" 778 779 file = " integer n_max_cg\n" 780 file = file + "parameter (n_max_cg=%d)" % nconfigs 781 782 # Write the file 783 writer.writelines(file) 784 785 return True
786 787 #=========================================================================== 788 # write_leshouche_file 789 #===========================================================================
790 - def write_leshouche_file(self, writer, matrix_element):
791 """Write the leshouche.inc file for MG4""" 792 793 # Write the file 794 writer.writelines(self.get_leshouche_lines(matrix_element, 0)) 795 796 return True
797 798 #=========================================================================== 799 # get_leshouche_lines 800 #===========================================================================
801 - def get_leshouche_lines(self, matrix_element, numproc):
802 """Write the leshouche.inc file for MG4""" 803 804 # Extract number of external particles 805 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 806 807 lines = [] 808 for iproc, proc in enumerate(matrix_element.get('processes')): 809 legs = proc.get_legs_with_decays() 810 lines.append("DATA (IDUP(i,%d,%d),i=1,%d)/%s/" % \ 811 (iproc + 1, numproc+1, nexternal, 812 ",".join([str(l.get('id')) for l in legs]))) 813 if iproc == 0 and numproc == 0: 814 for i in [1, 2]: 815 lines.append("DATA (MOTHUP(%d,i),i=1,%2r)/%s/" % \ 816 (i, nexternal, 817 ",".join([ "%3r" % 0 ] * ninitial + \ 818 [ "%3r" % i ] * (nexternal - ninitial)))) 819 820 # Here goes the color connections corresponding to the JAMPs 821 # Only one output, for the first subproc! 822 if iproc == 0: 823 # If no color basis, just output trivial color flow 824 if not matrix_element.get('color_basis'): 825 for i in [1, 2]: 826 lines.append("DATA (ICOLUP(%d,i,1,%d),i=1,%2r)/%s/" % \ 827 (i, numproc+1,nexternal, 828 ",".join([ "%3r" % 0 ] * nexternal))) 829 830 else: 831 # First build a color representation dictionnary 832 repr_dict = {} 833 for l in legs: 834 repr_dict[l.get('number')] = \ 835 proc.get('model').get_particle(l.get('id')).get_color()\ 836 * (-1)**(1+l.get('state')) 837 # Get the list of color flows 838 color_flow_list = \ 839 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 840 ninitial) 841 # And output them properly 842 for cf_i, color_flow_dict in enumerate(color_flow_list): 843 for i in [0, 1]: 844 lines.append("DATA (ICOLUP(%d,i,%d,%d),i=1,%2r)/%s/" % \ 845 (i + 1, cf_i + 1, numproc+1, nexternal, 846 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 847 for l in legs]))) 848 849 return lines
850 851 852 853 854 #=========================================================================== 855 # write_maxamps_file 856 #===========================================================================
857 - def write_maxamps_file(self, writer, maxamps, maxflows, 858 maxproc,maxsproc):
859 """Write the maxamps.inc file for MG4.""" 860 861 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 862 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 863 (maxamps, maxflows) 864 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 865 (maxproc, maxsproc) 866 867 # Write the file 868 writer.writelines(file) 869 870 return True
871 872 873 #=========================================================================== 874 # Routines to output UFO models in MG4 format 875 #=========================================================================== 876
877 - def convert_model(self, model, wanted_lorentz = [], 878 wanted_couplings = []):
879 """ Create a full valid MG4 model from a MG5 model (coming from UFO)""" 880 881 # Make sure aloha is in quadruple precision if needed 882 old_aloha_mp=aloha.mp_precision 883 aloha.mp_precision=self.opt['mp'] 884 885 # create the MODEL 886 write_dir=pjoin(self.dir_path, 'Source', 'MODEL') 887 model_builder = UFO_model_to_mg4(model, write_dir, self.opt + self.proc_characteristic) 888 model_builder.build(wanted_couplings) 889 890 # Backup the loop mode, because it can be changed in what follows. 891 old_loop_mode = aloha.loop_mode 892 893 # Create the aloha model or use the existing one (for loop exporters 894 # this is useful as the aloha model will be used again in the 895 # LoopHelasMatrixElements generated). We do not save the model generated 896 # here if it didn't exist already because it would be a waste of 897 # memory for tree level applications since aloha is only needed at the 898 # time of creating the aloha fortran subroutines. 899 if hasattr(self, 'aloha_model'): 900 aloha_model = self.aloha_model 901 else: 902 aloha_model = create_aloha.AbstractALOHAModel(os.path.basename(model.get('modelpath'))) 903 aloha_model.add_Lorentz_object(model.get('lorentz')) 904 905 # Compute the subroutines 906 if wanted_lorentz: 907 aloha_model.compute_subset(wanted_lorentz) 908 else: 909 aloha_model.compute_all(save=False) 910 911 # Write them out 912 write_dir=pjoin(self.dir_path, 'Source', 'DHELAS') 913 aloha_model.write(write_dir, 'Fortran') 914 915 # Revert the original aloha loop mode 916 aloha.loop_mode = old_loop_mode 917 918 #copy Helas Template 919 cp(MG5DIR + '/aloha/template_files/Makefile_F', write_dir+'/makefile') 920 if any([any(['L' in tag for tag in d[1]]) for d in wanted_lorentz]): 921 cp(MG5DIR + '/aloha/template_files/aloha_functions_loop.f', 922 write_dir+'/aloha_functions.f') 923 aloha_model.loop_mode = False 924 else: 925 cp(MG5DIR + '/aloha/template_files/aloha_functions.f', 926 write_dir+'/aloha_functions.f') 927 create_aloha.write_aloha_file_inc(write_dir, '.f', '.o') 928 929 # Make final link in the Process 930 self.make_model_symbolic_link() 931 932 # Re-establish original aloha mode 933 aloha.mp_precision=old_aloha_mp
934 935 936 #=========================================================================== 937 # Helper functions 938 #===========================================================================
939 - def modify_grouping(self, matrix_element):
940 """allow to modify the grouping (if grouping is in place) 941 return two value: 942 - True/False if the matrix_element was modified 943 - the new(or old) matrix element""" 944 945 return False, matrix_element
946 947 #=========================================================================== 948 # Helper functions 949 #===========================================================================
950 - def get_mg5_info_lines(self):
951 """Return info lines for MG5, suitable to place at beginning of 952 Fortran files""" 953 954 info = misc.get_pkg_info() 955 info_lines = "" 956 if info and info.has_key('version') and info.has_key('date'): 957 info_lines = "# Generated by MadGraph5_aMC@NLO v. %s, %s\n" % \ 958 (info['version'], info['date']) 959 info_lines = info_lines + \ 960 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 961 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 962 else: 963 info_lines = "# Generated by MadGraph5_aMC@NLO\n" + \ 964 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 965 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 966 967 return info_lines
968
969 - def get_process_info_lines(self, matrix_element):
970 """Return info lines describing the processes for this matrix element""" 971 972 return"\n".join([ "C " + process.nice_string().replace('\n', '\nC * ') \ 973 for process in matrix_element.get('processes')])
974 975
976 - def get_helicity_lines(self, matrix_element,array_name='NHEL'):
977 """Return the Helicity matrix definition lines for this matrix element""" 978 979 helicity_line_list = [] 980 i = 0 981 for helicities in matrix_element.get_helicity_matrix(): 982 i = i + 1 983 int_list = [i, len(helicities)] 984 int_list.extend(helicities) 985 helicity_line_list.append(\ 986 ("DATA ("+array_name+"(I,%4r),I=1,%d) /" + \ 987 ",".join(['%2r'] * len(helicities)) + "/") % tuple(int_list)) 988 989 return "\n".join(helicity_line_list)
990
991 - def get_ic_line(self, matrix_element):
992 """Return the IC definition line coming after helicities, required by 993 switchmom in madevent""" 994 995 nexternal = matrix_element.get_nexternal_ninitial()[0] 996 int_list = range(1, nexternal + 1) 997 998 return "DATA (IC(I,1),I=1,%i) /%s/" % (nexternal, 999 ",".join([str(i) for \ 1000 i in int_list]))
1001
1002 - def set_chosen_SO_index(self, process, squared_orders):
1003 """ From the squared order constraints set by the user, this function 1004 finds what indices of the squared_orders list the user intends to pick. 1005 It returns this as a string of comma-separated successive '.true.' or 1006 '.false.' for each index.""" 1007 1008 user_squared_orders = process.get('squared_orders') 1009 split_orders = process.get('split_orders') 1010 1011 if len(user_squared_orders)==0: 1012 return ','.join(['.true.']*len(squared_orders)) 1013 1014 res = [] 1015 for sqsos in squared_orders: 1016 is_a_match = True 1017 for user_sqso, value in user_squared_orders.items(): 1018 if (process.get_squared_order_type(user_sqso) =='==' and \ 1019 value!=sqsos[split_orders.index(user_sqso)]) or \ 1020 (process.get_squared_order_type(user_sqso) in ['<=','='] and \ 1021 value<sqsos[split_orders.index(user_sqso)]) or \ 1022 (process.get_squared_order_type(user_sqso) == '>' and \ 1023 value>=sqsos[split_orders.index(user_sqso)]): 1024 is_a_match = False 1025 break 1026 res.append('.true.' if is_a_match else '.false.') 1027 1028 return ','.join(res)
1029
1030 - def get_split_orders_lines(self, orders, array_name, n=5):
1031 """ Return the split orders definition as defined in the list orders and 1032 for the name of the array 'array_name'. Split rows in chunks of size n.""" 1033 1034 ret_list = [] 1035 for index, order in enumerate(orders): 1036 for k in xrange(0, len(order), n): 1037 ret_list.append("DATA (%s(%3r,i),i=%3r,%3r) /%s/" % \ 1038 (array_name,index + 1, k + 1, min(k + n, len(order)), 1039 ','.join(["%5r" % i for i in order[k:k + n]]))) 1040 return ret_list
1041
1042 - def format_integer_list(self, list, name, n=5):
1043 """ Return an initialization of the python list in argument following 1044 the fortran syntax using the data keyword assignment, filling an array 1045 of name 'name'. It splits rows in chunks of size n.""" 1046 1047 ret_list = [] 1048 for k in xrange(0, len(list), n): 1049 ret_list.append("DATA (%s(i),i=%3r,%3r) /%s/" % \ 1050 (name, k + 1, min(k + n, len(list)), 1051 ','.join(["%5r" % i for i in list[k:k + n]]))) 1052 return ret_list
1053
1054 - def get_color_data_lines(self, matrix_element, n=6):
1055 """Return the color matrix definition lines for this matrix element. Split 1056 rows in chunks of size n.""" 1057 1058 if not matrix_element.get('color_matrix'): 1059 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 1060 else: 1061 ret_list = [] 1062 my_cs = color.ColorString() 1063 for index, denominator in \ 1064 enumerate(matrix_element.get('color_matrix').\ 1065 get_line_denominators()): 1066 # First write the common denominator for this color matrix line 1067 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 1068 # Then write the numerators for the matrix elements 1069 num_list = matrix_element.get('color_matrix').\ 1070 get_line_numerators(index, denominator) 1071 1072 for k in xrange(0, len(num_list), n): 1073 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 1074 (index + 1, k + 1, min(k + n, len(num_list)), 1075 ','.join(["%5r" % i for i in num_list[k:k + n]]))) 1076 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[index]) 1077 ret_list.append("C %s" % repr(my_cs)) 1078 return ret_list
1079 1080
1081 - def get_den_factor_line(self, matrix_element):
1082 """Return the denominator factor line for this matrix element""" 1083 1084 return "DATA IDEN/%2r/" % \ 1085 matrix_element.get_denominator_factor()
1086
1087 - def get_icolamp_lines(self, mapconfigs, matrix_element, num_matrix_element):
1088 """Return the ICOLAMP matrix, showing which JAMPs contribute to 1089 which configs (diagrams).""" 1090 1091 ret_list = [] 1092 1093 booldict = {False: ".false.", True: ".true."} 1094 1095 if not matrix_element.get('color_basis'): 1096 # No color, so only one color factor. Simply write a ".true." 1097 # for each config (i.e., each diagram with only 3 particle 1098 # vertices 1099 configs = len(mapconfigs) 1100 ret_list.append("DATA(icolamp(1,i,%d),i=1,%d)/%s/" % \ 1101 (num_matrix_element, configs, 1102 ','.join([".true." for i in range(configs)]))) 1103 return ret_list 1104 1105 # There is a color basis - create a list showing which JAMPs have 1106 # contributions to which configs 1107 1108 # Only want to include leading color flows, so find max_Nc 1109 color_basis = matrix_element.get('color_basis') 1110 1111 # We don't want to include the power of Nc's which come from the potential 1112 # loop color trace (i.e. in the case of a closed fermion loop for example) 1113 # so we subtract it here when computing max_Nc 1114 max_Nc = max(sum([[(v[4]-v[5]) for v in val] for val in 1115 color_basis.values()],[])) 1116 1117 # Crate dictionary between diagram number and JAMP number 1118 diag_jamp = {} 1119 for ijamp, col_basis_elem in \ 1120 enumerate(sorted(matrix_element.get('color_basis').keys())): 1121 for diag_tuple in matrix_element.get('color_basis')[col_basis_elem]: 1122 # Only use color flows with Nc == max_Nc. However, notice that 1123 # we don't want to include the Nc power coming from the loop 1124 # in this counting. 1125 if (diag_tuple[4]-diag_tuple[5]) == max_Nc: 1126 diag_num = diag_tuple[0] + 1 1127 # Add this JAMP number to this diag_num 1128 diag_jamp[diag_num] = diag_jamp.setdefault(diag_num, []) + \ 1129 [ijamp+1] 1130 1131 colamps = ijamp + 1 1132 for iconfig, num_diag in enumerate(mapconfigs): 1133 if num_diag == 0: 1134 continue 1135 1136 # List of True or False 1137 bool_list = [(i + 1 in diag_jamp[num_diag]) for i in range(colamps)] 1138 # Add line 1139 ret_list.append("DATA(icolamp(i,%d,%d),i=1,%d)/%s/" % \ 1140 (iconfig+1, num_matrix_element, colamps, 1141 ','.join(["%s" % booldict[b] for b in \ 1142 bool_list]))) 1143 1144 return ret_list
1145
1146 - def get_amp2_lines(self, matrix_element, config_map = []):
1147 """Return the amp2(i) = sum(amp for diag(i))^2 lines""" 1148 1149 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 1150 # Get minimum legs in a vertex 1151 vert_list = [max(diag.get_vertex_leg_numbers()) for diag in \ 1152 matrix_element.get('diagrams') if diag.get_vertex_leg_numbers()!=[]] 1153 minvert = min(vert_list) if vert_list!=[] else 0 1154 1155 ret_lines = [] 1156 if config_map: 1157 # In this case, we need to sum up all amplitudes that have 1158 # identical topologies, as given by the config_map (which 1159 # gives the topology/config for each of the diagrams 1160 diagrams = matrix_element.get('diagrams') 1161 # Combine the diagrams with identical topologies 1162 config_to_diag_dict = {} 1163 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1164 if config_map[idiag] == 0: 1165 continue 1166 try: 1167 config_to_diag_dict[config_map[idiag]].append(idiag) 1168 except KeyError: 1169 config_to_diag_dict[config_map[idiag]] = [idiag] 1170 # Write out the AMP2s summing squares of amplitudes belonging 1171 # to eiher the same diagram or different diagrams with 1172 # identical propagator properties. Note that we need to use 1173 # AMP2 number corresponding to the first diagram number used 1174 # for that AMP2. 1175 for config in sorted(config_to_diag_dict.keys()): 1176 1177 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % \ 1178 {"num": (config_to_diag_dict[config][0] + 1)} 1179 1180 amp = "+".join(["AMP(%(num)d)" % {"num": a.get('number')} for a in \ 1181 sum([diagrams[idiag].get('amplitudes') for \ 1182 idiag in config_to_diag_dict[config]], [])]) 1183 1184 # Not using \sum |M|^2 anymore since this creates troubles 1185 # when ckm is not diagonal due to the JIM mechanism. 1186 if '+' in amp: 1187 line += "(%s)*dconjg(%s)" % (amp, amp) 1188 else: 1189 line += "%s*dconjg(%s)" % (amp, amp) 1190 ret_lines.append(line) 1191 else: 1192 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1193 # Ignore any diagrams with 4-particle vertices. 1194 if diag.get_vertex_leg_numbers()!=[] and max(diag.get_vertex_leg_numbers()) > minvert: 1195 continue 1196 # Now write out the expression for AMP2, meaning the sum of 1197 # squared amplitudes belonging to the same diagram 1198 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % {"num": (idiag + 1)} 1199 line += "+".join(["AMP(%(num)d)*dconjg(AMP(%(num)d))" % \ 1200 {"num": a.get('number')} for a in \ 1201 diag.get('amplitudes')]) 1202 ret_lines.append(line) 1203 1204 return ret_lines
1205 1206 #=========================================================================== 1207 # Returns the data statements initializing the coeffictients for the JAMP 1208 # decomposition. It is used when the JAMP initialization is decided to be 1209 # done through big arrays containing the projection coefficients. 1210 #===========================================================================
1211 - def get_JAMP_coefs(self, color_amplitudes, color_basis=None, tag_letter="",\ 1212 n=50, Nc_value=3):
1213 """This functions return the lines defining the DATA statement setting 1214 the coefficients building the JAMPS out of the AMPS. Split rows in 1215 bunches of size n. 1216 One can specify the color_basis from which the color amplitudes originates 1217 so that there are commentaries telling what color structure each JAMP 1218 corresponds to.""" 1219 1220 if(not isinstance(color_amplitudes,list) or 1221 not (color_amplitudes and isinstance(color_amplitudes[0],list))): 1222 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_coefs" 1223 1224 res_list = [] 1225 my_cs = color.ColorString() 1226 for index, coeff_list in enumerate(color_amplitudes): 1227 # Create the list of the complete numerical coefficient. 1228 coefs_list=[coefficient[0][0]*coefficient[0][1]*\ 1229 (fractions.Fraction(Nc_value)**coefficient[0][3]) for \ 1230 coefficient in coeff_list] 1231 # Create the list of the numbers of the contributing amplitudes. 1232 # Mutliply by -1 for those which have an imaginary coefficient. 1233 ampnumbers_list=[coefficient[1]*(-1 if coefficient[0][2] else 1) \ 1234 for coefficient in coeff_list] 1235 # Find the common denominator. 1236 commondenom=abs(reduce(fractions.gcd, coefs_list).denominator) 1237 num_list=[(coefficient*commondenom).numerator \ 1238 for coefficient in coefs_list] 1239 res_list.append("DATA NCONTRIBAMPS%s(%i)/%i/"%(tag_letter,\ 1240 index+1,len(num_list))) 1241 res_list.append("DATA DENOMCCOEF%s(%i)/%i/"%(tag_letter,\ 1242 index+1,commondenom)) 1243 if color_basis: 1244 my_cs.from_immutable(sorted(color_basis.keys())[index]) 1245 res_list.append("C %s" % repr(my_cs)) 1246 for k in xrange(0, len(num_list), n): 1247 res_list.append("DATA (NUMCCOEF%s(%3r,i),i=%6r,%6r) /%s/" % \ 1248 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1249 ','.join(["%6r" % i for i in num_list[k:k + n]]))) 1250 res_list.append("DATA (AMPNUMBERS%s(%3r,i),i=%6r,%6r) /%s/" % \ 1251 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1252 ','.join(["%6r" % i for i in ampnumbers_list[k:k + n]]))) 1253 pass 1254 return res_list
1255 1256
1257 - def get_JAMP_lines_split_order(self, col_amps, split_order_amps, 1258 split_order_names=None, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)"):
1259 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1260 defined as a matrix element or directly as a color_amplitudes dictionary. 1261 The split_order_amps specifies the group of amplitudes sharing the same 1262 amplitude orders which should be put in together in a given set of JAMPS. 1263 The split_order_amps is supposed to have the format of the second output 1264 of the function get_split_orders_mapping function in helas_objects.py. 1265 The split_order_names is optional (it should correspond to the process 1266 'split_orders' attribute) and only present to provide comments in the 1267 JAMP definitions in the code.""" 1268 1269 # Let the user call get_JAMP_lines_split_order directly from a 1270 error_msg="Malformed '%s' argument passed to the "+\ 1271 "get_JAMP_lines_split_order function: %s"%str(split_order_amps) 1272 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1273 color_amplitudes=col_amps.get_color_amplitudes() 1274 elif(isinstance(col_amps,list)): 1275 if(col_amps and isinstance(col_amps[0],list)): 1276 color_amplitudes=col_amps 1277 else: 1278 raise MadGraph5Error, error_msg%'col_amps' 1279 else: 1280 raise MadGraph5Error, error_msg%'col_amps' 1281 1282 # Verify the sanity of the split_order_amps and split_order_names args 1283 if isinstance(split_order_amps,list): 1284 for elem in split_order_amps: 1285 if len(elem)!=2: 1286 raise MadGraph5Error, error_msg%'split_order_amps' 1287 # Check the first element of the two lists to make sure they are 1288 # integers, although in principle they should all be integers. 1289 if not isinstance(elem[0],tuple) or \ 1290 not isinstance(elem[1],tuple) or \ 1291 not isinstance(elem[0][0],int) or \ 1292 not isinstance(elem[1][0],int): 1293 raise MadGraph5Error, error_msg%'split_order_amps' 1294 else: 1295 raise MadGraph5Error, error_msg%'split_order_amps' 1296 1297 if not split_order_names is None: 1298 if isinstance(split_order_names,list): 1299 # Should specify the same number of names as there are elements 1300 # in the key of the split_order_amps. 1301 if len(split_order_names)!=len(split_order_amps[0][0]): 1302 raise MadGraph5Error, error_msg%'split_order_names' 1303 # Check the first element of the list to be a string 1304 if not isinstance(split_order_names[0],str): 1305 raise MadGraph5Error, error_msg%'split_order_names' 1306 else: 1307 raise MadGraph5Error, error_msg%'split_order_names' 1308 1309 # Now scan all contributing orders to be individually computed and 1310 # construct the list of color_amplitudes for JAMP to be constructed 1311 # accordingly. 1312 res_list=[] 1313 for i, amp_order in enumerate(split_order_amps): 1314 col_amps_order = [] 1315 for jamp in color_amplitudes: 1316 col_amps_order.append(filter(lambda col_amp: 1317 col_amp[1] in amp_order[1],jamp)) 1318 if split_order_names: 1319 res_list.append('C JAMPs contributing to orders '+' '.join( 1320 ['%s=%i'%order for order in zip(split_order_names, 1321 amp_order[0])])) 1322 if self.opt['export_format'] in ['madloop_matchbox']: 1323 res_list.extend(self.get_JAMP_lines(col_amps_order, 1324 JAMP_format="JAMP(%s,{0})".format(str(i+1)), 1325 JAMP_formatLC="LNJAMP(%s,{0})".format(str(i+1)))) 1326 else: 1327 res_list.extend(self.get_JAMP_lines(col_amps_order, 1328 JAMP_format="JAMP(%s,{0})".format(str(i+1)))) 1329 1330 return res_list
1331 1332
1333 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", 1334 split=-1):
1335 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1336 defined as a matrix element or directly as a color_amplitudes dictionary, 1337 Jamp_formatLC should be define to allow to add LeadingColor computation 1338 (usefull for MatchBox) 1339 The split argument defines how the JAMP lines should be split in order 1340 not to be too long.""" 1341 1342 # Let the user call get_JAMP_lines directly from a MatrixElement or from 1343 # the color amplitudes lists. 1344 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1345 color_amplitudes=col_amps.get_color_amplitudes() 1346 elif(isinstance(col_amps,list)): 1347 if(col_amps and isinstance(col_amps[0],list)): 1348 color_amplitudes=col_amps 1349 else: 1350 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1351 else: 1352 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1353 1354 1355 res_list = [] 1356 for i, coeff_list in enumerate(color_amplitudes): 1357 # It might happen that coeff_list is empty if this function was 1358 # called from get_JAMP_lines_split_order (i.e. if some color flow 1359 # does not contribute at all for a given order). 1360 # In this case we simply set it to 0. 1361 if coeff_list==[]: 1362 res_list.append(((JAMP_format+"=0D0") % str(i + 1))) 1363 continue 1364 # Break the JAMP definition into 'n=split' pieces to avoid having 1365 # arbitrarly long lines. 1366 first=True 1367 n = (len(coeff_list)+1 if split<=0 else split) 1368 while coeff_list!=[]: 1369 coefs=coeff_list[:n] 1370 coeff_list=coeff_list[n:] 1371 res = ((JAMP_format+"=") % str(i + 1)) + \ 1372 ((JAMP_format % str(i + 1)) if not first and split>0 else '') 1373 1374 first=False 1375 # Optimization: if all contributions to that color basis element have 1376 # the same coefficient (up to a sign), put it in front 1377 list_fracs = [abs(coefficient[0][1]) for coefficient in coefs] 1378 common_factor = False 1379 diff_fracs = list(set(list_fracs)) 1380 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1: 1381 common_factor = True 1382 global_factor = diff_fracs[0] 1383 res = res + '%s(' % self.coeff(1, global_factor, False, 0) 1384 1385 # loop for JAMP 1386 for (coefficient, amp_number) in coefs: 1387 if not coefficient: 1388 continue 1389 if common_factor: 1390 res = (res + "%s" + AMP_format) % \ 1391 (self.coeff(coefficient[0], 1392 coefficient[1] / abs(coefficient[1]), 1393 coefficient[2], 1394 coefficient[3]), 1395 str(amp_number)) 1396 else: 1397 res = (res + "%s" + AMP_format) % (self.coeff(coefficient[0], 1398 coefficient[1], 1399 coefficient[2], 1400 coefficient[3]), 1401 str(amp_number)) 1402 1403 if common_factor: 1404 res = res + ')' 1405 1406 res_list.append(res) 1407 1408 return res_list
1409
1410 - def get_pdf_lines(self, matrix_element, ninitial, subproc_group = False):
1411 """Generate the PDF lines for the auto_dsig.f file""" 1412 1413 processes = matrix_element.get('processes') 1414 model = processes[0].get('model') 1415 1416 pdf_definition_lines = "" 1417 pdf_data_lines = "" 1418 pdf_lines = "" 1419 1420 if ninitial == 1: 1421 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 1422 for i, proc in enumerate(processes): 1423 process_line = proc.base_string() 1424 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1425 pdf_lines = pdf_lines + "\nPD(IPROC)=1d0\n" 1426 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 1427 else: 1428 # Pick out all initial state particles for the two beams 1429 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 1430 p in processes]))), 1431 sorted(list(set([p.get_initial_pdg(2) for \ 1432 p in processes])))] 1433 1434 # Prepare all variable names 1435 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 1436 sum(initial_states,[])]) 1437 for key,val in pdf_codes.items(): 1438 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 1439 1440 # Set conversion from PDG code to number used in PDF calls 1441 pdgtopdf = {21: 0, 22: 7} 1442 1443 # Fill in missing entries of pdgtopdf 1444 for pdg in sum(initial_states,[]): 1445 if not pdg in pdgtopdf and not pdg in pdgtopdf.values(): 1446 pdgtopdf[pdg] = pdg 1447 elif pdg not in pdgtopdf and pdg in pdgtopdf.values(): 1448 # If any particle has pdg code 7, we need to use something else 1449 pdgtopdf[pdg] = 6000000 + pdg 1450 1451 # Get PDF variable declarations for all initial states 1452 for i in [0,1]: 1453 pdf_definition_lines += "DOUBLE PRECISION " + \ 1454 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1455 for pdg in \ 1456 initial_states[i]]) + \ 1457 "\n" 1458 1459 # Get PDF data lines for all initial states 1460 for i in [0,1]: 1461 pdf_data_lines += "DATA " + \ 1462 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1463 for pdg in initial_states[i]]) + \ 1464 "/%d*1D0/" % len(initial_states[i]) + \ 1465 "\n" 1466 1467 # Get PDF lines for all different initial states 1468 for i, init_states in enumerate(initial_states): 1469 if subproc_group: 1470 pdf_lines = pdf_lines + \ 1471 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 1472 % (i + 1, i + 1) 1473 else: 1474 pdf_lines = pdf_lines + \ 1475 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 1476 % (i + 1, i + 1) 1477 1478 for nbi,initial_state in enumerate(init_states): 1479 if initial_state in pdf_codes.keys(): 1480 if subproc_group: 1481 pdf_lines = pdf_lines + \ 1482 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP, 1," + \ 1483 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 1484 (pdf_codes[initial_state], 1485 i + 1, i + 1, pdgtopdf[initial_state], 1486 i + 1, i + 1) 1487 else: 1488 pdf_lines = pdf_lines + \ 1489 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP, %d," + \ 1490 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 1491 (pdf_codes[initial_state], 1492 i + 1, i + 1, pdgtopdf[initial_state], 1493 i + 1, 1494 i + 1, i + 1) 1495 pdf_lines = pdf_lines + "ENDIF\n" 1496 1497 # Add up PDFs for the different initial state particles 1498 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 1499 for proc in processes: 1500 process_line = proc.base_string() 1501 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1502 pdf_lines = pdf_lines + "\nPD(IPROC)=" 1503 for ibeam in [1, 2]: 1504 initial_state = proc.get_initial_pdg(ibeam) 1505 if initial_state in pdf_codes.keys(): 1506 pdf_lines = pdf_lines + "%s%d*" % \ 1507 (pdf_codes[initial_state], ibeam) 1508 else: 1509 pdf_lines = pdf_lines + "1d0*" 1510 # Remove last "*" from pdf_lines 1511 pdf_lines = pdf_lines[:-1] + "\n" 1512 pdf_lines = pdf_lines + "PD(0)=PD(0)+DABS(PD(IPROC))\n" 1513 1514 # Remove last line break from the return variables 1515 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1]
1516 1517 #=========================================================================== 1518 # write_props_file 1519 #===========================================================================
1520 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
1521 """Write the props.inc file for MadEvent. Needs input from 1522 write_configs_file.""" 1523 1524 lines = [] 1525 1526 particle_dict = matrix_element.get('processes')[0].get('model').\ 1527 get('particle_dict') 1528 1529 for iconf, configs in enumerate(s_and_t_channels): 1530 for vertex in configs[0] + configs[1][:-1]: 1531 leg = vertex.get('legs')[-1] 1532 if leg.get('id') not in particle_dict: 1533 # Fake propagator used in multiparticle vertices 1534 mass = 'zero' 1535 width = 'zero' 1536 pow_part = 0 1537 else: 1538 particle = particle_dict[leg.get('id')] 1539 # Get mass 1540 if particle.get('mass').lower() == 'zero': 1541 mass = particle.get('mass') 1542 else: 1543 mass = "abs(%s)" % particle.get('mass') 1544 # Get width 1545 if particle.get('width').lower() == 'zero': 1546 width = particle.get('width') 1547 else: 1548 width = "abs(%s)" % particle.get('width') 1549 1550 pow_part = 1 + int(particle.is_boson()) 1551 1552 lines.append("prmass(%d,%d) = %s" % \ 1553 (leg.get('number'), iconf + 1, mass)) 1554 lines.append("prwidth(%d,%d) = %s" % \ 1555 (leg.get('number'), iconf + 1, width)) 1556 lines.append("pow(%d,%d) = %d" % \ 1557 (leg.get('number'), iconf + 1, pow_part)) 1558 1559 # Write the file 1560 writer.writelines(lines) 1561 1562 return True
1563 1564 #=========================================================================== 1565 # write_configs_file 1566 #===========================================================================
1567 - def write_configs_file(self, writer, matrix_element):
1568 """Write the configs.inc file for MadEvent""" 1569 1570 # Extract number of external particles 1571 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1572 1573 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 1574 mapconfigs = [c[0] for c in configs] 1575 model = matrix_element.get('processes')[0].get('model') 1576 return mapconfigs, self.write_configs_file_from_diagrams(writer, 1577 [[c[1]] for c in configs], 1578 mapconfigs, 1579 nexternal, ninitial, 1580 model)
1581 1582 #=========================================================================== 1583 # write_configs_file_from_diagrams 1584 #===========================================================================
1585 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 1586 nexternal, ninitial, model):
1587 """Write the actual configs.inc file. 1588 1589 configs is the diagrams corresponding to configs (each 1590 diagrams is a list of corresponding diagrams for all 1591 subprocesses, with None if there is no corresponding diagrams 1592 for a given process). 1593 mapconfigs gives the diagram number for each config. 1594 1595 For s-channels, we need to output one PDG for each subprocess in 1596 the subprocess group, in order to be able to pick the right 1597 one for multiprocesses.""" 1598 1599 lines = [] 1600 1601 s_and_t_channels = [] 1602 1603 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 1604 for config in configs if [d for d in config if d][0].\ 1605 get_vertex_leg_numbers()!=[]] 1606 minvert = min(vert_list) if vert_list!=[] else 0 1607 1608 # Number of subprocesses 1609 nsubprocs = len(configs[0]) 1610 1611 nconfigs = 0 1612 1613 new_pdg = model.get_first_non_pdg() 1614 1615 for iconfig, helas_diags in enumerate(configs): 1616 if any(vert > minvert for vert in [d for d in helas_diags if d]\ 1617 [0].get_vertex_leg_numbers()) : 1618 # Only 3-vertices allowed in configs.inc except for vertices 1619 # which originate from a shrunk loop. 1620 continue 1621 nconfigs += 1 1622 1623 # Need s- and t-channels for all subprocesses, including 1624 # those that don't contribute to this config 1625 empty_verts = [] 1626 stchannels = [] 1627 for h in helas_diags: 1628 if h: 1629 # get_s_and_t_channels gives vertices starting from 1630 # final state external particles and working inwards 1631 stchannels.append(h.get('amplitudes')[0].\ 1632 get_s_and_t_channels(ninitial, model, new_pdg)) 1633 else: 1634 stchannels.append((empty_verts, None)) 1635 1636 # For t-channels, just need the first non-empty one 1637 tchannels = [t for s,t in stchannels if t != None][0] 1638 1639 # For s_and_t_channels (to be used later) use only first config 1640 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 1641 tchannels]) 1642 1643 # Make sure empty_verts is same length as real vertices 1644 if any([s for s,t in stchannels]): 1645 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 1646 1647 # Reorganize s-channel vertices to get a list of all 1648 # subprocesses for each vertex 1649 schannels = zip(*[s for s,t in stchannels]) 1650 else: 1651 schannels = [] 1652 1653 allchannels = schannels 1654 if len(tchannels) > 1: 1655 # Write out tchannels only if there are any non-trivial ones 1656 allchannels = schannels + tchannels 1657 1658 # Write out propagators for s-channel and t-channel vertices 1659 1660 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 1661 # Correspondance between the config and the diagram = amp2 1662 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 1663 mapconfigs[iconfig])) 1664 1665 for verts in allchannels: 1666 if verts in schannels: 1667 vert = [v for v in verts if v][0] 1668 else: 1669 vert = verts 1670 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1671 last_leg = vert.get('legs')[-1] 1672 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 1673 (last_leg.get('number'), nconfigs, len(daughters), 1674 ",".join([str(d) for d in daughters]))) 1675 if verts in schannels: 1676 pdgs = [] 1677 for v in verts: 1678 if v: 1679 pdgs.append(v.get('legs')[-1].get('id')) 1680 else: 1681 pdgs.append(0) 1682 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1683 (last_leg.get('number'), nconfigs, nsubprocs, 1684 ",".join([str(d) for d in pdgs]))) 1685 lines.append("data tprid(%d,%d)/0/" % \ 1686 (last_leg.get('number'), nconfigs)) 1687 elif verts in tchannels[:-1]: 1688 lines.append("data tprid(%d,%d)/%d/" % \ 1689 (last_leg.get('number'), nconfigs, 1690 abs(last_leg.get('id')))) 1691 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1692 (last_leg.get('number'), nconfigs, nsubprocs, 1693 ",".join(['0'] * nsubprocs))) 1694 1695 # Write out number of configs 1696 lines.append("# Number of configs") 1697 lines.append("data mapconfig(0)/%d/" % nconfigs) 1698 1699 # Write the file 1700 writer.writelines(lines) 1701 1702 return s_and_t_channels
1703 1704 #=========================================================================== 1705 # Global helper methods 1706 #=========================================================================== 1707
1708 - def coeff(self, ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1709 """Returns a nicely formatted string for the coefficients in JAMP lines""" 1710 1711 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power 1712 1713 if total_coeff == 1: 1714 if is_imaginary: 1715 return '+imag1*' 1716 else: 1717 return '+' 1718 elif total_coeff == -1: 1719 if is_imaginary: 1720 return '-imag1*' 1721 else: 1722 return '-' 1723 1724 res_str = '%+iD0' % total_coeff.numerator 1725 1726 if total_coeff.denominator != 1: 1727 # Check if total_coeff is an integer 1728 res_str = res_str + '/%iD0' % total_coeff.denominator 1729 1730 if is_imaginary: 1731 res_str = res_str + '*imag1' 1732 1733 return res_str + '*'
1734 1735
1736 - def set_fortran_compiler(self, default_compiler, force=False):
1737 """Set compiler based on what's available on the system""" 1738 1739 # Check for compiler 1740 if default_compiler['fortran'] and misc.which(default_compiler['fortran']): 1741 f77_compiler = default_compiler['fortran'] 1742 elif misc.which('gfortran'): 1743 f77_compiler = 'gfortran' 1744 elif misc.which('g77'): 1745 f77_compiler = 'g77' 1746 elif misc.which('f77'): 1747 f77_compiler = 'f77' 1748 elif default_compiler['fortran']: 1749 logger.warning('No Fortran Compiler detected! Please install one') 1750 f77_compiler = default_compiler['fortran'] # maybe misc fail so try with it 1751 else: 1752 raise MadGraph5Error, 'No Fortran Compiler detected! Please install one' 1753 logger.info('Use Fortran compiler ' + f77_compiler) 1754 1755 1756 # Check for compiler. 1. set default. 1757 if default_compiler['f2py']: 1758 f2py_compiler = default_compiler['f2py'] 1759 else: 1760 f2py_compiler = '' 1761 # Try to find the correct one. 1762 if default_compiler['f2py'] and misc.which(default_compiler['f2py']): 1763 f2py_compiler = default_compiler['f2py'] 1764 elif misc.which('f2py'): 1765 f2py_compiler = 'f2py' 1766 elif sys.version_info[1] == 6: 1767 if misc.which('f2py-2.6'): 1768 f2py_compiler = 'f2py-2.6' 1769 elif misc.which('f2py2.6'): 1770 f2py_compiler = 'f2py2.6' 1771 elif sys.version_info[1] == 7: 1772 if misc.which('f2py-2.7'): 1773 f2py_compiler = 'f2py-2.7' 1774 elif misc.which('f2py2.7'): 1775 f2py_compiler = 'f2py2.7' 1776 1777 to_replace = {'fortran': f77_compiler, 'f2py': f2py_compiler} 1778 1779 1780 self.replace_make_opt_f_compiler(to_replace) 1781 # Replace also for Template but not for cluster 1782 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite: 1783 self.replace_make_opt_f_compiler(to_replace, pjoin(MG5DIR, 'Template', 'LO')) 1784 1785 return f77_compiler
1786 1787 # an alias for backward compatibility 1788 set_compiler = set_fortran_compiler 1789 1790
1791 - def set_cpp_compiler(self, default_compiler, force=False):
1792 """Set compiler based on what's available on the system""" 1793 1794 # Check for compiler 1795 if default_compiler and misc.which(default_compiler): 1796 compiler = default_compiler 1797 elif misc.which('g++'): 1798 #check if clang version 1799 p = misc.Popen(['g++', '--version'], stdout=subprocess.PIPE, 1800 stderr=subprocess.PIPE) 1801 out, _ = p.communicate() 1802 if 'clang' in out and misc.which('clang'): 1803 compiler = 'clang' 1804 else: 1805 compiler = 'g++' 1806 elif misc.which('c++'): 1807 compiler = 'c++' 1808 elif misc.which('clang'): 1809 compiler = 'clang' 1810 elif default_compiler: 1811 logger.warning('No c++ Compiler detected! Please install one') 1812 compiler = default_compiler # maybe misc fail so try with it 1813 else: 1814 raise MadGraph5Error, 'No c++ Compiler detected! Please install one' 1815 logger.info('Use c++ compiler ' + compiler) 1816 self.replace_make_opt_c_compiler(compiler) 1817 # Replace also for Template but not for cluster 1818 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite and \ 1819 not __debug__ and not os.path.exists(pjoin(MG5DIR,'bin','create_release.py')): 1820 self.replace_make_opt_c_compiler(compiler, pjoin(MG5DIR, 'Template', 'LO')) 1821 1822 return compiler
1823 1824
1825 - def replace_make_opt_f_compiler(self, compilers, root_dir = ""):
1826 """Set FC=compiler in Source/make_opts""" 1827 1828 assert isinstance(compilers, dict) 1829 1830 mod = False #avoid to rewrite the file if not needed 1831 if not root_dir: 1832 root_dir = self.dir_path 1833 1834 compiler= compilers['fortran'] 1835 f2py_compiler = compilers['f2py'] 1836 if not f2py_compiler: 1837 f2py_compiler = 'f2py' 1838 for_update= {'DEFAULT_F_COMPILER':compiler, 1839 'DEFAULT_F2PY_COMPILER':f2py_compiler} 1840 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1841 1842 try: 1843 common_run_interface.CommonRunCmd.update_make_opts_full( 1844 make_opts, for_update) 1845 except IOError: 1846 if root_dir == self.dir_path: 1847 logger.info('Fail to set compiler. Trying to continue anyway.')
1848
1849 - def replace_make_opt_c_compiler(self, compiler, root_dir = ""):
1850 """Set CXX=compiler in Source/make_opts. 1851 The version is also checked, in order to set some extra flags 1852 if the compiler is clang (on MACOS)""" 1853 1854 is_clang = misc.detect_if_cpp_compiler_is_clang(compiler) 1855 is_lc = misc.detect_cpp_std_lib_dependence(compiler) == '-lc++' 1856 1857 1858 # list of the variable to set in the make_opts file 1859 for_update= {'DEFAULT_CPP_COMPILER':compiler, 1860 'MACFLAG':'-mmacosx-version-min=10.7' if is_clang and is_lc else '', 1861 'STDLIB': '-lc++' if is_lc else '-lstdc++', 1862 'STDLIB_FLAG': '-stdlib=libc++' if is_lc and is_clang else '' 1863 } 1864 1865 # for MOJAVE remove the MACFLAG: 1866 if is_clang: 1867 import platform 1868 version, _, _ = platform.mac_ver() 1869 if not version:# not linux 1870 version = 14 # set version to remove MACFLAG 1871 else: 1872 version = int(version.split('.')[1]) 1873 if version >= 14: 1874 for_update['MACFLAG'] = '-mmacosx-version-min=10.8' if is_lc else '' 1875 1876 if not root_dir: 1877 root_dir = self.dir_path 1878 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1879 1880 try: 1881 common_run_interface.CommonRunCmd.update_make_opts_full( 1882 make_opts, for_update) 1883 except IOError: 1884 if root_dir == self.dir_path: 1885 logger.info('Fail to set compiler. Trying to continue anyway.') 1886 1887 return
1888
1889 #=============================================================================== 1890 # ProcessExporterFortranSA 1891 #=============================================================================== 1892 -class ProcessExporterFortranSA(ProcessExporterFortran):
1893 """Class to take care of exporting a set of matrix elements to 1894 MadGraph v4 StandAlone format.""" 1895 1896 matrix_template = "matrix_standalone_v4.inc" 1897
1898 - def __init__(self, *args,**opts):
1899 """add the format information compare to standard init""" 1900 1901 if 'format' in opts: 1902 self.format = opts['format'] 1903 del opts['format'] 1904 else: 1905 self.format = 'standalone' 1906 1907 self.prefix_info = {} 1908 ProcessExporterFortran.__init__(self, *args, **opts)
1909
1910 - def copy_template(self, model):
1911 """Additional actions needed for setup of Template 1912 """ 1913 1914 #First copy the full template tree if dir_path doesn't exit 1915 if os.path.isdir(self.dir_path): 1916 return 1917 1918 logger.info('initialize a new standalone directory: %s' % \ 1919 os.path.basename(self.dir_path)) 1920 temp_dir = pjoin(self.mgme_dir, 'Template/LO') 1921 1922 # Create the directory structure 1923 os.mkdir(self.dir_path) 1924 os.mkdir(pjoin(self.dir_path, 'Source')) 1925 os.mkdir(pjoin(self.dir_path, 'Source', 'MODEL')) 1926 os.mkdir(pjoin(self.dir_path, 'Source', 'DHELAS')) 1927 os.mkdir(pjoin(self.dir_path, 'SubProcesses')) 1928 os.mkdir(pjoin(self.dir_path, 'bin')) 1929 os.mkdir(pjoin(self.dir_path, 'bin', 'internal')) 1930 os.mkdir(pjoin(self.dir_path, 'lib')) 1931 os.mkdir(pjoin(self.dir_path, 'Cards')) 1932 1933 # Information at top-level 1934 #Write version info 1935 shutil.copy(pjoin(temp_dir, 'TemplateVersion.txt'), self.dir_path) 1936 try: 1937 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 1938 except IOError: 1939 MG5_version = misc.get_pkg_info() 1940 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 1941 "5." + MG5_version['version']) 1942 1943 1944 # Add file in SubProcesses 1945 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f_sp'), 1946 pjoin(self.dir_path, 'SubProcesses', 'makefileP')) 1947 1948 if self.format == 'standalone': 1949 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'check_sa.f'), 1950 pjoin(self.dir_path, 'SubProcesses', 'check_sa.f')) 1951 1952 # Add file in Source 1953 shutil.copy(pjoin(temp_dir, 'Source', 'make_opts'), 1954 pjoin(self.dir_path, 'Source')) 1955 # add the makefile 1956 filename = pjoin(self.dir_path,'Source','makefile') 1957 self.write_source_makefile(writers.FileWriter(filename))
1958 1959 #=========================================================================== 1960 # export model files 1961 #===========================================================================
1962 - def export_model_files(self, model_path):
1963 """export the model dependent files for V4 model""" 1964 1965 super(ProcessExporterFortranSA,self).export_model_files(model_path) 1966 # Add the routine update_as_param in v4 model 1967 # This is a function created in the UFO 1968 text=""" 1969 subroutine update_as_param() 1970 call setpara('param_card.dat',.false.) 1971 return 1972 end 1973 """ 1974 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 1975 ff.write(text) 1976 ff.close() 1977 1978 text = open(pjoin(self.dir_path,'SubProcesses','check_sa.f')).read() 1979 text = text.replace('call setpara(\'param_card.dat\')', 'call setpara(\'param_card.dat\', .true.)') 1980 fsock = open(pjoin(self.dir_path,'SubProcesses','check_sa.f'), 'w') 1981 fsock.write(text) 1982 fsock.close() 1983 1984 self.make_model_symbolic_link()
1985 1986 #=========================================================================== 1987 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 1988 #===========================================================================
1989 - def write_procdef_mg5(self, file_pos, modelname, process_str):
1990 """ write an equivalent of the MG4 proc_card in order that all the Madevent 1991 Perl script of MadEvent4 are still working properly for pure MG5 run. 1992 Not needed for StandAlone so just return 1993 """ 1994 1995 return
1996 1997 1998 #=========================================================================== 1999 # Make the Helas and Model directories for Standalone directory 2000 #===========================================================================
2001 - def make(self):
2002 """Run make in the DHELAS and MODEL directories, to set up 2003 everything for running standalone 2004 """ 2005 2006 source_dir = pjoin(self.dir_path, "Source") 2007 logger.info("Running make for Helas") 2008 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2009 logger.info("Running make for Model") 2010 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran')
2011 2012 #=========================================================================== 2013 # Create proc_card_mg5.dat for Standalone directory 2014 #===========================================================================
2015 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2016 """Finalize Standalone MG4 directory by 2017 generation proc_card_mg5.dat 2018 generate a global makefile 2019 """ 2020 2021 compiler = {'fortran': mg5options['fortran_compiler'], 2022 'cpp': mg5options['cpp_compiler'], 2023 'f2py': mg5options['f2py_compiler']} 2024 2025 self.compiler_choice(compiler) 2026 self.make() 2027 2028 # Write command history as proc_card_mg5 2029 if history and os.path.isdir(pjoin(self.dir_path, 'Cards')): 2030 output_file = pjoin(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2031 history.write(output_file) 2032 2033 ProcessExporterFortran.finalize(self, matrix_elements, 2034 history, mg5options, flaglist) 2035 open(pjoin(self.dir_path,'__init__.py'),'w') 2036 open(pjoin(self.dir_path,'SubProcesses','__init__.py'),'w') 2037 2038 if 'mode' in self.opt and self.opt['mode'] == "reweight": 2039 #add the module to hande the NLO weight 2040 files.copytree(pjoin(MG5DIR, 'Template', 'RWGTNLO'), 2041 pjoin(self.dir_path, 'Source')) 2042 files.copytree(pjoin(MG5DIR, 'Template', 'NLO', 'Source', 'PDF'), 2043 pjoin(self.dir_path, 'Source', 'PDF')) 2044 self.write_pdf_opendata() 2045 2046 if self.prefix_info: 2047 self.write_f2py_splitter() 2048 self.write_f2py_makefile() 2049 self.write_f2py_check_sa(matrix_elements, 2050 pjoin(self.dir_path,'SubProcesses','check_sa.py')) 2051 else: 2052 # create a single makefile to compile all the subprocesses 2053 text = '''\n# For python linking (require f2py part of numpy)\nifeq ($(origin MENUM),undefined)\n MENUM=2\nendif\n''' 2054 deppython = '' 2055 for Pdir in os.listdir(pjoin(self.dir_path,'SubProcesses')): 2056 if os.path.isdir(pjoin(self.dir_path, 'SubProcesses', Pdir)): 2057 text += '%(0)s/matrix$(MENUM)py.so:\n\tcd %(0)s;make matrix$(MENUM)py.so\n'% {'0': Pdir} 2058 deppython += ' %(0)s/matrix$(MENUM)py.so ' % {'0': Pdir} 2059 text+='all: %s\n\techo \'done\'' % deppython 2060 2061 ff = open(pjoin(self.dir_path, 'SubProcesses', 'makefile'),'a') 2062 ff.write(text) 2063 ff.close()
2064
2065 - def write_f2py_splitter(self):
2066 """write a function to call the correct matrix element""" 2067 2068 template = """ 2069 %(python_information)s 2070 subroutine smatrixhel(pdgs, npdg, p, ALPHAS, SCALE2, nhel, ANS) 2071 IMPLICIT NONE 2072 2073 CF2PY double precision, intent(in), dimension(0:3,npdg) :: p 2074 CF2PY integer, intent(in), dimension(npdg) :: pdgs 2075 CF2PY integer, intent(in) :: npdg 2076 CF2PY double precision, intent(out) :: ANS 2077 CF2PY double precision, intent(in) :: ALPHAS 2078 CF2PY double precision, intent(in) :: SCALE2 2079 integer pdgs(*) 2080 integer npdg, nhel 2081 double precision p(*) 2082 double precision ANS, ALPHAS, PI,SCALE2 2083 include 'coupl.inc' 2084 2085 PI = 3.141592653589793D0 2086 G = 2* DSQRT(ALPHAS*PI) 2087 CALL UPDATE_AS_PARAM() 2088 if (scale2.ne.0d0) stop 1 2089 2090 %(smatrixhel)s 2091 2092 return 2093 end 2094 2095 SUBROUTINE INITIALISE(PATH) 2096 C ROUTINE FOR F2PY to read the benchmark point. 2097 IMPLICIT NONE 2098 CHARACTER*512 PATH 2099 CF2PY INTENT(IN) :: PATH 2100 CALL SETPARA(PATH) !first call to setup the paramaters 2101 RETURN 2102 END 2103 2104 subroutine get_pdg_order(PDG) 2105 IMPLICIT NONE 2106 CF2PY INTEGER, intent(out) :: PDG(%(nb_me)i,%(maxpart)i) 2107 INTEGER PDG(%(nb_me)i,%(maxpart)i), PDGS(%(nb_me)i,%(maxpart)i) 2108 DATA PDGS/ %(pdgs)s / 2109 PDG = PDGS 2110 RETURN 2111 END 2112 2113 subroutine get_prefix(PREFIX) 2114 IMPLICIT NONE 2115 CF2PY CHARACTER*20, intent(out) :: PREFIX(%(nb_me)i) 2116 character*20 PREFIX(%(nb_me)i),PREF(%(nb_me)i) 2117 DATA PREF / '%(prefix)s'/ 2118 PREFIX = PREF 2119 RETURN 2120 END 2121 2122 2123 """ 2124 2125 allids = self.prefix_info.keys() 2126 allprefix = [self.prefix_info[key][0] for key in allids] 2127 min_nexternal = min([len(ids) for ids in allids]) 2128 max_nexternal = max([len(ids) for ids in allids]) 2129 2130 info = [] 2131 for key, (prefix, tag) in self.prefix_info.items(): 2132 info.append('#PY %s : %s # %s' % (tag, key, prefix)) 2133 2134 2135 text = [] 2136 for n_ext in range(min_nexternal, max_nexternal+1): 2137 current = [ids for ids in allids if len(ids)==n_ext] 2138 if not current: 2139 continue 2140 if min_nexternal != max_nexternal: 2141 if n_ext == min_nexternal: 2142 text.append(' if (npdg.eq.%i)then' % n_ext) 2143 else: 2144 text.append(' else if (npdg.eq.%i)then' % n_ext) 2145 for ii,pdgs in enumerate(current): 2146 condition = '.and.'.join(['%i.eq.pdgs(%i)' %(pdg, i+1) for i, pdg in enumerate(pdgs)]) 2147 if ii==0: 2148 text.append( ' if(%s) then ! %i' % (condition, i)) 2149 else: 2150 text.append( ' else if(%s) then ! %i' % (condition,i)) 2151 text.append(' call %ssmatrixhel(p, nhel, ans)' % self.prefix_info[pdgs][0]) 2152 text.append(' endif') 2153 #close the function 2154 if min_nexternal != max_nexternal: 2155 text.append('endif') 2156 2157 formatting = {'python_information':'\n'.join(info), 2158 'smatrixhel': '\n'.join(text), 2159 'maxpart': max_nexternal, 2160 'nb_me': len(allids), 2161 'pdgs': ','.join(str(pdg[i]) if i<len(pdg) else '0' 2162 for i in range(max_nexternal) for pdg in allids), 2163 'prefix':'\',\''.join(allprefix) 2164 } 2165 formatting['lenprefix'] = len(formatting['prefix']) 2166 text = template % formatting 2167 fsock = writers.FortranWriter(pjoin(self.dir_path, 'SubProcesses', 'all_matrix.f'),'w') 2168 fsock.writelines(text) 2169 fsock.close()
2170
2171 - def write_f2py_check_sa(self, matrix_element, writer):
2172 """ Write the general check_sa.py in SubProcesses that calls all processes successively.""" 2173 # To be implemented. It is just an example file, i.e. not crucial. 2174 return
2175
2176 - def write_f2py_makefile(self):
2177 """ """ 2178 # Add file in SubProcesses 2179 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f2py'), 2180 pjoin(self.dir_path, 'SubProcesses', 'makefile'))
2181
2182 - def create_MA5_cards(self,*args,**opts):
2183 """ Overload the function of the mother so as to bypass this in StandAlone.""" 2184 pass
2185
2186 - def compiler_choice(self, compiler):
2187 """ Different daughter classes might want different compilers. 2188 So this function is meant to be overloaded if desired.""" 2189 2190 self.set_compiler(compiler)
2191 2192 #=========================================================================== 2193 # generate_subprocess_directory 2194 #===========================================================================
2195 - def generate_subprocess_directory(self, matrix_element, 2196 fortran_model, number):
2197 """Generate the Pxxxxx directory for a subprocess in MG4 standalone, 2198 including the necessary matrix.f and nexternal.inc files""" 2199 2200 cwd = os.getcwd() 2201 # Create the directory PN_xx_xxxxx in the specified path 2202 dirpath = pjoin(self.dir_path, 'SubProcesses', \ 2203 "P%s" % matrix_element.get('processes')[0].shell_string()) 2204 2205 if self.opt['sa_symmetry']: 2206 # avoid symmetric output 2207 for i,proc in enumerate(matrix_element.get('processes')): 2208 2209 tag = proc.get_tag() 2210 legs = proc.get('legs')[:] 2211 leg0 = proc.get('legs')[0] 2212 leg1 = proc.get('legs')[1] 2213 if not leg1.get('state'): 2214 proc.get('legs')[0] = leg1 2215 proc.get('legs')[1] = leg0 2216 flegs = proc.get('legs')[2:] 2217 for perm in itertools.permutations(flegs): 2218 for i,p in enumerate(perm): 2219 proc.get('legs')[i+2] = p 2220 dirpath2 = pjoin(self.dir_path, 'SubProcesses', \ 2221 "P%s" % proc.shell_string()) 2222 #restore original order 2223 proc.get('legs')[2:] = legs[2:] 2224 if os.path.exists(dirpath2): 2225 proc.get('legs')[:] = legs 2226 return 0 2227 proc.get('legs')[:] = legs 2228 2229 try: 2230 os.mkdir(dirpath) 2231 except os.error as error: 2232 logger.warning(error.strerror + " " + dirpath) 2233 2234 #try: 2235 # os.chdir(dirpath) 2236 #except os.error: 2237 # logger.error('Could not cd to directory %s' % dirpath) 2238 # return 0 2239 2240 logger.info('Creating files in directory %s' % dirpath) 2241 2242 # Extract number of external particles 2243 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2244 2245 # Create the matrix.f file and the nexternal.inc file 2246 if self.opt['export_format']=='standalone_msP': 2247 filename = pjoin(dirpath, 'matrix_prod.f') 2248 else: 2249 filename = pjoin(dirpath, 'matrix.f') 2250 2251 proc_prefix = '' 2252 if 'prefix' in self.cmd_options: 2253 if self.cmd_options['prefix'] == 'int': 2254 proc_prefix = 'M%s_' % number 2255 elif self.cmd_options['prefix'] == 'proc': 2256 proc_prefix = matrix_element.get('processes')[0].shell_string().split('_',1)[1] 2257 else: 2258 raise Exception, '--prefix options supports only \'int\' and \'proc\'' 2259 for proc in matrix_element.get('processes'): 2260 ids = [l.get('id') for l in proc.get('legs_with_decays')] 2261 self.prefix_info[tuple(ids)] = [proc_prefix, proc.get_tag()] 2262 2263 calls = self.write_matrix_element_v4( 2264 writers.FortranWriter(filename), 2265 matrix_element, 2266 fortran_model, 2267 proc_prefix=proc_prefix) 2268 2269 if self.opt['export_format'] == 'standalone_msP': 2270 filename = pjoin(dirpath,'configs_production.inc') 2271 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2272 writers.FortranWriter(filename), 2273 matrix_element) 2274 2275 filename = pjoin(dirpath,'props_production.inc') 2276 self.write_props_file(writers.FortranWriter(filename), 2277 matrix_element, 2278 s_and_t_channels) 2279 2280 filename = pjoin(dirpath,'nexternal_prod.inc') 2281 self.write_nexternal_madspin(writers.FortranWriter(filename), 2282 nexternal, ninitial) 2283 2284 if self.opt['export_format']=='standalone_msF': 2285 filename = pjoin(dirpath, 'helamp.inc') 2286 ncomb=matrix_element.get_helicity_combinations() 2287 self.write_helamp_madspin(writers.FortranWriter(filename), 2288 ncomb) 2289 2290 filename = pjoin(dirpath, 'nexternal.inc') 2291 self.write_nexternal_file(writers.FortranWriter(filename), 2292 nexternal, ninitial) 2293 2294 filename = pjoin(dirpath, 'pmass.inc') 2295 self.write_pmass_file(writers.FortranWriter(filename), 2296 matrix_element) 2297 2298 filename = pjoin(dirpath, 'ngraphs.inc') 2299 self.write_ngraphs_file(writers.FortranWriter(filename), 2300 len(matrix_element.get_all_amplitudes())) 2301 2302 # Generate diagrams 2303 filename = pjoin(dirpath, "matrix.ps") 2304 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2305 get('diagrams'), 2306 filename, 2307 model=matrix_element.get('processes')[0].\ 2308 get('model'), 2309 amplitude=True) 2310 logger.info("Generating Feynman diagrams for " + \ 2311 matrix_element.get('processes')[0].nice_string()) 2312 plot.draw() 2313 2314 linkfiles = ['check_sa.f', 'coupl.inc'] 2315 2316 if proc_prefix and os.path.exists(pjoin(dirpath, '..', 'check_sa.f')): 2317 text = open(pjoin(dirpath, '..', 'check_sa.f')).read() 2318 pat = re.compile('smatrix', re.I) 2319 new_text, n = re.subn(pat, '%ssmatrix' % proc_prefix, text) 2320 with open(pjoin(dirpath, 'check_sa.f'),'w') as f: 2321 f.write(new_text) 2322 linkfiles.pop(0) 2323 2324 for file in linkfiles: 2325 ln('../%s' % file, cwd=dirpath) 2326 ln('../makefileP', name='makefile', cwd=dirpath) 2327 # Return to original PWD 2328 #os.chdir(cwd) 2329 2330 if not calls: 2331 calls = 0 2332 return calls
2333 2334 2335 #=========================================================================== 2336 # write_source_makefile 2337 #===========================================================================
2338 - def write_source_makefile(self, writer):
2339 """Write the nexternal.inc file for MG4""" 2340 2341 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 2342 set_of_lib = '$(LIBDIR)libdhelas.$(libext) $(LIBDIR)libmodel.$(libext)' 2343 model_line='''$(LIBDIR)libmodel.$(libext): MODEL\n\t cd MODEL; make\n''' 2344 2345 replace_dict= {'libraries': set_of_lib, 2346 'model':model_line, 2347 'additional_dsample': '', 2348 'additional_dependencies':''} 2349 2350 text = open(path).read() % replace_dict 2351 2352 if writer: 2353 writer.write(text) 2354 2355 return replace_dict
2356 2357 #=========================================================================== 2358 # write_matrix_element_v4 2359 #===========================================================================
2360 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 2361 write=True, proc_prefix=''):
2362 """Export a matrix element to a matrix.f file in MG4 standalone format 2363 if write is on False, just return the replace_dict and not write anything.""" 2364 2365 2366 if not matrix_element.get('processes') or \ 2367 not matrix_element.get('diagrams'): 2368 return 0 2369 2370 if writer: 2371 if not isinstance(writer, writers.FortranWriter): 2372 raise writers.FortranWriter.FortranWriterError(\ 2373 "writer not FortranWriter but %s" % type(writer)) 2374 # Set lowercase/uppercase Fortran code 2375 writers.FortranWriter.downcase = False 2376 2377 2378 if not self.opt.has_key('sa_symmetry'): 2379 self.opt['sa_symmetry']=False 2380 2381 2382 # The proc_id is for MadEvent grouping which is never used in SA. 2383 replace_dict = {'global_variable':'', 'amp2_lines':'', 2384 'proc_prefix':proc_prefix, 'proc_id':''} 2385 2386 # Extract helas calls 2387 helas_calls = fortran_model.get_matrix_element_calls(\ 2388 matrix_element) 2389 2390 replace_dict['helas_calls'] = "\n".join(helas_calls) 2391 2392 # Extract version number and date from VERSION file 2393 info_lines = self.get_mg5_info_lines() 2394 replace_dict['info_lines'] = info_lines 2395 2396 # Extract process info lines 2397 process_lines = self.get_process_info_lines(matrix_element) 2398 replace_dict['process_lines'] = process_lines 2399 2400 # Extract number of external particles 2401 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2402 replace_dict['nexternal'] = nexternal 2403 replace_dict['nincoming'] = ninitial 2404 2405 # Extract ncomb 2406 ncomb = matrix_element.get_helicity_combinations() 2407 replace_dict['ncomb'] = ncomb 2408 2409 # Extract helicity lines 2410 helicity_lines = self.get_helicity_lines(matrix_element) 2411 replace_dict['helicity_lines'] = helicity_lines 2412 2413 # Extract overall denominator 2414 # Averaging initial state color, spin, and identical FS particles 2415 replace_dict['den_factor_line'] = self.get_den_factor_line(matrix_element) 2416 2417 # Extract ngraphs 2418 ngraphs = matrix_element.get_number_of_amplitudes() 2419 replace_dict['ngraphs'] = ngraphs 2420 2421 # Extract nwavefuncs 2422 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2423 replace_dict['nwavefuncs'] = nwavefuncs 2424 2425 # Extract ncolor 2426 ncolor = max(1, len(matrix_element.get('color_basis'))) 2427 replace_dict['ncolor'] = ncolor 2428 2429 replace_dict['hel_avg_factor'] = matrix_element.get_hel_avg_factor() 2430 replace_dict['beamone_helavgfactor'], replace_dict['beamtwo_helavgfactor'] =\ 2431 matrix_element.get_beams_hel_avg_factor() 2432 2433 # Extract color data lines 2434 color_data_lines = self.get_color_data_lines(matrix_element) 2435 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2436 2437 if self.opt['export_format']=='standalone_msP': 2438 # For MadSpin need to return the AMP2 2439 amp2_lines = self.get_amp2_lines(matrix_element, [] ) 2440 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2441 replace_dict['global_variable'] = \ 2442 " Double Precision amp2(NGRAPHS)\n common/to_amps/ amp2\n" 2443 2444 # JAMP definition, depends on the number of independent split orders 2445 split_orders=matrix_element.get('processes')[0].get('split_orders') 2446 2447 if len(split_orders)==0: 2448 replace_dict['nSplitOrders']='' 2449 # Extract JAMP lines 2450 jamp_lines = self.get_JAMP_lines(matrix_element) 2451 # Consider the output of a dummy order 'ALL_ORDERS' for which we 2452 # set all amplitude order to weight 1 and only one squared order 2453 # contribution which is of course ALL_ORDERS=2. 2454 squared_orders = [(2,),] 2455 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 2456 replace_dict['chosen_so_configs'] = '.TRUE.' 2457 replace_dict['nSqAmpSplitOrders']=1 2458 replace_dict['split_order_str_list']='' 2459 else: 2460 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 2461 replace_dict['nAmpSplitOrders']=len(amp_orders) 2462 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 2463 replace_dict['nSplitOrders']=len(split_orders) 2464 replace_dict['split_order_str_list']=str(split_orders) 2465 amp_so = self.get_split_orders_lines( 2466 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 2467 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 2468 replace_dict['ampsplitorders']='\n'.join(amp_so) 2469 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 2470 jamp_lines = self.get_JAMP_lines_split_order(\ 2471 matrix_element,amp_orders,split_order_names=split_orders) 2472 2473 # Now setup the array specifying what squared split order is chosen 2474 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 2475 matrix_element.get('processes')[0],squared_orders) 2476 2477 # For convenience we also write the driver check_sa_splitOrders.f 2478 # that explicitely writes out the contribution from each squared order. 2479 # The original driver still works and is compiled with 'make' while 2480 # the splitOrders one is compiled with 'make check_sa_born_splitOrders' 2481 check_sa_writer=writers.FortranWriter('check_sa_born_splitOrders.f') 2482 self.write_check_sa_splitOrders(squared_orders,split_orders, 2483 nexternal,ninitial,proc_prefix,check_sa_writer) 2484 2485 if write: 2486 writers.FortranWriter('nsqso_born.inc').writelines( 2487 """INTEGER NSQSO_BORN 2488 PARAMETER (NSQSO_BORN=%d)"""%replace_dict['nSqAmpSplitOrders']) 2489 2490 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2491 2492 matrix_template = self.matrix_template 2493 if self.opt['export_format']=='standalone_msP' : 2494 matrix_template = 'matrix_standalone_msP_v4.inc' 2495 elif self.opt['export_format']=='standalone_msF': 2496 matrix_template = 'matrix_standalone_msF_v4.inc' 2497 elif self.opt['export_format']=='matchbox': 2498 replace_dict["proc_prefix"] = 'MG5_%i_' % matrix_element.get('processes')[0].get('id') 2499 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2500 2501 if len(split_orders)>0: 2502 if self.opt['export_format'] in ['standalone_msP', 'standalone_msF']: 2503 logger.debug("Warning: The export format %s is not "+\ 2504 " available for individual ME evaluation of given coupl. orders."+\ 2505 " Only the total ME will be computed.", self.opt['export_format']) 2506 elif self.opt['export_format'] in ['madloop_matchbox']: 2507 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2508 matrix_template = "matrix_standalone_matchbox_splitOrders_v4.inc" 2509 else: 2510 matrix_template = "matrix_standalone_splitOrders_v4.inc" 2511 2512 replace_dict['template_file'] = pjoin(_file_path, 'iolibs', 'template_files', matrix_template) 2513 replace_dict['template_file2'] = pjoin(_file_path, \ 2514 'iolibs/template_files/split_orders_helping_functions.inc') 2515 if write and writer: 2516 path = replace_dict['template_file'] 2517 content = open(path).read() 2518 content = content % replace_dict 2519 # Write the file 2520 writer.writelines(content) 2521 # Add the helper functions. 2522 if len(split_orders)>0: 2523 content = '\n' + open(replace_dict['template_file2'])\ 2524 .read()%replace_dict 2525 writer.writelines(content) 2526 return len(filter(lambda call: call.find('#') != 0, helas_calls)) 2527 else: 2528 replace_dict['return_value'] = len(filter(lambda call: call.find('#') != 0, helas_calls)) 2529 return replace_dict # for subclass update
2530
2531 - def write_check_sa_splitOrders(self,squared_orders, split_orders, nexternal, 2532 nincoming, proc_prefix, writer):
2533 """ Write out a more advanced version of the check_sa drivers that 2534 individually returns the matrix element for each contributing squared 2535 order.""" 2536 2537 check_sa_content = open(pjoin(self.mgme_dir, 'madgraph', 'iolibs', \ 2538 'template_files', 'check_sa_splitOrders.f')).read() 2539 printout_sq_orders=[] 2540 for i, squared_order in enumerate(squared_orders): 2541 sq_orders=[] 2542 for j, sqo in enumerate(squared_order): 2543 sq_orders.append('%s=%d'%(split_orders[j],sqo)) 2544 printout_sq_orders.append(\ 2545 "write(*,*) '%d) Matrix element for (%s) = ',MATELEMS(%d)"\ 2546 %(i+1,' '.join(sq_orders),i+1)) 2547 printout_sq_orders='\n'.join(printout_sq_orders) 2548 replace_dict = {'printout_sqorders':printout_sq_orders, 2549 'nSplitOrders':len(squared_orders), 2550 'nexternal':nexternal, 2551 'nincoming':nincoming, 2552 'proc_prefix':proc_prefix} 2553 2554 if writer: 2555 writer.writelines(check_sa_content % replace_dict) 2556 else: 2557 return replace_dict
2558
2559 -class ProcessExporterFortranMatchBox(ProcessExporterFortranSA):
2560 """class to take care of exporting a set of matrix element for the Matchbox 2561 code in the case of Born only routine""" 2562 2563 default_opt = {'clean': False, 'complex_mass':False, 2564 'export_format':'matchbox', 'mp': False, 2565 'sa_symmetry': True} 2566 2567 #specific template of the born 2568 2569 2570 matrix_template = "matrix_standalone_matchbox.inc" 2571 2572 @staticmethod
2573 - def get_color_string_lines(matrix_element):
2574 """Return the color matrix definition lines for this matrix element. Split 2575 rows in chunks of size n.""" 2576 2577 if not matrix_element.get('color_matrix'): 2578 return "\n".join(["out = 1"]) 2579 2580 #start the real work 2581 color_denominators = matrix_element.get('color_matrix').\ 2582 get_line_denominators() 2583 matrix_strings = [] 2584 my_cs = color.ColorString() 2585 for i_color in xrange(len(color_denominators)): 2586 # Then write the numerators for the matrix elements 2587 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[i_color]) 2588 t_str=repr(my_cs) 2589 t_match=re.compile(r"(\w+)\(([\s\d+\,]*)\)") 2590 # from '1 T(2,4,1) Tr(4,5,6) Epsilon(5,3,2,1) T(1,2)' returns with findall: 2591 # [('T', '2,4,1'), ('Tr', '4,5,6'), ('Epsilon', '5,3,2,1'), ('T', '1,2')] 2592 all_matches = t_match.findall(t_str) 2593 output = {} 2594 arg=[] 2595 for match in all_matches: 2596 ctype, tmparg = match[0], [m.strip() for m in match[1].split(',')] 2597 if ctype in ['ColorOne' ]: 2598 continue 2599 if ctype not in ['T', 'Tr' ]: 2600 raise MadGraph5Error, 'Color Structure not handled by Matchbox: %s' % ctype 2601 tmparg += ['0'] 2602 arg +=tmparg 2603 for j, v in enumerate(arg): 2604 output[(i_color,j)] = v 2605 2606 for key in output: 2607 if matrix_strings == []: 2608 #first entry 2609 matrix_strings.append(""" 2610 if (in1.eq.%s.and.in2.eq.%s)then 2611 out = %s 2612 """ % (key[0], key[1], output[key])) 2613 else: 2614 #not first entry 2615 matrix_strings.append(""" 2616 elseif (in1.eq.%s.and.in2.eq.%s)then 2617 out = %s 2618 """ % (key[0], key[1], output[key])) 2619 if len(matrix_strings): 2620 matrix_strings.append(" else \n out = - 1 \n endif") 2621 else: 2622 return "\n out = - 1 \n " 2623 return "\n".join(matrix_strings)
2624
2625 - def make(self,*args,**opts):
2626 pass
2627
2628 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", split=-1, 2629 JAMP_formatLC=None):
2630 2631 """Adding leading color part of the colorflow""" 2632 2633 if not JAMP_formatLC: 2634 JAMP_formatLC= "LN%s" % JAMP_format 2635 2636 error_msg="Malformed '%s' argument passed to the get_JAMP_lines" 2637 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 2638 col_amps=col_amps.get_color_amplitudes() 2639 elif(isinstance(col_amps,list)): 2640 if(col_amps and isinstance(col_amps[0],list)): 2641 col_amps=col_amps 2642 else: 2643 raise MadGraph5Error, error_msg % 'col_amps' 2644 else: 2645 raise MadGraph5Error, error_msg % 'col_amps' 2646 2647 text = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(col_amps, 2648 JAMP_format=JAMP_format, 2649 AMP_format=AMP_format, 2650 split=-1) 2651 2652 2653 # Filter the col_ampls to generate only those without any 1/NC terms 2654 2655 LC_col_amps = [] 2656 for coeff_list in col_amps: 2657 to_add = [] 2658 for (coefficient, amp_number) in coeff_list: 2659 if coefficient[3]==0: 2660 to_add.append( (coefficient, amp_number) ) 2661 LC_col_amps.append(to_add) 2662 2663 text += super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(LC_col_amps, 2664 JAMP_format=JAMP_formatLC, 2665 AMP_format=AMP_format, 2666 split=-1) 2667 2668 return text
2669
2670 2671 2672 2673 #=============================================================================== 2674 # ProcessExporterFortranMW 2675 #=============================================================================== 2676 -class ProcessExporterFortranMW(ProcessExporterFortran):
2677 """Class to take care of exporting a set of matrix elements to 2678 MadGraph v4 - MadWeight format.""" 2679 2680 matrix_file="matrix_standalone_v4.inc" 2681
2682 - def copy_template(self, model):
2683 """Additional actions needed for setup of Template 2684 """ 2685 2686 super(ProcessExporterFortranMW, self).copy_template(model) 2687 2688 # Add the MW specific file 2689 shutil.copytree(pjoin(MG5DIR,'Template','MadWeight'), 2690 pjoin(self.dir_path, 'Source','MadWeight'), True) 2691 shutil.copytree(pjoin(MG5DIR,'madgraph','madweight'), 2692 pjoin(self.dir_path, 'bin','internal','madweight'), True) 2693 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','setrun.f'), 2694 pjoin(self.dir_path, 'Source','setrun.f')) 2695 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','run.inc'), 2696 pjoin(self.dir_path, 'Source','run.inc')) 2697 # File created from Template (Different in some child class) 2698 filename = os.path.join(self.dir_path,'Source','run_config.inc') 2699 self.write_run_config_file(writers.FortranWriter(filename)) 2700 2701 try: 2702 subprocess.call([os.path.join(self.dir_path, 'Source','MadWeight','bin','internal','pass_to_madweight')], 2703 stdout = os.open(os.devnull, os.O_RDWR), 2704 stderr = os.open(os.devnull, os.O_RDWR), 2705 cwd=self.dir_path) 2706 except OSError: 2707 # Probably madweight already called 2708 pass 2709 2710 # Copy the different python file in the Template 2711 self.copy_python_file() 2712 # create the appropriate cuts.f 2713 self.get_mw_cuts_version() 2714 2715 # add the makefile in Source directory 2716 filename = os.path.join(self.dir_path,'Source','makefile') 2717 self.write_source_makefile(writers.FortranWriter(filename))
2718 2719 2720 2721 2722 #=========================================================================== 2723 # convert_model 2724 #===========================================================================
2725 - def convert_model(self, model, wanted_lorentz = [], 2726 wanted_couplings = []):
2727 2728 super(ProcessExporterFortranMW,self).convert_model(model, 2729 wanted_lorentz, wanted_couplings) 2730 2731 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 2732 try: 2733 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 2734 except OSError as error: 2735 pass 2736 model_path = model.get('modelpath') 2737 # This is not safe if there is a '##' or '-' in the path. 2738 shutil.copytree(model_path, 2739 pjoin(self.dir_path,'bin','internal','ufomodel'), 2740 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 2741 if hasattr(model, 'restrict_card'): 2742 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 2743 'restrict_default.dat') 2744 if isinstance(model.restrict_card, check_param_card.ParamCard): 2745 model.restrict_card.write(out_path) 2746 else: 2747 files.cp(model.restrict_card, out_path)
2748 2749 #=========================================================================== 2750 # generate_subprocess_directory 2751 #===========================================================================
2752 - def copy_python_file(self):
2753 """copy the python file require for the Template""" 2754 2755 # madevent interface 2756 cp(_file_path+'/interface/madweight_interface.py', 2757 self.dir_path+'/bin/internal/madweight_interface.py') 2758 cp(_file_path+'/interface/extended_cmd.py', 2759 self.dir_path+'/bin/internal/extended_cmd.py') 2760 cp(_file_path+'/interface/common_run_interface.py', 2761 self.dir_path+'/bin/internal/common_run_interface.py') 2762 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 2763 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 2764 cp(_file_path+'/iolibs/save_load_object.py', 2765 self.dir_path+'/bin/internal/save_load_object.py') 2766 cp(_file_path+'/madevent/gen_crossxhtml.py', 2767 self.dir_path+'/bin/internal/gen_crossxhtml.py') 2768 cp(_file_path+'/madevent/sum_html.py', 2769 self.dir_path+'/bin/internal/sum_html.py') 2770 cp(_file_path+'/various/FO_analyse_card.py', 2771 self.dir_path+'/bin/internal/FO_analyse_card.py') 2772 cp(_file_path+'/iolibs/file_writers.py', 2773 self.dir_path+'/bin/internal/file_writers.py') 2774 #model file 2775 cp(_file_path+'../models/check_param_card.py', 2776 self.dir_path+'/bin/internal/check_param_card.py') 2777 2778 #madevent file 2779 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 2780 cp(_file_path+'/various/lhe_parser.py', 2781 self.dir_path+'/bin/internal/lhe_parser.py') 2782 2783 cp(_file_path+'/various/banner.py', 2784 self.dir_path+'/bin/internal/banner.py') 2785 cp(_file_path+'/various/shower_card.py', 2786 self.dir_path+'/bin/internal/shower_card.py') 2787 cp(_file_path+'/various/cluster.py', 2788 self.dir_path+'/bin/internal/cluster.py') 2789 2790 # logging configuration 2791 cp(_file_path+'/interface/.mg5_logging.conf', 2792 self.dir_path+'/bin/internal/me5_logging.conf') 2793 cp(_file_path+'/interface/coloring_logging.py', 2794 self.dir_path+'/bin/internal/coloring_logging.py')
2795 2796 2797 #=========================================================================== 2798 # Change the version of cuts.f to the one compatible with MW 2799 #===========================================================================
2800 - def get_mw_cuts_version(self, outpath=None):
2801 """create the appropriate cuts.f 2802 This is based on the one associated to ME output but: 2803 1) No clustering (=> remove initcluster/setclscales) 2804 2) Adding the definition of cut_bw at the file. 2805 """ 2806 2807 template = open(pjoin(MG5DIR,'Template','LO','SubProcesses','cuts.f')) 2808 2809 text = StringIO() 2810 #1) remove all dependencies in ickkw >1: 2811 nb_if = 0 2812 for line in template: 2813 if 'if(xqcut.gt.0d0' in line: 2814 nb_if = 1 2815 if nb_if == 0: 2816 text.write(line) 2817 continue 2818 if re.search(r'if\(.*\)\s*then', line): 2819 nb_if += 1 2820 elif 'endif' in line: 2821 nb_if -= 1 2822 2823 #2) add fake cut_bw (have to put the true one later) 2824 text.write(""" 2825 logical function cut_bw(p) 2826 include 'madweight_param.inc' 2827 double precision p(*) 2828 if (bw_cut) then 2829 cut_bw = .true. 2830 else 2831 stop 1 2832 endif 2833 return 2834 end 2835 """) 2836 2837 final = text.getvalue() 2838 #3) remove the call to initcluster: 2839 template = final.replace('call initcluster', '! Remove for MW!call initcluster') 2840 template = template.replace('genps.inc', 'maxparticles.inc') 2841 #Now we can write it 2842 if not outpath: 2843 fsock = open(pjoin(self.dir_path, 'SubProcesses', 'cuts.f'), 'w') 2844 elif isinstance(outpath, str): 2845 fsock = open(outpath, 'w') 2846 else: 2847 fsock = outpath 2848 fsock.write(template)
2849 2850 2851 2852 #=========================================================================== 2853 # Make the Helas and Model directories for Standalone directory 2854 #===========================================================================
2855 - def make(self):
2856 """Run make in the DHELAS, MODEL, PDF and CERNLIB directories, to set up 2857 everything for running madweight 2858 """ 2859 2860 source_dir = os.path.join(self.dir_path, "Source") 2861 logger.info("Running make for Helas") 2862 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2863 logger.info("Running make for Model") 2864 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran') 2865 logger.info("Running make for PDF") 2866 misc.compile(arg=['../lib/libpdf.a'], cwd=source_dir, mode='fortran') 2867 logger.info("Running make for CERNLIB") 2868 misc.compile(arg=['../lib/libcernlib.a'], cwd=source_dir, mode='fortran') 2869 logger.info("Running make for GENERIC") 2870 misc.compile(arg=['../lib/libgeneric.a'], cwd=source_dir, mode='fortran') 2871 logger.info("Running make for blocks") 2872 misc.compile(arg=['../lib/libblocks.a'], cwd=source_dir, mode='fortran') 2873 logger.info("Running make for tools") 2874 misc.compile(arg=['../lib/libtools.a'], cwd=source_dir, mode='fortran')
2875 2876 #=========================================================================== 2877 # Create proc_card_mg5.dat for MadWeight directory 2878 #===========================================================================
2879 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2880 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 2881 2882 compiler = {'fortran': mg5options['fortran_compiler'], 2883 'cpp': mg5options['cpp_compiler'], 2884 'f2py': mg5options['f2py_compiler']} 2885 2886 2887 2888 #proc_charac 2889 self.create_proc_charac() 2890 2891 # Write maxparticles.inc based on max of ME's/subprocess groups 2892 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 2893 self.write_maxparticles_file(writers.FortranWriter(filename), 2894 matrix_elements) 2895 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2896 pjoin(self.dir_path, 'Source','MadWeight','blocks')) 2897 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2898 pjoin(self.dir_path, 'Source','MadWeight','tools')) 2899 2900 self.set_compiler(compiler) 2901 self.make() 2902 2903 # Write command history as proc_card_mg5 2904 if os.path.isdir(os.path.join(self.dir_path, 'Cards')): 2905 output_file = os.path.join(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2906 history.write(output_file) 2907 2908 ProcessExporterFortran.finalize(self, matrix_elements, 2909 history, mg5options, flaglist)
2910 2911 2912 2913 #=========================================================================== 2914 # create the run_card for MW 2915 #===========================================================================
2916 - def create_run_card(self, matrix_elements, history):
2917 """ """ 2918 2919 run_card = banner_mod.RunCard() 2920 2921 # pass to default for MW 2922 run_card["run_tag"] = "\'not_use\'" 2923 run_card["fixed_ren_scale"] = "T" 2924 run_card["fixed_fac_scale"] = "T" 2925 run_card.remove_all_cut() 2926 2927 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 2928 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2929 python_template=True) 2930 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'), 2931 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2932 python_template=True)
2933 2934 #=========================================================================== 2935 # export model files 2936 #===========================================================================
2937 - def export_model_files(self, model_path):
2938 """export the model dependent files for V4 model""" 2939 2940 super(ProcessExporterFortranMW,self).export_model_files(model_path) 2941 # Add the routine update_as_param in v4 model 2942 # This is a function created in the UFO 2943 text=""" 2944 subroutine update_as_param() 2945 call setpara('param_card.dat',.false.) 2946 return 2947 end 2948 """ 2949 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 2950 ff.write(text) 2951 ff.close() 2952 2953 # Modify setrun.f 2954 text = open(os.path.join(self.dir_path,'Source','setrun.f')).read() 2955 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 2956 fsock = open(os.path.join(self.dir_path,'Source','setrun.f'), 'w') 2957 fsock.write(text) 2958 fsock.close() 2959 2960 # Modify initialization.f 2961 text = open(os.path.join(self.dir_path,'SubProcesses','initialization.f')).read() 2962 text = text.replace('call setpara(param_name)', 'call setpara(param_name, .true.)') 2963 fsock = open(os.path.join(self.dir_path,'SubProcesses','initialization.f'), 'w') 2964 fsock.write(text) 2965 fsock.close() 2966 2967 2968 self.make_model_symbolic_link()
2969 2970 #=========================================================================== 2971 # generate_subprocess_directory 2972 #===========================================================================
2973 - def generate_subprocess_directory(self, matrix_element, 2974 fortran_model,number):
2975 """Generate the Pxxxxx directory for a subprocess in MG4 MadWeight format, 2976 including the necessary matrix.f and nexternal.inc files""" 2977 2978 cwd = os.getcwd() 2979 # Create the directory PN_xx_xxxxx in the specified path 2980 dirpath = os.path.join(self.dir_path, 'SubProcesses', \ 2981 "P%s" % matrix_element.get('processes')[0].shell_string()) 2982 2983 try: 2984 os.mkdir(dirpath) 2985 except os.error as error: 2986 logger.warning(error.strerror + " " + dirpath) 2987 2988 #try: 2989 # os.chdir(dirpath) 2990 #except os.error: 2991 # logger.error('Could not cd to directory %s' % dirpath) 2992 # return 0 2993 2994 logger.info('Creating files in directory %s' % dirpath) 2995 2996 # Extract number of external particles 2997 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2998 2999 # Create the matrix.f file and the nexternal.inc file 3000 filename = pjoin(dirpath,'matrix.f') 3001 calls,ncolor = self.write_matrix_element_v4( 3002 writers.FortranWriter(filename), 3003 matrix_element, 3004 fortran_model) 3005 3006 filename = pjoin(dirpath, 'auto_dsig.f') 3007 self.write_auto_dsig_file(writers.FortranWriter(filename), 3008 matrix_element) 3009 3010 filename = pjoin(dirpath, 'configs.inc') 3011 mapconfigs, s_and_t_channels = self.write_configs_file(\ 3012 writers.FortranWriter(filename), 3013 matrix_element) 3014 3015 filename = pjoin(dirpath, 'nexternal.inc') 3016 self.write_nexternal_file(writers.FortranWriter(filename), 3017 nexternal, ninitial) 3018 3019 filename = pjoin(dirpath, 'leshouche.inc') 3020 self.write_leshouche_file(writers.FortranWriter(filename), 3021 matrix_element) 3022 3023 filename = pjoin(dirpath, 'props.inc') 3024 self.write_props_file(writers.FortranWriter(filename), 3025 matrix_element, 3026 s_and_t_channels) 3027 3028 filename = pjoin(dirpath, 'pmass.inc') 3029 self.write_pmass_file(writers.FortranWriter(filename), 3030 matrix_element) 3031 3032 filename = pjoin(dirpath, 'ngraphs.inc') 3033 self.write_ngraphs_file(writers.FortranWriter(filename), 3034 len(matrix_element.get_all_amplitudes())) 3035 3036 filename = pjoin(dirpath, 'maxamps.inc') 3037 self.write_maxamps_file(writers.FortranWriter(filename), 3038 len(matrix_element.get('diagrams')), 3039 ncolor, 3040 len(matrix_element.get('processes')), 3041 1) 3042 3043 filename = pjoin(dirpath, 'phasespace.inc') 3044 self.write_phasespace_file(writers.FortranWriter(filename), 3045 len(matrix_element.get('diagrams')), 3046 ) 3047 3048 # Generate diagrams 3049 filename = pjoin(dirpath, "matrix.ps") 3050 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3051 get('diagrams'), 3052 filename, 3053 model=matrix_element.get('processes')[0].\ 3054 get('model'), 3055 amplitude='') 3056 logger.info("Generating Feynman diagrams for " + \ 3057 matrix_element.get('processes')[0].nice_string()) 3058 plot.draw() 3059 3060 #import genps.inc and maxconfigs.inc into Subprocesses 3061 ln(self.dir_path + '/Source/genps.inc', self.dir_path + '/SubProcesses', log=False) 3062 #ln(self.dir_path + '/Source/maxconfigs.inc', self.dir_path + '/SubProcesses', log=False) 3063 3064 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f', 'genps.inc'] 3065 3066 for file in linkfiles: 3067 ln('../%s' % file, starting_dir=cwd) 3068 3069 ln('nexternal.inc', '../../Source', log=False, cwd=dirpath) 3070 ln('leshouche.inc', '../../Source', log=False, cwd=dirpath) 3071 ln('maxamps.inc', '../../Source', log=False, cwd=dirpath) 3072 ln('phasespace.inc', '../', log=True, cwd=dirpath) 3073 # Return to original PWD 3074 #os.chdir(cwd) 3075 3076 if not calls: 3077 calls = 0 3078 return calls
3079 3080 #=========================================================================== 3081 # write_matrix_element_v4 3082 #===========================================================================
3083 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model,proc_id = "", config_map = []):
3084 """Export a matrix element to a matrix.f file in MG4 MadWeight format""" 3085 3086 if not matrix_element.get('processes') or \ 3087 not matrix_element.get('diagrams'): 3088 return 0 3089 3090 if writer: 3091 if not isinstance(writer, writers.FortranWriter): 3092 raise writers.FortranWriter.FortranWriterError(\ 3093 "writer not FortranWriter") 3094 3095 # Set lowercase/uppercase Fortran code 3096 writers.FortranWriter.downcase = False 3097 3098 replace_dict = {} 3099 3100 # Extract version number and date from VERSION file 3101 info_lines = self.get_mg5_info_lines() 3102 replace_dict['info_lines'] = info_lines 3103 3104 # Extract process info lines 3105 process_lines = self.get_process_info_lines(matrix_element) 3106 replace_dict['process_lines'] = process_lines 3107 3108 # Set proc_id 3109 replace_dict['proc_id'] = proc_id 3110 3111 # Extract number of external particles 3112 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3113 replace_dict['nexternal'] = nexternal 3114 3115 # Extract ncomb 3116 ncomb = matrix_element.get_helicity_combinations() 3117 replace_dict['ncomb'] = ncomb 3118 3119 # Extract helicity lines 3120 helicity_lines = self.get_helicity_lines(matrix_element) 3121 replace_dict['helicity_lines'] = helicity_lines 3122 3123 # Extract overall denominator 3124 # Averaging initial state color, spin, and identical FS particles 3125 den_factor_line = self.get_den_factor_line(matrix_element) 3126 replace_dict['den_factor_line'] = den_factor_line 3127 3128 # Extract ngraphs 3129 ngraphs = matrix_element.get_number_of_amplitudes() 3130 replace_dict['ngraphs'] = ngraphs 3131 3132 # Extract nwavefuncs 3133 nwavefuncs = matrix_element.get_number_of_wavefunctions() 3134 replace_dict['nwavefuncs'] = nwavefuncs 3135 3136 # Extract ncolor 3137 ncolor = max(1, len(matrix_element.get('color_basis'))) 3138 replace_dict['ncolor'] = ncolor 3139 3140 # Extract color data lines 3141 color_data_lines = self.get_color_data_lines(matrix_element) 3142 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 3143 3144 # Extract helas calls 3145 helas_calls = fortran_model.get_matrix_element_calls(\ 3146 matrix_element) 3147 3148 replace_dict['helas_calls'] = "\n".join(helas_calls) 3149 3150 # Extract JAMP lines 3151 jamp_lines = self.get_JAMP_lines(matrix_element) 3152 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 3153 3154 replace_dict['template_file'] = os.path.join(_file_path, \ 3155 'iolibs/template_files/%s' % self.matrix_file) 3156 replace_dict['template_file2'] = '' 3157 3158 if writer: 3159 file = open(replace_dict['template_file']).read() 3160 file = file % replace_dict 3161 # Write the file 3162 writer.writelines(file) 3163 return len(filter(lambda call: call.find('#') != 0, helas_calls)),ncolor 3164 else: 3165 replace_dict['return_value'] = (len(filter(lambda call: call.find('#') != 0, helas_calls)),ncolor)
3166 3167 #=========================================================================== 3168 # write_source_makefile 3169 #===========================================================================
3170 - def write_source_makefile(self, writer):
3171 """Write the nexternal.inc file for madweight""" 3172 3173 3174 path = os.path.join(_file_path,'iolibs','template_files','madweight_makefile_source') 3175 set_of_lib = '$(LIBRARIES) $(LIBDIR)libdhelas.$(libext) $(LIBDIR)libpdf.$(libext) $(LIBDIR)libmodel.$(libext) $(LIBDIR)libcernlib.$(libext) $(LIBDIR)libtf.$(libext)' 3176 text = open(path).read() % {'libraries': set_of_lib} 3177 writer.write(text) 3178 3179 return True
3180
3181 - def write_phasespace_file(self, writer, nb_diag):
3182 """ """ 3183 3184 template = """ include 'maxparticles.inc' 3185 integer max_branches 3186 parameter (max_branches=max_particles-1) 3187 integer max_configs 3188 parameter (max_configs=%(nb_diag)s) 3189 3190 c channel position 3191 integer config_pos,perm_pos 3192 common /to_config/config_pos,perm_pos 3193 3194 """ 3195 3196 writer.write(template % {'nb_diag': nb_diag})
3197 3198 3199 #=========================================================================== 3200 # write_auto_dsig_file 3201 #===========================================================================
3202 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
3203 """Write the auto_dsig.f file for the differential cross section 3204 calculation, includes pdf call information (MadWeight format)""" 3205 3206 if not matrix_element.get('processes') or \ 3207 not matrix_element.get('diagrams'): 3208 return 0 3209 3210 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 3211 3212 if ninitial < 1 or ninitial > 2: 3213 raise writers.FortranWriter.FortranWriterError, \ 3214 """Need ninitial = 1 or 2 to write auto_dsig file""" 3215 3216 replace_dict = {} 3217 3218 # Extract version number and date from VERSION file 3219 info_lines = self.get_mg5_info_lines() 3220 replace_dict['info_lines'] = info_lines 3221 3222 # Extract process info lines 3223 process_lines = self.get_process_info_lines(matrix_element) 3224 replace_dict['process_lines'] = process_lines 3225 3226 # Set proc_id 3227 replace_dict['proc_id'] = proc_id 3228 replace_dict['numproc'] = 1 3229 3230 # Set dsig_line 3231 if ninitial == 1: 3232 # No conversion, since result of decay should be given in GeV 3233 dsig_line = "pd(0)*dsiguu" 3234 else: 3235 # Convert result (in GeV) to pb 3236 dsig_line = "pd(0)*conv*dsiguu" 3237 3238 replace_dict['dsig_line'] = dsig_line 3239 3240 # Extract pdf lines 3241 pdf_vars, pdf_data, pdf_lines = \ 3242 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 3243 replace_dict['pdf_vars'] = pdf_vars 3244 replace_dict['pdf_data'] = pdf_data 3245 replace_dict['pdf_lines'] = pdf_lines 3246 3247 # Lines that differ between subprocess group and regular 3248 if proc_id: 3249 replace_dict['numproc'] = int(proc_id) 3250 replace_dict['passcuts_begin'] = "" 3251 replace_dict['passcuts_end'] = "" 3252 # Set lines for subprocess group version 3253 # Set define_iconfigs_lines 3254 replace_dict['define_subdiag_lines'] = \ 3255 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3256 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3257 else: 3258 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 3259 replace_dict['passcuts_end'] = "ENDIF" 3260 replace_dict['define_subdiag_lines'] = "" 3261 3262 if writer: 3263 file = open(os.path.join(_file_path, \ 3264 'iolibs/template_files/auto_dsig_mw.inc')).read() 3265 3266 file = file % replace_dict 3267 # Write the file 3268 writer.writelines(file) 3269 else: 3270 return replace_dict
3271 #=========================================================================== 3272 # write_configs_file 3273 #===========================================================================
3274 - def write_configs_file(self, writer, matrix_element):
3275 """Write the configs.inc file for MadEvent""" 3276 3277 # Extract number of external particles 3278 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3279 3280 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 3281 mapconfigs = [c[0] for c in configs] 3282 model = matrix_element.get('processes')[0].get('model') 3283 return mapconfigs, self.write_configs_file_from_diagrams(writer, 3284 [[c[1]] for c in configs], 3285 mapconfigs, 3286 nexternal, ninitial,matrix_element, model)
3287 3288 #=========================================================================== 3289 # write_run_configs_file 3290 #===========================================================================
3291 - def write_run_config_file(self, writer):
3292 """Write the run_configs.inc file for MadWeight""" 3293 3294 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 3295 text = open(path).read() % {'chanperjob':'5'} 3296 writer.write(text) 3297 return True
3298 3299 #=========================================================================== 3300 # write_configs_file_from_diagrams 3301 #===========================================================================
3302 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 3303 nexternal, ninitial, matrix_element, model):
3304 """Write the actual configs.inc file. 3305 3306 configs is the diagrams corresponding to configs (each 3307 diagrams is a list of corresponding diagrams for all 3308 subprocesses, with None if there is no corresponding diagrams 3309 for a given process). 3310 mapconfigs gives the diagram number for each config. 3311 3312 For s-channels, we need to output one PDG for each subprocess in 3313 the subprocess group, in order to be able to pick the right 3314 one for multiprocesses.""" 3315 3316 lines = [] 3317 3318 particle_dict = matrix_element.get('processes')[0].get('model').\ 3319 get('particle_dict') 3320 3321 s_and_t_channels = [] 3322 3323 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 3324 for config in configs if [d for d in config if d][0].\ 3325 get_vertex_leg_numbers()!=[]] 3326 3327 minvert = min(vert_list) if vert_list!=[] else 0 3328 # Number of subprocesses 3329 nsubprocs = len(configs[0]) 3330 3331 nconfigs = 0 3332 3333 new_pdg = model.get_first_non_pdg() 3334 3335 for iconfig, helas_diags in enumerate(configs): 3336 if any([vert > minvert for vert in 3337 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 3338 # Only 3-vertices allowed in configs.inc 3339 continue 3340 nconfigs += 1 3341 3342 # Need s- and t-channels for all subprocesses, including 3343 # those that don't contribute to this config 3344 empty_verts = [] 3345 stchannels = [] 3346 for h in helas_diags: 3347 if h: 3348 # get_s_and_t_channels gives vertices starting from 3349 # final state external particles and working inwards 3350 stchannels.append(h.get('amplitudes')[0].\ 3351 get_s_and_t_channels(ninitial,model,new_pdg)) 3352 else: 3353 stchannels.append((empty_verts, None)) 3354 3355 # For t-channels, just need the first non-empty one 3356 tchannels = [t for s,t in stchannels if t != None][0] 3357 3358 # For s_and_t_channels (to be used later) use only first config 3359 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 3360 tchannels]) 3361 3362 # Make sure empty_verts is same length as real vertices 3363 if any([s for s,t in stchannels]): 3364 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 3365 3366 # Reorganize s-channel vertices to get a list of all 3367 # subprocesses for each vertex 3368 schannels = zip(*[s for s,t in stchannels]) 3369 else: 3370 schannels = [] 3371 3372 allchannels = schannels 3373 if len(tchannels) > 1: 3374 # Write out tchannels only if there are any non-trivial ones 3375 allchannels = schannels + tchannels 3376 3377 # Write out propagators for s-channel and t-channel vertices 3378 3379 #lines.append("# Diagram %d" % (mapconfigs[iconfig])) 3380 # Correspondance between the config and the diagram = amp2 3381 lines.append("* %d %d " % (nconfigs, 3382 mapconfigs[iconfig])) 3383 3384 for verts in allchannels: 3385 if verts in schannels: 3386 vert = [v for v in verts if v][0] 3387 else: 3388 vert = verts 3389 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 3390 last_leg = vert.get('legs')[-1] 3391 line=str(last_leg.get('number'))+" "+str(daughters[0])+" "+str(daughters[1]) 3392 # lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 3393 # (last_leg.get('number'), nconfigs, len(daughters), 3394 # ",".join([str(d) for d in daughters]))) 3395 3396 if last_leg.get('id') == 21 and 21 not in particle_dict: 3397 # Fake propagator used in multiparticle vertices 3398 mass = 'zero' 3399 width = 'zero' 3400 pow_part = 0 3401 else: 3402 if (last_leg.get('id')!=7): 3403 particle = particle_dict[last_leg.get('id')] 3404 # Get mass 3405 mass = particle.get('mass') 3406 # Get width 3407 width = particle.get('width') 3408 else : # fake propagator used in multiparticle vertices 3409 mass= 'zero' 3410 width= 'zero' 3411 3412 line=line+" "+mass+" "+width+" " 3413 3414 if verts in schannels: 3415 pdgs = [] 3416 for v in verts: 3417 if v: 3418 pdgs.append(v.get('legs')[-1].get('id')) 3419 else: 3420 pdgs.append(0) 3421 lines.append(line+" S "+str(last_leg.get('id'))) 3422 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3423 # (last_leg.get('number'), nconfigs, nsubprocs, 3424 # ",".join([str(d) for d in pdgs]))) 3425 # lines.append("data tprid(%d,%d)/0/" % \ 3426 # (last_leg.get('number'), nconfigs)) 3427 elif verts in tchannels[:-1]: 3428 lines.append(line+" T "+str(last_leg.get('id'))) 3429 # lines.append("data tprid(%d,%d)/%d/" % \ 3430 # (last_leg.get('number'), nconfigs, 3431 # abs(last_leg.get('id')))) 3432 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3433 # (last_leg.get('number'), nconfigs, nsubprocs, 3434 # ",".join(['0'] * nsubprocs))) 3435 3436 # Write out number of configs 3437 # lines.append("# Number of configs") 3438 # lines.append("data mapconfig(0)/%d/" % nconfigs) 3439 lines.append(" * ") # a line with just a star indicates this is the end of file 3440 # Write the file 3441 writer.writelines(lines) 3442 3443 return s_and_t_channels
3444
3445 3446 3447 #=============================================================================== 3448 # ProcessExporterFortranME 3449 #=============================================================================== 3450 -class ProcessExporterFortranME(ProcessExporterFortran):
3451 """Class to take care of exporting a set of matrix elements to 3452 MadEvent format.""" 3453 3454 matrix_file = "matrix_madevent_v4.inc" 3455 3456 # helper function for customise helas writter 3457 @staticmethod
3458 - def custom_helas_call(call, arg):
3459 if arg['mass'] == '%(M)s,%(W)s,': 3460 arg['mass'] = '%(M)s, fk_%(W)s,' 3461 elif '%(W)s' in arg['mass']: 3462 raise Exception 3463 return call, arg
3464
3465 - def copy_template(self, model):
3466 """Additional actions needed for setup of Template 3467 """ 3468 3469 super(ProcessExporterFortranME, self).copy_template(model) 3470 3471 # File created from Template (Different in some child class) 3472 filename = pjoin(self.dir_path,'Source','run_config.inc') 3473 self.write_run_config_file(writers.FortranWriter(filename)) 3474 3475 # The next file are model dependant (due to SLAH convention) 3476 self.model_name = model.get('name') 3477 # Add the symmetry.f 3478 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3479 self.write_symmetry(writers.FortranWriter(filename)) 3480 # 3481 filename = pjoin(self.dir_path,'SubProcesses','addmothers.f') 3482 self.write_addmothers(writers.FortranWriter(filename)) 3483 # Copy the different python file in the Template 3484 self.copy_python_file()
3485 3486 3487 3488 3489 3490 3491 #=========================================================================== 3492 # generate_subprocess_directory 3493 #===========================================================================
3494 - def copy_python_file(self):
3495 """copy the python file require for the Template""" 3496 3497 # madevent interface 3498 cp(_file_path+'/interface/madevent_interface.py', 3499 self.dir_path+'/bin/internal/madevent_interface.py') 3500 cp(_file_path+'/interface/extended_cmd.py', 3501 self.dir_path+'/bin/internal/extended_cmd.py') 3502 cp(_file_path+'/interface/common_run_interface.py', 3503 self.dir_path+'/bin/internal/common_run_interface.py') 3504 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3505 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3506 cp(_file_path+'/iolibs/save_load_object.py', 3507 self.dir_path+'/bin/internal/save_load_object.py') 3508 cp(_file_path+'/iolibs/file_writers.py', 3509 self.dir_path+'/bin/internal/file_writers.py') 3510 #model file 3511 cp(_file_path+'../models/check_param_card.py', 3512 self.dir_path+'/bin/internal/check_param_card.py') 3513 3514 #copy all the file present in madevent directory 3515 for name in os.listdir(pjoin(_file_path, 'madevent')): 3516 if name not in ['__init__.py'] and name.endswith('.py'): 3517 cp(_file_path+'/madevent/'+name, self.dir_path+'/bin/internal/') 3518 3519 #madevent file 3520 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3521 cp(_file_path+'/various/lhe_parser.py', 3522 self.dir_path+'/bin/internal/lhe_parser.py') 3523 cp(_file_path+'/various/banner.py', 3524 self.dir_path+'/bin/internal/banner.py') 3525 cp(_file_path+'/various/histograms.py', 3526 self.dir_path+'/bin/internal/histograms.py') 3527 cp(_file_path+'/various/plot_djrs.py', 3528 self.dir_path+'/bin/internal/plot_djrs.py') 3529 cp(_file_path+'/various/systematics.py', self.dir_path+'/bin/internal/systematics.py') 3530 3531 cp(_file_path+'/various/cluster.py', 3532 self.dir_path+'/bin/internal/cluster.py') 3533 cp(_file_path+'/madevent/combine_runs.py', 3534 self.dir_path+'/bin/internal/combine_runs.py') 3535 # logging configuration 3536 cp(_file_path+'/interface/.mg5_logging.conf', 3537 self.dir_path+'/bin/internal/me5_logging.conf') 3538 cp(_file_path+'/interface/coloring_logging.py', 3539 self.dir_path+'/bin/internal/coloring_logging.py') 3540 # shower card and FO_analyse_card. 3541 # Although not needed, it is imported by banner.py 3542 cp(_file_path+'/various/shower_card.py', 3543 self.dir_path+'/bin/internal/shower_card.py') 3544 cp(_file_path+'/various/FO_analyse_card.py', 3545 self.dir_path+'/bin/internal/FO_analyse_card.py')
3546 3547
3548 - def convert_model(self, model, wanted_lorentz = [], 3549 wanted_couplings = []):
3550 3551 super(ProcessExporterFortranME,self).convert_model(model, 3552 wanted_lorentz, wanted_couplings) 3553 3554 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 3555 try: 3556 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 3557 except OSError as error: 3558 pass 3559 model_path = model.get('modelpath') 3560 # This is not safe if there is a '##' or '-' in the path. 3561 shutil.copytree(model_path, 3562 pjoin(self.dir_path,'bin','internal','ufomodel'), 3563 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 3564 if hasattr(model, 'restrict_card'): 3565 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 3566 'restrict_default.dat') 3567 if isinstance(model.restrict_card, check_param_card.ParamCard): 3568 model.restrict_card.write(out_path) 3569 else: 3570 files.cp(model.restrict_card, out_path)
3571 3572 #=========================================================================== 3573 # export model files 3574 #===========================================================================
3575 - def export_model_files(self, model_path):
3576 """export the model dependent files""" 3577 3578 super(ProcessExporterFortranME,self).export_model_files(model_path) 3579 3580 # Add the routine update_as_param in v4 model 3581 # This is a function created in the UFO 3582 text=""" 3583 subroutine update_as_param() 3584 call setpara('param_card.dat',.false.) 3585 return 3586 end 3587 """ 3588 ff = open(pjoin(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3589 ff.write(text) 3590 ff.close() 3591 3592 # Add the symmetry.f 3593 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3594 self.write_symmetry(writers.FortranWriter(filename), v5=False) 3595 3596 # Modify setrun.f 3597 text = open(pjoin(self.dir_path,'Source','setrun.f')).read() 3598 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3599 fsock = open(pjoin(self.dir_path,'Source','setrun.f'), 'w') 3600 fsock.write(text) 3601 fsock.close() 3602 3603 self.make_model_symbolic_link()
3604 3605 #=========================================================================== 3606 # generate_subprocess_directory 3607 #===========================================================================
3608 - def generate_subprocess_directory(self, matrix_element, 3609 fortran_model, 3610 me_number):
3611 """Generate the Pxxxxx directory for a subprocess in MG4 madevent, 3612 including the necessary matrix.f and various helper files""" 3613 3614 cwd = os.getcwd() 3615 path = pjoin(self.dir_path, 'SubProcesses') 3616 3617 3618 if not self.model: 3619 self.model = matrix_element.get('processes')[0].get('model') 3620 3621 3622 3623 #os.chdir(path) 3624 # Create the directory PN_xx_xxxxx in the specified path 3625 subprocdir = "P%s" % matrix_element.get('processes')[0].shell_string() 3626 try: 3627 os.mkdir(pjoin(path,subprocdir)) 3628 except os.error as error: 3629 logger.warning(error.strerror + " " + subprocdir) 3630 3631 #try: 3632 # os.chdir(subprocdir) 3633 #except os.error: 3634 # logger.error('Could not cd to directory %s' % subprocdir) 3635 # return 0 3636 3637 logger.info('Creating files in directory %s' % subprocdir) 3638 Ppath = pjoin(path, subprocdir) 3639 3640 # Extract number of external particles 3641 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3642 3643 # Add the driver.f 3644 ncomb = matrix_element.get_helicity_combinations() 3645 filename = pjoin(Ppath,'driver.f') 3646 self.write_driver(writers.FortranWriter(filename),ncomb,n_grouped_proc=1, 3647 v5=self.opt['v5_model']) 3648 3649 # Create the matrix.f file, auto_dsig.f file and all inc files 3650 filename = pjoin(Ppath, 'matrix.f') 3651 calls, ncolor = \ 3652 self.write_matrix_element_v4(writers.FortranWriter(filename), 3653 matrix_element, fortran_model, subproc_number = me_number) 3654 3655 filename = pjoin(Ppath, 'auto_dsig.f') 3656 self.write_auto_dsig_file(writers.FortranWriter(filename), 3657 matrix_element) 3658 3659 filename = pjoin(Ppath, 'configs.inc') 3660 mapconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 3661 writers.FortranWriter(filename), 3662 matrix_element) 3663 3664 filename = pjoin(Ppath, 'config_nqcd.inc') 3665 self.write_config_nqcd_file(writers.FortranWriter(filename), 3666 nqcd_list) 3667 3668 filename = pjoin(Ppath, 'config_subproc_map.inc') 3669 self.write_config_subproc_map_file(writers.FortranWriter(filename), 3670 s_and_t_channels) 3671 3672 filename = pjoin(Ppath, 'coloramps.inc') 3673 self.write_coloramps_file(writers.FortranWriter(filename), 3674 mapconfigs, 3675 matrix_element) 3676 3677 filename = pjoin(Ppath, 'get_color.f') 3678 self.write_colors_file(writers.FortranWriter(filename), 3679 matrix_element) 3680 3681 filename = pjoin(Ppath, 'decayBW.inc') 3682 self.write_decayBW_file(writers.FortranWriter(filename), 3683 s_and_t_channels) 3684 3685 filename = pjoin(Ppath, 'dname.mg') 3686 self.write_dname_file(writers.FileWriter(filename), 3687 "P"+matrix_element.get('processes')[0].shell_string()) 3688 3689 filename = pjoin(Ppath, 'iproc.dat') 3690 self.write_iproc_file(writers.FortranWriter(filename), 3691 me_number) 3692 3693 filename = pjoin(Ppath, 'leshouche.inc') 3694 self.write_leshouche_file(writers.FortranWriter(filename), 3695 matrix_element) 3696 3697 filename = pjoin(Ppath, 'maxamps.inc') 3698 self.write_maxamps_file(writers.FortranWriter(filename), 3699 len(matrix_element.get('diagrams')), 3700 ncolor, 3701 len(matrix_element.get('processes')), 3702 1) 3703 3704 filename = pjoin(Ppath, 'mg.sym') 3705 self.write_mg_sym_file(writers.FortranWriter(filename), 3706 matrix_element) 3707 3708 filename = pjoin(Ppath, 'ncombs.inc') 3709 self.write_ncombs_file(writers.FortranWriter(filename), 3710 nexternal) 3711 3712 filename = pjoin(Ppath, 'nexternal.inc') 3713 self.write_nexternal_file(writers.FortranWriter(filename), 3714 nexternal, ninitial) 3715 3716 filename = pjoin(Ppath, 'ngraphs.inc') 3717 self.write_ngraphs_file(writers.FortranWriter(filename), 3718 len(mapconfigs)) 3719 3720 3721 filename = pjoin(Ppath, 'pmass.inc') 3722 self.write_pmass_file(writers.FortranWriter(filename), 3723 matrix_element) 3724 3725 filename = pjoin(Ppath, 'props.inc') 3726 self.write_props_file(writers.FortranWriter(filename), 3727 matrix_element, 3728 s_and_t_channels) 3729 3730 # Find config symmetries and permutations 3731 symmetry, perms, ident_perms = \ 3732 diagram_symmetry.find_symmetry(matrix_element) 3733 3734 filename = pjoin(Ppath, 'symswap.inc') 3735 self.write_symswap_file(writers.FortranWriter(filename), 3736 ident_perms) 3737 3738 filename = pjoin(Ppath, 'symfact_orig.dat') 3739 self.write_symfact_file(open(filename, 'w'), symmetry) 3740 3741 # Generate diagrams 3742 filename = pjoin(Ppath, "matrix.ps") 3743 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3744 get('diagrams'), 3745 filename, 3746 model=matrix_element.get('processes')[0].\ 3747 get('model'), 3748 amplitude=True) 3749 logger.info("Generating Feynman diagrams for " + \ 3750 matrix_element.get('processes')[0].nice_string()) 3751 plot.draw() 3752 3753 self.link_files_in_SubProcess(Ppath) 3754 3755 #import nexternal/leshouche in Source 3756 ln(pjoin(Ppath,'nexternal.inc'), pjoin(self.dir_path,'Source'), log=False) 3757 ln(pjoin(Ppath,'leshouche.inc'), pjoin(self.dir_path,'Source'), log=False) 3758 ln(pjoin(Ppath,'maxamps.inc'), pjoin(self.dir_path,'Source'), log=False) 3759 # Return to SubProcesses dir 3760 #os.chdir(os.path.pardir) 3761 3762 # Add subprocess to subproc.mg 3763 filename = pjoin(path, 'subproc.mg') 3764 files.append_to_file(filename, 3765 self.write_subproc, 3766 subprocdir) 3767 3768 # Return to original dir 3769 #os.chdir(cwd) 3770 3771 # Generate info page 3772 gen_infohtml.make_info_html(self.dir_path) 3773 3774 3775 if not calls: 3776 calls = 0 3777 return calls
3778 3779 link_Sub_files = ['addmothers.f', 3780 'cluster.f', 3781 'cluster.inc', 3782 'coupl.inc', 3783 'cuts.f', 3784 'cuts.inc', 3785 'genps.f', 3786 'genps.inc', 3787 'idenparts.f', 3788 'initcluster.f', 3789 'makefile', 3790 'message.inc', 3791 'myamp.f', 3792 'reweight.f', 3793 'run.inc', 3794 'maxconfigs.inc', 3795 'maxparticles.inc', 3796 'run_config.inc', 3797 'lhe_event_infos.inc', 3798 'setcuts.f', 3799 'setscales.f', 3800 'sudakov.inc', 3801 'symmetry.f', 3802 'unwgt.f', 3803 'dummy_fct.f' 3804 ] 3805 3819 3820
3821 - def finalize(self, matrix_elements, history, mg5options, flaglist):
3822 """Finalize ME v4 directory by creating jpeg diagrams, html 3823 pages,proc_card_mg5.dat and madevent.tar.gz.""" 3824 3825 if 'nojpeg' in flaglist: 3826 makejpg = False 3827 else: 3828 makejpg = True 3829 if 'online' in flaglist: 3830 online = True 3831 else: 3832 online = False 3833 3834 compiler = {'fortran': mg5options['fortran_compiler'], 3835 'cpp': mg5options['cpp_compiler'], 3836 'f2py': mg5options['f2py_compiler']} 3837 3838 # indicate that the output type is not grouped 3839 if not isinstance(self, ProcessExporterFortranMEGroup): 3840 self.proc_characteristic['grouped_matrix'] = False 3841 self.proc_characteristic['complex_mass_scheme'] = mg5options['complex_mass_scheme'] 3842 # indicate the PDG of all initial particle 3843 try: 3844 pdgs1 = [p.get_initial_pdg(1) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 3845 pdgs2 = [p.get_initial_pdg(2) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 3846 except AttributeError: 3847 pdgs1 = [p.get_initial_pdg(1) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 3848 pdgs2 = [p.get_initial_pdg(2) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 3849 self.proc_characteristic['pdg_initial1'] = pdgs1 3850 self.proc_characteristic['pdg_initial2'] = pdgs2 3851 3852 3853 modelname = self.opt['model'] 3854 if modelname == 'mssm' or modelname.startswith('mssm-'): 3855 param_card = pjoin(self.dir_path, 'Cards','param_card.dat') 3856 mg5_param = pjoin(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 3857 check_param_card.convert_to_mg5card(param_card, mg5_param) 3858 check_param_card.check_valid_param_card(mg5_param) 3859 3860 # Add the combine_events.f modify param_card path/number of @X 3861 filename = pjoin(self.dir_path,'Source','combine_events.f') 3862 try: 3863 nb_proc =[p.get('id') for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes')] 3864 except AttributeError: 3865 nb_proc =[p.get('id') for m in matrix_elements.get('matrix_elements') for p in m.get('processes')] 3866 nb_proc = len(set(nb_proc)) 3867 self.write_combine_events(writers.FortranWriter(filename), nb_proc) # already formatted 3868 # Write maxconfigs.inc based on max of ME's/subprocess groups 3869 filename = pjoin(self.dir_path,'Source','maxconfigs.inc') 3870 self.write_maxconfigs_file(writers.FortranWriter(filename), 3871 matrix_elements) 3872 3873 # Write maxparticles.inc based on max of ME's/subprocess groups 3874 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 3875 self.write_maxparticles_file(writers.FortranWriter(filename), 3876 matrix_elements) 3877 3878 # Touch "done" file 3879 os.system('touch %s/done' % pjoin(self.dir_path,'SubProcesses')) 3880 3881 # Check for compiler 3882 self.set_compiler(compiler) 3883 self.set_cpp_compiler(compiler['cpp']) 3884 3885 3886 old_pos = os.getcwd() 3887 subpath = pjoin(self.dir_path, 'SubProcesses') 3888 3889 P_dir_list = [proc for proc in os.listdir(subpath) 3890 if os.path.isdir(pjoin(subpath,proc)) and proc[0] == 'P'] 3891 3892 devnull = os.open(os.devnull, os.O_RDWR) 3893 # Convert the poscript in jpg files (if authorize) 3894 if makejpg: 3895 try: 3896 os.remove(pjoin(self.dir_path,'HTML','card.jpg')) 3897 except Exception, error: 3898 pass 3899 3900 if misc.which('gs'): 3901 logger.info("Generate jpeg diagrams") 3902 for Pdir in P_dir_list: 3903 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 3904 stdout = devnull, cwd=pjoin(subpath, Pdir)) 3905 3906 logger.info("Generate web pages") 3907 # Create the WebPage using perl script 3908 3909 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 3910 stdout = devnull,cwd=pjoin(self.dir_path)) 3911 3912 #os.chdir(os.path.pardir) 3913 3914 obj = gen_infohtml.make_info_html(self.dir_path) 3915 3916 if online: 3917 nb_channel = obj.rep_rule['nb_gen_diag'] 3918 open(pjoin(self.dir_path, 'Online'),'w').write(str(nb_channel)) 3919 #add the information to proc_charac 3920 self.proc_characteristic['nb_channel'] = obj.rep_rule['nb_gen_diag'] 3921 3922 # Write command history as proc_card_mg5 3923 if os.path.isdir(pjoin(self.dir_path,'Cards')): 3924 output_file = pjoin(self.dir_path,'Cards', 'proc_card_mg5.dat') 3925 history.write(output_file) 3926 3927 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3928 stdout = devnull) 3929 3930 #crate the proc_characteristic file 3931 self.create_proc_charac(matrix_elements, history) 3932 3933 # create the run_card 3934 ProcessExporterFortran.finalize(self, matrix_elements, history, mg5options, flaglist) 3935 3936 # Run "make" to generate madevent.tar.gz file 3937 if os.path.exists(pjoin(self.dir_path,'SubProcesses', 'subproc.mg')): 3938 if os.path.exists(pjoin(self.dir_path,'madevent.tar.gz')): 3939 os.remove(pjoin(self.dir_path,'madevent.tar.gz')) 3940 misc.call([os.path.join(self.dir_path, 'bin', 'internal', 'make_madevent_tar')], 3941 stdout = devnull, cwd=self.dir_path) 3942 3943 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3944 stdout = devnull, cwd=self.dir_path)
3945 3946 3947 3948 3949 3950 3951 #return to the initial dir 3952 #os.chdir(old_pos) 3953 3954 #=========================================================================== 3955 # write_matrix_element_v4 3956 #===========================================================================
3957 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 3958 proc_id = "", config_map = [], subproc_number = ""):
3959 """Export a matrix element to a matrix.f file in MG4 madevent format""" 3960 3961 if not matrix_element.get('processes') or \ 3962 not matrix_element.get('diagrams'): 3963 return 0 3964 3965 if writer: 3966 if not isinstance(writer, writers.FortranWriter): 3967 raise writers.FortranWriter.FortranWriterError(\ 3968 "writer not FortranWriter") 3969 # Set lowercase/uppercase Fortran code 3970 writers.FortranWriter.downcase = False 3971 3972 # The proc prefix is not used for MadEvent output so it can safely be set 3973 # to an empty string. 3974 replace_dict = {'proc_prefix':''} 3975 3976 # Extract helas calls 3977 helas_calls = fortran_model.get_matrix_element_calls(\ 3978 matrix_element) 3979 3980 3981 replace_dict['helas_calls'] = "\n".join(helas_calls) 3982 3983 3984 #adding the support for the fake width (forbidding too small width) 3985 mass_width = matrix_element.get_all_mass_widths() 3986 width_list = set([e[1] for e in mass_width]) 3987 3988 replace_dict['fake_width_declaration'] = \ 3989 (' double precision fk_%s \n' * len(width_list)) % tuple(width_list) 3990 replace_dict['fake_width_declaration'] += \ 3991 (' save fk_%s \n' * len(width_list)) % tuple(width_list) 3992 fk_w_defs = [] 3993 one_def = ' fk_%(w)s = SIGN(MAX(ABS(%(w)s), ABS(%(m)s*small_width_treatment)), %(w)s)' 3994 for m, w in mass_width: 3995 if w == 'zero': 3996 if ' fk_zero = 0d0' not in fk_w_defs: 3997 fk_w_defs.append(' fk_zero = 0d0') 3998 continue 3999 fk_w_defs.append(one_def %{'m':m, 'w':w}) 4000 replace_dict['fake_width_definitions'] = '\n'.join(fk_w_defs) 4001 4002 # Extract version number and date from VERSION file 4003 info_lines = self.get_mg5_info_lines() 4004 replace_dict['info_lines'] = info_lines 4005 4006 # Extract process info lines 4007 process_lines = self.get_process_info_lines(matrix_element) 4008 replace_dict['process_lines'] = process_lines 4009 4010 # Set proc_id 4011 replace_dict['proc_id'] = proc_id 4012 4013 # Extract ncomb 4014 ncomb = matrix_element.get_helicity_combinations() 4015 replace_dict['ncomb'] = ncomb 4016 4017 # Extract helicity lines 4018 helicity_lines = self.get_helicity_lines(matrix_element) 4019 replace_dict['helicity_lines'] = helicity_lines 4020 4021 # Extract IC line 4022 ic_line = self.get_ic_line(matrix_element) 4023 replace_dict['ic_line'] = ic_line 4024 4025 # Extract overall denominator 4026 # Averaging initial state color, spin, and identical FS particles 4027 den_factor_line = self.get_den_factor_line(matrix_element) 4028 replace_dict['den_factor_line'] = den_factor_line 4029 4030 # Extract ngraphs 4031 ngraphs = matrix_element.get_number_of_amplitudes() 4032 replace_dict['ngraphs'] = ngraphs 4033 4034 # Extract ndiags 4035 ndiags = len(matrix_element.get('diagrams')) 4036 replace_dict['ndiags'] = ndiags 4037 4038 # Set define_iconfigs_lines 4039 replace_dict['define_iconfigs_lines'] = \ 4040 """INTEGER MAPCONFIG(0:LMAXCONFIGS), ICONFIG 4041 COMMON/TO_MCONFIGS/MAPCONFIG, ICONFIG""" 4042 4043 if proc_id: 4044 # Set lines for subprocess group version 4045 # Set define_iconfigs_lines 4046 replace_dict['define_iconfigs_lines'] += \ 4047 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4048 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4049 # Set set_amp2_line 4050 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(SUBDIAG(%s))/XTOT" % \ 4051 proc_id 4052 else: 4053 # Standard running 4054 # Set set_amp2_line 4055 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(MAPCONFIG(ICONFIG))/XTOT" 4056 4057 # Extract nwavefuncs 4058 nwavefuncs = matrix_element.get_number_of_wavefunctions() 4059 replace_dict['nwavefuncs'] = nwavefuncs 4060 4061 # Extract ncolor 4062 ncolor = max(1, len(matrix_element.get('color_basis'))) 4063 replace_dict['ncolor'] = ncolor 4064 4065 # Extract color data lines 4066 color_data_lines = self.get_color_data_lines(matrix_element) 4067 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 4068 4069 4070 # Set the size of Wavefunction 4071 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 4072 replace_dict['wavefunctionsize'] = 18 4073 else: 4074 replace_dict['wavefunctionsize'] = 6 4075 4076 # Extract amp2 lines 4077 amp2_lines = self.get_amp2_lines(matrix_element, config_map) 4078 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 4079 4080 # The JAMP definition depends on the splitting order 4081 split_orders=matrix_element.get('processes')[0].get('split_orders') 4082 if len(split_orders)>0: 4083 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 4084 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 4085 matrix_element.get('processes')[0],squared_orders) 4086 else: 4087 # Consider the output of a dummy order 'ALL_ORDERS' for which we 4088 # set all amplitude order to weight 1 and only one squared order 4089 # contribution which is of course ALL_ORDERS=2. 4090 squared_orders = [(2,),] 4091 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 4092 replace_dict['chosen_so_configs'] = '.TRUE.' 4093 4094 replace_dict['nAmpSplitOrders']=len(amp_orders) 4095 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 4096 replace_dict['split_order_str_list']=str(split_orders) 4097 replace_dict['nSplitOrders']=max(len(split_orders),1) 4098 amp_so = self.get_split_orders_lines( 4099 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 4100 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 4101 replace_dict['ampsplitorders']='\n'.join(amp_so) 4102 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 4103 4104 4105 # Extract JAMP lines 4106 # If no split_orders then artificiall add one entry called 'ALL_ORDERS' 4107 jamp_lines = self.get_JAMP_lines_split_order(\ 4108 matrix_element,amp_orders,split_order_names= 4109 split_orders if len(split_orders)>0 else ['ALL_ORDERS']) 4110 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 4111 4112 replace_dict['template_file'] = pjoin(_file_path, \ 4113 'iolibs/template_files/%s' % self.matrix_file) 4114 replace_dict['template_file2'] = pjoin(_file_path, \ 4115 'iolibs/template_files/split_orders_helping_functions.inc') 4116 if writer: 4117 file = open(replace_dict['template_file']).read() 4118 file = file % replace_dict 4119 # Add the split orders helper functions. 4120 file = file + '\n' + open(replace_dict['template_file2'])\ 4121 .read()%replace_dict 4122 # Write the file 4123 writer.writelines(file) 4124 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor 4125 else: 4126 replace_dict['return_value'] = (len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor) 4127 return replace_dict
4128 4129 #=========================================================================== 4130 # write_auto_dsig_file 4131 #===========================================================================
4132 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
4133 """Write the auto_dsig.f file for the differential cross section 4134 calculation, includes pdf call information""" 4135 4136 if not matrix_element.get('processes') or \ 4137 not matrix_element.get('diagrams'): 4138 return 0 4139 4140 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 4141 self.proc_characteristic['ninitial'] = ninitial 4142 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 4143 4144 # Add information relevant for MLM matching: 4145 # Maximum QCD power in all the contributions 4146 max_qcd_order = 0 4147 for diag in matrix_element.get('diagrams'): 4148 orders = diag.calculate_orders() 4149 if 'QCD' in orders: 4150 max_qcd_order = max(max_qcd_order,orders['QCD']) 4151 max_n_light_final_partons = max(len([1 for id in proc.get_final_ids() 4152 if proc.get('model').get_particle(id).get('mass')=='ZERO' and 4153 proc.get('model').get_particle(id).get('color')>1]) 4154 for proc in matrix_element.get('processes')) 4155 # Maximum number of final state light jets to be matched 4156 self.proc_characteristic['max_n_matched_jets'] = max( 4157 self.proc_characteristic['max_n_matched_jets'], 4158 min(max_qcd_order,max_n_light_final_partons)) 4159 4160 # List of default pdgs to be considered for the CKKWl merging cut 4161 self.proc_characteristic['colored_pdgs'] = \ 4162 sorted(list(set([abs(p.get('pdg_code')) for p in 4163 matrix_element.get('processes')[0].get('model').get('particles') if 4164 p.get('color')>1]))) 4165 4166 if ninitial < 1 or ninitial > 2: 4167 raise writers.FortranWriter.FortranWriterError, \ 4168 """Need ninitial = 1 or 2 to write auto_dsig file""" 4169 4170 replace_dict = {} 4171 4172 # Extract version number and date from VERSION file 4173 info_lines = self.get_mg5_info_lines() 4174 replace_dict['info_lines'] = info_lines 4175 4176 # Extract process info lines 4177 process_lines = self.get_process_info_lines(matrix_element) 4178 replace_dict['process_lines'] = process_lines 4179 4180 # Set proc_id 4181 replace_dict['proc_id'] = proc_id 4182 replace_dict['numproc'] = 1 4183 4184 # Set dsig_line 4185 if ninitial == 1: 4186 # No conversion, since result of decay should be given in GeV 4187 dsig_line = "pd(0)*dsiguu" 4188 else: 4189 # Convert result (in GeV) to pb 4190 dsig_line = "pd(0)*conv*dsiguu" 4191 4192 replace_dict['dsig_line'] = dsig_line 4193 4194 # Extract pdf lines 4195 pdf_vars, pdf_data, pdf_lines = \ 4196 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 4197 replace_dict['pdf_vars'] = pdf_vars 4198 replace_dict['pdf_data'] = pdf_data 4199 replace_dict['pdf_lines'] = pdf_lines 4200 4201 # Lines that differ between subprocess group and regular 4202 if proc_id: 4203 replace_dict['numproc'] = int(proc_id) 4204 replace_dict['passcuts_begin'] = "" 4205 replace_dict['passcuts_end'] = "" 4206 # Set lines for subprocess group version 4207 # Set define_iconfigs_lines 4208 replace_dict['define_subdiag_lines'] = \ 4209 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4210 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4211 replace_dict['cutsdone'] = "" 4212 else: 4213 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 4214 replace_dict['passcuts_end'] = "ENDIF" 4215 replace_dict['define_subdiag_lines'] = "" 4216 replace_dict['cutsdone'] = " cutsdone=.false.\n cutspassed=.false." 4217 4218 if not isinstance(self, ProcessExporterFortranMEGroup): 4219 ncomb=matrix_element.get_helicity_combinations() 4220 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 4221 else: 4222 replace_dict['read_write_good_hel'] = "" 4223 4224 context = {'read_write_good_hel':True} 4225 4226 if writer: 4227 file = open(pjoin(_file_path, \ 4228 'iolibs/template_files/auto_dsig_v4.inc')).read() 4229 file = file % replace_dict 4230 4231 # Write the file 4232 writer.writelines(file, context=context) 4233 else: 4234 return replace_dict, context
4235 #=========================================================================== 4236 # write_coloramps_file 4237 #===========================================================================
4238 - def write_coloramps_file(self, writer, mapconfigs, matrix_element):
4239 """Write the coloramps.inc file for MadEvent""" 4240 4241 lines = self.get_icolamp_lines(mapconfigs, matrix_element, 1) 4242 lines.insert(0, "logical icolamp(%d,%d,1)" % \ 4243 (max(len(matrix_element.get('color_basis').keys()), 1), 4244 len(mapconfigs))) 4245 4246 4247 # Write the file 4248 writer.writelines(lines) 4249 4250 return True
4251 4252 #=========================================================================== 4253 # write_colors_file 4254 #===========================================================================
4255 - def write_colors_file(self, writer, matrix_elements):
4256 """Write the get_color.f file for MadEvent, which returns color 4257 for all particles used in the matrix element.""" 4258 4259 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 4260 matrix_elements = [matrix_elements] 4261 4262 model = matrix_elements[0].get('processes')[0].get('model') 4263 4264 # We need the both particle and antiparticle wf_ids, since the identity 4265 # depends on the direction of the wf. 4266 wf_ids = set(sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 4267 for wf in d.get('wavefunctions')],[]) \ 4268 for d in me.get('diagrams')], []) \ 4269 for me in matrix_elements], [])) 4270 4271 leg_ids = set(sum([sum([sum([[l.get('id'), 4272 model.get_particle(l.get('id')).get_anti_pdg_code()] \ 4273 for l in p.get_legs_with_decays()], []) \ 4274 for p in me.get('processes')], []) \ 4275 for me in matrix_elements], [])) 4276 particle_ids = sorted(list(wf_ids.union(leg_ids))) 4277 4278 lines = """function get_color(ipdg) 4279 implicit none 4280 integer get_color, ipdg 4281 4282 if(ipdg.eq.%d)then 4283 get_color=%d 4284 return 4285 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 4286 4287 for part_id in particle_ids[1:]: 4288 lines += """else if(ipdg.eq.%d)then 4289 get_color=%d 4290 return 4291 """ % (part_id, model.get_particle(part_id).get_color()) 4292 # Dummy particle for multiparticle vertices with pdg given by 4293 # first code not in the model 4294 lines += """else if(ipdg.eq.%d)then 4295 c This is dummy particle used in multiparticle vertices 4296 get_color=2 4297 return 4298 """ % model.get_first_non_pdg() 4299 lines += """else 4300 write(*,*)'Error: No color given for pdg ',ipdg 4301 get_color=0 4302 return 4303 endif 4304 end 4305 """ 4306 4307 # Write the file 4308 writer.writelines(lines) 4309 4310 return True
4311 4312 #=========================================================================== 4313 # write_config_nqcd_file 4314 #===========================================================================
4315 - def write_config_nqcd_file(self, writer, nqcd_list):
4316 """Write the config_nqcd.inc with the number of QCD couplings 4317 for each config""" 4318 4319 lines = [] 4320 for iconf, n in enumerate(nqcd_list): 4321 lines.append("data nqcd(%d)/%d/" % (iconf+1, n)) 4322 4323 # Write the file 4324 writer.writelines(lines) 4325 4326 return True
4327 4328 #=========================================================================== 4329 # write_maxconfigs_file 4330 #===========================================================================
4331 - def write_maxconfigs_file(self, writer, matrix_elements):
4332 """Write the maxconfigs.inc file for MadEvent""" 4333 4334 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 4335 maxconfigs = max([me.get_num_configs() for me in \ 4336 matrix_elements.get('matrix_elements')]) 4337 else: 4338 maxconfigs = max([me.get_num_configs() for me in matrix_elements]) 4339 4340 lines = "integer lmaxconfigs\n" 4341 lines += "parameter(lmaxconfigs=%d)" % maxconfigs 4342 4343 # Write the file 4344 writer.writelines(lines) 4345 4346 return True
4347 4348 #=========================================================================== 4349 # read_write_good_hel 4350 #===========================================================================
4351 - def read_write_good_hel(self, ncomb):
4352 """return the code to read/write the good_hel common_block""" 4353 4354 convert = {'ncomb' : ncomb} 4355 output = """ 4356 subroutine write_good_hel(stream_id) 4357 implicit none 4358 integer stream_id 4359 INTEGER NCOMB 4360 PARAMETER ( NCOMB=%(ncomb)d) 4361 LOGICAL GOODHEL(NCOMB) 4362 INTEGER NTRY 4363 common/BLOCK_GOODHEL/NTRY,GOODHEL 4364 write(stream_id,*) GOODHEL 4365 return 4366 end 4367 4368 4369 subroutine read_good_hel(stream_id) 4370 implicit none 4371 include 'genps.inc' 4372 integer stream_id 4373 INTEGER NCOMB 4374 PARAMETER ( NCOMB=%(ncomb)d) 4375 LOGICAL GOODHEL(NCOMB) 4376 INTEGER NTRY 4377 common/BLOCK_GOODHEL/NTRY,GOODHEL 4378 read(stream_id,*) GOODHEL 4379 NTRY = MAXTRIES + 1 4380 return 4381 end 4382 4383 subroutine init_good_hel() 4384 implicit none 4385 INTEGER NCOMB 4386 PARAMETER ( NCOMB=%(ncomb)d) 4387 LOGICAL GOODHEL(NCOMB) 4388 INTEGER NTRY 4389 INTEGER I 4390 4391 do i=1,NCOMB 4392 GOODHEL(I) = .false. 4393 enddo 4394 NTRY = 0 4395 end 4396 4397 integer function get_maxsproc() 4398 implicit none 4399 get_maxsproc = 1 4400 return 4401 end 4402 4403 """ % convert 4404 4405 return output
4406 4407 #=========================================================================== 4408 # write_config_subproc_map_file 4409 #===========================================================================
4410 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
4411 """Write a dummy config_subproc.inc file for MadEvent""" 4412 4413 lines = [] 4414 4415 for iconfig in range(len(s_and_t_channels)): 4416 lines.append("DATA CONFSUB(1,%d)/1/" % \ 4417 (iconfig + 1)) 4418 4419 # Write the file 4420 writer.writelines(lines) 4421 4422 return True
4423 4424 #=========================================================================== 4425 # write_configs_file 4426 #===========================================================================
4427 - def write_configs_file(self, writer, matrix_element):
4428 """Write the configs.inc file for MadEvent""" 4429 4430 # Extract number of external particles 4431 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4432 4433 model = matrix_element.get('processes')[0].get('model') 4434 configs = [(i+1, d) for (i, d) in \ 4435 enumerate(matrix_element.get('diagrams'))] 4436 mapconfigs = [c[0] for c in configs] 4437 return mapconfigs, self.write_configs_file_from_diagrams(writer, 4438 [[c[1]] for c in configs], 4439 mapconfigs, 4440 nexternal, ninitial, 4441 model)
4442 4443 #=========================================================================== 4444 # write_run_configs_file 4445 #===========================================================================
4446 - def write_run_config_file(self, writer):
4447 """Write the run_configs.inc file for MadEvent""" 4448 4449 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 4450 4451 if self.proc_characteristic['loop_induced']: 4452 job_per_chan = 1 4453 else: 4454 job_per_chan = 5 4455 4456 if writer: 4457 text = open(path).read() % {'chanperjob': job_per_chan} 4458 writer.write(text) 4459 return True 4460 else: 4461 return {'chanperjob': job_per_chan}
4462 4463 #=========================================================================== 4464 # write_configs_file_from_diagrams 4465 #===========================================================================
4466 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 4467 nexternal, ninitial, model):
4468 """Write the actual configs.inc file. 4469 4470 configs is the diagrams corresponding to configs (each 4471 diagrams is a list of corresponding diagrams for all 4472 subprocesses, with None if there is no corresponding diagrams 4473 for a given process). 4474 mapconfigs gives the diagram number for each config. 4475 4476 For s-channels, we need to output one PDG for each subprocess in 4477 the subprocess group, in order to be able to pick the right 4478 one for multiprocesses.""" 4479 4480 lines = [] 4481 4482 s_and_t_channels = [] 4483 4484 nqcd_list = [] 4485 4486 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 4487 for config in configs if [d for d in config if d][0].\ 4488 get_vertex_leg_numbers()!=[]] 4489 minvert = min(vert_list) if vert_list!=[] else 0 4490 4491 # Number of subprocesses 4492 nsubprocs = len(configs[0]) 4493 4494 nconfigs = 0 4495 4496 new_pdg = model.get_first_non_pdg() 4497 4498 for iconfig, helas_diags in enumerate(configs): 4499 if any([vert > minvert for vert in 4500 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 4501 # Only 3-vertices allowed in configs.inc 4502 continue 4503 nconfigs += 1 4504 4505 # Need s- and t-channels for all subprocesses, including 4506 # those that don't contribute to this config 4507 empty_verts = [] 4508 stchannels = [] 4509 for h in helas_diags: 4510 if h: 4511 # get_s_and_t_channels gives vertices starting from 4512 # final state external particles and working inwards 4513 stchannels.append(h.get('amplitudes')[0].\ 4514 get_s_and_t_channels(ninitial, model, 4515 new_pdg)) 4516 else: 4517 stchannels.append((empty_verts, None)) 4518 4519 # For t-channels, just need the first non-empty one 4520 tchannels = [t for s,t in stchannels if t != None][0] 4521 4522 # For s_and_t_channels (to be used later) use only first config 4523 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 4524 tchannels]) 4525 4526 # Make sure empty_verts is same length as real vertices 4527 if any([s for s,t in stchannels]): 4528 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 4529 4530 # Reorganize s-channel vertices to get a list of all 4531 # subprocesses for each vertex 4532 schannels = zip(*[s for s,t in stchannels]) 4533 else: 4534 schannels = [] 4535 4536 allchannels = schannels 4537 if len(tchannels) > 1: 4538 # Write out tchannels only if there are any non-trivial ones 4539 allchannels = schannels + tchannels 4540 4541 # Write out propagators for s-channel and t-channel vertices 4542 4543 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 4544 # Correspondance between the config and the diagram = amp2 4545 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 4546 mapconfigs[iconfig])) 4547 # Number of QCD couplings in this diagram 4548 nqcd = 0 4549 for h in helas_diags: 4550 if h: 4551 try: 4552 nqcd = h.calculate_orders()['QCD'] 4553 except KeyError: 4554 pass 4555 break 4556 else: 4557 continue 4558 4559 nqcd_list.append(nqcd) 4560 4561 for verts in allchannels: 4562 if verts in schannels: 4563 vert = [v for v in verts if v][0] 4564 else: 4565 vert = verts 4566 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 4567 last_leg = vert.get('legs')[-1] 4568 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 4569 (last_leg.get('number'), nconfigs, len(daughters), 4570 ",".join([str(d) for d in daughters]))) 4571 if verts in schannels: 4572 pdgs = [] 4573 for v in verts: 4574 if v: 4575 pdgs.append(v.get('legs')[-1].get('id')) 4576 else: 4577 pdgs.append(0) 4578 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4579 (last_leg.get('number'), nconfigs, nsubprocs, 4580 ",".join([str(d) for d in pdgs]))) 4581 lines.append("data tprid(%d,%d)/0/" % \ 4582 (last_leg.get('number'), nconfigs)) 4583 elif verts in tchannels[:-1]: 4584 lines.append("data tprid(%d,%d)/%d/" % \ 4585 (last_leg.get('number'), nconfigs, 4586 abs(last_leg.get('id')))) 4587 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4588 (last_leg.get('number'), nconfigs, nsubprocs, 4589 ",".join(['0'] * nsubprocs))) 4590 4591 # Write out number of configs 4592 lines.append("# Number of configs") 4593 lines.append("data mapconfig(0)/%d/" % nconfigs) 4594 4595 # Write the file 4596 writer.writelines(lines) 4597 4598 return s_and_t_channels, nqcd_list
4599 4600 #=========================================================================== 4601 # write_decayBW_file 4602 #===========================================================================
4603 - def write_decayBW_file(self, writer, s_and_t_channels):
4604 """Write the decayBW.inc file for MadEvent""" 4605 4606 lines = [] 4607 4608 booldict = {None: "0", True: "1", False: "2"} 4609 4610 for iconf, config in enumerate(s_and_t_channels): 4611 schannels = config[0] 4612 for vertex in schannels: 4613 # For the resulting leg, pick out whether it comes from 4614 # decay or not, as given by the onshell flag 4615 leg = vertex.get('legs')[-1] 4616 lines.append("data gForceBW(%d,%d)/%s/" % \ 4617 (leg.get('number'), iconf + 1, 4618 booldict[leg.get('onshell')])) 4619 4620 # Write the file 4621 writer.writelines(lines) 4622 4623 return True
4624 4625 #=========================================================================== 4626 # write_dname_file 4627 #===========================================================================
4628 - def write_dname_file(self, writer, dir_name):
4629 """Write the dname.mg file for MG4""" 4630 4631 line = "DIRNAME=%s" % dir_name 4632 4633 # Write the file 4634 writer.write(line + "\n") 4635 4636 return True
4637 4638 #=========================================================================== 4639 # write_driver 4640 #===========================================================================
4641 - def write_driver(self, writer, ncomb, n_grouped_proc, v5=True):
4642 """Write the SubProcess/driver.f file for MG4""" 4643 4644 path = pjoin(_file_path,'iolibs','template_files','madevent_driver.f') 4645 4646 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4647 card = 'Source/MODEL/MG5_param.dat' 4648 else: 4649 card = 'param_card.dat' 4650 # Requiring each helicity configuration to be probed by 10 points for 4651 # matrix element before using the resulting grid for MC over helicity 4652 # sampling. 4653 # We multiply this by 2 because each grouped subprocess is called at most 4654 # twice for each IMIRROR. 4655 replace_dict = {'param_card_name':card, 4656 'ncomb':ncomb, 4657 'hel_init_points':n_grouped_proc*10*2} 4658 if not v5: 4659 replace_dict['secondparam']=',.true.' 4660 else: 4661 replace_dict['secondparam']='' 4662 4663 if writer: 4664 text = open(path).read() % replace_dict 4665 writer.write(text) 4666 return True 4667 else: 4668 return replace_dict
4669 4670 #=========================================================================== 4671 # write_addmothers 4672 #===========================================================================
4673 - def write_addmothers(self, writer):
4674 """Write the SubProcess/addmothers.f""" 4675 4676 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 4677 4678 text = open(path).read() % {'iconfig': 'diag_number'} 4679 writer.write(text) 4680 4681 return True
4682 4683 4684 #=========================================================================== 4685 # write_combine_events 4686 #===========================================================================
4687 - def write_combine_events(self, writer, nb_proc=100):
4688 """Write the SubProcess/driver.f file for MG4""" 4689 4690 path = pjoin(_file_path,'iolibs','template_files','madevent_combine_events.f') 4691 4692 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4693 card = 'Source/MODEL/MG5_param.dat' 4694 else: 4695 card = 'param_card.dat' 4696 4697 #set maxpup (number of @X in the process card) 4698 4699 text = open(path).read() % {'param_card_name':card, 'maxpup':nb_proc+1} 4700 #the +1 is just a security. This is not needed but I feel(OM) safer with it. 4701 writer.write(text) 4702 4703 return True
4704 4705 4706 #=========================================================================== 4707 # write_symmetry 4708 #===========================================================================
4709 - def write_symmetry(self, writer, v5=True):
4710 """Write the SubProcess/driver.f file for ME""" 4711 4712 path = pjoin(_file_path,'iolibs','template_files','madevent_symmetry.f') 4713 4714 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4715 card = 'Source/MODEL/MG5_param.dat' 4716 else: 4717 card = 'param_card.dat' 4718 4719 if v5: 4720 replace_dict = {'param_card_name':card, 'setparasecondarg':''} 4721 else: 4722 replace_dict= {'param_card_name':card, 'setparasecondarg':',.true.'} 4723 4724 if writer: 4725 text = open(path).read() 4726 text = text % replace_dict 4727 writer.write(text) 4728 return True 4729 else: 4730 return replace_dict
4731 4732 4733 4734 #=========================================================================== 4735 # write_iproc_file 4736 #===========================================================================
4737 - def write_iproc_file(self, writer, me_number):
4738 """Write the iproc.dat file for MG4""" 4739 line = "%d" % (me_number + 1) 4740 4741 # Write the file 4742 for line_to_write in writer.write_line(line): 4743 writer.write(line_to_write) 4744 return True
4745 4746 #=========================================================================== 4747 # write_mg_sym_file 4748 #===========================================================================
4749 - def write_mg_sym_file(self, writer, matrix_element):
4750 """Write the mg.sym file for MadEvent.""" 4751 4752 lines = [] 4753 4754 # Extract process with all decays included 4755 final_legs = filter(lambda leg: leg.get('state') == True, 4756 matrix_element.get('processes')[0].get_legs_with_decays()) 4757 4758 ninitial = len(filter(lambda leg: leg.get('state') == False, 4759 matrix_element.get('processes')[0].get('legs'))) 4760 4761 identical_indices = {} 4762 4763 # Extract identical particle info 4764 for i, leg in enumerate(final_legs): 4765 if leg.get('id') in identical_indices: 4766 identical_indices[leg.get('id')].append(\ 4767 i + ninitial + 1) 4768 else: 4769 identical_indices[leg.get('id')] = [i + ninitial + 1] 4770 4771 # Remove keys which have only one particle 4772 for key in identical_indices.keys(): 4773 if len(identical_indices[key]) < 2: 4774 del identical_indices[key] 4775 4776 # Write mg.sym file 4777 lines.append(str(len(identical_indices.keys()))) 4778 for key in identical_indices.keys(): 4779 lines.append(str(len(identical_indices[key]))) 4780 for number in identical_indices[key]: 4781 lines.append(str(number)) 4782 4783 # Write the file 4784 writer.writelines(lines) 4785 4786 return True
4787 4788 #=========================================================================== 4789 # write_mg_sym_file 4790 #===========================================================================
4791 - def write_default_mg_sym_file(self, writer):
4792 """Write the mg.sym file for MadEvent.""" 4793 4794 lines = "0" 4795 4796 # Write the file 4797 writer.writelines(lines) 4798 4799 return True
4800 4801 #=========================================================================== 4802 # write_ncombs_file 4803 #===========================================================================
4804 - def write_ncombs_file(self, writer, nexternal):
4805 """Write the ncombs.inc file for MadEvent.""" 4806 4807 # ncomb (used for clustering) is 2^nexternal 4808 file = " integer n_max_cl\n" 4809 file = file + "parameter (n_max_cl=%d)" % (2 ** nexternal) 4810 4811 # Write the file 4812 writer.writelines(file) 4813 4814 return True
4815 4816 #=========================================================================== 4817 # write_processes_file 4818 #===========================================================================
4819 - def write_processes_file(self, writer, subproc_group):
4820 """Write the processes.dat file with info about the subprocesses 4821 in this group.""" 4822 4823 lines = [] 4824 4825 for ime, me in \ 4826 enumerate(subproc_group.get('matrix_elements')): 4827 lines.append("%s %s" % (str(ime+1) + " " * (7-len(str(ime+1))), 4828 ",".join(p.base_string() for p in \ 4829 me.get('processes')))) 4830 if me.get('has_mirror_process'): 4831 mirror_procs = [copy.copy(p) for p in me.get('processes')] 4832 for proc in mirror_procs: 4833 legs = copy.copy(proc.get('legs_with_decays')) 4834 legs.insert(0, legs.pop(1)) 4835 proc.set("legs_with_decays", legs) 4836 lines.append("mirror %s" % ",".join(p.base_string() for p in \ 4837 mirror_procs)) 4838 else: 4839 lines.append("mirror none") 4840 4841 # Write the file 4842 writer.write("\n".join(lines)) 4843 4844 return True
4845 4846 #=========================================================================== 4847 # write_symswap_file 4848 #===========================================================================
4849 - def write_symswap_file(self, writer, ident_perms):
4850 """Write the file symswap.inc for MG4 by comparing diagrams using 4851 the internal matrix element value functionality.""" 4852 4853 lines = [] 4854 4855 # Write out lines for symswap.inc file (used to permute the 4856 # external leg momenta 4857 for iperm, perm in enumerate(ident_perms): 4858 lines.append("data (isym(i,%d),i=1,nexternal)/%s/" % \ 4859 (iperm+1, ",".join([str(i+1) for i in perm]))) 4860 lines.append("data nsym/%d/" % len(ident_perms)) 4861 4862 # Write the file 4863 writer.writelines(lines) 4864 4865 return True
4866 4867 #=========================================================================== 4868 # write_symfact_file 4869 #===========================================================================
4870 - def write_symfact_file(self, writer, symmetry):
4871 """Write the files symfact.dat for MG4 by comparing diagrams using 4872 the internal matrix element value functionality.""" 4873 4874 pos = max(2, int(math.ceil(math.log10(len(symmetry))))) 4875 form = "%"+str(pos)+"r %"+str(pos+1)+"r" 4876 # Write out lines for symswap.inc file (used to permute the 4877 # external leg momenta 4878 lines = [ form %(i+1, s) for i,s in enumerate(symmetry) if s != 0] 4879 # Write the file 4880 writer.write('\n'.join(lines)) 4881 writer.write('\n') 4882 4883 return True
4884 4885 #=========================================================================== 4886 # write_symperms_file 4887 #===========================================================================
4888 - def write_symperms_file(self, writer, perms):
4889 """Write the symperms.inc file for subprocess group, used for 4890 symmetric configurations""" 4891 4892 lines = [] 4893 for iperm, perm in enumerate(perms): 4894 lines.append("data (perms(i,%d),i=1,nexternal)/%s/" % \ 4895 (iperm+1, ",".join([str(i+1) for i in perm]))) 4896 4897 # Write the file 4898 writer.writelines(lines) 4899 4900 return True
4901 4902 #=========================================================================== 4903 # write_subproc 4904 #===========================================================================
4905 - def write_subproc(self, writer, subprocdir):
4906 """Append this subprocess to the subproc.mg file for MG4""" 4907 4908 # Write line to file 4909 writer.write(subprocdir + "\n") 4910 4911 return True
4912
4913 #=============================================================================== 4914 # ProcessExporterFortranMEGroup 4915 #=============================================================================== 4916 -class ProcessExporterFortranMEGroup(ProcessExporterFortranME):
4917 """Class to take care of exporting a set of matrix elements to 4918 MadEvent subprocess group format.""" 4919 4920 matrix_file = "matrix_madevent_group_v4.inc" 4921 grouped_mode = 'madevent' 4922 #=========================================================================== 4923 # generate_subprocess_directory 4924 #===========================================================================
4925 - def generate_subprocess_directory(self, subproc_group, 4926 fortran_model, 4927 group_number):
4928 """Generate the Pn directory for a subprocess group in MadEvent, 4929 including the necessary matrix_N.f files, configs.inc and various 4930 other helper files.""" 4931 4932 assert isinstance(subproc_group, group_subprocs.SubProcessGroup), \ 4933 "subproc_group object not SubProcessGroup" 4934 4935 if not self.model: 4936 self.model = subproc_group.get('matrix_elements')[0].\ 4937 get('processes')[0].get('model') 4938 4939 cwd = os.getcwd() 4940 path = pjoin(self.dir_path, 'SubProcesses') 4941 4942 os.chdir(path) 4943 pathdir = os.getcwd() 4944 4945 # Create the directory PN in the specified path 4946 subprocdir = "P%d_%s" % (subproc_group.get('number'), 4947 subproc_group.get('name')) 4948 try: 4949 os.mkdir(subprocdir) 4950 except os.error as error: 4951 logger.warning(error.strerror + " " + subprocdir) 4952 4953 try: 4954 os.chdir(subprocdir) 4955 except os.error: 4956 logger.error('Could not cd to directory %s' % subprocdir) 4957 return 0 4958 4959 logger.info('Creating files in directory %s' % subprocdir) 4960 4961 # Create the matrix.f files, auto_dsig.f files and all inc files 4962 # for all subprocesses in the group 4963 4964 maxamps = 0 4965 maxflows = 0 4966 tot_calls = 0 4967 4968 matrix_elements = subproc_group.get('matrix_elements') 4969 4970 # Add the driver.f, all grouped ME's must share the same number of 4971 # helicity configuration 4972 ncomb = matrix_elements[0].get_helicity_combinations() 4973 for me in matrix_elements[1:]: 4974 if ncomb!=me.get_helicity_combinations(): 4975 raise MadGraph5Error, "All grouped processes must share the "+\ 4976 "same number of helicity configurations." 4977 4978 filename = 'driver.f' 4979 self.write_driver(writers.FortranWriter(filename),ncomb, 4980 n_grouped_proc=len(matrix_elements), v5=self.opt['v5_model']) 4981 4982 for ime, matrix_element in \ 4983 enumerate(matrix_elements): 4984 filename = 'matrix%d.f' % (ime+1) 4985 calls, ncolor = \ 4986 self.write_matrix_element_v4(writers.FortranWriter(filename), 4987 matrix_element, 4988 fortran_model, 4989 proc_id=str(ime+1), 4990 config_map=subproc_group.get('diagram_maps')[ime], 4991 subproc_number=group_number) 4992 4993 filename = 'auto_dsig%d.f' % (ime+1) 4994 self.write_auto_dsig_file(writers.FortranWriter(filename), 4995 matrix_element, 4996 str(ime+1)) 4997 4998 # Keep track of needed quantities 4999 tot_calls += int(calls) 5000 maxflows = max(maxflows, ncolor) 5001 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 5002 5003 # Draw diagrams 5004 filename = "matrix%d.ps" % (ime+1) 5005 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 5006 get('diagrams'), 5007 filename, 5008 model = \ 5009 matrix_element.get('processes')[0].\ 5010 get('model'), 5011 amplitude=True) 5012 logger.info("Generating Feynman diagrams for " + \ 5013 matrix_element.get('processes')[0].nice_string()) 5014 plot.draw() 5015 5016 # Extract number of external particles 5017 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 5018 5019 # Generate a list of diagrams corresponding to each configuration 5020 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 5021 # If a subprocess has no diagrams for this config, the number is 0 5022 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 5023 5024 filename = 'auto_dsig.f' 5025 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 5026 subproc_group) 5027 5028 filename = 'coloramps.inc' 5029 self.write_coloramps_file(writers.FortranWriter(filename), 5030 subproc_diagrams_for_config, 5031 maxflows, 5032 matrix_elements) 5033 5034 filename = 'get_color.f' 5035 self.write_colors_file(writers.FortranWriter(filename), 5036 matrix_elements) 5037 5038 filename = 'config_subproc_map.inc' 5039 self.write_config_subproc_map_file(writers.FortranWriter(filename), 5040 subproc_diagrams_for_config) 5041 5042 filename = 'configs.inc' 5043 nconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 5044 writers.FortranWriter(filename), 5045 subproc_group, 5046 subproc_diagrams_for_config) 5047 5048 filename = 'config_nqcd.inc' 5049 self.write_config_nqcd_file(writers.FortranWriter(filename), 5050 nqcd_list) 5051 5052 filename = 'decayBW.inc' 5053 self.write_decayBW_file(writers.FortranWriter(filename), 5054 s_and_t_channels) 5055 5056 filename = 'dname.mg' 5057 self.write_dname_file(writers.FortranWriter(filename), 5058 subprocdir) 5059 5060 filename = 'iproc.dat' 5061 self.write_iproc_file(writers.FortranWriter(filename), 5062 group_number) 5063 5064 filename = 'leshouche.inc' 5065 self.write_leshouche_file(writers.FortranWriter(filename), 5066 subproc_group) 5067 5068 filename = 'maxamps.inc' 5069 self.write_maxamps_file(writers.FortranWriter(filename), 5070 maxamps, 5071 maxflows, 5072 max([len(me.get('processes')) for me in \ 5073 matrix_elements]), 5074 len(matrix_elements)) 5075 5076 # Note that mg.sym is not relevant for this case 5077 filename = 'mg.sym' 5078 self.write_default_mg_sym_file(writers.FortranWriter(filename)) 5079 5080 filename = 'mirrorprocs.inc' 5081 self.write_mirrorprocs(writers.FortranWriter(filename), 5082 subproc_group) 5083 5084 filename = 'ncombs.inc' 5085 self.write_ncombs_file(writers.FortranWriter(filename), 5086 nexternal) 5087 5088 filename = 'nexternal.inc' 5089 self.write_nexternal_file(writers.FortranWriter(filename), 5090 nexternal, ninitial) 5091 5092 filename = 'ngraphs.inc' 5093 self.write_ngraphs_file(writers.FortranWriter(filename), 5094 nconfigs) 5095 5096 filename = 'pmass.inc' 5097 self.write_pmass_file(writers.FortranWriter(filename), 5098 matrix_element) 5099 5100 filename = 'props.inc' 5101 self.write_props_file(writers.FortranWriter(filename), 5102 matrix_element, 5103 s_and_t_channels) 5104 5105 filename = 'processes.dat' 5106 files.write_to_file(filename, 5107 self.write_processes_file, 5108 subproc_group) 5109 5110 # Find config symmetries and permutations 5111 symmetry, perms, ident_perms = \ 5112 diagram_symmetry.find_symmetry(subproc_group) 5113 5114 filename = 'symswap.inc' 5115 self.write_symswap_file(writers.FortranWriter(filename), 5116 ident_perms) 5117 5118 filename = 'symfact_orig.dat' 5119 self.write_symfact_file(open(filename, 'w'), symmetry) 5120 5121 filename = 'symperms.inc' 5122 self.write_symperms_file(writers.FortranWriter(filename), 5123 perms) 5124 5125 # Generate jpgs -> pass in make_html 5126 #os.system(pjoin('..', '..', 'bin', 'gen_jpeg-pl')) 5127 5128 self.link_files_in_SubProcess(pjoin(pathdir,subprocdir)) 5129 5130 #import nexternal/leshouch in Source 5131 ln('nexternal.inc', '../../Source', log=False) 5132 ln('leshouche.inc', '../../Source', log=False) 5133 ln('maxamps.inc', '../../Source', log=False) 5134 5135 # Return to SubProcesses dir) 5136 os.chdir(pathdir) 5137 5138 # Add subprocess to subproc.mg 5139 filename = 'subproc.mg' 5140 files.append_to_file(filename, 5141 self.write_subproc, 5142 subprocdir) 5143 5144 # Return to original dir 5145 os.chdir(cwd) 5146 5147 if not tot_calls: 5148 tot_calls = 0 5149 return tot_calls
5150 5151 #=========================================================================== 5152 # write_super_auto_dsig_file 5153 #===========================================================================
5154 - def write_super_auto_dsig_file(self, writer, subproc_group):
5155 """Write the auto_dsig.f file selecting between the subprocesses 5156 in subprocess group mode""" 5157 5158 replace_dict = {} 5159 5160 # Extract version number and date from VERSION file 5161 info_lines = self.get_mg5_info_lines() 5162 replace_dict['info_lines'] = info_lines 5163 5164 matrix_elements = subproc_group.get('matrix_elements') 5165 5166 # Extract process info lines 5167 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 5168 matrix_elements]) 5169 replace_dict['process_lines'] = process_lines 5170 5171 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 5172 replace_dict['nexternal'] = nexternal 5173 5174 replace_dict['nsprocs'] = 2*len(matrix_elements) 5175 5176 # Generate dsig definition line 5177 dsig_def_line = "DOUBLE PRECISION " + \ 5178 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 5179 range(len(matrix_elements))]) 5180 replace_dict["dsig_def_line"] = dsig_def_line 5181 5182 # Generate dsig process lines 5183 call_dsig_proc_lines = [] 5184 for iproc in range(len(matrix_elements)): 5185 call_dsig_proc_lines.append(\ 5186 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 5187 {"num": iproc + 1, 5188 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 5189 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 5190 5191 ncomb=matrix_elements[0].get_helicity_combinations() 5192 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 5193 5194 if writer: 5195 file = open(pjoin(_file_path, \ 5196 'iolibs/template_files/super_auto_dsig_group_v4.inc')).read() 5197 file = file % replace_dict 5198 5199 # Write the file 5200 writer.writelines(file) 5201 else: 5202 return replace_dict
5203 5204 #=========================================================================== 5205 # write_mirrorprocs 5206 #===========================================================================
5207 - def write_mirrorprocs(self, writer, subproc_group):
5208 """Write the mirrorprocs.inc file determining which processes have 5209 IS mirror process in subprocess group mode.""" 5210 5211 lines = [] 5212 bool_dict = {True: '.true.', False: '.false.'} 5213 matrix_elements = subproc_group.get('matrix_elements') 5214 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 5215 (len(matrix_elements), 5216 ",".join([bool_dict[me.get('has_mirror_process')] for \ 5217 me in matrix_elements]))) 5218 # Write the file 5219 writer.writelines(lines)
5220 5221 #=========================================================================== 5222 # write_addmothers 5223 #===========================================================================
5224 - def write_addmothers(self, writer):
5225 """Write the SubProcess/addmothers.f""" 5226 5227 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 5228 5229 text = open(path).read() % {'iconfig': 'lconfig'} 5230 writer.write(text) 5231 5232 return True
5233 5234 5235 #=========================================================================== 5236 # write_coloramps_file 5237 #===========================================================================
5238 - def write_coloramps_file(self, writer, diagrams_for_config, maxflows, 5239 matrix_elements):
5240 """Write the coloramps.inc file for MadEvent in Subprocess group mode""" 5241 5242 # Create a map from subprocess (matrix element) to a list of 5243 # the diagrams corresponding to each config 5244 5245 lines = [] 5246 5247 subproc_to_confdiag = {} 5248 for config in diagrams_for_config: 5249 for subproc, diag in enumerate(config): 5250 try: 5251 subproc_to_confdiag[subproc].append(diag) 5252 except KeyError: 5253 subproc_to_confdiag[subproc] = [diag] 5254 5255 for subproc in sorted(subproc_to_confdiag.keys()): 5256 lines.extend(self.get_icolamp_lines(subproc_to_confdiag[subproc], 5257 matrix_elements[subproc], 5258 subproc + 1)) 5259 5260 lines.insert(0, "logical icolamp(%d,%d,%d)" % \ 5261 (maxflows, 5262 len(diagrams_for_config), 5263 len(matrix_elements))) 5264 5265 # Write the file 5266 writer.writelines(lines) 5267 5268 return True
5269 5270 #=========================================================================== 5271 # write_config_subproc_map_file 5272 #===========================================================================
5273 - def write_config_subproc_map_file(self, writer, config_subproc_map):
5274 """Write the config_subproc_map.inc file for subprocess groups""" 5275 5276 lines = [] 5277 # Output only configs that have some corresponding diagrams 5278 iconfig = 0 5279 for config in config_subproc_map: 5280 if set(config) == set([0]): 5281 continue 5282 lines.append("DATA (CONFSUB(i,%d),i=1,%d)/%s/" % \ 5283 (iconfig + 1, len(config), 5284 ",".join([str(i) for i in config]))) 5285 iconfig += 1 5286 # Write the file 5287 writer.writelines(lines) 5288 5289 return True
5290 5291 #=========================================================================== 5292 # read_write_good_hel 5293 #===========================================================================
5294 - def read_write_good_hel(self, ncomb):
5295 """return the code to read/write the good_hel common_block""" 5296 5297 convert = {'ncomb' : ncomb} 5298 5299 output = """ 5300 subroutine write_good_hel(stream_id) 5301 implicit none 5302 integer stream_id 5303 INTEGER NCOMB 5304 PARAMETER ( NCOMB=%(ncomb)d) 5305 LOGICAL GOODHEL(NCOMB, 2) 5306 INTEGER NTRY(2) 5307 common/BLOCK_GOODHEL/NTRY,GOODHEL 5308 write(stream_id,*) GOODHEL 5309 return 5310 end 5311 5312 5313 subroutine read_good_hel(stream_id) 5314 implicit none 5315 include 'genps.inc' 5316 integer stream_id 5317 INTEGER NCOMB 5318 PARAMETER ( NCOMB=%(ncomb)d) 5319 LOGICAL GOODHEL(NCOMB, 2) 5320 INTEGER NTRY(2) 5321 common/BLOCK_GOODHEL/NTRY,GOODHEL 5322 read(stream_id,*) GOODHEL 5323 NTRY(1) = MAXTRIES + 1 5324 NTRY(2) = MAXTRIES + 1 5325 return 5326 end 5327 5328 subroutine init_good_hel() 5329 implicit none 5330 INTEGER NCOMB 5331 PARAMETER ( NCOMB=%(ncomb)d) 5332 LOGICAL GOODHEL(NCOMB, 2) 5333 INTEGER NTRY(2) 5334 INTEGER I 5335 5336 do i=1,NCOMB 5337 GOODHEL(I,1) = .false. 5338 GOODHEL(I,2) = .false. 5339 enddo 5340 NTRY(1) = 0 5341 NTRY(2) = 0 5342 end 5343 5344 integer function get_maxsproc() 5345 implicit none 5346 include 'maxamps.inc' 5347 5348 get_maxsproc = maxsproc 5349 return 5350 end 5351 5352 """ % convert 5353 5354 return output
5355 5356 5357 5358 #=========================================================================== 5359 # write_configs_file 5360 #===========================================================================
5361 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
5362 """Write the configs.inc file with topology information for a 5363 subprocess group. Use the first subprocess with a diagram for each 5364 configuration.""" 5365 5366 matrix_elements = subproc_group.get('matrix_elements') 5367 model = matrix_elements[0].get('processes')[0].get('model') 5368 5369 diagrams = [] 5370 config_numbers = [] 5371 for iconfig, config in enumerate(diagrams_for_config): 5372 # Check if any diagrams correspond to this config 5373 if set(config) == set([0]): 5374 continue 5375 subproc_diags = [] 5376 for s,d in enumerate(config): 5377 if d: 5378 subproc_diags.append(matrix_elements[s].\ 5379 get('diagrams')[d-1]) 5380 else: 5381 subproc_diags.append(None) 5382 diagrams.append(subproc_diags) 5383 config_numbers.append(iconfig + 1) 5384 5385 # Extract number of external particles 5386 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 5387 5388 return len(diagrams), \ 5389 self.write_configs_file_from_diagrams(writer, diagrams, 5390 config_numbers, 5391 nexternal, ninitial, 5392 model)
5393 5394 #=========================================================================== 5395 # write_run_configs_file 5396 #===========================================================================
5397 - def write_run_config_file(self, writer):
5398 """Write the run_configs.inc file for MadEvent""" 5399 5400 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 5401 if self.proc_characteristic['loop_induced']: 5402 job_per_chan = 1 5403 else: 5404 job_per_chan = 2 5405 text = open(path).read() % {'chanperjob':job_per_chan} 5406 writer.write(text) 5407 return True
5408 5409 5410 #=========================================================================== 5411 # write_leshouche_file 5412 #===========================================================================
5413 - def write_leshouche_file(self, writer, subproc_group):
5414 """Write the leshouche.inc file for MG4""" 5415 5416 all_lines = [] 5417 5418 for iproc, matrix_element in \ 5419 enumerate(subproc_group.get('matrix_elements')): 5420 all_lines.extend(self.get_leshouche_lines(matrix_element, 5421 iproc)) 5422 # Write the file 5423 writer.writelines(all_lines) 5424 return True
5425 5426
5427 - def finalize(self,*args, **opts):
5428 5429 super(ProcessExporterFortranMEGroup, self).finalize(*args, **opts) 5430 #ensure that the grouping information is on the correct value 5431 self.proc_characteristic['grouped_matrix'] = True
5432 5433 5434 #=============================================================================== 5435 # UFO_model_to_mg4 5436 #=============================================================================== 5437 5438 python_to_fortran = lambda x: parsers.UFOExpressionParserFortran().parse(x)
5439 5440 -class UFO_model_to_mg4(object):
5441 """ A converter of the UFO-MG5 Model to the MG4 format """ 5442 5443 # The list below shows the only variables the user is allowed to change by 5444 # himself for each PS point. If he changes any other, then calling 5445 # UPDATE_AS_PARAM() (or equivalently MP_UPDATE_AS_PARAM()) will not 5446 # correctly account for the change. 5447 PS_dependent_key = ['aS','MU_R'] 5448 mp_complex_format = 'complex*32' 5449 mp_real_format = 'real*16' 5450 # Warning, it is crucial none of the couplings/parameters of the model 5451 # starts with this prefix. I should add a check for this. 5452 # You can change it as the global variable to check_param_card.ParamCard 5453 mp_prefix = check_param_card.ParamCard.mp_prefix 5454
5455 - def __init__(self, model, output_path, opt=None):
5456 """ initialization of the objects """ 5457 5458 self.model = model 5459 self.model_name = model['name'] 5460 self.dir_path = output_path 5461 5462 self.opt = {'complex_mass': False, 'export_format': 'madevent', 'mp':True, 5463 'loop_induced': False} 5464 if opt: 5465 self.opt.update(opt) 5466 5467 self.coups_dep = [] # (name, expression, type) 5468 self.coups_indep = [] # (name, expression, type) 5469 self.params_dep = [] # (name, expression, type) 5470 self.params_indep = [] # (name, expression, type) 5471 self.params_ext = [] # external parameter 5472 self.p_to_f = parsers.UFOExpressionParserFortran(self.model) 5473 self.mp_p_to_f = parsers.UFOExpressionParserMPFortran(self.model)
5474
5476 """modify the parameter if some of them are identical up to the case""" 5477 5478 lower_dict={} 5479 duplicate = set() 5480 keys = self.model['parameters'].keys() 5481 for key in keys: 5482 for param in self.model['parameters'][key]: 5483 lower_name = param.name.lower() 5484 if not lower_name: 5485 continue 5486 try: 5487 lower_dict[lower_name].append(param) 5488 except KeyError,error: 5489 lower_dict[lower_name] = [param] 5490 else: 5491 duplicate.add(lower_name) 5492 logger.debug('%s is define both as lower case and upper case.' 5493 % lower_name) 5494 if not duplicate: 5495 return 5496 5497 re_expr = r'''\b(%s)\b''' 5498 to_change = [] 5499 change={} 5500 for value in duplicate: 5501 for i, var in enumerate(lower_dict[value]): 5502 to_change.append(var.name) 5503 new_name = '%s%s' % (var.name.lower(), 5504 ('__%d'%(i+1) if i>0 else '')) 5505 change[var.name] = new_name 5506 var.name = new_name 5507 5508 # Apply the modification to the map_CTcoup_CTparam of the model 5509 # if it has one (giving for each coupling the CT parameters whcih 5510 # are necessary and which should be exported to the model. 5511 if hasattr(self.model,'map_CTcoup_CTparam'): 5512 for coup, ctparams in self.model.map_CTcoup_CTparam: 5513 for i, ctparam in enumerate(ctparams): 5514 try: 5515 self.model.map_CTcoup_CTparam[coup][i] = change[ctparam] 5516 except KeyError: 5517 pass 5518 5519 replace = lambda match_pattern: change[match_pattern.groups()[0]] 5520 rep_pattern = re.compile(re_expr % '|'.join(to_change)) 5521 5522 # change parameters 5523 for key in keys: 5524 if key == ('external',): 5525 continue 5526 for param in self.model['parameters'][key]: 5527 param.expr = rep_pattern.sub(replace, param.expr) 5528 5529 # change couplings 5530 for key in self.model['couplings'].keys(): 5531 for coup in self.model['couplings'][key]: 5532 coup.expr = rep_pattern.sub(replace, coup.expr) 5533 5534 # change mass/width 5535 for part in self.model['particles']: 5536 if str(part.get('mass')) in to_change: 5537 part.set('mass', rep_pattern.sub(replace, str(part.get('mass')))) 5538 if str(part.get('width')) in to_change: 5539 part.set('width', rep_pattern.sub(replace, str(part.get('width'))))
5540
5541 - def refactorize(self, wanted_couplings = []):
5542 """modify the couplings to fit with MG4 convention """ 5543 5544 # Keep only separation in alphaS 5545 keys = self.model['parameters'].keys() 5546 keys.sort(key=len) 5547 for key in keys: 5548 to_add = [o for o in self.model['parameters'][key] if o.name] 5549 5550 if key == ('external',): 5551 self.params_ext += to_add 5552 elif any([(k in key) for k in self.PS_dependent_key]): 5553 self.params_dep += to_add 5554 else: 5555 self.params_indep += to_add 5556 # same for couplings 5557 keys = self.model['couplings'].keys() 5558 keys.sort(key=len) 5559 for key, coup_list in self.model['couplings'].items(): 5560 if any([(k in key) for k in self.PS_dependent_key]): 5561 self.coups_dep += [c for c in coup_list if 5562 (not wanted_couplings or c.name in \ 5563 wanted_couplings)] 5564 else: 5565 self.coups_indep += [c for c in coup_list if 5566 (not wanted_couplings or c.name in \ 5567 wanted_couplings)] 5568 5569 # MG4 use G and not aS as it basic object for alphas related computation 5570 #Pass G in the independant list 5571 if 'G' in self.params_dep: 5572 index = self.params_dep.index('G') 5573 G = self.params_dep.pop(index) 5574 # G.expr = '2*cmath.sqrt(as*pi)' 5575 # self.params_indep.insert(0, self.params_dep.pop(index)) 5576 # No need to add it if not defined 5577 5578 if 'aS' not in self.params_ext: 5579 logger.critical('aS not define as external parameter adding it!') 5580 #self.model['parameters']['aS'] = base_objects.ParamCardVariable('aS', 0.138,'DUMMY',(1,)) 5581 self.params_indep.append( base_objects. ModelVariable('aS', '0.138','real')) 5582 self.params_indep.append( base_objects. ModelVariable('G', '4.1643','real'))
5583 - def build(self, wanted_couplings = [], full=True):
5584 """modify the couplings to fit with MG4 convention and creates all the 5585 different files""" 5586 5587 self.pass_parameter_to_case_insensitive() 5588 self.refactorize(wanted_couplings) 5589 5590 # write the files 5591 if full: 5592 if wanted_couplings: 5593 # extract the wanted ct parameters 5594 self.extract_needed_CTparam(wanted_couplings=wanted_couplings) 5595 self.write_all()
5596 5597
5598 - def open(self, name, comment='c', format='default'):
5599 """ Open the file name in the correct directory and with a valid 5600 header.""" 5601 5602 file_path = pjoin(self.dir_path, name) 5603 5604 if format == 'fortran': 5605 fsock = writers.FortranWriter(file_path, 'w') 5606 else: 5607 fsock = open(file_path, 'w') 5608 5609 file.writelines(fsock, comment * 77 + '\n') 5610 file.writelines(fsock,'%(comment)s written by the UFO converter\n' % \ 5611 {'comment': comment + (6 - len(comment)) * ' '}) 5612 file.writelines(fsock, comment * 77 + '\n\n') 5613 return fsock
5614 5615
5616 - def write_all(self):
5617 """ write all the files """ 5618 #write the part related to the external parameter 5619 self.create_ident_card() 5620 self.create_param_read() 5621 5622 #write the definition of the parameter 5623 self.create_input() 5624 self.create_intparam_def(dp=True,mp=False) 5625 if self.opt['mp']: 5626 self.create_intparam_def(dp=False,mp=True) 5627 5628 # definition of the coupling. 5629 self.create_actualize_mp_ext_param_inc() 5630 self.create_coupl_inc() 5631 self.create_write_couplings() 5632 self.create_couplings() 5633 5634 # the makefile 5635 self.create_makeinc() 5636 self.create_param_write() 5637 5638 # The model functions 5639 self.create_model_functions_inc() 5640 self.create_model_functions_def() 5641 5642 # The param_card.dat 5643 self.create_param_card() 5644 5645 5646 # All the standard files 5647 self.copy_standard_file()
5648 5649 ############################################################################ 5650 ## ROUTINE CREATING THE FILES ############################################ 5651 ############################################################################ 5652
5653 - def copy_standard_file(self):
5654 """Copy the standard files for the fortran model.""" 5655 5656 #copy the library files 5657 file_to_link = ['formats.inc','printout.f', \ 5658 'rw_para.f', 'testprog.f'] 5659 5660 for filename in file_to_link: 5661 cp( MG5DIR + '/models/template_files/fortran/' + filename, \ 5662 self.dir_path) 5663 5664 file = open(os.path.join(MG5DIR,\ 5665 'models/template_files/fortran/rw_para.f')).read() 5666 5667 includes=["include \'coupl.inc\'","include \'input.inc\'", 5668 "include \'model_functions.inc\'"] 5669 if self.opt['mp']: 5670 includes.extend(["include \'mp_coupl.inc\'","include \'mp_input.inc\'"]) 5671 # In standalone and madloop we do no use the compiled param card but 5672 # still parse the .dat one so we must load it. 5673 if self.opt['loop_induced']: 5674 #loop induced follow MadEvent way to handle the card. 5675 load_card = '' 5676 lha_read_filename='lha_read.f' 5677 elif self.opt['export_format'] in ['madloop','madloop_optimized', 'madloop_matchbox']: 5678 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5679 lha_read_filename='lha_read_mp.f' 5680 elif self.opt['export_format'].startswith('standalone') \ 5681 or self.opt['export_format'] in ['madweight', 'plugin']\ 5682 or self.opt['export_format'].startswith('matchbox'): 5683 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5684 lha_read_filename='lha_read.f' 5685 else: 5686 load_card = '' 5687 lha_read_filename='lha_read.f' 5688 cp( MG5DIR + '/models/template_files/fortran/' + lha_read_filename, \ 5689 os.path.join(self.dir_path,'lha_read.f')) 5690 5691 file=file%{'includes':'\n '.join(includes), 5692 'load_card':load_card} 5693 writer=open(os.path.join(self.dir_path,'rw_para.f'),'w') 5694 writer.writelines(file) 5695 writer.close() 5696 5697 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 5698 or self.opt['loop_induced']: 5699 cp( MG5DIR + '/models/template_files/fortran/makefile_madevent', 5700 self.dir_path + '/makefile') 5701 if self.opt['export_format'] in ['FKS5_default', 'FKS5_optimized']: 5702 path = pjoin(self.dir_path, 'makefile') 5703 text = open(path).read() 5704 text = text.replace('madevent','aMCatNLO') 5705 open(path, 'w').writelines(text) 5706 elif self.opt['export_format'] in ['standalone', 'standalone_msP','standalone_msF', 5707 'madloop','madloop_optimized', 'standalone_rw', 5708 'madweight','matchbox','madloop_matchbox', 'plugin']: 5709 cp( MG5DIR + '/models/template_files/fortran/makefile_standalone', 5710 self.dir_path + '/makefile') 5711 #elif self.opt['export_format'] in []: 5712 #pass 5713 else: 5714 raise MadGraph5Error('Unknown format')
5715
5716 - def create_coupl_inc(self):
5717 """ write coupling.inc """ 5718 5719 fsock = self.open('coupl.inc', format='fortran') 5720 if self.opt['mp']: 5721 mp_fsock = self.open('mp_coupl.inc', format='fortran') 5722 mp_fsock_same_name = self.open('mp_coupl_same_name.inc',\ 5723 format='fortran') 5724 5725 # Write header 5726 header = """double precision G 5727 common/strong/ G 5728 5729 double complex gal(2) 5730 common/weak/ gal 5731 5732 double precision MU_R 5733 common/rscale/ MU_R 5734 5735 double precision Nf 5736 parameter(Nf=%d) 5737 """ % self.model.get_nflav() 5738 5739 fsock.writelines(header) 5740 5741 if self.opt['mp']: 5742 header = """%(real_mp_format)s %(mp_prefix)sG 5743 common/MP_strong/ %(mp_prefix)sG 5744 5745 %(complex_mp_format)s %(mp_prefix)sgal(2) 5746 common/MP_weak/ %(mp_prefix)sgal 5747 5748 %(complex_mp_format)s %(mp_prefix)sMU_R 5749 common/MP_rscale/ %(mp_prefix)sMU_R 5750 5751 """ 5752 5753 5754 5755 5756 mp_fsock.writelines(header%{'real_mp_format':self.mp_real_format, 5757 'complex_mp_format':self.mp_complex_format, 5758 'mp_prefix':self.mp_prefix}) 5759 mp_fsock_same_name.writelines(header%{'real_mp_format':self.mp_real_format, 5760 'complex_mp_format':self.mp_complex_format, 5761 'mp_prefix':''}) 5762 5763 # Write the Mass definition/ common block 5764 masses = set() 5765 widths = set() 5766 if self.opt['complex_mass']: 5767 complex_mass = set() 5768 5769 for particle in self.model.get('particles'): 5770 #find masses 5771 one_mass = particle.get('mass') 5772 if one_mass.lower() != 'zero': 5773 masses.add(one_mass) 5774 5775 # find width 5776 one_width = particle.get('width') 5777 if one_width.lower() != 'zero': 5778 widths.add(one_width) 5779 if self.opt['complex_mass'] and one_mass.lower() != 'zero': 5780 complex_mass.add('CMASS_%s' % one_mass) 5781 5782 if masses: 5783 fsock.writelines('double precision '+','.join(masses)+'\n') 5784 fsock.writelines('common/masses/ '+','.join(masses)+'\n\n') 5785 if self.opt['mp']: 5786 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5787 ','.join(masses)+'\n') 5788 mp_fsock_same_name.writelines('common/MP_masses/ '+\ 5789 ','.join(masses)+'\n\n') 5790 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5791 self.mp_prefix+m for m in masses])+'\n') 5792 mp_fsock.writelines('common/MP_masses/ '+\ 5793 ','.join([self.mp_prefix+m for m in masses])+'\n\n') 5794 5795 if widths: 5796 fsock.writelines('double precision '+','.join(widths)+'\n') 5797 fsock.writelines('common/widths/ '+','.join(widths)+'\n\n') 5798 if self.opt['mp']: 5799 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5800 ','.join(widths)+'\n') 5801 mp_fsock_same_name.writelines('common/MP_widths/ '+\ 5802 ','.join(widths)+'\n\n') 5803 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5804 self.mp_prefix+w for w in widths])+'\n') 5805 mp_fsock.writelines('common/MP_widths/ '+\ 5806 ','.join([self.mp_prefix+w for w in widths])+'\n\n') 5807 5808 # Write the Couplings 5809 coupling_list = [coupl.name for coupl in self.coups_dep + self.coups_indep] 5810 fsock.writelines('double complex '+', '.join(coupling_list)+'\n') 5811 fsock.writelines('common/couplings/ '+', '.join(coupling_list)+'\n') 5812 if self.opt['mp']: 5813 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5814 ','.join(coupling_list)+'\n') 5815 mp_fsock_same_name.writelines('common/MP_couplings/ '+\ 5816 ','.join(coupling_list)+'\n\n') 5817 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5818 self.mp_prefix+c for c in coupling_list])+'\n') 5819 mp_fsock.writelines('common/MP_couplings/ '+\ 5820 ','.join([self.mp_prefix+c for c in coupling_list])+'\n\n') 5821 5822 # Write complex mass for complex mass scheme (if activated) 5823 if self.opt['complex_mass'] and complex_mass: 5824 fsock.writelines('double complex '+', '.join(complex_mass)+'\n') 5825 fsock.writelines('common/complex_mass/ '+', '.join(complex_mass)+'\n') 5826 if self.opt['mp']: 5827 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5828 ','.join(complex_mass)+'\n') 5829 mp_fsock_same_name.writelines('common/MP_complex_mass/ '+\ 5830 ','.join(complex_mass)+'\n\n') 5831 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5832 self.mp_prefix+cm for cm in complex_mass])+'\n') 5833 mp_fsock.writelines('common/MP_complex_mass/ '+\ 5834 ','.join([self.mp_prefix+cm for cm in complex_mass])+'\n\n')
5835
5836 - def create_write_couplings(self):
5837 """ write the file coupl_write.inc """ 5838 5839 fsock = self.open('coupl_write.inc', format='fortran') 5840 5841 fsock.writelines("""write(*,*) ' Couplings of %s' 5842 write(*,*) ' ---------------------------------' 5843 write(*,*) ' '""" % self.model_name) 5844 def format(coupl): 5845 return 'write(*,2) \'%(name)s = \', %(name)s' % {'name': coupl.name}
5846 5847 # Write the Couplings 5848 lines = [format(coupl) for coupl in self.coups_dep + self.coups_indep] 5849 fsock.writelines('\n'.join(lines)) 5850 5851
5852 - def create_input(self):
5853 """create input.inc containing the definition of the parameters""" 5854 5855 fsock = self.open('input.inc', format='fortran') 5856 if self.opt['mp']: 5857 mp_fsock = self.open('mp_input.inc', format='fortran') 5858 5859 #find mass/ width since they are already define 5860 already_def = set() 5861 for particle in self.model.get('particles'): 5862 already_def.add(particle.get('mass').lower()) 5863 already_def.add(particle.get('width').lower()) 5864 if self.opt['complex_mass']: 5865 already_def.add('cmass_%s' % particle.get('mass').lower()) 5866 5867 is_valid = lambda name: name.lower() not in ['g', 'mu_r', 'zero'] and \ 5868 name.lower() not in already_def 5869 5870 real_parameters = [param.name for param in self.params_dep + 5871 self.params_indep if param.type == 'real' 5872 and is_valid(param.name)] 5873 5874 real_parameters += [param.name for param in self.params_ext 5875 if param.type == 'real'and 5876 is_valid(param.name)] 5877 5878 # check the parameter is a CT parameter or not 5879 # if yes, just use the needed ones 5880 real_parameters = [param for param in real_parameters \ 5881 if self.check_needed_param(param)] 5882 5883 fsock.writelines('double precision '+','.join(real_parameters)+'\n') 5884 fsock.writelines('common/params_R/ '+','.join(real_parameters)+'\n\n') 5885 if self.opt['mp']: 5886 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5887 self.mp_prefix+p for p in real_parameters])+'\n') 5888 mp_fsock.writelines('common/MP_params_R/ '+','.join([\ 5889 self.mp_prefix+p for p in real_parameters])+'\n\n') 5890 5891 complex_parameters = [param.name for param in self.params_dep + 5892 self.params_indep if param.type == 'complex' and 5893 is_valid(param.name)] 5894 5895 # check the parameter is a CT parameter or not 5896 # if yes, just use the needed ones 5897 complex_parameters = [param for param in complex_parameters \ 5898 if self.check_needed_param(param)] 5899 5900 if complex_parameters: 5901 fsock.writelines('double complex '+','.join(complex_parameters)+'\n') 5902 fsock.writelines('common/params_C/ '+','.join(complex_parameters)+'\n\n') 5903 if self.opt['mp']: 5904 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5905 self.mp_prefix+p for p in complex_parameters])+'\n') 5906 mp_fsock.writelines('common/MP_params_C/ '+','.join([\ 5907 self.mp_prefix+p for p in complex_parameters])+'\n\n')
5908
5909 - def check_needed_param(self, param):
5910 """ Returns whether the parameter in argument is needed for this 5911 specific computation or not.""" 5912 5913 # If this is a leading order model or if there was no CT parameter 5914 # employed in this NLO model, one can directly return that the 5915 # parameter is needed since only CTParameters are filtered. 5916 if not hasattr(self, 'allCTparameters') or \ 5917 self.allCTparameters is None or self.usedCTparameters is None or \ 5918 len(self.allCTparameters)==0: 5919 return True 5920 5921 # We must allow the conjugate shorthand for the complex parameter as 5922 # well so we check wether either the parameter name or its name with 5923 # 'conjg__' substituted with '' is present in the list. 5924 # This is acceptable even if some parameter had an original name 5925 # including 'conjg__' in it, because at worst we export a parameter 5926 # was not needed. 5927 param = param.lower() 5928 cjg_param = param.replace('conjg__','',1) 5929 5930 # First make sure it is a CTparameter 5931 if param not in self.allCTparameters and \ 5932 cjg_param not in self.allCTparameters: 5933 return True 5934 5935 # Now check if it is in the list of CTparameters actually used 5936 return (param in self.usedCTparameters or \ 5937 cjg_param in self.usedCTparameters)
5938
5939 - def extract_needed_CTparam(self,wanted_couplings=[]):
5940 """ Extract what are the needed CT parameters given the wanted_couplings""" 5941 5942 if not hasattr(self.model,'map_CTcoup_CTparam') or not wanted_couplings: 5943 # Setting these lists to none wil disable the filtering in 5944 # check_needed_param 5945 self.allCTparameters = None 5946 self.usedCTparameters = None 5947 return 5948 5949 # All CTparameters appearin in all CT couplings 5950 allCTparameters=self.model.map_CTcoup_CTparam.values() 5951 # Define in this class the list of all CT parameters 5952 self.allCTparameters=list(\ 5953 set(itertools.chain.from_iterable(allCTparameters))) 5954 5955 # All used CT couplings 5956 w_coupls = [coupl.lower() for coupl in wanted_couplings] 5957 allUsedCTCouplings = [coupl for coupl in 5958 self.model.map_CTcoup_CTparam.keys() if coupl.lower() in w_coupls] 5959 5960 # Now define the list of all CT parameters that are actually used 5961 self.usedCTparameters=list(\ 5962 set(itertools.chain.from_iterable([ 5963 self.model.map_CTcoup_CTparam[coupl] for coupl in allUsedCTCouplings 5964 ]))) 5965 5966 # Now at last, make these list case insensitive 5967 self.allCTparameters = [ct.lower() for ct in self.allCTparameters] 5968 self.usedCTparameters = [ct.lower() for ct in self.usedCTparameters]
5969
5970 - def create_intparam_def(self, dp=True, mp=False):
5971 """ create intparam_definition.inc setting the internal parameters. 5972 Output the double precision and/or the multiple precision parameters 5973 depending on the parameters dp and mp. If mp only, then the file names 5974 get the 'mp_' prefix. 5975 """ 5976 5977 fsock = self.open('%sintparam_definition.inc'% 5978 ('mp_' if mp and not dp else ''), format='fortran') 5979 5980 fsock.write_comments(\ 5981 "Parameters that should not be recomputed event by event.\n") 5982 fsock.writelines("if(readlha) then\n") 5983 if dp: 5984 fsock.writelines("G = 2 * DSQRT(AS*PI) ! for the first init\n") 5985 if mp: 5986 fsock.writelines("MP__G = 2 * SQRT(MP__AS*MP__PI) ! for the first init\n") 5987 5988 for param in self.params_indep: 5989 if param.name == 'ZERO': 5990 continue 5991 # check whether the parameter is a CT parameter 5992 # if yes,just used the needed ones 5993 if not self.check_needed_param(param.name): 5994 continue 5995 if dp: 5996 fsock.writelines("%s = %s\n" % (param.name, 5997 self.p_to_f.parse(param.expr))) 5998 if mp: 5999 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 6000 self.mp_p_to_f.parse(param.expr))) 6001 6002 fsock.writelines('endif') 6003 6004 fsock.write_comments('\nParameters that should be recomputed at an event by even basis.\n') 6005 if dp: 6006 fsock.writelines("aS = G**2/4/pi\n") 6007 if mp: 6008 fsock.writelines("MP__aS = MP__G**2/4/MP__PI\n") 6009 for param in self.params_dep: 6010 # check whether the parameter is a CT parameter 6011 # if yes,just used the needed ones 6012 if not self.check_needed_param(param.name): 6013 continue 6014 if dp: 6015 fsock.writelines("%s = %s\n" % (param.name, 6016 self.p_to_f.parse(param.expr))) 6017 elif mp: 6018 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 6019 self.mp_p_to_f.parse(param.expr))) 6020 6021 fsock.write_comments("\nDefinition of the EW coupling used in the write out of aqed\n") 6022 if ('aEWM1',) in self.model['parameters']: 6023 if dp: 6024 fsock.writelines(""" gal(1) = 3.5449077018110318d0 / DSQRT(aEWM1) 6025 gal(2) = 1d0 6026 """) 6027 elif mp: 6028 fsock.writelines(""" %(mp_prefix)sgal(1) = 2 * SQRT(MP__PI/MP__aEWM1) 6029 %(mp_prefix)sgal(2) = 1d0 6030 """ %{'mp_prefix':self.mp_prefix}) 6031 pass 6032 # in Gmu scheme, aEWM1 is not external but Gf is an exteranl variable 6033 elif ('Gf',) in self.model['parameters']: 6034 if dp: 6035 fsock.writelines(""" gal(1) = 2.378414230005442133435d0*MDL_MW*DSQRT(1D0-MDL_MW**2/MDL_MZ**2)*DSQRT(MDL_Gf) 6036 gal(2) = 1d0 6037 """) 6038 elif mp: 6039 fsock.writelines(""" %(mp_prefix)sgal(1) = 2*MP__MDL_MW*SQRT(1e0_16-MP__MDL_MW**2/MP__MDL_MZ**2)*SQRT(SQRT(2e0_16)*MP__MDL_Gf) 6040 %(mp_prefix)sgal(2) = 1d0 6041 """ %{'mp_prefix':self.mp_prefix}) 6042 pass 6043 else: 6044 if dp: 6045 logger.warning('$RED aEWM1 and Gf not define in MODEL. AQED will not be written correcty in LHE FILE') 6046 fsock.writelines(""" gal(1) = 1d0 6047 gal(2) = 1d0 6048 """) 6049 elif mp: 6050 fsock.writelines(""" %(mp_prefix)sgal(1) = 1e0_16 6051 %(mp_prefix)sgal(2) = 1e0_16 6052 """%{'mp_prefix':self.mp_prefix})
6053 6054
6055 - def create_couplings(self):
6056 """ create couplings.f and all couplingsX.f """ 6057 6058 nb_def_by_file = 25 6059 6060 self.create_couplings_main(nb_def_by_file) 6061 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6062 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6063 6064 for i in range(nb_coup_indep): 6065 # For the independent couplings, we compute the double and multiple 6066 # precision ones together 6067 data = self.coups_indep[nb_def_by_file * i: 6068 min(len(self.coups_indep), nb_def_by_file * (i+1))] 6069 self.create_couplings_part(i + 1, data, dp=True, mp=self.opt['mp']) 6070 6071 for i in range(nb_coup_dep): 6072 # For the dependent couplings, we compute the double and multiple 6073 # precision ones in separate subroutines. 6074 data = self.coups_dep[nb_def_by_file * i: 6075 min(len(self.coups_dep), nb_def_by_file * (i+1))] 6076 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6077 dp=True,mp=False) 6078 if self.opt['mp']: 6079 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6080 dp=False,mp=True)
6081 6082
6083 - def create_couplings_main(self, nb_def_by_file=25):
6084 """ create couplings.f """ 6085 6086 fsock = self.open('couplings.f', format='fortran') 6087 6088 fsock.writelines("""subroutine coup() 6089 6090 implicit none 6091 double precision PI, ZERO 6092 logical READLHA 6093 parameter (PI=3.141592653589793d0) 6094 parameter (ZERO=0d0) 6095 include \'model_functions.inc\'""") 6096 if self.opt['mp']: 6097 fsock.writelines("""%s MP__PI, MP__ZERO 6098 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6099 parameter (MP__ZERO=0e0_16) 6100 include \'mp_input.inc\' 6101 include \'mp_coupl.inc\' 6102 """%self.mp_real_format) 6103 fsock.writelines("""include \'input.inc\' 6104 include \'coupl.inc\' 6105 READLHA = .true. 6106 include \'intparam_definition.inc\'""") 6107 if self.opt['mp']: 6108 fsock.writelines("""include \'mp_intparam_definition.inc\'\n""") 6109 6110 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6111 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6112 6113 fsock.writelines('\n'.join(\ 6114 ['call coup%s()' % (i + 1) for i in range(nb_coup_indep)])) 6115 6116 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6117 6118 fsock.writelines('\n'.join(\ 6119 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6120 for i in range(nb_coup_dep)])) 6121 if self.opt['mp']: 6122 fsock.writelines('\n'.join(\ 6123 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6124 for i in range(nb_coup_dep)])) 6125 fsock.writelines('''\n return \n end\n''') 6126 6127 fsock.writelines("""subroutine update_as_param() 6128 6129 implicit none 6130 double precision PI, ZERO 6131 logical READLHA 6132 parameter (PI=3.141592653589793d0) 6133 parameter (ZERO=0d0) 6134 include \'model_functions.inc\'""") 6135 fsock.writelines("""include \'input.inc\' 6136 include \'coupl.inc\' 6137 READLHA = .false.""") 6138 fsock.writelines(""" 6139 include \'intparam_definition.inc\'\n 6140 """) 6141 6142 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6143 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6144 6145 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6146 6147 fsock.writelines('\n'.join(\ 6148 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6149 for i in range(nb_coup_dep)])) 6150 fsock.writelines('''\n return \n end\n''') 6151 6152 fsock.writelines("""subroutine update_as_param2(mu_r2,as2) 6153 6154 implicit none 6155 double precision PI 6156 parameter (PI=3.141592653589793d0) 6157 double precision mu_r2, as2 6158 include \'model_functions.inc\'""") 6159 fsock.writelines("""include \'input.inc\' 6160 include \'coupl.inc\'""") 6161 fsock.writelines(""" 6162 if (mu_r2.gt.0d0) MU_R = mu_r2 6163 G = SQRT(4.0d0*PI*AS2) 6164 AS = as2 6165 6166 CALL UPDATE_AS_PARAM() 6167 """) 6168 fsock.writelines('''\n return \n end\n''') 6169 6170 if self.opt['mp']: 6171 fsock.writelines("""subroutine mp_update_as_param() 6172 6173 implicit none 6174 logical READLHA 6175 include \'model_functions.inc\'""") 6176 fsock.writelines("""%s MP__PI, MP__ZERO 6177 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6178 parameter (MP__ZERO=0e0_16) 6179 include \'mp_input.inc\' 6180 include \'mp_coupl.inc\' 6181 """%self.mp_real_format) 6182 fsock.writelines("""include \'input.inc\' 6183 include \'coupl.inc\' 6184 include \'actualize_mp_ext_params.inc\' 6185 READLHA = .false. 6186 include \'mp_intparam_definition.inc\'\n 6187 """) 6188 6189 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6190 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6191 6192 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6193 6194 fsock.writelines('\n'.join(\ 6195 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6196 for i in range(nb_coup_dep)])) 6197 fsock.writelines('''\n return \n end\n''')
6198
6199 - def create_couplings_part(self, nb_file, data, dp=True, mp=False):
6200 """ create couplings[nb_file].f containing information coming from data. 6201 Outputs the computation of the double precision and/or the multiple 6202 precision couplings depending on the parameters dp and mp. 6203 If mp is True and dp is False, then the prefix 'MP_' is appended to the 6204 filename and subroutine name. 6205 """ 6206 6207 fsock = self.open('%scouplings%s.f' %('mp_' if mp and not dp else '', 6208 nb_file), format='fortran') 6209 fsock.writelines("""subroutine %scoup%s() 6210 6211 implicit none 6212 include \'model_functions.inc\'"""%('mp_' if mp and not dp else '',nb_file)) 6213 if dp: 6214 fsock.writelines(""" 6215 double precision PI, ZERO 6216 parameter (PI=3.141592653589793d0) 6217 parameter (ZERO=0d0) 6218 include 'input.inc' 6219 include 'coupl.inc'""") 6220 if mp: 6221 fsock.writelines("""%s MP__PI, MP__ZERO 6222 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6223 parameter (MP__ZERO=0e0_16) 6224 include \'mp_input.inc\' 6225 include \'mp_coupl.inc\' 6226 """%self.mp_real_format) 6227 6228 for coupling in data: 6229 if dp: 6230 fsock.writelines('%s = %s' % (coupling.name, 6231 self.p_to_f.parse(coupling.expr))) 6232 if mp: 6233 fsock.writelines('%s%s = %s' % (self.mp_prefix,coupling.name, 6234 self.mp_p_to_f.parse(coupling.expr))) 6235 fsock.writelines('end')
6236
6237 - def create_model_functions_inc(self):
6238 """ Create model_functions.inc which contains the various declarations 6239 of auxiliary functions which might be used in the couplings expressions 6240 """ 6241 6242 additional_fct = [] 6243 # check for functions define in the UFO model 6244 ufo_fct = self.model.get('functions') 6245 if ufo_fct: 6246 for fct in ufo_fct: 6247 # already handle by default 6248 if fct.name not in ["complexconjugate", "re", "im", "sec", 6249 "csc", "asec", "acsc", "theta_function", "cond", 6250 "condif", "reglogp", "reglogm", "reglog", "recms", "arg", "cot", 6251 "grreglog","regsqrt"]: 6252 additional_fct.append(fct.name) 6253 6254 6255 fsock = self.open('model_functions.inc', format='fortran') 6256 fsock.writelines("""double complex cond 6257 double complex condif 6258 double complex reglog 6259 double complex reglogp 6260 double complex reglogm 6261 double complex recms 6262 double complex arg 6263 double complex grreglog 6264 double complex regsqrt 6265 %s 6266 """ % "\n".join([" double complex %s" % i for i in additional_fct])) 6267 6268 6269 if self.opt['mp']: 6270 fsock.writelines("""%(complex_mp_format)s mp_cond 6271 %(complex_mp_format)s mp_condif 6272 %(complex_mp_format)s mp_reglog 6273 %(complex_mp_format)s mp_reglogp 6274 %(complex_mp_format)s mp_reglogm 6275 %(complex_mp_format)s mp_recms 6276 %(complex_mp_format)s mp_arg 6277 %(complex_mp_format)s mp_grreglog 6278 %(complex_mp_format)s mp_regsqrt 6279 %(additional)s 6280 """ %\ 6281 {"additional": "\n".join([" %s mp_%s" % (self.mp_complex_format, i) for i in additional_fct]), 6282 'complex_mp_format':self.mp_complex_format 6283 })
6284
6285 - def create_model_functions_def(self):
6286 """ Create model_functions.f which contains the various definitions 6287 of auxiliary functions which might be used in the couplings expressions 6288 Add the functions.f functions for formfactors support 6289 """ 6290 6291 fsock = self.open('model_functions.f', format='fortran') 6292 fsock.writelines("""double complex function cond(condition,truecase,falsecase) 6293 implicit none 6294 double complex condition,truecase,falsecase 6295 if(condition.eq.(0.0d0,0.0d0)) then 6296 cond=truecase 6297 else 6298 cond=falsecase 6299 endif 6300 end 6301 6302 double complex function condif(condition,truecase,falsecase) 6303 implicit none 6304 logical condition 6305 double complex truecase,falsecase 6306 if(condition) then 6307 condif=truecase 6308 else 6309 condif=falsecase 6310 endif 6311 end 6312 6313 double complex function recms(condition,expr) 6314 implicit none 6315 logical condition 6316 double complex expr 6317 if(condition)then 6318 recms=expr 6319 else 6320 recms=dcmplx(dble(expr)) 6321 endif 6322 end 6323 6324 double complex function reglog(arg) 6325 implicit none 6326 double complex TWOPII 6327 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6328 double complex arg 6329 if(arg.eq.(0.0d0,0.0d0)) then 6330 reglog=(0.0d0,0.0d0) 6331 else 6332 reglog=log(arg) 6333 endif 6334 end 6335 6336 double complex function reglogp(arg) 6337 implicit none 6338 double complex TWOPII 6339 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6340 double complex arg 6341 if(arg.eq.(0.0d0,0.0d0))then 6342 reglogp=(0.0d0,0.0d0) 6343 else 6344 if(dble(arg).lt.0.0d0.and.dimag(arg).lt.0.0d0)then 6345 reglogp=log(arg) + TWOPII 6346 else 6347 reglogp=log(arg) 6348 endif 6349 endif 6350 end 6351 6352 double complex function reglogm(arg) 6353 implicit none 6354 double complex TWOPII 6355 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6356 double complex arg 6357 if(arg.eq.(0.0d0,0.0d0))then 6358 reglogm=(0.0d0,0.0d0) 6359 else 6360 if(dble(arg).lt.0.0d0.and.dimag(arg).gt.0.0d0)then 6361 reglogm=log(arg) - TWOPII 6362 else 6363 reglogm=log(arg) 6364 endif 6365 endif 6366 end 6367 6368 double complex function regsqrt(arg_in) 6369 implicit none 6370 double complex arg_in 6371 double complex arg 6372 arg=arg_in 6373 if(dabs(dimag(arg)).eq.0.0d0)then 6374 arg=dcmplx(dble(arg),0.0d0) 6375 endif 6376 if(dabs(dble(arg)).eq.0.0d0)then 6377 arg=dcmplx(0.0d0,dimag(arg)) 6378 endif 6379 regsqrt=sqrt(arg) 6380 end 6381 6382 double complex function grreglog(logsw,expr1_in,expr2_in) 6383 implicit none 6384 double complex TWOPII 6385 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6386 double complex expr1_in,expr2_in 6387 double complex expr1,expr2 6388 double precision logsw 6389 double precision imagexpr 6390 logical firstsheet 6391 expr1=expr1_in 6392 expr2=expr2_in 6393 if(dabs(dimag(expr1)).eq.0.0d0)then 6394 expr1=dcmplx(dble(expr1),0.0d0) 6395 endif 6396 if(dabs(dble(expr1)).eq.0.0d0)then 6397 expr1=dcmplx(0.0d0,dimag(expr1)) 6398 endif 6399 if(dabs(dimag(expr2)).eq.0.0d0)then 6400 expr2=dcmplx(dble(expr2),0.0d0) 6401 endif 6402 if(dabs(dble(expr2)).eq.0.0d0)then 6403 expr2=dcmplx(0.0d0,dimag(expr2)) 6404 endif 6405 if(expr1.eq.(0.0d0,0.0d0))then 6406 grreglog=(0.0d0,0.0d0) 6407 else 6408 imagexpr=dimag(expr1)*dimag(expr2) 6409 firstsheet=imagexpr.ge.0.0d0 6410 firstsheet=firstsheet.or.dble(expr1).ge.0.0d0 6411 firstsheet=firstsheet.or.dble(expr2).ge.0.0d0 6412 if(firstsheet)then 6413 grreglog=log(expr1) 6414 else 6415 if(dimag(expr1).gt.0.0d0)then 6416 grreglog=log(expr1) - logsw*TWOPII 6417 else 6418 grreglog=log(expr1) + logsw*TWOPII 6419 endif 6420 endif 6421 endif 6422 end 6423 6424 double complex function arg(comnum) 6425 implicit none 6426 double complex comnum 6427 double complex iim 6428 iim = (0.0d0,1.0d0) 6429 if(comnum.eq.(0.0d0,0.0d0)) then 6430 arg=(0.0d0,0.0d0) 6431 else 6432 arg=log(comnum/abs(comnum))/iim 6433 endif 6434 end""") 6435 if self.opt['mp']: 6436 fsock.writelines(""" 6437 6438 %(complex_mp_format)s function mp_cond(condition,truecase,falsecase) 6439 implicit none 6440 %(complex_mp_format)s condition,truecase,falsecase 6441 if(condition.eq.(0.0e0_16,0.0e0_16)) then 6442 mp_cond=truecase 6443 else 6444 mp_cond=falsecase 6445 endif 6446 end 6447 6448 %(complex_mp_format)s function mp_condif(condition,truecase,falsecase) 6449 implicit none 6450 logical condition 6451 %(complex_mp_format)s truecase,falsecase 6452 if(condition) then 6453 mp_condif=truecase 6454 else 6455 mp_condif=falsecase 6456 endif 6457 end 6458 6459 %(complex_mp_format)s function mp_recms(condition,expr) 6460 implicit none 6461 logical condition 6462 %(complex_mp_format)s expr 6463 if(condition)then 6464 mp_recms=expr 6465 else 6466 mp_recms=cmplx(real(expr),kind=16) 6467 endif 6468 end 6469 6470 %(complex_mp_format)s function mp_reglog(arg) 6471 implicit none 6472 %(complex_mp_format)s TWOPII 6473 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6474 %(complex_mp_format)s arg 6475 if(arg.eq.(0.0e0_16,0.0e0_16)) then 6476 mp_reglog=(0.0e0_16,0.0e0_16) 6477 else 6478 mp_reglog=log(arg) 6479 endif 6480 end 6481 6482 %(complex_mp_format)s function mp_reglogp(arg) 6483 implicit none 6484 %(complex_mp_format)s TWOPII 6485 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6486 %(complex_mp_format)s arg 6487 if(arg.eq.(0.0e0_16,0.0e0_16))then 6488 mp_reglogp=(0.0e0_16,0.0e0_16) 6489 else 6490 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).lt.0.0e0_16)then 6491 mp_reglogp=log(arg) + TWOPII 6492 else 6493 mp_reglogp=log(arg) 6494 endif 6495 endif 6496 end 6497 6498 %(complex_mp_format)s function mp_reglogm(arg) 6499 implicit none 6500 %(complex_mp_format)s TWOPII 6501 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6502 %(complex_mp_format)s arg 6503 if(arg.eq.(0.0e0_16,0.0e0_16))then 6504 mp_reglogm=(0.0e0_16,0.0e0_16) 6505 else 6506 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).gt.0.0e0_16)then 6507 mp_reglogm=log(arg) - TWOPII 6508 else 6509 mp_reglogm=log(arg) 6510 endif 6511 endif 6512 end 6513 6514 %(complex_mp_format)s function mp_regsqrt(arg_in) 6515 implicit none 6516 %(complex_mp_format)s arg_in 6517 %(complex_mp_format)s arg 6518 arg=arg_in 6519 if(abs(imagpart(arg)).eq.0.0e0_16)then 6520 arg=cmplx(real(arg,kind=16),0.0e0_16) 6521 endif 6522 if(abs(real(arg,kind=16)).eq.0.0e0_16)then 6523 arg=cmplx(0.0e0_16,imagpart(arg)) 6524 endif 6525 mp_regsqrt=sqrt(arg) 6526 end 6527 6528 6529 %(complex_mp_format)s function mp_grreglog(logsw,expr1_in,expr2_in) 6530 implicit none 6531 %(complex_mp_format)s TWOPII 6532 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6533 %(complex_mp_format)s expr1_in,expr2_in 6534 %(complex_mp_format)s expr1,expr2 6535 %(real_mp_format)s logsw 6536 %(real_mp_format)s imagexpr 6537 logical firstsheet 6538 expr1=expr1_in 6539 expr2=expr2_in 6540 if(abs(imagpart(expr1)).eq.0.0e0_16)then 6541 expr1=cmplx(real(expr1,kind=16),0.0e0_16) 6542 endif 6543 if(abs(real(expr1,kind=16)).eq.0.0e0_16)then 6544 expr1=cmplx(0.0e0_16,imagpart(expr1)) 6545 endif 6546 if(abs(imagpart(expr2)).eq.0.0e0_16)then 6547 expr2=cmplx(real(expr2,kind=16),0.0e0_16) 6548 endif 6549 if(abs(real(expr2,kind=16)).eq.0.0e0_16)then 6550 expr2=cmplx(0.0e0_16,imagpart(expr2)) 6551 endif 6552 if(expr1.eq.(0.0e0_16,0.0e0_16))then 6553 mp_grreglog=(0.0e0_16,0.0e0_16) 6554 else 6555 imagexpr=imagpart(expr1)*imagpart(expr2) 6556 firstsheet=imagexpr.ge.0.0e0_16 6557 firstsheet=firstsheet.or.real(expr1,kind=16).ge.0.0e0_16 6558 firstsheet=firstsheet.or.real(expr2,kind=16).ge.0.0e0_16 6559 if(firstsheet)then 6560 mp_grreglog=log(expr1) 6561 else 6562 if(imagpart(expr1).gt.0.0e0_16)then 6563 mp_grreglog=log(expr1) - logsw*TWOPII 6564 else 6565 mp_grreglog=log(expr1) + logsw*TWOPII 6566 endif 6567 endif 6568 endif 6569 end 6570 6571 %(complex_mp_format)s function mp_arg(comnum) 6572 implicit none 6573 %(complex_mp_format)s comnum 6574 %(complex_mp_format)s imm 6575 imm = (0.0e0_16,1.0e0_16) 6576 if(comnum.eq.(0.0e0_16,0.0e0_16)) then 6577 mp_arg=(0.0e0_16,0.0e0_16) 6578 else 6579 mp_arg=log(comnum/abs(comnum))/imm 6580 endif 6581 end"""%{'complex_mp_format':self.mp_complex_format,'real_mp_format':self.mp_real_format}) 6582 6583 6584 #check for the file functions.f 6585 model_path = self.model.get('modelpath') 6586 if os.path.exists(pjoin(model_path,'Fortran','functions.f')): 6587 fsock.write_comment_line(' USER DEFINE FUNCTIONS ') 6588 input = pjoin(model_path,'Fortran','functions.f') 6589 file.writelines(fsock, open(input).read()) 6590 fsock.write_comment_line(' END USER DEFINE FUNCTIONS ') 6591 6592 # check for functions define in the UFO model 6593 ufo_fct = self.model.get('functions') 6594 if ufo_fct: 6595 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS ') 6596 for fct in ufo_fct: 6597 # already handle by default 6598 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc", "condif", 6599 "theta_function", "cond", "reglog", "reglogp", "reglogm", "recms","arg", 6600 "grreglog","regsqrt"]: 6601 ufo_fct_template = """ 6602 double complex function %(name)s(%(args)s) 6603 implicit none 6604 double complex %(args)s 6605 %(definitions)s 6606 %(name)s = %(fct)s 6607 6608 return 6609 end 6610 """ 6611 str_fct = self.p_to_f.parse(fct.expr) 6612 if not self.p_to_f.to_define: 6613 definitions = [] 6614 else: 6615 definitions=[] 6616 for d in self.p_to_f.to_define: 6617 if d == 'pi': 6618 definitions.append(' double precision pi') 6619 definitions.append(' data pi /3.1415926535897932d0/') 6620 else: 6621 definitions.append(' double complex %s' % d) 6622 6623 text = ufo_fct_template % { 6624 'name': fct.name, 6625 'args': ", ".join(fct.arguments), 6626 'fct': str_fct, 6627 'definitions': '\n'.join(definitions) 6628 } 6629 6630 fsock.writelines(text) 6631 if self.opt['mp']: 6632 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS FOR MP') 6633 for fct in ufo_fct: 6634 # already handle by default 6635 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc","condif", 6636 "theta_function", "cond", "reglog", "reglogp","reglogm", "recms","arg", 6637 "grreglog","regsqrt"]: 6638 ufo_fct_template = """ 6639 %(complex_mp_format)s function mp_%(name)s(mp__%(args)s) 6640 implicit none 6641 %(complex_mp_format)s mp__%(args)s 6642 %(definitions)s 6643 mp_%(name)s = %(fct)s 6644 6645 return 6646 end 6647 """ 6648 str_fct = self.mp_p_to_f.parse(fct.expr) 6649 if not self.mp_p_to_f.to_define: 6650 definitions = [] 6651 else: 6652 definitions=[] 6653 for d in self.mp_p_to_f.to_define: 6654 if d == 'pi': 6655 definitions.append(' %s mp__pi' % self.mp_real_format) 6656 definitions.append(' data mp__pi /3.141592653589793238462643383279502884197e+00_16/') 6657 else: 6658 definitions.append(' %s mp_%s' % (self.mp_complex_format,d)) 6659 text = ufo_fct_template % { 6660 'name': fct.name, 6661 'args': ", mp__".join(fct.arguments), 6662 'fct': str_fct, 6663 'definitions': '\n'.join(definitions), 6664 'complex_mp_format': self.mp_complex_format 6665 } 6666 fsock.writelines(text) 6667 6668 6669 6670 fsock.write_comment_line(' STOP UFO DEFINE FUNCTIONS ')
6671 6672 6673
6674 - def create_makeinc(self):
6675 """create makeinc.inc containing the file to compile """ 6676 6677 fsock = self.open('makeinc.inc', comment='#') 6678 text = 'MODEL = couplings.o lha_read.o printout.o rw_para.o' 6679 text += ' model_functions.o ' 6680 6681 nb_coup_indep = 1 + len(self.coups_dep) // 25 6682 nb_coup_dep = 1 + len(self.coups_indep) // 25 6683 couplings_files=['couplings%s.o' % (i+1) \ 6684 for i in range(nb_coup_dep + nb_coup_indep) ] 6685 if self.opt['mp']: 6686 couplings_files+=['mp_couplings%s.o' % (i+1) for i in \ 6687 range(nb_coup_dep,nb_coup_dep + nb_coup_indep) ] 6688 text += ' '.join(couplings_files) 6689 fsock.writelines(text)
6690
6691 - def create_param_write(self):
6692 """ create param_write """ 6693 6694 fsock = self.open('param_write.inc', format='fortran') 6695 6696 fsock.writelines("""write(*,*) ' External Params' 6697 write(*,*) ' ---------------------------------' 6698 write(*,*) ' '""") 6699 def format(name): 6700 return 'write(*,*) \'%(name)s = \', %(name)s' % {'name': name}
6701 6702 # Write the external parameter 6703 lines = [format(param.name) for param in self.params_ext] 6704 fsock.writelines('\n'.join(lines)) 6705 6706 fsock.writelines("""write(*,*) ' Internal Params' 6707 write(*,*) ' ---------------------------------' 6708 write(*,*) ' '""") 6709 lines = [format(data.name) for data in self.params_indep 6710 if data.name != 'ZERO' and self.check_needed_param(data.name)] 6711 fsock.writelines('\n'.join(lines)) 6712 fsock.writelines("""write(*,*) ' Internal Params evaluated point by point' 6713 write(*,*) ' ----------------------------------------' 6714 write(*,*) ' '""") 6715 lines = [format(data.name) for data in self.params_dep \ 6716 if self.check_needed_param(data.name)] 6717 6718 fsock.writelines('\n'.join(lines)) 6719 6720 6721
6722 - def create_ident_card(self):
6723 """ create the ident_card.dat """ 6724 6725 def format(parameter): 6726 """return the line for the ident_card corresponding to this parameter""" 6727 colum = [parameter.lhablock.lower()] + \ 6728 [str(value) for value in parameter.lhacode] + \ 6729 [parameter.name] 6730 if not parameter.name: 6731 return '' 6732 return ' '.join(colum)+'\n'
6733 6734 fsock = self.open('ident_card.dat') 6735 6736 external_param = [format(param) for param in self.params_ext] 6737 fsock.writelines('\n'.join(external_param)) 6738
6739 - def create_actualize_mp_ext_param_inc(self):
6740 """ create the actualize_mp_ext_params.inc code """ 6741 6742 # In principle one should actualize all external, but for now, it is 6743 # hardcoded that only AS and MU_R can by dynamically changed by the user 6744 # so that we only update those ones. 6745 # Of course, to be on the safe side, one could decide to update all 6746 # external parameters. 6747 update_params_list=[p for p in self.params_ext if p.name in 6748 self.PS_dependent_key] 6749 6750 res_strings = ["%(mp_prefix)s%(name)s=%(name)s"\ 6751 %{'mp_prefix':self.mp_prefix,'name':param.name}\ 6752 for param in update_params_list] 6753 # When read_lha is false, it is G which is taken in input and not AS, so 6754 # this is what should be reset here too. 6755 if 'aS' in [param.name for param in update_params_list]: 6756 res_strings.append("%(mp_prefix)sG=G"%{'mp_prefix':self.mp_prefix}) 6757 6758 fsock = self.open('actualize_mp_ext_params.inc', format='fortran') 6759 fsock.writelines('\n'.join(res_strings))
6760
6761 - def create_param_read(self):
6762 """create param_read""" 6763 6764 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 6765 or self.opt['loop_induced']: 6766 fsock = self.open('param_read.inc', format='fortran') 6767 fsock.writelines(' include \'../param_card.inc\'') 6768 return 6769 6770 def format_line(parameter): 6771 """return the line for the ident_card corresponding to this 6772 parameter""" 6773 template = \ 6774 """ call LHA_get_real(npara,param,value,'%(name)s',%(name)s,%(value)s)""" \ 6775 % {'name': parameter.name, 6776 'value': self.p_to_f.parse(str(parameter.value.real))} 6777 if self.opt['mp']: 6778 template = template+ \ 6779 ("\n call MP_LHA_get_real(npara,param,value,'%(name)s',"+ 6780 "%(mp_prefix)s%(name)s,%(value)s)") \ 6781 % {'name': parameter.name,'mp_prefix': self.mp_prefix, 6782 'value': self.mp_p_to_f.parse(str(parameter.value.real))} 6783 return template 6784 6785 fsock = self.open('param_read.inc', format='fortran') 6786 res_strings = [format_line(param) \ 6787 for param in self.params_ext] 6788 6789 # Correct width sign for Majorana particles (where the width 6790 # and mass need to have the same sign) 6791 for particle in self.model.get('particles'): 6792 if particle.is_fermion() and particle.get('self_antipart') and \ 6793 particle.get('width').lower() != 'zero': 6794 6795 res_strings.append('%(width)s = sign(%(width)s,%(mass)s)' % \ 6796 {'width': particle.get('width'), 'mass': particle.get('mass')}) 6797 if self.opt['mp']: 6798 res_strings.append(\ 6799 ('%(mp_pref)s%(width)s = sign(%(mp_pref)s%(width)s,'+\ 6800 '%(mp_pref)s%(mass)s)')%{'width': particle.get('width'),\ 6801 'mass': particle.get('mass'),'mp_pref':self.mp_prefix}) 6802 6803 fsock.writelines('\n'.join(res_strings)) 6804 6805 6806 @staticmethod
6807 - def create_param_card_static(model, output_path, rule_card_path=False, 6808 mssm_convert=True):
6809 """ create the param_card.dat for a givent model --static method-- """ 6810 #1. Check if a default param_card is present: 6811 done = False 6812 if hasattr(model, 'restrict_card') and isinstance(model.restrict_card, str): 6813 restrict_name = os.path.basename(model.restrict_card)[9:-4] 6814 model_path = model.get('modelpath') 6815 if os.path.exists(pjoin(model_path,'paramcard_%s.dat' % restrict_name)): 6816 done = True 6817 files.cp(pjoin(model_path,'paramcard_%s.dat' % restrict_name), 6818 output_path) 6819 if not done: 6820 param_writer.ParamCardWriter(model, output_path) 6821 6822 if rule_card_path: 6823 if hasattr(model, 'rule_card'): 6824 model.rule_card.write_file(rule_card_path) 6825 6826 if mssm_convert: 6827 model_name = model.get('name') 6828 # IF MSSM convert the card to SLAH1 6829 if model_name == 'mssm' or model_name.startswith('mssm-'): 6830 import models.check_param_card as translator 6831 # Check the format of the param_card for Pythia and make it correct 6832 if rule_card_path: 6833 translator.make_valid_param_card(output_path, rule_card_path) 6834 translator.convert_to_slha1(output_path)
6835
6836 - def create_param_card(self):
6837 """ create the param_card.dat """ 6838 6839 rule_card = pjoin(self.dir_path, 'param_card_rule.dat') 6840 if not hasattr(self.model, 'rule_card'): 6841 rule_card=False 6842 self.create_param_card_static(self.model, 6843 output_path=pjoin(self.dir_path, 'param_card.dat'), 6844 rule_card_path=rule_card, 6845 mssm_convert=True)
6846
6847 -def ExportV4Factory(cmd, noclean, output_type='default', group_subprocesses=True, cmd_options={}):
6848 """ Determine which Export_v4 class is required. cmd is the command 6849 interface containing all potential usefull information. 6850 The output_type argument specifies from which context the output 6851 is called. It is 'madloop' for MadLoop5, 'amcatnlo' for FKS5 output 6852 and 'default' for tree-level outputs.""" 6853 6854 opt = dict(cmd.options) 6855 opt['output_options'] = cmd_options 6856 6857 # ========================================================================== 6858 # First check whether Ninja must be installed. 6859 # Ninja would only be required if: 6860 # a) Loop optimized output is selected 6861 # b) the process gathered from the amplitude generated use loops 6862 6863 if len(cmd._curr_amps)>0: 6864 try: 6865 curr_proc = cmd._curr_amps[0].get('process') 6866 except base_objects.PhysicsObject.PhysicsObjectError: 6867 curr_proc = None 6868 elif hasattr(cmd,'_fks_multi_proc') and \ 6869 len(cmd._fks_multi_proc.get('process_definitions'))>0: 6870 curr_proc = cmd._fks_multi_proc.get('process_definitions')[0] 6871 else: 6872 curr_proc = None 6873 6874 requires_reduction_tool = opt['loop_optimized_output'] and \ 6875 (not curr_proc is None) and \ 6876 (curr_proc.get('perturbation_couplings') != [] and \ 6877 not curr_proc.get('NLO_mode') in [None,'real','tree','LO','LOonly']) 6878 6879 # An installation is required then, but only if the specified path is the 6880 # default local one and that the Ninja library appears missing. 6881 if requires_reduction_tool: 6882 cmd.install_reduction_library() 6883 6884 # ========================================================================== 6885 # First treat the MadLoop5 standalone case 6886 MadLoop_SA_options = {'clean': not noclean, 6887 'complex_mass':cmd.options['complex_mass_scheme'], 6888 'export_format':'madloop', 6889 'mp':True, 6890 'loop_dir': os.path.join(cmd._mgme_dir,'Template','loop_material'), 6891 'cuttools_dir': cmd._cuttools_dir, 6892 'iregi_dir':cmd._iregi_dir, 6893 'pjfry_dir':cmd.options['pjfry'], 6894 'golem_dir':cmd.options['golem'], 6895 'samurai_dir':cmd.options['samurai'], 6896 'ninja_dir':cmd.options['ninja'], 6897 'collier_dir':cmd.options['collier'], 6898 'fortran_compiler':cmd.options['fortran_compiler'], 6899 'f2py_compiler':cmd.options['f2py_compiler'], 6900 'output_dependencies':cmd.options['output_dependencies'], 6901 'SubProc_prefix':'P', 6902 'compute_color_flows':cmd.options['loop_color_flows'], 6903 'mode': 'reweight' if cmd._export_format == "standalone_rw" else '', 6904 'cluster_local_path': cmd.options['cluster_local_path'], 6905 'output_options': cmd_options 6906 } 6907 6908 if output_type.startswith('madloop'): 6909 import madgraph.loop.loop_exporters as loop_exporters 6910 if os.path.isdir(os.path.join(cmd._mgme_dir, 'Template/loop_material')): 6911 ExporterClass=None 6912 if not cmd.options['loop_optimized_output']: 6913 ExporterClass=loop_exporters.LoopProcessExporterFortranSA 6914 else: 6915 if output_type == "madloop": 6916 ExporterClass=loop_exporters.LoopProcessOptimizedExporterFortranSA 6917 MadLoop_SA_options['export_format'] = 'madloop_optimized' 6918 elif output_type == "madloop_matchbox": 6919 ExporterClass=loop_exporters.LoopProcessExporterFortranMatchBox 6920 MadLoop_SA_options['export_format'] = 'madloop_matchbox' 6921 else: 6922 raise Exception, "output_type not recognize %s" % output_type 6923 return ExporterClass(cmd._export_dir, MadLoop_SA_options) 6924 else: 6925 raise MadGraph5Error('MG5_aMC cannot find the \'loop_material\' directory'+\ 6926 ' in %s'%str(cmd._mgme_dir)) 6927 6928 # Then treat the aMC@NLO output 6929 elif output_type=='amcatnlo': 6930 import madgraph.iolibs.export_fks as export_fks 6931 ExporterClass=None 6932 amcatnlo_options = dict(opt) 6933 amcatnlo_options.update(MadLoop_SA_options) 6934 amcatnlo_options['mp'] = len(cmd._fks_multi_proc.get_virt_amplitudes()) > 0 6935 if not cmd.options['loop_optimized_output']: 6936 logger.info("Writing out the aMC@NLO code") 6937 ExporterClass = export_fks.ProcessExporterFortranFKS 6938 amcatnlo_options['export_format']='FKS5_default' 6939 else: 6940 logger.info("Writing out the aMC@NLO code, using optimized Loops") 6941 ExporterClass = export_fks.ProcessOptimizedExporterFortranFKS 6942 amcatnlo_options['export_format']='FKS5_optimized' 6943 return ExporterClass(cmd._export_dir, amcatnlo_options) 6944 6945 6946 # Then the default tree-level output 6947 elif output_type=='default': 6948 assert group_subprocesses in [True, False] 6949 6950 opt = dict(opt) 6951 opt.update({'clean': not noclean, 6952 'complex_mass': cmd.options['complex_mass_scheme'], 6953 'export_format':cmd._export_format, 6954 'mp': False, 6955 'sa_symmetry':False, 6956 'model': cmd._curr_model.get('name'), 6957 'v5_model': False if cmd._model_v4_path else True }) 6958 6959 format = cmd._export_format #shortcut 6960 6961 if format in ['standalone_msP', 'standalone_msF', 'standalone_rw']: 6962 opt['sa_symmetry'] = True 6963 elif format == 'plugin': 6964 opt['sa_symmetry'] = cmd._export_plugin.sa_symmetry 6965 6966 loop_induced_opt = dict(opt) 6967 loop_induced_opt.update(MadLoop_SA_options) 6968 loop_induced_opt['export_format'] = 'madloop_optimized' 6969 loop_induced_opt['SubProc_prefix'] = 'PV' 6970 # For loop_induced output with MadEvent, we must have access to the 6971 # color flows. 6972 loop_induced_opt['compute_color_flows'] = True 6973 for key in opt: 6974 if key not in loop_induced_opt: 6975 loop_induced_opt[key] = opt[key] 6976 6977 # Madevent output supports MadAnalysis5 6978 if format in ['madevent']: 6979 opt['madanalysis5'] = cmd.options['madanalysis5_path'] 6980 6981 if format == 'matrix' or format.startswith('standalone'): 6982 return ProcessExporterFortranSA(cmd._export_dir, opt, format=format) 6983 6984 elif format in ['madevent'] and group_subprocesses: 6985 if isinstance(cmd._curr_amps[0], 6986 loop_diagram_generation.LoopAmplitude): 6987 import madgraph.loop.loop_exporters as loop_exporters 6988 return loop_exporters.LoopInducedExporterMEGroup( 6989 cmd._export_dir,loop_induced_opt) 6990 else: 6991 return ProcessExporterFortranMEGroup(cmd._export_dir,opt) 6992 elif format in ['madevent']: 6993 if isinstance(cmd._curr_amps[0], 6994 loop_diagram_generation.LoopAmplitude): 6995 import madgraph.loop.loop_exporters as loop_exporters 6996 return loop_exporters.LoopInducedExporterMENoGroup( 6997 cmd._export_dir,loop_induced_opt) 6998 else: 6999 return ProcessExporterFortranME(cmd._export_dir,opt) 7000 elif format in ['matchbox']: 7001 return ProcessExporterFortranMatchBox(cmd._export_dir,opt) 7002 elif cmd._export_format in ['madweight'] and group_subprocesses: 7003 7004 return ProcessExporterFortranMWGroup(cmd._export_dir, opt) 7005 elif cmd._export_format in ['madweight']: 7006 return ProcessExporterFortranMW(cmd._export_dir, opt) 7007 elif format == 'plugin': 7008 if isinstance(cmd._curr_amps[0], 7009 loop_diagram_generation.LoopAmplitude): 7010 return cmd._export_plugin(cmd._export_dir, loop_induced_opt) 7011 else: 7012 return cmd._export_plugin(cmd._export_dir, opt) 7013 7014 else: 7015 raise Exception, 'Wrong export_v4 format' 7016 else: 7017 raise MadGraph5Error, 'Output type %s not reckognized in ExportV4Factory.'
7018
7019 7020 7021 7022 #=============================================================================== 7023 # ProcessExporterFortranMWGroup 7024 #=============================================================================== 7025 -class ProcessExporterFortranMWGroup(ProcessExporterFortranMW):
7026 """Class to take care of exporting a set of matrix elements to 7027 MadEvent subprocess group format.""" 7028 7029 matrix_file = "matrix_madweight_group_v4.inc" 7030 grouped_mode = 'madweight' 7031 #=========================================================================== 7032 # generate_subprocess_directory 7033 #===========================================================================
7034 - def generate_subprocess_directory(self, subproc_group, 7035 fortran_model, 7036 group_number):
7037 """Generate the Pn directory for a subprocess group in MadEvent, 7038 including the necessary matrix_N.f files, configs.inc and various 7039 other helper files.""" 7040 7041 if not isinstance(subproc_group, group_subprocs.SubProcessGroup): 7042 raise base_objects.PhysicsObject.PhysicsObjectError,\ 7043 "subproc_group object not SubProcessGroup" 7044 7045 if not self.model: 7046 self.model = subproc_group.get('matrix_elements')[0].\ 7047 get('processes')[0].get('model') 7048 7049 pathdir = os.path.join(self.dir_path, 'SubProcesses') 7050 7051 # Create the directory PN in the specified path 7052 subprocdir = "P%d_%s" % (subproc_group.get('number'), 7053 subproc_group.get('name')) 7054 try: 7055 os.mkdir(pjoin(pathdir, subprocdir)) 7056 except os.error as error: 7057 logger.warning(error.strerror + " " + subprocdir) 7058 7059 7060 logger.info('Creating files in directory %s' % subprocdir) 7061 Ppath = pjoin(pathdir, subprocdir) 7062 7063 # Create the matrix.f files, auto_dsig.f files and all inc files 7064 # for all subprocesses in the group 7065 7066 maxamps = 0 7067 maxflows = 0 7068 tot_calls = 0 7069 7070 matrix_elements = subproc_group.get('matrix_elements') 7071 7072 for ime, matrix_element in \ 7073 enumerate(matrix_elements): 7074 filename = pjoin(Ppath, 'matrix%d.f' % (ime+1)) 7075 calls, ncolor = \ 7076 self.write_matrix_element_v4(writers.FortranWriter(filename), 7077 matrix_element, 7078 fortran_model, 7079 str(ime+1), 7080 subproc_group.get('diagram_maps')[\ 7081 ime]) 7082 7083 filename = pjoin(Ppath, 'auto_dsig%d.f' % (ime+1)) 7084 self.write_auto_dsig_file(writers.FortranWriter(filename), 7085 matrix_element, 7086 str(ime+1)) 7087 7088 # Keep track of needed quantities 7089 tot_calls += int(calls) 7090 maxflows = max(maxflows, ncolor) 7091 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 7092 7093 # Draw diagrams 7094 filename = pjoin(Ppath, "matrix%d.ps" % (ime+1)) 7095 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 7096 get('diagrams'), 7097 filename, 7098 model = \ 7099 matrix_element.get('processes')[0].\ 7100 get('model'), 7101 amplitude=True) 7102 logger.info("Generating Feynman diagrams for " + \ 7103 matrix_element.get('processes')[0].nice_string()) 7104 plot.draw() 7105 7106 # Extract number of external particles 7107 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 7108 7109 # Generate a list of diagrams corresponding to each configuration 7110 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 7111 # If a subprocess has no diagrams for this config, the number is 0 7112 7113 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 7114 7115 filename = pjoin(Ppath, 'auto_dsig.f') 7116 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 7117 subproc_group) 7118 7119 filename = pjoin(Ppath,'configs.inc') 7120 nconfigs, s_and_t_channels = self.write_configs_file(\ 7121 writers.FortranWriter(filename), 7122 subproc_group, 7123 subproc_diagrams_for_config) 7124 7125 filename = pjoin(Ppath, 'leshouche.inc') 7126 self.write_leshouche_file(writers.FortranWriter(filename), 7127 subproc_group) 7128 7129 filename = pjoin(Ppath, 'phasespace.inc') 7130 self.write_phasespace_file(writers.FortranWriter(filename), 7131 nconfigs) 7132 7133 7134 filename = pjoin(Ppath, 'maxamps.inc') 7135 self.write_maxamps_file(writers.FortranWriter(filename), 7136 maxamps, 7137 maxflows, 7138 max([len(me.get('processes')) for me in \ 7139 matrix_elements]), 7140 len(matrix_elements)) 7141 7142 filename = pjoin(Ppath, 'mirrorprocs.inc') 7143 self.write_mirrorprocs(writers.FortranWriter(filename), 7144 subproc_group) 7145 7146 filename = pjoin(Ppath, 'nexternal.inc') 7147 self.write_nexternal_file(writers.FortranWriter(filename), 7148 nexternal, ninitial) 7149 7150 filename = pjoin(Ppath, 'pmass.inc') 7151 self.write_pmass_file(writers.FortranWriter(filename), 7152 matrix_element) 7153 7154 filename = pjoin(Ppath, 'props.inc') 7155 self.write_props_file(writers.FortranWriter(filename), 7156 matrix_element, 7157 s_and_t_channels) 7158 7159 # filename = pjoin(Ppath, 'processes.dat') 7160 # files.write_to_file(filename, 7161 # self.write_processes_file, 7162 # subproc_group) 7163 7164 # Generate jpgs -> pass in make_html 7165 #os.system(os.path.join('..', '..', 'bin', 'gen_jpeg-pl')) 7166 7167 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f'] 7168 7169 for file in linkfiles: 7170 ln('../%s' % file, cwd=Ppath) 7171 7172 ln('nexternal.inc', '../../Source', cwd=Ppath, log=False) 7173 ln('leshouche.inc', '../../Source', cwd=Ppath, log=False) 7174 ln('maxamps.inc', '../../Source', cwd=Ppath, log=False) 7175 ln('../../Source/maxparticles.inc', '.', log=True, cwd=Ppath) 7176 ln('../../Source/maxparticles.inc', '.', name='genps.inc', log=True, cwd=Ppath) 7177 ln('phasespace.inc', '../', log=True, cwd=Ppath) 7178 if not tot_calls: 7179 tot_calls = 0 7180 return tot_calls
7181 7182 7183 #=========================================================================== 7184 # Helper functions 7185 #===========================================================================
7186 - def modify_grouping(self, matrix_element):
7187 """allow to modify the grouping (if grouping is in place) 7188 return two value: 7189 - True/False if the matrix_element was modified 7190 - the new(or old) matrix element""" 7191 7192 return True, matrix_element.split_lepton_grouping()
7193 7194 #=========================================================================== 7195 # write_super_auto_dsig_file 7196 #===========================================================================
7197 - def write_super_auto_dsig_file(self, writer, subproc_group):
7198 """Write the auto_dsig.f file selecting between the subprocesses 7199 in subprocess group mode""" 7200 7201 replace_dict = {} 7202 7203 # Extract version number and date from VERSION file 7204 info_lines = self.get_mg5_info_lines() 7205 replace_dict['info_lines'] = info_lines 7206 7207 matrix_elements = subproc_group.get('matrix_elements') 7208 7209 # Extract process info lines 7210 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 7211 matrix_elements]) 7212 replace_dict['process_lines'] = process_lines 7213 7214 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 7215 replace_dict['nexternal'] = nexternal 7216 7217 replace_dict['nsprocs'] = 2*len(matrix_elements) 7218 7219 # Generate dsig definition line 7220 dsig_def_line = "DOUBLE PRECISION " + \ 7221 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 7222 range(len(matrix_elements))]) 7223 replace_dict["dsig_def_line"] = dsig_def_line 7224 7225 # Generate dsig process lines 7226 call_dsig_proc_lines = [] 7227 for iproc in range(len(matrix_elements)): 7228 call_dsig_proc_lines.append(\ 7229 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 7230 {"num": iproc + 1, 7231 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 7232 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 7233 7234 if writer: 7235 file = open(os.path.join(_file_path, \ 7236 'iolibs/template_files/super_auto_dsig_mw_group_v4.inc')).read() 7237 file = file % replace_dict 7238 # Write the file 7239 writer.writelines(file) 7240 else: 7241 return replace_dict
7242 7243 #=========================================================================== 7244 # write_mirrorprocs 7245 #===========================================================================
7246 - def write_mirrorprocs(self, writer, subproc_group):
7247 """Write the mirrorprocs.inc file determining which processes have 7248 IS mirror process in subprocess group mode.""" 7249 7250 lines = [] 7251 bool_dict = {True: '.true.', False: '.false.'} 7252 matrix_elements = subproc_group.get('matrix_elements') 7253 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 7254 (len(matrix_elements), 7255 ",".join([bool_dict[me.get('has_mirror_process')] for \ 7256 me in matrix_elements]))) 7257 # Write the file 7258 writer.writelines(lines)
7259 7260 #=========================================================================== 7261 # write_configs_file 7262 #===========================================================================
7263 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
7264 """Write the configs.inc file with topology information for a 7265 subprocess group. Use the first subprocess with a diagram for each 7266 configuration.""" 7267 7268 matrix_elements = subproc_group.get('matrix_elements') 7269 model = matrix_elements[0].get('processes')[0].get('model') 7270 7271 diagrams = [] 7272 config_numbers = [] 7273 for iconfig, config in enumerate(diagrams_for_config): 7274 # Check if any diagrams correspond to this config 7275 if set(config) == set([0]): 7276 continue 7277 subproc_diags = [] 7278 for s,d in enumerate(config): 7279 if d: 7280 subproc_diags.append(matrix_elements[s].\ 7281 get('diagrams')[d-1]) 7282 else: 7283 subproc_diags.append(None) 7284 diagrams.append(subproc_diags) 7285 config_numbers.append(iconfig + 1) 7286 7287 # Extract number of external particles 7288 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 7289 7290 return len(diagrams), \ 7291 self.write_configs_file_from_diagrams(writer, diagrams, 7292 config_numbers, 7293 nexternal, ninitial, 7294 matrix_elements[0],model)
7295 7296 #=========================================================================== 7297 # write_run_configs_file 7298 #===========================================================================
7299 - def write_run_config_file(self, writer):
7300 """Write the run_configs.inc file for MadEvent""" 7301 7302 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 7303 text = open(path).read() % {'chanperjob':'2'} 7304 writer.write(text) 7305 return True
7306 7307 7308 #=========================================================================== 7309 # write_leshouche_file 7310 #===========================================================================
7311 - def write_leshouche_file(self, writer, subproc_group):
7312 """Write the leshouche.inc file for MG4""" 7313 7314 all_lines = [] 7315 7316 for iproc, matrix_element in \ 7317 enumerate(subproc_group.get('matrix_elements')): 7318 all_lines.extend(self.get_leshouche_lines(matrix_element, 7319 iproc)) 7320 7321 # Write the file 7322 writer.writelines(all_lines) 7323 7324 return True
7325