Package madgraph :: Package iolibs :: Module export_v4
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_v4

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Methods and classes to export matrix elements to v4 format.""" 
  16   
  17  import copy 
  18  from cStringIO import StringIO 
  19  from distutils import dir_util 
  20  import itertools 
  21  import fractions 
  22  import glob 
  23  import logging 
  24  import math 
  25  import os 
  26  import re 
  27  import shutil 
  28  import subprocess 
  29  import sys 
  30  import time 
  31  import traceback 
  32   
  33  import aloha 
  34   
  35  import madgraph.core.base_objects as base_objects 
  36  import madgraph.core.color_algebra as color 
  37  import madgraph.core.helas_objects as helas_objects 
  38  import madgraph.iolibs.drawing_eps as draw 
  39  import madgraph.iolibs.files as files 
  40  import madgraph.iolibs.group_subprocs as group_subprocs 
  41  import madgraph.iolibs.file_writers as writers 
  42  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  43  import madgraph.iolibs.template_files as template_files 
  44  import madgraph.iolibs.ufo_expression_parsers as parsers 
  45  import madgraph.iolibs.helas_call_writers as helas_call_writers 
  46  import madgraph.interface.common_run_interface as common_run_interface 
  47  import madgraph.various.diagram_symmetry as diagram_symmetry 
  48  import madgraph.various.misc as misc 
  49  import madgraph.various.banner as banner_mod 
  50  import madgraph.various.process_checks as process_checks 
  51  import madgraph.loop.loop_diagram_generation as loop_diagram_generation 
  52  import aloha.create_aloha as create_aloha 
  53  import models.import_ufo as import_ufo 
  54  import models.write_param_card as param_writer 
  55  import models.check_param_card as check_param_card 
  56   
  57   
  58  from madgraph import MadGraph5Error, MG5DIR, ReadWrite 
  59  from madgraph.iolibs.files import cp, ln, mv 
  60   
  61  from madgraph import InvalidCmd 
  62   
  63  pjoin = os.path.join 
  64   
  65  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  66  logger = logging.getLogger('madgraph.export_v4') 
  67   
  68  default_compiler= {'fortran': 'gfortran', 
  69                         'f2py': 'f2py', 
  70                         'cpp':'g++'} 
71 72 73 -class VirtualExporter(object):
74 75 #exporter variable who modified the way madgraph interacts with this class 76 77 grouped_mode = 'madevent' 78 # This variable changes the type of object called within 'generate_subprocess_directory' 79 #functions. 80 # False to avoid grouping (only identical matrix element are merged) 81 # 'madevent' group the massless quark and massless lepton 82 # 'madweight' group the gluon with the massless quark 83 sa_symmetry = False 84 # If no grouped_mode=False, uu~ and u~u will be called independently. 85 #Putting sa_symmetry generates only one of the two matrix-element. 86 check = True 87 # Ask madgraph to check if the directory already exists and propose to the user to 88 #remove it first if this is the case 89 output = 'Template' 90 # [Template, None, dir] 91 # - Template, madgraph will call copy_template 92 # - dir, madgraph will just create an empty directory for initialisation 93 # - None, madgraph do nothing for initialisation 94 exporter = 'v4' 95 # language of the output 'v4' for Fortran output 96 # 'cpp' for C++ output 97 98
99 - def __init__(self, dir_path = "", opt=None):
100 # cmd_options is a dictionary with all the optional argurment passed at output time 101 return
102
103 - def copy_template(self, model):
104 return
105
106 - def generate_subprocess_directory(self, subproc_group, helicity_model, me=None):
107 # generate_subprocess_directory(self, matrix_element, helicity_model, me_number) [for ungrouped] 108 return 0 # return an integer stating the number of call to helicity routine
109
110 - def convert_model(self, model, wanted_lorentz=[], wanted_couplings=[]):
111 return
112
113 - def finalize(self,matrix_element, cmdhistory, MG5options, outputflag):
114 return
115 116
117 - def pass_information_from_cmd(self, cmd):
118 """pass information from the command interface to the exporter. 119 Please do not modify any object of the interface from the exporter. 120 """ 121 return
122
123 - def modify_grouping(self, matrix_element):
124 return False, matrix_element
125
126 - def export_model_files(self, model_v4_path):
127 raise Exception, "V4 model not supported by this type of exporter. Please use UFO model" 128 return
129
130 - def export_helas(self, HELAS_PATH):
131 raise Exception, "V4 model not supported by this type of exporter. Please use UFO model" 132 return
133
134 #=============================================================================== 135 # ProcessExporterFortran 136 #=============================================================================== 137 -class ProcessExporterFortran(VirtualExporter):
138 """Class to take care of exporting a set of matrix elements to 139 Fortran (v4) format.""" 140 141 default_opt = {'clean': False, 'complex_mass':False, 142 'export_format':'madevent', 'mp': False, 143 'v5_model': True, 144 'output_options':{} 145 } 146 grouped_mode = False 147
148 - def __init__(self, dir_path = "", opt=None):
149 """Initiate the ProcessExporterFortran with directory information""" 150 self.mgme_dir = MG5DIR 151 self.dir_path = dir_path 152 self.model = None 153 154 self.opt = dict(self.default_opt) 155 if opt: 156 self.opt.update(opt) 157 158 self.cmd_options = self.opt['output_options'] 159 160 #place holder to pass information to the run_interface 161 self.proc_characteristic = banner_mod.ProcCharacteristic()
162 163 164 #=========================================================================== 165 # process exporter fortran switch between group and not grouped 166 #===========================================================================
167 - def export_processes(self, matrix_elements, fortran_model):
168 """Make the switch between grouped and not grouped output""" 169 170 calls = 0 171 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 172 for (group_number, me_group) in enumerate(matrix_elements): 173 calls = calls + self.generate_subprocess_directory(\ 174 me_group, fortran_model, group_number) 175 else: 176 for me_number, me in enumerate(matrix_elements.get_matrix_elements()): 177 calls = calls + self.generate_subprocess_directory(\ 178 me, fortran_model, me_number) 179 180 return calls
181 182 183 #=========================================================================== 184 # create the run_card 185 #===========================================================================
186 - def create_run_card(self, matrix_elements, history):
187 """ """ 188 189 190 # bypass this for the loop-check 191 import madgraph.loop.loop_helas_objects as loop_helas_objects 192 if isinstance(matrix_elements, loop_helas_objects.LoopHelasMatrixElement): 193 matrix_elements = None 194 195 run_card = banner_mod.RunCard() 196 197 198 default=True 199 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 200 processes = [me.get('processes') for megroup in matrix_elements 201 for me in megroup['matrix_elements']] 202 elif matrix_elements: 203 processes = [me.get('processes') 204 for me in matrix_elements['matrix_elements']] 205 else: 206 default =False 207 208 if default: 209 run_card.create_default_for_process(self.proc_characteristic, 210 history, 211 processes) 212 213 214 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 215 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'))
216 217 218 #=========================================================================== 219 # copy the Template in a new directory. 220 #===========================================================================
221 - def copy_template(self, model):
222 """create the directory run_name as a copy of the MadEvent 223 Template, and clean the directory 224 """ 225 226 #First copy the full template tree if dir_path doesn't exit 227 if not os.path.isdir(self.dir_path): 228 assert self.mgme_dir, \ 229 "No valid MG_ME path given for MG4 run directory creation." 230 logger.info('initialize a new directory: %s' % \ 231 os.path.basename(self.dir_path)) 232 shutil.copytree(pjoin(self.mgme_dir, 'Template/LO'), 233 self.dir_path, True) 234 # distutils.dir_util.copy_tree since dir_path already exists 235 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 236 self.dir_path) 237 # copy plot_card 238 for card in ['plot_card']: 239 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 240 try: 241 shutil.copy(pjoin(self.dir_path, 'Cards',card + '.dat'), 242 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 243 except IOError: 244 logger.warning("Failed to copy " + card + ".dat to default") 245 elif os.getcwd() == os.path.realpath(self.dir_path): 246 logger.info('working in local directory: %s' % \ 247 os.path.realpath(self.dir_path)) 248 # distutils.dir_util.copy_tree since dir_path already exists 249 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/LO'), 250 self.dir_path) 251 # for name in misc.glob('Template/LO/*', self.mgme_dir): 252 # name = os.path.basename(name) 253 # filname = pjoin(self.mgme_dir, 'Template','LO',name) 254 # if os.path.isfile(filename): 255 # files.cp(filename, pjoin(self.dir_path,name)) 256 # elif os.path.isdir(filename): 257 # shutil.copytree(filename, pjoin(self.dir_path,name), True) 258 # distutils.dir_util.copy_tree since dir_path already exists 259 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 260 self.dir_path) 261 # Copy plot_card 262 for card in ['plot_card']: 263 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 264 try: 265 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 266 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 267 except IOError: 268 logger.warning("Failed to copy " + card + ".dat to default") 269 elif not os.path.isfile(pjoin(self.dir_path, 'TemplateVersion.txt')): 270 assert self.mgme_dir, \ 271 "No valid MG_ME path given for MG4 run directory creation." 272 try: 273 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 274 except IOError: 275 MG5_version = misc.get_pkg_info() 276 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write(MG5_version['version']) 277 278 #Ensure that the Template is clean 279 if self.opt['clean']: 280 logger.info('remove old information in %s' % \ 281 os.path.basename(self.dir_path)) 282 if os.environ.has_key('MADGRAPH_BASE'): 283 misc.call([pjoin('bin', 'internal', 'clean_template'), 284 '--web'], cwd=self.dir_path) 285 else: 286 try: 287 misc.call([pjoin('bin', 'internal', 'clean_template')], \ 288 cwd=self.dir_path) 289 except Exception, why: 290 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 291 % (os.path.basename(self.dir_path),why)) 292 293 #Write version info 294 MG_version = misc.get_pkg_info() 295 open(pjoin(self.dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 296 MG_version['version']) 297 298 # add the makefile in Source directory 299 filename = pjoin(self.dir_path,'Source','makefile') 300 self.write_source_makefile(writers.FileWriter(filename)) 301 302 # add the DiscreteSampler information 303 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'DiscreteSampler.f'), 304 pjoin(self.dir_path, 'Source')) 305 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'StringCast.f'), 306 pjoin(self.dir_path, 'Source')) 307 308 # We need to create the correct open_data for the pdf 309 self.write_pdf_opendata()
310 311 312 #=========================================================================== 313 # Call MadAnalysis5 to generate the default cards for this process 314 #===========================================================================
315 - def create_default_madanalysis5_cards(self, history, proc_defs, processes, 316 ma5_path, output_dir, levels = ['parton','hadron']):
317 """ Call MA5 so that it writes default cards for both parton and 318 post-shower levels, tailored for this particular process.""" 319 320 if len(levels)==0: 321 return 322 start = time.time() 323 logger.info('Generating MadAnalysis5 default cards tailored to this process') 324 try: 325 MA5_interpreter = common_run_interface.CommonRunCmd.\ 326 get_MadAnalysis5_interpreter(MG5DIR,ma5_path,loglevel=100) 327 except (Exception, SystemExit) as e: 328 logger.warning('Fail to create a MadAnalysis5 instance. Therefore the default analysis with MadAnalysis5 will be empty.') 329 return 330 if MA5_interpreter is None: 331 return 332 333 MA5_main = MA5_interpreter.main 334 for lvl in ['parton','hadron']: 335 if lvl in levels: 336 card_to_generate = pjoin(output_dir,'madanalysis5_%s_card_default.dat'%lvl) 337 try: 338 text = MA5_main.madgraph.generate_card(history, proc_defs, processes,lvl) 339 except (Exception, SystemExit) as e: 340 # keep the default card (skip only) 341 logger.warning('MadAnalysis5 failed to write a %s-level'%lvl+ 342 ' default analysis card for this process.') 343 logger.warning('Therefore, %s-level default analysis with MadAnalysis5 will be empty.'%lvl) 344 error=StringIO() 345 traceback.print_exc(file=error) 346 logger.debug('MadAnalysis5 error was:') 347 logger.debug('-'*60) 348 logger.debug(error.getvalue()[:-1]) 349 logger.debug('-'*60) 350 else: 351 open(card_to_generate,'w').write(text) 352 stop = time.time() 353 if stop-start >1: 354 logger.info('Cards created in %.2fs' % (stop-start))
355 356 #=========================================================================== 357 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 358 #===========================================================================
359 - def write_procdef_mg5(self, file_pos, modelname, process_str):
360 """ write an equivalent of the MG4 proc_card in order that all the Madevent 361 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 362 363 proc_card_template = template_files.mg4_proc_card.mg4_template 364 process_template = template_files.mg4_proc_card.process_template 365 process_text = '' 366 coupling = '' 367 new_process_content = [] 368 369 370 # First find the coupling and suppress the coupling from process_str 371 #But first ensure that coupling are define whithout spaces: 372 process_str = process_str.replace(' =', '=') 373 process_str = process_str.replace('= ', '=') 374 process_str = process_str.replace(',',' , ') 375 #now loop on the element and treat all the coupling 376 for info in process_str.split(): 377 if '=' in info: 378 coupling += info + '\n' 379 else: 380 new_process_content.append(info) 381 # Recombine the process_str (which is the input process_str without coupling 382 #info) 383 process_str = ' '.join(new_process_content) 384 385 #format the SubProcess 386 replace_dict = {'process': process_str, 387 'coupling': coupling} 388 process_text += process_template.substitute(replace_dict) 389 390 replace_dict = {'process': process_text, 391 'model': modelname, 392 'multiparticle':''} 393 text = proc_card_template.substitute(replace_dict) 394 395 if file_pos: 396 ff = open(file_pos, 'w') 397 ff.write(text) 398 ff.close() 399 else: 400 return replace_dict
401 402
403 - def pass_information_from_cmd(self, cmd):
404 """Pass information for MA5""" 405 406 self.proc_defs = cmd._curr_proc_defs
407 408 #=========================================================================== 409 # Create jpeg diagrams, html pages,proc_card_mg5.dat and madevent.tar.gz 410 #===========================================================================
411 - def finalize(self, matrix_elements, history='', mg5options={}, flaglist=[]):
412 """Function to finalize v4 directory, for inheritance.""" 413 414 self.create_run_card(matrix_elements, history) 415 self.create_MA5_cards(matrix_elements, history)
416
417 - def create_MA5_cards(self,matrix_elements,history):
418 """ A wrapper around the creation of the MA5 cards so that it can be 419 bypassed by daughter classes (i.e. in standalone).""" 420 if 'madanalysis5_path' in self.opt and not \ 421 self.opt['madanalysis5_path'] is None and not self.proc_defs is None: 422 processes = None 423 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 424 processes = [me.get('processes') for megroup in matrix_elements 425 for me in megroup['matrix_elements']] 426 elif matrix_elements: 427 processes = [me.get('processes') 428 for me in matrix_elements['matrix_elements']] 429 430 self.create_default_madanalysis5_cards( 431 history, self.proc_defs, processes, 432 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'), 433 levels = ['hadron','parton']) 434 435 for level in ['hadron','parton']: 436 # Copying these cards turn on the use of MadAnalysis5 by default. 437 if os.path.isfile(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level)): 438 shutil.copy(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level), 439 pjoin(self.dir_path,'Cards','madanalysis5_%s_card.dat'%level))
440 441 #=========================================================================== 442 # Create the proc_characteristic file passing information to the run_interface 443 #===========================================================================
444 - def create_proc_charac(self, matrix_elements=None, history="", **opts):
445 446 self.proc_characteristic.write(pjoin(self.dir_path, 'SubProcesses', 'proc_characteristics'))
447 448 #=========================================================================== 449 # write_matrix_element_v4 450 #===========================================================================
451 - def write_matrix_element_v4(self):
452 """Function to write a matrix.f file, for inheritance. 453 """ 454 pass
455 456 #=========================================================================== 457 # write_pdf_opendata 458 #===========================================================================
459 - def write_pdf_opendata(self):
460 """ modify the pdf opendata file, to allow direct access to cluster node 461 repository if configure""" 462 463 if not self.opt["cluster_local_path"]: 464 changer = {"pdf_systemwide": ""} 465 else: 466 to_add = """ 467 tempname='%(path)s'//Tablefile 468 open(IU,file=tempname,status='old',ERR=1) 469 return 470 1 tempname='%(path)s/Pdfdata/'//Tablefile 471 open(IU,file=tempname,status='old',ERR=2) 472 return 473 2 tempname='%(path)s/lhapdf'//Tablefile 474 open(IU,file=tempname,status='old',ERR=3) 475 return 476 3 tempname='%(path)s/../lhapdf/pdfsets/'//Tablefile 477 open(IU,file=tempname,status='old',ERR=4) 478 return 479 4 tempname='%(path)s/../lhapdf/pdfsets/6.1/'//Tablefile 480 open(IU,file=tempname,status='old',ERR=5) 481 return 482 """ % {"path" : self.opt["cluster_local_path"]} 483 484 changer = {"pdf_systemwide": to_add} 485 486 487 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "opendata.f")) 488 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_opendata.f"),"r").read() 489 ff.writelines(template % changer) 490 491 # Do the same for lhapdf set 492 if not self.opt["cluster_local_path"]: 493 changer = {"cluster_specific_path": ""} 494 else: 495 to_add=""" 496 LHAPath='%(path)s/PDFsets' 497 Inquire(File=LHAPath, exist=exists) 498 if(exists)return 499 LHAPath='%(path)s/../lhapdf/pdfsets/6.1/' 500 Inquire(File=LHAPath, exist=exists) 501 if(exists)return 502 LHAPath='%(path)s/../lhapdf/pdfsets/' 503 Inquire(File=LHAPath, exist=exists) 504 if(exists)return 505 LHAPath='./PDFsets' 506 """ % {"path" : self.opt["cluster_local_path"]} 507 changer = {"cluster_specific_path": to_add} 508 509 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f")) 510 #ff = open(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f"),"w") 511 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_wrap_lhapdf.f"),"r").read() 512 ff.writelines(template % changer) 513 514 515 return
516 517 518 519 #=========================================================================== 520 # write_maxparticles_file 521 #===========================================================================
522 - def write_maxparticles_file(self, writer, matrix_elements):
523 """Write the maxparticles.inc file for MadEvent""" 524 525 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 526 maxparticles = max([me.get_nexternal_ninitial()[0] for me in \ 527 matrix_elements.get('matrix_elements')]) 528 else: 529 maxparticles = max([me.get_nexternal_ninitial()[0] \ 530 for me in matrix_elements]) 531 532 lines = "integer max_particles\n" 533 lines += "parameter(max_particles=%d)" % maxparticles 534 535 # Write the file 536 writer.writelines(lines) 537 538 return True
539 540 541 #=========================================================================== 542 # export the model 543 #===========================================================================
544 - def export_model_files(self, model_path):
545 """Configure the files/link of the process according to the model""" 546 547 # Import the model 548 for file in os.listdir(model_path): 549 if os.path.isfile(pjoin(model_path, file)): 550 shutil.copy2(pjoin(model_path, file), \ 551 pjoin(self.dir_path, 'Source', 'MODEL'))
552 553 567 575 576 577 #=========================================================================== 578 # export the helas routine 579 #===========================================================================
580 - def export_helas(self, helas_path):
581 """Configure the files/link of the process according to the model""" 582 583 # Import helas routine 584 for filename in os.listdir(helas_path): 585 filepos = pjoin(helas_path, filename) 586 if os.path.isfile(filepos): 587 if filepos.endswith('Makefile.template'): 588 cp(filepos, self.dir_path + '/Source/DHELAS/Makefile') 589 elif filepos.endswith('Makefile'): 590 pass 591 else: 592 cp(filepos, self.dir_path + '/Source/DHELAS')
593 # following lines do the same but whithout symbolic link 594 # 595 #def export_helas(mgme_dir, dir_path): 596 # 597 # # Copy the HELAS directory 598 # helas_dir = pjoin(mgme_dir, 'HELAS') 599 # for filename in os.listdir(helas_dir): 600 # if os.path.isfile(pjoin(helas_dir, filename)): 601 # shutil.copy2(pjoin(helas_dir, filename), 602 # pjoin(dir_path, 'Source', 'DHELAS')) 603 # shutil.move(pjoin(dir_path, 'Source', 'DHELAS', 'Makefile.template'), 604 # pjoin(dir_path, 'Source', 'DHELAS', 'Makefile')) 605 # 606 607 #=========================================================================== 608 # generate_subprocess_directory 609 #===========================================================================
610 - def generate_subprocess_directory(self, matrix_element, 611 fortran_model, 612 me_number):
613 """Routine to generate a subprocess directory (for inheritance)""" 614 615 pass
616 617 #=========================================================================== 618 # get_source_libraries_list 619 #===========================================================================
620 - def get_source_libraries_list(self):
621 """ Returns the list of libraries to be compiling when compiling the 622 SOURCE directory. It is different for loop_induced processes and 623 also depends on the value of the 'output_dependencies' option""" 624 625 return ['$(LIBDIR)libdhelas.$(libext)', 626 '$(LIBDIR)libpdf.$(libext)', 627 '$(LIBDIR)libmodel.$(libext)', 628 '$(LIBDIR)libcernlib.$(libext)', 629 '$(LIBDIR)libbias.$(libext)']
630 631 #=========================================================================== 632 # write_source_makefile 633 #===========================================================================
634 - def write_source_makefile(self, writer):
635 """Write the nexternal.inc file for MG4""" 636 637 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 638 set_of_lib = ' '.join(['$(LIBRARIES)']+self.get_source_libraries_list()) 639 if self.opt['model'] == 'mssm' or self.opt['model'].startswith('mssm-'): 640 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 641 MODEL/MG5_param.dat: ../Cards/param_card.dat\n\t../bin/madevent treatcards param 642 param_card.inc: MODEL/MG5_param.dat\n\t../bin/madevent treatcards param\n''' 643 else: 644 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 645 param_card.inc: ../Cards/param_card.dat\n\t../bin/madevent treatcards param\n''' 646 647 replace_dict= {'libraries': set_of_lib, 648 'model':model_line, 649 'additional_dsample': '', 650 'additional_dependencies':''} 651 652 if writer: 653 text = open(path).read() % replace_dict 654 writer.write(text) 655 656 return replace_dict
657 658 #=========================================================================== 659 # write_nexternal_madspin 660 #===========================================================================
661 - def write_nexternal_madspin(self, writer, nexternal, ninitial):
662 """Write the nexternal_prod.inc file for madspin""" 663 664 replace_dict = {} 665 666 replace_dict['nexternal'] = nexternal 667 replace_dict['ninitial'] = ninitial 668 669 file = """ \ 670 integer nexternal_prod 671 parameter (nexternal_prod=%(nexternal)d) 672 integer nincoming_prod 673 parameter (nincoming_prod=%(ninitial)d)""" % replace_dict 674 675 # Write the file 676 if writer: 677 writer.writelines(file) 678 return True 679 else: 680 return replace_dict
681 682 #=========================================================================== 683 # write_helamp_madspin 684 #===========================================================================
685 - def write_helamp_madspin(self, writer, ncomb):
686 """Write the helamp.inc file for madspin""" 687 688 replace_dict = {} 689 690 replace_dict['ncomb'] = ncomb 691 692 file = """ \ 693 integer ncomb1 694 parameter (ncomb1=%(ncomb)d) 695 double precision helamp(ncomb1) 696 common /to_helamp/helamp """ % replace_dict 697 698 # Write the file 699 if writer: 700 writer.writelines(file) 701 return True 702 else: 703 return replace_dict
704 705 706 707 #=========================================================================== 708 # write_nexternal_file 709 #===========================================================================
710 - def write_nexternal_file(self, writer, nexternal, ninitial):
711 """Write the nexternal.inc file for MG4""" 712 713 replace_dict = {} 714 715 replace_dict['nexternal'] = nexternal 716 replace_dict['ninitial'] = ninitial 717 718 file = """ \ 719 integer nexternal 720 parameter (nexternal=%(nexternal)d) 721 integer nincoming 722 parameter (nincoming=%(ninitial)d)""" % replace_dict 723 724 # Write the file 725 if writer: 726 writer.writelines(file) 727 return True 728 else: 729 return replace_dict
730 #=========================================================================== 731 # write_pmass_file 732 #===========================================================================
733 - def write_pmass_file(self, writer, matrix_element):
734 """Write the pmass.inc file for MG4""" 735 736 model = matrix_element.get('processes')[0].get('model') 737 738 lines = [] 739 for wf in matrix_element.get_external_wavefunctions(): 740 mass = model.get('particle_dict')[wf.get('pdg_code')].get('mass') 741 if mass.lower() != "zero": 742 mass = "abs(%s)" % mass 743 744 lines.append("pmass(%d)=%s" % \ 745 (wf.get('number_external'), mass)) 746 747 # Write the file 748 writer.writelines(lines) 749 750 return True
751 752 #=========================================================================== 753 # write_ngraphs_file 754 #===========================================================================
755 - def write_ngraphs_file(self, writer, nconfigs):
756 """Write the ngraphs.inc file for MG4. Needs input from 757 write_configs_file.""" 758 759 file = " integer n_max_cg\n" 760 file = file + "parameter (n_max_cg=%d)" % nconfigs 761 762 # Write the file 763 writer.writelines(file) 764 765 return True
766 767 #=========================================================================== 768 # write_leshouche_file 769 #===========================================================================
770 - def write_leshouche_file(self, writer, matrix_element):
771 """Write the leshouche.inc file for MG4""" 772 773 # Write the file 774 writer.writelines(self.get_leshouche_lines(matrix_element, 0)) 775 776 return True
777 778 #=========================================================================== 779 # get_leshouche_lines 780 #===========================================================================
781 - def get_leshouche_lines(self, matrix_element, numproc):
782 """Write the leshouche.inc file for MG4""" 783 784 # Extract number of external particles 785 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 786 787 lines = [] 788 for iproc, proc in enumerate(matrix_element.get('processes')): 789 legs = proc.get_legs_with_decays() 790 lines.append("DATA (IDUP(i,%d,%d),i=1,%d)/%s/" % \ 791 (iproc + 1, numproc+1, nexternal, 792 ",".join([str(l.get('id')) for l in legs]))) 793 if iproc == 0 and numproc == 0: 794 for i in [1, 2]: 795 lines.append("DATA (MOTHUP(%d,i),i=1,%2r)/%s/" % \ 796 (i, nexternal, 797 ",".join([ "%3r" % 0 ] * ninitial + \ 798 [ "%3r" % i ] * (nexternal - ninitial)))) 799 800 # Here goes the color connections corresponding to the JAMPs 801 # Only one output, for the first subproc! 802 if iproc == 0: 803 # If no color basis, just output trivial color flow 804 if not matrix_element.get('color_basis'): 805 for i in [1, 2]: 806 lines.append("DATA (ICOLUP(%d,i,1,%d),i=1,%2r)/%s/" % \ 807 (i, numproc+1,nexternal, 808 ",".join([ "%3r" % 0 ] * nexternal))) 809 810 else: 811 # First build a color representation dictionnary 812 repr_dict = {} 813 for l in legs: 814 repr_dict[l.get('number')] = \ 815 proc.get('model').get_particle(l.get('id')).get_color()\ 816 * (-1)**(1+l.get('state')) 817 # Get the list of color flows 818 color_flow_list = \ 819 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 820 ninitial) 821 # And output them properly 822 for cf_i, color_flow_dict in enumerate(color_flow_list): 823 for i in [0, 1]: 824 lines.append("DATA (ICOLUP(%d,i,%d,%d),i=1,%2r)/%s/" % \ 825 (i + 1, cf_i + 1, numproc+1, nexternal, 826 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 827 for l in legs]))) 828 829 return lines
830 831 832 833 834 #=========================================================================== 835 # write_maxamps_file 836 #===========================================================================
837 - def write_maxamps_file(self, writer, maxamps, maxflows, 838 maxproc,maxsproc):
839 """Write the maxamps.inc file for MG4.""" 840 841 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 842 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 843 (maxamps, maxflows) 844 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 845 (maxproc, maxsproc) 846 847 # Write the file 848 writer.writelines(file) 849 850 return True
851 852 853 #=========================================================================== 854 # Routines to output UFO models in MG4 format 855 #=========================================================================== 856
857 - def convert_model(self, model, wanted_lorentz = [], 858 wanted_couplings = []):
859 """ Create a full valid MG4 model from a MG5 model (coming from UFO)""" 860 861 # Make sure aloha is in quadruple precision if needed 862 old_aloha_mp=aloha.mp_precision 863 aloha.mp_precision=self.opt['mp'] 864 865 # create the MODEL 866 write_dir=pjoin(self.dir_path, 'Source', 'MODEL') 867 model_builder = UFO_model_to_mg4(model, write_dir, self.opt + self.proc_characteristic) 868 model_builder.build(wanted_couplings) 869 870 # Backup the loop mode, because it can be changed in what follows. 871 old_loop_mode = aloha.loop_mode 872 873 # Create the aloha model or use the existing one (for loop exporters 874 # this is useful as the aloha model will be used again in the 875 # LoopHelasMatrixElements generated). We do not save the model generated 876 # here if it didn't exist already because it would be a waste of 877 # memory for tree level applications since aloha is only needed at the 878 # time of creating the aloha fortran subroutines. 879 if hasattr(self, 'aloha_model'): 880 aloha_model = self.aloha_model 881 else: 882 aloha_model = create_aloha.AbstractALOHAModel(os.path.basename(model.get('modelpath'))) 883 aloha_model.add_Lorentz_object(model.get('lorentz')) 884 885 # Compute the subroutines 886 if wanted_lorentz: 887 aloha_model.compute_subset(wanted_lorentz) 888 else: 889 aloha_model.compute_all(save=False) 890 891 # Write them out 892 write_dir=pjoin(self.dir_path, 'Source', 'DHELAS') 893 aloha_model.write(write_dir, 'Fortran') 894 895 # Revert the original aloha loop mode 896 aloha.loop_mode = old_loop_mode 897 898 #copy Helas Template 899 cp(MG5DIR + '/aloha/template_files/Makefile_F', write_dir+'/makefile') 900 if any([any(['L' in tag for tag in d[1]]) for d in wanted_lorentz]): 901 cp(MG5DIR + '/aloha/template_files/aloha_functions_loop.f', 902 write_dir+'/aloha_functions.f') 903 aloha_model.loop_mode = False 904 else: 905 cp(MG5DIR + '/aloha/template_files/aloha_functions.f', 906 write_dir+'/aloha_functions.f') 907 create_aloha.write_aloha_file_inc(write_dir, '.f', '.o') 908 909 # Make final link in the Process 910 self.make_model_symbolic_link() 911 912 # Re-establish original aloha mode 913 aloha.mp_precision=old_aloha_mp
914 915 916 #=========================================================================== 917 # Helper functions 918 #===========================================================================
919 - def modify_grouping(self, matrix_element):
920 """allow to modify the grouping (if grouping is in place) 921 return two value: 922 - True/False if the matrix_element was modified 923 - the new(or old) matrix element""" 924 925 return False, matrix_element
926 927 #=========================================================================== 928 # Helper functions 929 #===========================================================================
930 - def get_mg5_info_lines(self):
931 """Return info lines for MG5, suitable to place at beginning of 932 Fortran files""" 933 934 info = misc.get_pkg_info() 935 info_lines = "" 936 if info and info.has_key('version') and info.has_key('date'): 937 info_lines = "# Generated by MadGraph5_aMC@NLO v. %s, %s\n" % \ 938 (info['version'], info['date']) 939 info_lines = info_lines + \ 940 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 941 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 942 else: 943 info_lines = "# Generated by MadGraph5_aMC@NLO\n" + \ 944 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 945 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 946 947 return info_lines
948
949 - def get_process_info_lines(self, matrix_element):
950 """Return info lines describing the processes for this matrix element""" 951 952 return"\n".join([ "C " + process.nice_string().replace('\n', '\nC * ') \ 953 for process in matrix_element.get('processes')])
954 955
956 - def get_helicity_lines(self, matrix_element,array_name='NHEL'):
957 """Return the Helicity matrix definition lines for this matrix element""" 958 959 helicity_line_list = [] 960 i = 0 961 for helicities in matrix_element.get_helicity_matrix(): 962 i = i + 1 963 int_list = [i, len(helicities)] 964 int_list.extend(helicities) 965 helicity_line_list.append(\ 966 ("DATA ("+array_name+"(I,%4r),I=1,%d) /" + \ 967 ",".join(['%2r'] * len(helicities)) + "/") % tuple(int_list)) 968 969 return "\n".join(helicity_line_list)
970
971 - def get_ic_line(self, matrix_element):
972 """Return the IC definition line coming after helicities, required by 973 switchmom in madevent""" 974 975 nexternal = matrix_element.get_nexternal_ninitial()[0] 976 int_list = range(1, nexternal + 1) 977 978 return "DATA (IC(I,1),I=1,%i) /%s/" % (nexternal, 979 ",".join([str(i) for \ 980 i in int_list]))
981
982 - def set_chosen_SO_index(self, process, squared_orders):
983 """ From the squared order constraints set by the user, this function 984 finds what indices of the squared_orders list the user intends to pick. 985 It returns this as a string of comma-separated successive '.true.' or 986 '.false.' for each index.""" 987 988 user_squared_orders = process.get('squared_orders') 989 split_orders = process.get('split_orders') 990 991 if len(user_squared_orders)==0: 992 return ','.join(['.true.']*len(squared_orders)) 993 994 res = [] 995 for sqsos in squared_orders: 996 is_a_match = True 997 for user_sqso, value in user_squared_orders.items(): 998 if (process.get_squared_order_type(user_sqso) =='==' and \ 999 value!=sqsos[split_orders.index(user_sqso)]) or \ 1000 (process.get_squared_order_type(user_sqso) in ['<=','='] and \ 1001 value<sqsos[split_orders.index(user_sqso)]) or \ 1002 (process.get_squared_order_type(user_sqso) == '>' and \ 1003 value>=sqsos[split_orders.index(user_sqso)]): 1004 is_a_match = False 1005 break 1006 res.append('.true.' if is_a_match else '.false.') 1007 1008 return ','.join(res)
1009
1010 - def get_split_orders_lines(self, orders, array_name, n=5):
1011 """ Return the split orders definition as defined in the list orders and 1012 for the name of the array 'array_name'. Split rows in chunks of size n.""" 1013 1014 ret_list = [] 1015 for index, order in enumerate(orders): 1016 for k in xrange(0, len(order), n): 1017 ret_list.append("DATA (%s(%3r,i),i=%3r,%3r) /%s/" % \ 1018 (array_name,index + 1, k + 1, min(k + n, len(order)), 1019 ','.join(["%5r" % i for i in order[k:k + n]]))) 1020 return ret_list
1021
1022 - def format_integer_list(self, list, name, n=5):
1023 """ Return an initialization of the python list in argument following 1024 the fortran syntax using the data keyword assignment, filling an array 1025 of name 'name'. It splits rows in chunks of size n.""" 1026 1027 ret_list = [] 1028 for k in xrange(0, len(list), n): 1029 ret_list.append("DATA (%s(i),i=%3r,%3r) /%s/" % \ 1030 (name, k + 1, min(k + n, len(list)), 1031 ','.join(["%5r" % i for i in list[k:k + n]]))) 1032 return ret_list
1033
1034 - def get_color_data_lines(self, matrix_element, n=6):
1035 """Return the color matrix definition lines for this matrix element. Split 1036 rows in chunks of size n.""" 1037 1038 if not matrix_element.get('color_matrix'): 1039 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 1040 else: 1041 ret_list = [] 1042 my_cs = color.ColorString() 1043 for index, denominator in \ 1044 enumerate(matrix_element.get('color_matrix').\ 1045 get_line_denominators()): 1046 # First write the common denominator for this color matrix line 1047 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 1048 # Then write the numerators for the matrix elements 1049 num_list = matrix_element.get('color_matrix').\ 1050 get_line_numerators(index, denominator) 1051 1052 for k in xrange(0, len(num_list), n): 1053 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 1054 (index + 1, k + 1, min(k + n, len(num_list)), 1055 ','.join(["%5r" % i for i in num_list[k:k + n]]))) 1056 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[index]) 1057 ret_list.append("C %s" % repr(my_cs)) 1058 return ret_list
1059 1060
1061 - def get_den_factor_line(self, matrix_element):
1062 """Return the denominator factor line for this matrix element""" 1063 1064 return "DATA IDEN/%2r/" % \ 1065 matrix_element.get_denominator_factor()
1066
1067 - def get_icolamp_lines(self, mapconfigs, matrix_element, num_matrix_element):
1068 """Return the ICOLAMP matrix, showing which JAMPs contribute to 1069 which configs (diagrams).""" 1070 1071 ret_list = [] 1072 1073 booldict = {False: ".false.", True: ".true."} 1074 1075 if not matrix_element.get('color_basis'): 1076 # No color, so only one color factor. Simply write a ".true." 1077 # for each config (i.e., each diagram with only 3 particle 1078 # vertices 1079 configs = len(mapconfigs) 1080 ret_list.append("DATA(icolamp(1,i,%d),i=1,%d)/%s/" % \ 1081 (num_matrix_element, configs, 1082 ','.join([".true." for i in range(configs)]))) 1083 return ret_list 1084 1085 # There is a color basis - create a list showing which JAMPs have 1086 # contributions to which configs 1087 1088 # Only want to include leading color flows, so find max_Nc 1089 color_basis = matrix_element.get('color_basis') 1090 1091 # We don't want to include the power of Nc's which come from the potential 1092 # loop color trace (i.e. in the case of a closed fermion loop for example) 1093 # so we subtract it here when computing max_Nc 1094 max_Nc = max(sum([[(v[4]-v[5]) for v in val] for val in 1095 color_basis.values()],[])) 1096 1097 # Crate dictionary between diagram number and JAMP number 1098 diag_jamp = {} 1099 for ijamp, col_basis_elem in \ 1100 enumerate(sorted(matrix_element.get('color_basis').keys())): 1101 for diag_tuple in matrix_element.get('color_basis')[col_basis_elem]: 1102 # Only use color flows with Nc == max_Nc. However, notice that 1103 # we don't want to include the Nc power coming from the loop 1104 # in this counting. 1105 if (diag_tuple[4]-diag_tuple[5]) == max_Nc: 1106 diag_num = diag_tuple[0] + 1 1107 # Add this JAMP number to this diag_num 1108 diag_jamp[diag_num] = diag_jamp.setdefault(diag_num, []) + \ 1109 [ijamp+1] 1110 1111 colamps = ijamp + 1 1112 for iconfig, num_diag in enumerate(mapconfigs): 1113 if num_diag == 0: 1114 continue 1115 1116 # List of True or False 1117 bool_list = [(i + 1 in diag_jamp[num_diag]) for i in range(colamps)] 1118 # Add line 1119 ret_list.append("DATA(icolamp(i,%d,%d),i=1,%d)/%s/" % \ 1120 (iconfig+1, num_matrix_element, colamps, 1121 ','.join(["%s" % booldict[b] for b in \ 1122 bool_list]))) 1123 1124 return ret_list
1125
1126 - def get_amp2_lines(self, matrix_element, config_map = []):
1127 """Return the amp2(i) = sum(amp for diag(i))^2 lines""" 1128 1129 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 1130 # Get minimum legs in a vertex 1131 vert_list = [max(diag.get_vertex_leg_numbers()) for diag in \ 1132 matrix_element.get('diagrams') if diag.get_vertex_leg_numbers()!=[]] 1133 minvert = min(vert_list) if vert_list!=[] else 0 1134 1135 ret_lines = [] 1136 if config_map: 1137 # In this case, we need to sum up all amplitudes that have 1138 # identical topologies, as given by the config_map (which 1139 # gives the topology/config for each of the diagrams 1140 diagrams = matrix_element.get('diagrams') 1141 # Combine the diagrams with identical topologies 1142 config_to_diag_dict = {} 1143 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1144 if config_map[idiag] == 0: 1145 continue 1146 try: 1147 config_to_diag_dict[config_map[idiag]].append(idiag) 1148 except KeyError: 1149 config_to_diag_dict[config_map[idiag]] = [idiag] 1150 # Write out the AMP2s summing squares of amplitudes belonging 1151 # to eiher the same diagram or different diagrams with 1152 # identical propagator properties. Note that we need to use 1153 # AMP2 number corresponding to the first diagram number used 1154 # for that AMP2. 1155 for config in sorted(config_to_diag_dict.keys()): 1156 1157 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % \ 1158 {"num": (config_to_diag_dict[config][0] + 1)} 1159 1160 amp = "+".join(["AMP(%(num)d)" % {"num": a.get('number')} for a in \ 1161 sum([diagrams[idiag].get('amplitudes') for \ 1162 idiag in config_to_diag_dict[config]], [])]) 1163 1164 # Not using \sum |M|^2 anymore since this creates troubles 1165 # when ckm is not diagonal due to the JIM mechanism. 1166 if '+' in amp: 1167 line += "(%s)*dconjg(%s)" % (amp, amp) 1168 else: 1169 line += "%s*dconjg(%s)" % (amp, amp) 1170 ret_lines.append(line) 1171 else: 1172 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1173 # Ignore any diagrams with 4-particle vertices. 1174 if diag.get_vertex_leg_numbers()!=[] and max(diag.get_vertex_leg_numbers()) > minvert: 1175 continue 1176 # Now write out the expression for AMP2, meaning the sum of 1177 # squared amplitudes belonging to the same diagram 1178 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % {"num": (idiag + 1)} 1179 line += "+".join(["AMP(%(num)d)*dconjg(AMP(%(num)d))" % \ 1180 {"num": a.get('number')} for a in \ 1181 diag.get('amplitudes')]) 1182 ret_lines.append(line) 1183 1184 return ret_lines
1185 1186 #=========================================================================== 1187 # Returns the data statements initializing the coeffictients for the JAMP 1188 # decomposition. It is used when the JAMP initialization is decided to be 1189 # done through big arrays containing the projection coefficients. 1190 #===========================================================================
1191 - def get_JAMP_coefs(self, color_amplitudes, color_basis=None, tag_letter="",\ 1192 n=50, Nc_value=3):
1193 """This functions return the lines defining the DATA statement setting 1194 the coefficients building the JAMPS out of the AMPS. Split rows in 1195 bunches of size n. 1196 One can specify the color_basis from which the color amplitudes originates 1197 so that there are commentaries telling what color structure each JAMP 1198 corresponds to.""" 1199 1200 if(not isinstance(color_amplitudes,list) or 1201 not (color_amplitudes and isinstance(color_amplitudes[0],list))): 1202 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_coefs" 1203 1204 res_list = [] 1205 my_cs = color.ColorString() 1206 for index, coeff_list in enumerate(color_amplitudes): 1207 # Create the list of the complete numerical coefficient. 1208 coefs_list=[coefficient[0][0]*coefficient[0][1]*\ 1209 (fractions.Fraction(Nc_value)**coefficient[0][3]) for \ 1210 coefficient in coeff_list] 1211 # Create the list of the numbers of the contributing amplitudes. 1212 # Mutliply by -1 for those which have an imaginary coefficient. 1213 ampnumbers_list=[coefficient[1]*(-1 if coefficient[0][2] else 1) \ 1214 for coefficient in coeff_list] 1215 # Find the common denominator. 1216 commondenom=abs(reduce(fractions.gcd, coefs_list).denominator) 1217 num_list=[(coefficient*commondenom).numerator \ 1218 for coefficient in coefs_list] 1219 res_list.append("DATA NCONTRIBAMPS%s(%i)/%i/"%(tag_letter,\ 1220 index+1,len(num_list))) 1221 res_list.append("DATA DENOMCCOEF%s(%i)/%i/"%(tag_letter,\ 1222 index+1,commondenom)) 1223 if color_basis: 1224 my_cs.from_immutable(sorted(color_basis.keys())[index]) 1225 res_list.append("C %s" % repr(my_cs)) 1226 for k in xrange(0, len(num_list), n): 1227 res_list.append("DATA (NUMCCOEF%s(%3r,i),i=%6r,%6r) /%s/" % \ 1228 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1229 ','.join(["%6r" % i for i in num_list[k:k + n]]))) 1230 res_list.append("DATA (AMPNUMBERS%s(%3r,i),i=%6r,%6r) /%s/" % \ 1231 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1232 ','.join(["%6r" % i for i in ampnumbers_list[k:k + n]]))) 1233 pass 1234 return res_list
1235 1236
1237 - def get_JAMP_lines_split_order(self, col_amps, split_order_amps, 1238 split_order_names=None, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)"):
1239 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1240 defined as a matrix element or directly as a color_amplitudes dictionary. 1241 The split_order_amps specifies the group of amplitudes sharing the same 1242 amplitude orders which should be put in together in a given set of JAMPS. 1243 The split_order_amps is supposed to have the format of the second output 1244 of the function get_split_orders_mapping function in helas_objects.py. 1245 The split_order_names is optional (it should correspond to the process 1246 'split_orders' attribute) and only present to provide comments in the 1247 JAMP definitions in the code.""" 1248 1249 # Let the user call get_JAMP_lines_split_order directly from a 1250 error_msg="Malformed '%s' argument passed to the "+\ 1251 "get_JAMP_lines_split_order function: %s"%str(split_order_amps) 1252 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1253 color_amplitudes=col_amps.get_color_amplitudes() 1254 elif(isinstance(col_amps,list)): 1255 if(col_amps and isinstance(col_amps[0],list)): 1256 color_amplitudes=col_amps 1257 else: 1258 raise MadGraph5Error, error_msg%'col_amps' 1259 else: 1260 raise MadGraph5Error, error_msg%'col_amps' 1261 1262 # Verify the sanity of the split_order_amps and split_order_names args 1263 if isinstance(split_order_amps,list): 1264 for elem in split_order_amps: 1265 if len(elem)!=2: 1266 raise MadGraph5Error, error_msg%'split_order_amps' 1267 # Check the first element of the two lists to make sure they are 1268 # integers, although in principle they should all be integers. 1269 if not isinstance(elem[0],tuple) or \ 1270 not isinstance(elem[1],tuple) or \ 1271 not isinstance(elem[0][0],int) or \ 1272 not isinstance(elem[1][0],int): 1273 raise MadGraph5Error, error_msg%'split_order_amps' 1274 else: 1275 raise MadGraph5Error, error_msg%'split_order_amps' 1276 1277 if not split_order_names is None: 1278 if isinstance(split_order_names,list): 1279 # Should specify the same number of names as there are elements 1280 # in the key of the split_order_amps. 1281 if len(split_order_names)!=len(split_order_amps[0][0]): 1282 raise MadGraph5Error, error_msg%'split_order_names' 1283 # Check the first element of the list to be a string 1284 if not isinstance(split_order_names[0],str): 1285 raise MadGraph5Error, error_msg%'split_order_names' 1286 else: 1287 raise MadGraph5Error, error_msg%'split_order_names' 1288 1289 # Now scan all contributing orders to be individually computed and 1290 # construct the list of color_amplitudes for JAMP to be constructed 1291 # accordingly. 1292 res_list=[] 1293 for i, amp_order in enumerate(split_order_amps): 1294 col_amps_order = [] 1295 for jamp in color_amplitudes: 1296 col_amps_order.append(filter(lambda col_amp: 1297 col_amp[1] in amp_order[1],jamp)) 1298 if split_order_names: 1299 res_list.append('C JAMPs contributing to orders '+' '.join( 1300 ['%s=%i'%order for order in zip(split_order_names, 1301 amp_order[0])])) 1302 if self.opt['export_format'] in ['madloop_matchbox']: 1303 res_list.extend(self.get_JAMP_lines(col_amps_order, 1304 JAMP_format="JAMP(%s,{0})".format(str(i+1)), 1305 JAMP_formatLC="LNJAMP(%s,{0})".format(str(i+1)))) 1306 else: 1307 res_list.extend(self.get_JAMP_lines(col_amps_order, 1308 JAMP_format="JAMP(%s,{0})".format(str(i+1)))) 1309 1310 return res_list
1311 1312
1313 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", 1314 split=-1):
1315 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1316 defined as a matrix element or directly as a color_amplitudes dictionary, 1317 Jamp_formatLC should be define to allow to add LeadingColor computation 1318 (usefull for MatchBox) 1319 The split argument defines how the JAMP lines should be split in order 1320 not to be too long.""" 1321 1322 # Let the user call get_JAMP_lines directly from a MatrixElement or from 1323 # the color amplitudes lists. 1324 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1325 color_amplitudes=col_amps.get_color_amplitudes() 1326 elif(isinstance(col_amps,list)): 1327 if(col_amps and isinstance(col_amps[0],list)): 1328 color_amplitudes=col_amps 1329 else: 1330 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1331 else: 1332 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1333 1334 1335 res_list = [] 1336 for i, coeff_list in enumerate(color_amplitudes): 1337 # It might happen that coeff_list is empty if this function was 1338 # called from get_JAMP_lines_split_order (i.e. if some color flow 1339 # does not contribute at all for a given order). 1340 # In this case we simply set it to 0. 1341 if coeff_list==[]: 1342 res_list.append(((JAMP_format+"=0D0") % str(i + 1))) 1343 continue 1344 # Break the JAMP definition into 'n=split' pieces to avoid having 1345 # arbitrarly long lines. 1346 first=True 1347 n = (len(coeff_list)+1 if split<=0 else split) 1348 while coeff_list!=[]: 1349 coefs=coeff_list[:n] 1350 coeff_list=coeff_list[n:] 1351 res = ((JAMP_format+"=") % str(i + 1)) + \ 1352 ((JAMP_format % str(i + 1)) if not first and split>0 else '') 1353 1354 first=False 1355 # Optimization: if all contributions to that color basis element have 1356 # the same coefficient (up to a sign), put it in front 1357 list_fracs = [abs(coefficient[0][1]) for coefficient in coefs] 1358 common_factor = False 1359 diff_fracs = list(set(list_fracs)) 1360 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1: 1361 common_factor = True 1362 global_factor = diff_fracs[0] 1363 res = res + '%s(' % self.coeff(1, global_factor, False, 0) 1364 1365 # loop for JAMP 1366 for (coefficient, amp_number) in coefs: 1367 if not coefficient: 1368 continue 1369 if common_factor: 1370 res = (res + "%s" + AMP_format) % \ 1371 (self.coeff(coefficient[0], 1372 coefficient[1] / abs(coefficient[1]), 1373 coefficient[2], 1374 coefficient[3]), 1375 str(amp_number)) 1376 else: 1377 res = (res + "%s" + AMP_format) % (self.coeff(coefficient[0], 1378 coefficient[1], 1379 coefficient[2], 1380 coefficient[3]), 1381 str(amp_number)) 1382 1383 if common_factor: 1384 res = res + ')' 1385 1386 res_list.append(res) 1387 1388 return res_list
1389
1390 - def get_pdf_lines(self, matrix_element, ninitial, subproc_group = False):
1391 """Generate the PDF lines for the auto_dsig.f file""" 1392 1393 processes = matrix_element.get('processes') 1394 model = processes[0].get('model') 1395 1396 pdf_definition_lines = "" 1397 pdf_data_lines = "" 1398 pdf_lines = "" 1399 1400 if ninitial == 1: 1401 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 1402 for i, proc in enumerate(processes): 1403 process_line = proc.base_string() 1404 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1405 pdf_lines = pdf_lines + "\nPD(IPROC)=1d0\n" 1406 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 1407 else: 1408 # Pick out all initial state particles for the two beams 1409 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 1410 p in processes]))), 1411 sorted(list(set([p.get_initial_pdg(2) for \ 1412 p in processes])))] 1413 1414 # Prepare all variable names 1415 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 1416 sum(initial_states,[])]) 1417 for key,val in pdf_codes.items(): 1418 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 1419 1420 # Set conversion from PDG code to number used in PDF calls 1421 pdgtopdf = {21: 0, 22: 7} 1422 1423 # Fill in missing entries of pdgtopdf 1424 for pdg in sum(initial_states,[]): 1425 if not pdg in pdgtopdf and not pdg in pdgtopdf.values(): 1426 pdgtopdf[pdg] = pdg 1427 elif pdg not in pdgtopdf and pdg in pdgtopdf.values(): 1428 # If any particle has pdg code 7, we need to use something else 1429 pdgtopdf[pdg] = 6000000 + pdg 1430 1431 # Get PDF variable declarations for all initial states 1432 for i in [0,1]: 1433 pdf_definition_lines += "DOUBLE PRECISION " + \ 1434 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1435 for pdg in \ 1436 initial_states[i]]) + \ 1437 "\n" 1438 1439 # Get PDF data lines for all initial states 1440 for i in [0,1]: 1441 pdf_data_lines += "DATA " + \ 1442 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1443 for pdg in initial_states[i]]) + \ 1444 "/%d*1D0/" % len(initial_states[i]) + \ 1445 "\n" 1446 1447 # Get PDF lines for all different initial states 1448 for i, init_states in enumerate(initial_states): 1449 if subproc_group: 1450 pdf_lines = pdf_lines + \ 1451 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 1452 % (i + 1, i + 1) 1453 else: 1454 pdf_lines = pdf_lines + \ 1455 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 1456 % (i + 1, i + 1) 1457 1458 for nbi,initial_state in enumerate(init_states): 1459 if initial_state in pdf_codes.keys(): 1460 if subproc_group: 1461 pdf_lines = pdf_lines + \ 1462 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP, 1," + \ 1463 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 1464 (pdf_codes[initial_state], 1465 i + 1, i + 1, pdgtopdf[initial_state], 1466 i + 1, i + 1) 1467 else: 1468 pdf_lines = pdf_lines + \ 1469 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP, %d," + \ 1470 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 1471 (pdf_codes[initial_state], 1472 i + 1, i + 1, pdgtopdf[initial_state], 1473 i + 1, 1474 i + 1, i + 1) 1475 pdf_lines = pdf_lines + "ENDIF\n" 1476 1477 # Add up PDFs for the different initial state particles 1478 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 1479 for proc in processes: 1480 process_line = proc.base_string() 1481 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1482 pdf_lines = pdf_lines + "\nPD(IPROC)=" 1483 for ibeam in [1, 2]: 1484 initial_state = proc.get_initial_pdg(ibeam) 1485 if initial_state in pdf_codes.keys(): 1486 pdf_lines = pdf_lines + "%s%d*" % \ 1487 (pdf_codes[initial_state], ibeam) 1488 else: 1489 pdf_lines = pdf_lines + "1d0*" 1490 # Remove last "*" from pdf_lines 1491 pdf_lines = pdf_lines[:-1] + "\n" 1492 pdf_lines = pdf_lines + "PD(0)=PD(0)+DABS(PD(IPROC))\n" 1493 1494 # Remove last line break from the return variables 1495 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1]
1496 1497 #=========================================================================== 1498 # write_props_file 1499 #===========================================================================
1500 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
1501 """Write the props.inc file for MadEvent. Needs input from 1502 write_configs_file.""" 1503 1504 lines = [] 1505 1506 particle_dict = matrix_element.get('processes')[0].get('model').\ 1507 get('particle_dict') 1508 1509 for iconf, configs in enumerate(s_and_t_channels): 1510 for vertex in configs[0] + configs[1][:-1]: 1511 leg = vertex.get('legs')[-1] 1512 if leg.get('id') not in particle_dict: 1513 # Fake propagator used in multiparticle vertices 1514 mass = 'zero' 1515 width = 'zero' 1516 pow_part = 0 1517 else: 1518 particle = particle_dict[leg.get('id')] 1519 # Get mass 1520 if particle.get('mass').lower() == 'zero': 1521 mass = particle.get('mass') 1522 else: 1523 mass = "abs(%s)" % particle.get('mass') 1524 # Get width 1525 if particle.get('width').lower() == 'zero': 1526 width = particle.get('width') 1527 else: 1528 width = "abs(%s)" % particle.get('width') 1529 1530 pow_part = 1 + int(particle.is_boson()) 1531 1532 lines.append("prmass(%d,%d) = %s" % \ 1533 (leg.get('number'), iconf + 1, mass)) 1534 lines.append("prwidth(%d,%d) = %s" % \ 1535 (leg.get('number'), iconf + 1, width)) 1536 lines.append("pow(%d,%d) = %d" % \ 1537 (leg.get('number'), iconf + 1, pow_part)) 1538 1539 # Write the file 1540 writer.writelines(lines) 1541 1542 return True
1543 1544 #=========================================================================== 1545 # write_configs_file 1546 #===========================================================================
1547 - def write_configs_file(self, writer, matrix_element):
1548 """Write the configs.inc file for MadEvent""" 1549 1550 # Extract number of external particles 1551 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1552 1553 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 1554 mapconfigs = [c[0] for c in configs] 1555 model = matrix_element.get('processes')[0].get('model') 1556 return mapconfigs, self.write_configs_file_from_diagrams(writer, 1557 [[c[1]] for c in configs], 1558 mapconfigs, 1559 nexternal, ninitial, 1560 model)
1561 1562 #=========================================================================== 1563 # write_configs_file_from_diagrams 1564 #===========================================================================
1565 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 1566 nexternal, ninitial, model):
1567 """Write the actual configs.inc file. 1568 1569 configs is the diagrams corresponding to configs (each 1570 diagrams is a list of corresponding diagrams for all 1571 subprocesses, with None if there is no corresponding diagrams 1572 for a given process). 1573 mapconfigs gives the diagram number for each config. 1574 1575 For s-channels, we need to output one PDG for each subprocess in 1576 the subprocess group, in order to be able to pick the right 1577 one for multiprocesses.""" 1578 1579 lines = [] 1580 1581 s_and_t_channels = [] 1582 1583 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 1584 for config in configs if [d for d in config if d][0].\ 1585 get_vertex_leg_numbers()!=[]] 1586 minvert = min(vert_list) if vert_list!=[] else 0 1587 1588 # Number of subprocesses 1589 nsubprocs = len(configs[0]) 1590 1591 nconfigs = 0 1592 1593 new_pdg = model.get_first_non_pdg() 1594 1595 for iconfig, helas_diags in enumerate(configs): 1596 if any(vert > minvert for vert in [d for d in helas_diags if d]\ 1597 [0].get_vertex_leg_numbers()) : 1598 # Only 3-vertices allowed in configs.inc except for vertices 1599 # which originate from a shrunk loop. 1600 continue 1601 nconfigs += 1 1602 1603 # Need s- and t-channels for all subprocesses, including 1604 # those that don't contribute to this config 1605 empty_verts = [] 1606 stchannels = [] 1607 for h in helas_diags: 1608 if h: 1609 # get_s_and_t_channels gives vertices starting from 1610 # final state external particles and working inwards 1611 stchannels.append(h.get('amplitudes')[0].\ 1612 get_s_and_t_channels(ninitial, model, new_pdg)) 1613 else: 1614 stchannels.append((empty_verts, None)) 1615 1616 # For t-channels, just need the first non-empty one 1617 tchannels = [t for s,t in stchannels if t != None][0] 1618 1619 # For s_and_t_channels (to be used later) use only first config 1620 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 1621 tchannels]) 1622 1623 # Make sure empty_verts is same length as real vertices 1624 if any([s for s,t in stchannels]): 1625 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 1626 1627 # Reorganize s-channel vertices to get a list of all 1628 # subprocesses for each vertex 1629 schannels = zip(*[s for s,t in stchannels]) 1630 else: 1631 schannels = [] 1632 1633 allchannels = schannels 1634 if len(tchannels) > 1: 1635 # Write out tchannels only if there are any non-trivial ones 1636 allchannels = schannels + tchannels 1637 1638 # Write out propagators for s-channel and t-channel vertices 1639 1640 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 1641 # Correspondance between the config and the diagram = amp2 1642 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 1643 mapconfigs[iconfig])) 1644 1645 for verts in allchannels: 1646 if verts in schannels: 1647 vert = [v for v in verts if v][0] 1648 else: 1649 vert = verts 1650 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1651 last_leg = vert.get('legs')[-1] 1652 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 1653 (last_leg.get('number'), nconfigs, len(daughters), 1654 ",".join([str(d) for d in daughters]))) 1655 if verts in schannels: 1656 pdgs = [] 1657 for v in verts: 1658 if v: 1659 pdgs.append(v.get('legs')[-1].get('id')) 1660 else: 1661 pdgs.append(0) 1662 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1663 (last_leg.get('number'), nconfigs, nsubprocs, 1664 ",".join([str(d) for d in pdgs]))) 1665 lines.append("data tprid(%d,%d)/0/" % \ 1666 (last_leg.get('number'), nconfigs)) 1667 elif verts in tchannels[:-1]: 1668 lines.append("data tprid(%d,%d)/%d/" % \ 1669 (last_leg.get('number'), nconfigs, 1670 abs(last_leg.get('id')))) 1671 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1672 (last_leg.get('number'), nconfigs, nsubprocs, 1673 ",".join(['0'] * nsubprocs))) 1674 1675 # Write out number of configs 1676 lines.append("# Number of configs") 1677 lines.append("data mapconfig(0)/%d/" % nconfigs) 1678 1679 # Write the file 1680 writer.writelines(lines) 1681 1682 return s_and_t_channels
1683 1684 #=========================================================================== 1685 # Global helper methods 1686 #=========================================================================== 1687
1688 - def coeff(self, ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1689 """Returns a nicely formatted string for the coefficients in JAMP lines""" 1690 1691 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power 1692 1693 if total_coeff == 1: 1694 if is_imaginary: 1695 return '+imag1*' 1696 else: 1697 return '+' 1698 elif total_coeff == -1: 1699 if is_imaginary: 1700 return '-imag1*' 1701 else: 1702 return '-' 1703 1704 res_str = '%+iD0' % total_coeff.numerator 1705 1706 if total_coeff.denominator != 1: 1707 # Check if total_coeff is an integer 1708 res_str = res_str + '/%iD0' % total_coeff.denominator 1709 1710 if is_imaginary: 1711 res_str = res_str + '*imag1' 1712 1713 return res_str + '*'
1714 1715
1716 - def set_fortran_compiler(self, default_compiler, force=False):
1717 """Set compiler based on what's available on the system""" 1718 1719 # Check for compiler 1720 if default_compiler['fortran'] and misc.which(default_compiler['fortran']): 1721 f77_compiler = default_compiler['fortran'] 1722 elif misc.which('gfortran'): 1723 f77_compiler = 'gfortran' 1724 elif misc.which('g77'): 1725 f77_compiler = 'g77' 1726 elif misc.which('f77'): 1727 f77_compiler = 'f77' 1728 elif default_compiler['fortran']: 1729 logger.warning('No Fortran Compiler detected! Please install one') 1730 f77_compiler = default_compiler['fortran'] # maybe misc fail so try with it 1731 else: 1732 raise MadGraph5Error, 'No Fortran Compiler detected! Please install one' 1733 logger.info('Use Fortran compiler ' + f77_compiler) 1734 1735 1736 # Check for compiler. 1. set default. 1737 if default_compiler['f2py']: 1738 f2py_compiler = default_compiler['f2py'] 1739 else: 1740 f2py_compiler = '' 1741 # Try to find the correct one. 1742 if default_compiler['f2py'] and misc.which(default_compiler['f2py']): 1743 f2py_compiler = default_compiler['f2py'] 1744 elif misc.which('f2py'): 1745 f2py_compiler = 'f2py' 1746 elif sys.version_info[1] == 6: 1747 if misc.which('f2py-2.6'): 1748 f2py_compiler = 'f2py-2.6' 1749 elif misc.which('f2py2.6'): 1750 f2py_compiler = 'f2py2.6' 1751 elif sys.version_info[1] == 7: 1752 if misc.which('f2py-2.7'): 1753 f2py_compiler = 'f2py-2.7' 1754 elif misc.which('f2py2.7'): 1755 f2py_compiler = 'f2py2.7' 1756 1757 to_replace = {'fortran': f77_compiler, 'f2py': f2py_compiler} 1758 1759 1760 self.replace_make_opt_f_compiler(to_replace) 1761 # Replace also for Template but not for cluster 1762 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite: 1763 self.replace_make_opt_f_compiler(to_replace, pjoin(MG5DIR, 'Template', 'LO')) 1764 1765 return f77_compiler
1766 1767 # an alias for backward compatibility 1768 set_compiler = set_fortran_compiler 1769 1770
1771 - def set_cpp_compiler(self, default_compiler, force=False):
1772 """Set compiler based on what's available on the system""" 1773 1774 # Check for compiler 1775 if default_compiler and misc.which(default_compiler): 1776 compiler = default_compiler 1777 elif misc.which('g++'): 1778 #check if clang version 1779 p = misc.Popen(['g++', '--version'], stdout=subprocess.PIPE, 1780 stderr=subprocess.PIPE) 1781 out, _ = p.communicate() 1782 if 'clang' in out and misc.which('clang'): 1783 compiler = 'clang' 1784 else: 1785 compiler = 'g++' 1786 elif misc.which('c++'): 1787 compiler = 'c++' 1788 elif misc.which('clang'): 1789 compiler = 'clang' 1790 elif default_compiler: 1791 logger.warning('No c++ Compiler detected! Please install one') 1792 compiler = default_compiler # maybe misc fail so try with it 1793 else: 1794 raise MadGraph5Error, 'No c++ Compiler detected! Please install one' 1795 logger.info('Use c++ compiler ' + compiler) 1796 self.replace_make_opt_c_compiler(compiler) 1797 # Replace also for Template but not for cluster 1798 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite and \ 1799 not __debug__ and not os.path.exists(pjoin(MG5DIR,'bin','create_release.py')): 1800 self.replace_make_opt_c_compiler(compiler, pjoin(MG5DIR, 'Template', 'LO')) 1801 1802 return compiler
1803 1804
1805 - def replace_make_opt_f_compiler(self, compilers, root_dir = ""):
1806 """Set FC=compiler in Source/make_opts""" 1807 1808 assert isinstance(compilers, dict) 1809 1810 mod = False #avoid to rewrite the file if not needed 1811 if not root_dir: 1812 root_dir = self.dir_path 1813 1814 compiler= compilers['fortran'] 1815 f2py_compiler = compilers['f2py'] 1816 if not f2py_compiler: 1817 f2py_compiler = 'f2py' 1818 for_update= {'DEFAULT_F_COMPILER':compiler, 1819 'DEFAULT_F2PY_COMPILER':f2py_compiler} 1820 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1821 1822 try: 1823 common_run_interface.CommonRunCmd.update_make_opts_full( 1824 make_opts, for_update) 1825 except IOError: 1826 if root_dir == self.dir_path: 1827 logger.info('Fail to set compiler. Trying to continue anyway.')
1828
1829 - def replace_make_opt_c_compiler(self, compiler, root_dir = ""):
1830 """Set CXX=compiler in Source/make_opts. 1831 The version is also checked, in order to set some extra flags 1832 if the compiler is clang (on MACOS)""" 1833 1834 is_clang = misc.detect_if_cpp_compiler_is_clang(compiler) 1835 is_lc = misc.detect_cpp_std_lib_dependence(compiler) == '-lc++' 1836 1837 # list of the variable to set in the make_opts file 1838 for_update= {'DEFAULT_CPP_COMPILER':compiler, 1839 'MACFLAG':'-mmacosx-version-min=10.7' if is_clang and is_lc else '', 1840 'STDLIB': '-lc++' if is_lc else '-lstdc++', 1841 'STDLIB_FLAG': '-stdlib=libc++' if is_lc and is_clang else '' 1842 } 1843 1844 if not root_dir: 1845 root_dir = self.dir_path 1846 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1847 1848 try: 1849 common_run_interface.CommonRunCmd.update_make_opts_full( 1850 make_opts, for_update) 1851 except IOError: 1852 if root_dir == self.dir_path: 1853 logger.info('Fail to set compiler. Trying to continue anyway.') 1854 1855 return
1856
1857 #=============================================================================== 1858 # ProcessExporterFortranSA 1859 #=============================================================================== 1860 -class ProcessExporterFortranSA(ProcessExporterFortran):
1861 """Class to take care of exporting a set of matrix elements to 1862 MadGraph v4 StandAlone format.""" 1863 1864 matrix_template = "matrix_standalone_v4.inc" 1865
1866 - def __init__(self, *args,**opts):
1867 """add the format information compare to standard init""" 1868 1869 if 'format' in opts: 1870 self.format = opts['format'] 1871 del opts['format'] 1872 else: 1873 self.format = 'standalone' 1874 1875 self.prefix_info = {} 1876 ProcessExporterFortran.__init__(self, *args, **opts)
1877
1878 - def copy_template(self, model):
1879 """Additional actions needed for setup of Template 1880 """ 1881 1882 #First copy the full template tree if dir_path doesn't exit 1883 if os.path.isdir(self.dir_path): 1884 return 1885 1886 logger.info('initialize a new standalone directory: %s' % \ 1887 os.path.basename(self.dir_path)) 1888 temp_dir = pjoin(self.mgme_dir, 'Template/LO') 1889 1890 # Create the directory structure 1891 os.mkdir(self.dir_path) 1892 os.mkdir(pjoin(self.dir_path, 'Source')) 1893 os.mkdir(pjoin(self.dir_path, 'Source', 'MODEL')) 1894 os.mkdir(pjoin(self.dir_path, 'Source', 'DHELAS')) 1895 os.mkdir(pjoin(self.dir_path, 'SubProcesses')) 1896 os.mkdir(pjoin(self.dir_path, 'bin')) 1897 os.mkdir(pjoin(self.dir_path, 'bin', 'internal')) 1898 os.mkdir(pjoin(self.dir_path, 'lib')) 1899 os.mkdir(pjoin(self.dir_path, 'Cards')) 1900 1901 # Information at top-level 1902 #Write version info 1903 shutil.copy(pjoin(temp_dir, 'TemplateVersion.txt'), self.dir_path) 1904 try: 1905 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 1906 except IOError: 1907 MG5_version = misc.get_pkg_info() 1908 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 1909 "5." + MG5_version['version']) 1910 1911 1912 # Add file in SubProcesses 1913 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f_sp'), 1914 pjoin(self.dir_path, 'SubProcesses', 'makefileP')) 1915 1916 if self.format == 'standalone': 1917 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'check_sa.f'), 1918 pjoin(self.dir_path, 'SubProcesses', 'check_sa.f')) 1919 1920 # Add file in Source 1921 shutil.copy(pjoin(temp_dir, 'Source', 'make_opts'), 1922 pjoin(self.dir_path, 'Source')) 1923 # add the makefile 1924 filename = pjoin(self.dir_path,'Source','makefile') 1925 self.write_source_makefile(writers.FileWriter(filename))
1926 1927 #=========================================================================== 1928 # export model files 1929 #===========================================================================
1930 - def export_model_files(self, model_path):
1931 """export the model dependent files for V4 model""" 1932 1933 super(ProcessExporterFortranSA,self).export_model_files(model_path) 1934 # Add the routine update_as_param in v4 model 1935 # This is a function created in the UFO 1936 text=""" 1937 subroutine update_as_param() 1938 call setpara('param_card.dat',.false.) 1939 return 1940 end 1941 """ 1942 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 1943 ff.write(text) 1944 ff.close() 1945 1946 text = open(pjoin(self.dir_path,'SubProcesses','check_sa.f')).read() 1947 text = text.replace('call setpara(\'param_card.dat\')', 'call setpara(\'param_card.dat\', .true.)') 1948 fsock = open(pjoin(self.dir_path,'SubProcesses','check_sa.f'), 'w') 1949 fsock.write(text) 1950 fsock.close() 1951 1952 self.make_model_symbolic_link()
1953 1954 #=========================================================================== 1955 # Make the Helas and Model directories for Standalone directory 1956 #===========================================================================
1957 - def make(self):
1958 """Run make in the DHELAS and MODEL directories, to set up 1959 everything for running standalone 1960 """ 1961 1962 source_dir = pjoin(self.dir_path, "Source") 1963 logger.info("Running make for Helas") 1964 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 1965 logger.info("Running make for Model") 1966 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran')
1967 1968 #=========================================================================== 1969 # Create proc_card_mg5.dat for Standalone directory 1970 #===========================================================================
1971 - def finalize(self, matrix_elements, history, mg5options, flaglist):
1972 """Finalize Standalone MG4 directory by 1973 generation proc_card_mg5.dat 1974 generate a global makefile 1975 """ 1976 1977 compiler = {'fortran': mg5options['fortran_compiler'], 1978 'cpp': mg5options['cpp_compiler'], 1979 'f2py': mg5options['f2py_compiler']} 1980 1981 self.compiler_choice(compiler) 1982 self.make() 1983 1984 # Write command history as proc_card_mg5 1985 if history and os.path.isdir(pjoin(self.dir_path, 'Cards')): 1986 output_file = pjoin(self.dir_path, 'Cards', 'proc_card_mg5.dat') 1987 history.write(output_file) 1988 1989 ProcessExporterFortran.finalize(self, matrix_elements, 1990 history, mg5options, flaglist) 1991 open(pjoin(self.dir_path,'__init__.py'),'w') 1992 open(pjoin(self.dir_path,'SubProcesses','__init__.py'),'w') 1993 1994 if 'mode' in self.opt and self.opt['mode'] == "reweight": 1995 #add the module to hande the NLO weight 1996 files.copytree(pjoin(MG5DIR, 'Template', 'RWGTNLO'), 1997 pjoin(self.dir_path, 'Source')) 1998 files.copytree(pjoin(MG5DIR, 'Template', 'NLO', 'Source', 'PDF'), 1999 pjoin(self.dir_path, 'Source', 'PDF')) 2000 self.write_pdf_opendata() 2001 2002 if self.prefix_info: 2003 self.write_f2py_splitter() 2004 self.write_f2py_makefile() 2005 self.write_f2py_check_sa(matrix_elements, 2006 pjoin(self.dir_path,'SubProcesses','check_sa.py')) 2007 else: 2008 # create a single makefile to compile all the subprocesses 2009 text = '''\n# For python linking (require f2py part of numpy)\nifeq ($(origin MENUM),undefined)\n MENUM=2\nendif\n''' 2010 deppython = '' 2011 for Pdir in os.listdir(pjoin(self.dir_path,'SubProcesses')): 2012 if os.path.isdir(pjoin(self.dir_path, 'SubProcesses', Pdir)): 2013 text += '%(0)s/matrix$(MENUM)py.so:\n\tcd %(0)s;make matrix$(MENUM)py.so\n'% {'0': Pdir} 2014 deppython += ' %(0)s/matrix$(MENUM)py.so ' % {'0': Pdir} 2015 text+='all: %s\n\techo \'done\'' % deppython 2016 2017 ff = open(pjoin(self.dir_path, 'SubProcesses', 'makefile'),'a') 2018 ff.write(text) 2019 ff.close()
2020
2021 - def write_f2py_splitter(self):
2022 """write a function to call the correct matrix element""" 2023 2024 template = """ 2025 %(python_information)s 2026 subroutine smatrixhel(pdgs, npdg, p, ALPHAS, SCALE2, nhel, ANS) 2027 IMPLICIT NONE 2028 2029 CF2PY double precision, intent(in), dimension(0:3,npdg) :: p 2030 CF2PY integer, intent(in), dimension(npdg) :: pdgs 2031 CF2PY integer, intent(in) :: npdg 2032 CF2PY double precision, intent(out) :: ANS 2033 CF2PY double precision, intent(in) :: ALPHAS 2034 CF2PY double precision, intent(in) :: SCALE2 2035 integer pdgs(*) 2036 integer npdg, nhel 2037 double precision p(*) 2038 double precision ANS, ALPHAS, PI,SCALE2 2039 include 'coupl.inc' 2040 2041 PI = 3.141592653589793D0 2042 G = 2* DSQRT(ALPHAS*PI) 2043 CALL UPDATE_AS_PARAM() 2044 if (scale2.ne.0d0) stop 1 2045 2046 %(smatrixhel)s 2047 2048 return 2049 end 2050 2051 SUBROUTINE INITIALISE(PATH) 2052 C ROUTINE FOR F2PY to read the benchmark point. 2053 IMPLICIT NONE 2054 CHARACTER*512 PATH 2055 CF2PY INTENT(IN) :: PATH 2056 CALL SETPARA(PATH) !first call to setup the paramaters 2057 RETURN 2058 END 2059 2060 subroutine get_pdg_order(PDG) 2061 IMPLICIT NONE 2062 CF2PY INTEGER, intent(out) :: PDG(%(nb_me)i,%(maxpart)i) 2063 INTEGER PDG(%(nb_me)i,%(maxpart)i), PDGS(%(nb_me)i,%(maxpart)i) 2064 DATA PDGS/ %(pdgs)s / 2065 PDG = PDGS 2066 RETURN 2067 END 2068 2069 subroutine get_prefix(PREFIX) 2070 IMPLICIT NONE 2071 CF2PY CHARACTER*20, intent(out) :: PREFIX(%(nb_me)i) 2072 character*20 PREFIX(%(nb_me)i),PREF(%(nb_me)i) 2073 DATA PREF / '%(prefix)s'/ 2074 PREFIX = PREF 2075 RETURN 2076 END 2077 2078 2079 """ 2080 2081 allids = self.prefix_info.keys() 2082 allprefix = [self.prefix_info[key][0] for key in allids] 2083 min_nexternal = min([len(ids) for ids in allids]) 2084 max_nexternal = max([len(ids) for ids in allids]) 2085 2086 info = [] 2087 for key, (prefix, tag) in self.prefix_info.items(): 2088 info.append('#PY %s : %s # %s' % (tag, key, prefix)) 2089 2090 2091 text = [] 2092 for n_ext in range(min_nexternal, max_nexternal+1): 2093 current = [ids for ids in allids if len(ids)==n_ext] 2094 if not current: 2095 continue 2096 if min_nexternal != max_nexternal: 2097 if n_ext == min_nexternal: 2098 text.append(' if (npdg.eq.%i)then' % n_ext) 2099 else: 2100 text.append(' else if (npdg.eq.%i)then' % n_ext) 2101 for ii,pdgs in enumerate(current): 2102 condition = '.and.'.join(['%i.eq.pdgs(%i)' %(pdg, i+1) for i, pdg in enumerate(pdgs)]) 2103 if ii==0: 2104 text.append( ' if(%s) then ! %i' % (condition, i)) 2105 else: 2106 text.append( ' else if(%s) then ! %i' % (condition,i)) 2107 text.append(' call %ssmatrixhel(p, nhel, ans)' % self.prefix_info[pdgs][0]) 2108 text.append(' endif') 2109 #close the function 2110 if min_nexternal != max_nexternal: 2111 text.append('endif') 2112 2113 formatting = {'python_information':'\n'.join(info), 2114 'smatrixhel': '\n'.join(text), 2115 'maxpart': max_nexternal, 2116 'nb_me': len(allids), 2117 'pdgs': ','.join(str(pdg[i]) if i<len(pdg) else '0' 2118 for i in range(max_nexternal) for pdg in allids), 2119 'prefix':'\',\''.join(allprefix) 2120 } 2121 formatting['lenprefix'] = len(formatting['prefix']) 2122 text = template % formatting 2123 fsock = writers.FortranWriter(pjoin(self.dir_path, 'SubProcesses', 'all_matrix.f'),'w') 2124 fsock.writelines(text) 2125 fsock.close()
2126
2127 - def write_f2py_check_sa(self, matrix_element, writer):
2128 """ Write the general check_sa.py in SubProcesses that calls all processes successively.""" 2129 # To be implemented. It is just an example file, i.e. not crucial. 2130 return
2131
2132 - def write_f2py_makefile(self):
2133 """ """ 2134 # Add file in SubProcesses 2135 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f2py'), 2136 pjoin(self.dir_path, 'SubProcesses', 'makefile'))
2137
2138 - def create_MA5_cards(self,*args,**opts):
2139 """ Overload the function of the mother so as to bypass this in StandAlone.""" 2140 pass
2141
2142 - def compiler_choice(self, compiler):
2143 """ Different daughter classes might want different compilers. 2144 So this function is meant to be overloaded if desired.""" 2145 2146 self.set_compiler(compiler)
2147 2148 #=========================================================================== 2149 # generate_subprocess_directory 2150 #===========================================================================
2151 - def generate_subprocess_directory(self, matrix_element, 2152 fortran_model, number):
2153 """Generate the Pxxxxx directory for a subprocess in MG4 standalone, 2154 including the necessary matrix.f and nexternal.inc files""" 2155 2156 cwd = os.getcwd() 2157 # Create the directory PN_xx_xxxxx in the specified path 2158 dirpath = pjoin(self.dir_path, 'SubProcesses', \ 2159 "P%s" % matrix_element.get('processes')[0].shell_string()) 2160 2161 if self.opt['sa_symmetry']: 2162 # avoid symmetric output 2163 for i,proc in enumerate(matrix_element.get('processes')): 2164 2165 tag = proc.get_tag() 2166 legs = proc.get('legs')[:] 2167 leg0 = proc.get('legs')[0] 2168 leg1 = proc.get('legs')[1] 2169 if not leg1.get('state'): 2170 proc.get('legs')[0] = leg1 2171 proc.get('legs')[1] = leg0 2172 flegs = proc.get('legs')[2:] 2173 for perm in itertools.permutations(flegs): 2174 for i,p in enumerate(perm): 2175 proc.get('legs')[i+2] = p 2176 dirpath2 = pjoin(self.dir_path, 'SubProcesses', \ 2177 "P%s" % proc.shell_string()) 2178 #restore original order 2179 proc.get('legs')[2:] = legs[2:] 2180 if os.path.exists(dirpath2): 2181 proc.get('legs')[:] = legs 2182 return 0 2183 proc.get('legs')[:] = legs 2184 2185 try: 2186 os.mkdir(dirpath) 2187 except os.error as error: 2188 logger.warning(error.strerror + " " + dirpath) 2189 2190 #try: 2191 # os.chdir(dirpath) 2192 #except os.error: 2193 # logger.error('Could not cd to directory %s' % dirpath) 2194 # return 0 2195 2196 logger.info('Creating files in directory %s' % dirpath) 2197 2198 # Extract number of external particles 2199 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2200 2201 # Create the matrix.f file and the nexternal.inc file 2202 if self.opt['export_format']=='standalone_msP': 2203 filename = pjoin(dirpath, 'matrix_prod.f') 2204 else: 2205 filename = pjoin(dirpath, 'matrix.f') 2206 2207 proc_prefix = '' 2208 if 'prefix' in self.cmd_options: 2209 if self.cmd_options['prefix'] == 'int': 2210 proc_prefix = 'M%s_' % number 2211 elif self.cmd_options['prefix'] == 'proc': 2212 proc_prefix = matrix_element.get('processes')[0].shell_string().split('_',1)[1] 2213 else: 2214 raise Exception, '--prefix options supports only \'int\' and \'proc\'' 2215 for proc in matrix_element.get('processes'): 2216 ids = [l.get('id') for l in proc.get('legs_with_decays')] 2217 self.prefix_info[tuple(ids)] = [proc_prefix, proc.get_tag()] 2218 2219 calls = self.write_matrix_element_v4( 2220 writers.FortranWriter(filename), 2221 matrix_element, 2222 fortran_model, 2223 proc_prefix=proc_prefix) 2224 2225 if self.opt['export_format'] == 'standalone_msP': 2226 filename = pjoin(dirpath,'configs_production.inc') 2227 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2228 writers.FortranWriter(filename), 2229 matrix_element) 2230 2231 filename = pjoin(dirpath,'props_production.inc') 2232 self.write_props_file(writers.FortranWriter(filename), 2233 matrix_element, 2234 s_and_t_channels) 2235 2236 filename = pjoin(dirpath,'nexternal_prod.inc') 2237 self.write_nexternal_madspin(writers.FortranWriter(filename), 2238 nexternal, ninitial) 2239 2240 if self.opt['export_format']=='standalone_msF': 2241 filename = pjoin(dirpath, 'helamp.inc') 2242 ncomb=matrix_element.get_helicity_combinations() 2243 self.write_helamp_madspin(writers.FortranWriter(filename), 2244 ncomb) 2245 2246 filename = pjoin(dirpath, 'nexternal.inc') 2247 self.write_nexternal_file(writers.FortranWriter(filename), 2248 nexternal, ninitial) 2249 2250 filename = pjoin(dirpath, 'pmass.inc') 2251 self.write_pmass_file(writers.FortranWriter(filename), 2252 matrix_element) 2253 2254 filename = pjoin(dirpath, 'ngraphs.inc') 2255 self.write_ngraphs_file(writers.FortranWriter(filename), 2256 len(matrix_element.get_all_amplitudes())) 2257 2258 # Generate diagrams 2259 filename = pjoin(dirpath, "matrix.ps") 2260 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2261 get('diagrams'), 2262 filename, 2263 model=matrix_element.get('processes')[0].\ 2264 get('model'), 2265 amplitude=True) 2266 logger.info("Generating Feynman diagrams for " + \ 2267 matrix_element.get('processes')[0].nice_string()) 2268 plot.draw() 2269 2270 linkfiles = ['check_sa.f', 'coupl.inc'] 2271 2272 if proc_prefix and os.path.exists(pjoin(dirpath, '..', 'check_sa.f')): 2273 text = open(pjoin(dirpath, '..', 'check_sa.f')).read() 2274 pat = re.compile('smatrix', re.I) 2275 new_text, n = re.subn(pat, '%ssmatrix' % proc_prefix, text) 2276 with open(pjoin(dirpath, 'check_sa.f'),'w') as f: 2277 f.write(new_text) 2278 linkfiles.pop(0) 2279 2280 for file in linkfiles: 2281 ln('../%s' % file, cwd=dirpath) 2282 ln('../makefileP', name='makefile', cwd=dirpath) 2283 # Return to original PWD 2284 #os.chdir(cwd) 2285 2286 if not calls: 2287 calls = 0 2288 return calls
2289 2290 2291 #=========================================================================== 2292 # write_source_makefile 2293 #===========================================================================
2294 - def write_source_makefile(self, writer):
2295 """Write the nexternal.inc file for MG4""" 2296 2297 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 2298 set_of_lib = '$(LIBDIR)libdhelas.$(libext) $(LIBDIR)libmodel.$(libext)' 2299 model_line='''$(LIBDIR)libmodel.$(libext): MODEL\n\t cd MODEL; make\n''' 2300 2301 replace_dict= {'libraries': set_of_lib, 2302 'model':model_line, 2303 'additional_dsample': '', 2304 'additional_dependencies':''} 2305 2306 text = open(path).read() % replace_dict 2307 2308 if writer: 2309 writer.write(text) 2310 2311 return replace_dict
2312 2313 #=========================================================================== 2314 # write_matrix_element_v4 2315 #===========================================================================
2316 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 2317 write=True, proc_prefix=''):
2318 """Export a matrix element to a matrix.f file in MG4 standalone format 2319 if write is on False, just return the replace_dict and not write anything.""" 2320 2321 2322 if not matrix_element.get('processes') or \ 2323 not matrix_element.get('diagrams'): 2324 return 0 2325 2326 if writer: 2327 if not isinstance(writer, writers.FortranWriter): 2328 raise writers.FortranWriter.FortranWriterError(\ 2329 "writer not FortranWriter but %s" % type(writer)) 2330 # Set lowercase/uppercase Fortran code 2331 writers.FortranWriter.downcase = False 2332 2333 2334 if not self.opt.has_key('sa_symmetry'): 2335 self.opt['sa_symmetry']=False 2336 2337 2338 # The proc_id is for MadEvent grouping which is never used in SA. 2339 replace_dict = {'global_variable':'', 'amp2_lines':'', 2340 'proc_prefix':proc_prefix, 'proc_id':''} 2341 2342 # Extract helas calls 2343 helas_calls = fortran_model.get_matrix_element_calls(\ 2344 matrix_element) 2345 2346 replace_dict['helas_calls'] = "\n".join(helas_calls) 2347 2348 # Extract version number and date from VERSION file 2349 info_lines = self.get_mg5_info_lines() 2350 replace_dict['info_lines'] = info_lines 2351 2352 # Extract process info lines 2353 process_lines = self.get_process_info_lines(matrix_element) 2354 replace_dict['process_lines'] = process_lines 2355 2356 # Extract number of external particles 2357 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2358 replace_dict['nexternal'] = nexternal 2359 replace_dict['nincoming'] = ninitial 2360 2361 # Extract ncomb 2362 ncomb = matrix_element.get_helicity_combinations() 2363 replace_dict['ncomb'] = ncomb 2364 2365 # Extract helicity lines 2366 helicity_lines = self.get_helicity_lines(matrix_element) 2367 replace_dict['helicity_lines'] = helicity_lines 2368 2369 # Extract overall denominator 2370 # Averaging initial state color, spin, and identical FS particles 2371 replace_dict['den_factor_line'] = self.get_den_factor_line(matrix_element) 2372 2373 # Extract ngraphs 2374 ngraphs = matrix_element.get_number_of_amplitudes() 2375 replace_dict['ngraphs'] = ngraphs 2376 2377 # Extract nwavefuncs 2378 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2379 replace_dict['nwavefuncs'] = nwavefuncs 2380 2381 # Extract ncolor 2382 ncolor = max(1, len(matrix_element.get('color_basis'))) 2383 replace_dict['ncolor'] = ncolor 2384 2385 replace_dict['hel_avg_factor'] = matrix_element.get_hel_avg_factor() 2386 replace_dict['beamone_helavgfactor'], replace_dict['beamtwo_helavgfactor'] =\ 2387 matrix_element.get_beams_hel_avg_factor() 2388 2389 # Extract color data lines 2390 color_data_lines = self.get_color_data_lines(matrix_element) 2391 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2392 2393 if self.opt['export_format']=='standalone_msP': 2394 # For MadSpin need to return the AMP2 2395 amp2_lines = self.get_amp2_lines(matrix_element, [] ) 2396 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2397 replace_dict['global_variable'] = \ 2398 " Double Precision amp2(NGRAPHS)\n common/to_amps/ amp2\n" 2399 2400 # JAMP definition, depends on the number of independent split orders 2401 split_orders=matrix_element.get('processes')[0].get('split_orders') 2402 2403 if len(split_orders)==0: 2404 replace_dict['nSplitOrders']='' 2405 # Extract JAMP lines 2406 jamp_lines = self.get_JAMP_lines(matrix_element) 2407 # Consider the output of a dummy order 'ALL_ORDERS' for which we 2408 # set all amplitude order to weight 1 and only one squared order 2409 # contribution which is of course ALL_ORDERS=2. 2410 squared_orders = [(2,),] 2411 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 2412 replace_dict['chosen_so_configs'] = '.TRUE.' 2413 replace_dict['nSqAmpSplitOrders']=1 2414 replace_dict['split_order_str_list']='' 2415 else: 2416 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 2417 replace_dict['nAmpSplitOrders']=len(amp_orders) 2418 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 2419 replace_dict['nSplitOrders']=len(split_orders) 2420 replace_dict['split_order_str_list']=str(split_orders) 2421 amp_so = self.get_split_orders_lines( 2422 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 2423 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 2424 replace_dict['ampsplitorders']='\n'.join(amp_so) 2425 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 2426 jamp_lines = self.get_JAMP_lines_split_order(\ 2427 matrix_element,amp_orders,split_order_names=split_orders) 2428 2429 # Now setup the array specifying what squared split order is chosen 2430 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 2431 matrix_element.get('processes')[0],squared_orders) 2432 2433 # For convenience we also write the driver check_sa_splitOrders.f 2434 # that explicitely writes out the contribution from each squared order. 2435 # The original driver still works and is compiled with 'make' while 2436 # the splitOrders one is compiled with 'make check_sa_born_splitOrders' 2437 check_sa_writer=writers.FortranWriter('check_sa_born_splitOrders.f') 2438 self.write_check_sa_splitOrders(squared_orders,split_orders, 2439 nexternal,ninitial,proc_prefix,check_sa_writer) 2440 2441 if write: 2442 writers.FortranWriter('nsqso_born.inc').writelines( 2443 """INTEGER NSQSO_BORN 2444 PARAMETER (NSQSO_BORN=%d)"""%replace_dict['nSqAmpSplitOrders']) 2445 2446 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2447 2448 matrix_template = self.matrix_template 2449 if self.opt['export_format']=='standalone_msP' : 2450 matrix_template = 'matrix_standalone_msP_v4.inc' 2451 elif self.opt['export_format']=='standalone_msF': 2452 matrix_template = 'matrix_standalone_msF_v4.inc' 2453 elif self.opt['export_format']=='matchbox': 2454 replace_dict["proc_prefix"] = 'MG5_%i_' % matrix_element.get('processes')[0].get('id') 2455 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2456 2457 if len(split_orders)>0: 2458 if self.opt['export_format'] in ['standalone_msP', 'standalone_msF']: 2459 logger.debug("Warning: The export format %s is not "+\ 2460 " available for individual ME evaluation of given coupl. orders."+\ 2461 " Only the total ME will be computed.", self.opt['export_format']) 2462 elif self.opt['export_format'] in ['madloop_matchbox']: 2463 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2464 matrix_template = "matrix_standalone_matchbox_splitOrders_v4.inc" 2465 else: 2466 matrix_template = "matrix_standalone_splitOrders_v4.inc" 2467 2468 replace_dict['template_file'] = pjoin(_file_path, 'iolibs', 'template_files', matrix_template) 2469 replace_dict['template_file2'] = pjoin(_file_path, \ 2470 'iolibs/template_files/split_orders_helping_functions.inc') 2471 if write and writer: 2472 path = replace_dict['template_file'] 2473 content = open(path).read() 2474 content = content % replace_dict 2475 # Write the file 2476 writer.writelines(content) 2477 # Add the helper functions. 2478 if len(split_orders)>0: 2479 content = '\n' + open(replace_dict['template_file2'])\ 2480 .read()%replace_dict 2481 writer.writelines(content) 2482 return len(filter(lambda call: call.find('#') != 0, helas_calls)) 2483 else: 2484 replace_dict['return_value'] = len(filter(lambda call: call.find('#') != 0, helas_calls)) 2485 return replace_dict # for subclass update
2486
2487 - def write_check_sa_splitOrders(self,squared_orders, split_orders, nexternal, 2488 nincoming, proc_prefix, writer):
2489 """ Write out a more advanced version of the check_sa drivers that 2490 individually returns the matrix element for each contributing squared 2491 order.""" 2492 2493 check_sa_content = open(pjoin(self.mgme_dir, 'madgraph', 'iolibs', \ 2494 'template_files', 'check_sa_splitOrders.f')).read() 2495 printout_sq_orders=[] 2496 for i, squared_order in enumerate(squared_orders): 2497 sq_orders=[] 2498 for j, sqo in enumerate(squared_order): 2499 sq_orders.append('%s=%d'%(split_orders[j],sqo)) 2500 printout_sq_orders.append(\ 2501 "write(*,*) '%d) Matrix element for (%s) = ',MATELEMS(%d)"\ 2502 %(i+1,' '.join(sq_orders),i+1)) 2503 printout_sq_orders='\n'.join(printout_sq_orders) 2504 replace_dict = {'printout_sqorders':printout_sq_orders, 2505 'nSplitOrders':len(squared_orders), 2506 'nexternal':nexternal, 2507 'nincoming':nincoming, 2508 'proc_prefix':proc_prefix} 2509 2510 if writer: 2511 writer.writelines(check_sa_content % replace_dict) 2512 else: 2513 return replace_dict
2514
2515 -class ProcessExporterFortranMatchBox(ProcessExporterFortranSA):
2516 """class to take care of exporting a set of matrix element for the Matchbox 2517 code in the case of Born only routine""" 2518 2519 default_opt = {'clean': False, 'complex_mass':False, 2520 'export_format':'matchbox', 'mp': False, 2521 'sa_symmetry': True} 2522 2523 #specific template of the born 2524 2525 2526 matrix_template = "matrix_standalone_matchbox.inc" 2527 2528 @staticmethod
2529 - def get_color_string_lines(matrix_element):
2530 """Return the color matrix definition lines for this matrix element. Split 2531 rows in chunks of size n.""" 2532 2533 if not matrix_element.get('color_matrix'): 2534 return "\n".join(["out = 1"]) 2535 2536 #start the real work 2537 color_denominators = matrix_element.get('color_matrix').\ 2538 get_line_denominators() 2539 matrix_strings = [] 2540 my_cs = color.ColorString() 2541 for i_color in xrange(len(color_denominators)): 2542 # Then write the numerators for the matrix elements 2543 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[i_color]) 2544 t_str=repr(my_cs) 2545 t_match=re.compile(r"(\w+)\(([\s\d+\,]*)\)") 2546 # from '1 T(2,4,1) Tr(4,5,6) Epsilon(5,3,2,1) T(1,2)' returns with findall: 2547 # [('T', '2,4,1'), ('Tr', '4,5,6'), ('Epsilon', '5,3,2,1'), ('T', '1,2')] 2548 all_matches = t_match.findall(t_str) 2549 output = {} 2550 arg=[] 2551 for match in all_matches: 2552 ctype, tmparg = match[0], [m.strip() for m in match[1].split(',')] 2553 if ctype in ['ColorOne' ]: 2554 continue 2555 if ctype not in ['T', 'Tr' ]: 2556 raise MadGraph5Error, 'Color Structure not handled by Matchbox: %s' % ctype 2557 tmparg += ['0'] 2558 arg +=tmparg 2559 for j, v in enumerate(arg): 2560 output[(i_color,j)] = v 2561 2562 for key in output: 2563 if matrix_strings == []: 2564 #first entry 2565 matrix_strings.append(""" 2566 if (in1.eq.%s.and.in2.eq.%s)then 2567 out = %s 2568 """ % (key[0], key[1], output[key])) 2569 else: 2570 #not first entry 2571 matrix_strings.append(""" 2572 elseif (in1.eq.%s.and.in2.eq.%s)then 2573 out = %s 2574 """ % (key[0], key[1], output[key])) 2575 if len(matrix_strings): 2576 matrix_strings.append(" else \n out = - 1 \n endif") 2577 else: 2578 return "\n out = - 1 \n " 2579 return "\n".join(matrix_strings)
2580
2581 - def make(self,*args,**opts):
2582 pass
2583
2584 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", split=-1, 2585 JAMP_formatLC=None):
2586 2587 """Adding leading color part of the colorflow""" 2588 2589 if not JAMP_formatLC: 2590 JAMP_formatLC= "LN%s" % JAMP_format 2591 2592 error_msg="Malformed '%s' argument passed to the get_JAMP_lines" 2593 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 2594 col_amps=col_amps.get_color_amplitudes() 2595 elif(isinstance(col_amps,list)): 2596 if(col_amps and isinstance(col_amps[0],list)): 2597 col_amps=col_amps 2598 else: 2599 raise MadGraph5Error, error_msg % 'col_amps' 2600 else: 2601 raise MadGraph5Error, error_msg % 'col_amps' 2602 2603 text = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(col_amps, 2604 JAMP_format=JAMP_format, 2605 AMP_format=AMP_format, 2606 split=-1) 2607 2608 2609 # Filter the col_ampls to generate only those without any 1/NC terms 2610 2611 LC_col_amps = [] 2612 for coeff_list in col_amps: 2613 to_add = [] 2614 for (coefficient, amp_number) in coeff_list: 2615 if coefficient[3]==0: 2616 to_add.append( (coefficient, amp_number) ) 2617 LC_col_amps.append(to_add) 2618 2619 text += super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(LC_col_amps, 2620 JAMP_format=JAMP_formatLC, 2621 AMP_format=AMP_format, 2622 split=-1) 2623 2624 return text
2625
2626 2627 2628 2629 #=============================================================================== 2630 # ProcessExporterFortranMW 2631 #=============================================================================== 2632 -class ProcessExporterFortranMW(ProcessExporterFortran):
2633 """Class to take care of exporting a set of matrix elements to 2634 MadGraph v4 - MadWeight format.""" 2635 2636 matrix_file="matrix_standalone_v4.inc" 2637
2638 - def copy_template(self, model):
2639 """Additional actions needed for setup of Template 2640 """ 2641 2642 super(ProcessExporterFortranMW, self).copy_template(model) 2643 2644 # Add the MW specific file 2645 shutil.copytree(pjoin(MG5DIR,'Template','MadWeight'), 2646 pjoin(self.dir_path, 'Source','MadWeight'), True) 2647 shutil.copytree(pjoin(MG5DIR,'madgraph','madweight'), 2648 pjoin(self.dir_path, 'bin','internal','madweight'), True) 2649 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','setrun.f'), 2650 pjoin(self.dir_path, 'Source','setrun.f')) 2651 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','run.inc'), 2652 pjoin(self.dir_path, 'Source','run.inc')) 2653 # File created from Template (Different in some child class) 2654 filename = os.path.join(self.dir_path,'Source','run_config.inc') 2655 self.write_run_config_file(writers.FortranWriter(filename)) 2656 2657 try: 2658 subprocess.call([os.path.join(self.dir_path, 'Source','MadWeight','bin','internal','pass_to_madweight')], 2659 stdout = os.open(os.devnull, os.O_RDWR), 2660 stderr = os.open(os.devnull, os.O_RDWR), 2661 cwd=self.dir_path) 2662 except OSError: 2663 # Probably madweight already called 2664 pass 2665 2666 # Copy the different python file in the Template 2667 self.copy_python_file() 2668 # create the appropriate cuts.f 2669 self.get_mw_cuts_version() 2670 2671 # add the makefile in Source directory 2672 filename = os.path.join(self.dir_path,'Source','makefile') 2673 self.write_source_makefile(writers.FortranWriter(filename))
2674 2675 2676 2677 2678 #=========================================================================== 2679 # convert_model 2680 #===========================================================================
2681 - def convert_model(self, model, wanted_lorentz = [], 2682 wanted_couplings = []):
2683 2684 super(ProcessExporterFortranMW,self).convert_model(model, 2685 wanted_lorentz, wanted_couplings) 2686 2687 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 2688 try: 2689 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 2690 except OSError as error: 2691 pass 2692 model_path = model.get('modelpath') 2693 # This is not safe if there is a '##' or '-' in the path. 2694 shutil.copytree(model_path, 2695 pjoin(self.dir_path,'bin','internal','ufomodel'), 2696 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 2697 if hasattr(model, 'restrict_card'): 2698 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 2699 'restrict_default.dat') 2700 if isinstance(model.restrict_card, check_param_card.ParamCard): 2701 model.restrict_card.write(out_path) 2702 else: 2703 files.cp(model.restrict_card, out_path)
2704 2705 #=========================================================================== 2706 # generate_subprocess_directory 2707 #===========================================================================
2708 - def copy_python_file(self):
2709 """copy the python file require for the Template""" 2710 2711 # madevent interface 2712 cp(_file_path+'/interface/madweight_interface.py', 2713 self.dir_path+'/bin/internal/madweight_interface.py') 2714 cp(_file_path+'/interface/extended_cmd.py', 2715 self.dir_path+'/bin/internal/extended_cmd.py') 2716 cp(_file_path+'/interface/common_run_interface.py', 2717 self.dir_path+'/bin/internal/common_run_interface.py') 2718 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 2719 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 2720 cp(_file_path+'/iolibs/save_load_object.py', 2721 self.dir_path+'/bin/internal/save_load_object.py') 2722 cp(_file_path+'/madevent/gen_crossxhtml.py', 2723 self.dir_path+'/bin/internal/gen_crossxhtml.py') 2724 cp(_file_path+'/madevent/sum_html.py', 2725 self.dir_path+'/bin/internal/sum_html.py') 2726 cp(_file_path+'/various/FO_analyse_card.py', 2727 self.dir_path+'/bin/internal/FO_analyse_card.py') 2728 cp(_file_path+'/iolibs/file_writers.py', 2729 self.dir_path+'/bin/internal/file_writers.py') 2730 #model file 2731 cp(_file_path+'../models/check_param_card.py', 2732 self.dir_path+'/bin/internal/check_param_card.py') 2733 2734 #madevent file 2735 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 2736 cp(_file_path+'/various/lhe_parser.py', 2737 self.dir_path+'/bin/internal/lhe_parser.py') 2738 2739 cp(_file_path+'/various/banner.py', 2740 self.dir_path+'/bin/internal/banner.py') 2741 cp(_file_path+'/various/shower_card.py', 2742 self.dir_path+'/bin/internal/shower_card.py') 2743 cp(_file_path+'/various/cluster.py', 2744 self.dir_path+'/bin/internal/cluster.py') 2745 2746 # logging configuration 2747 cp(_file_path+'/interface/.mg5_logging.conf', 2748 self.dir_path+'/bin/internal/me5_logging.conf') 2749 cp(_file_path+'/interface/coloring_logging.py', 2750 self.dir_path+'/bin/internal/coloring_logging.py')
2751 2752 2753 #=========================================================================== 2754 # Change the version of cuts.f to the one compatible with MW 2755 #===========================================================================
2756 - def get_mw_cuts_version(self, outpath=None):
2757 """create the appropriate cuts.f 2758 This is based on the one associated to ME output but: 2759 1) No clustering (=> remove initcluster/setclscales) 2760 2) Adding the definition of cut_bw at the file. 2761 """ 2762 2763 template = open(pjoin(MG5DIR,'Template','LO','SubProcesses','cuts.f')) 2764 2765 text = StringIO() 2766 #1) remove all dependencies in ickkw >1: 2767 nb_if = 0 2768 for line in template: 2769 if 'if(xqcut.gt.0d0' in line: 2770 nb_if = 1 2771 if nb_if == 0: 2772 text.write(line) 2773 continue 2774 if re.search(r'if\(.*\)\s*then', line): 2775 nb_if += 1 2776 elif 'endif' in line: 2777 nb_if -= 1 2778 2779 #2) add fake cut_bw (have to put the true one later) 2780 text.write(""" 2781 logical function cut_bw(p) 2782 include 'madweight_param.inc' 2783 double precision p(*) 2784 if (bw_cut) then 2785 cut_bw = .true. 2786 else 2787 stop 1 2788 endif 2789 return 2790 end 2791 """) 2792 2793 final = text.getvalue() 2794 #3) remove the call to initcluster: 2795 template = final.replace('call initcluster', '! Remove for MW!call initcluster') 2796 template = template.replace('genps.inc', 'maxparticles.inc') 2797 #Now we can write it 2798 if not outpath: 2799 fsock = open(pjoin(self.dir_path, 'SubProcesses', 'cuts.f'), 'w') 2800 elif isinstance(outpath, str): 2801 fsock = open(outpath, 'w') 2802 else: 2803 fsock = outpath 2804 fsock.write(template)
2805 2806 2807 2808 #=========================================================================== 2809 # Make the Helas and Model directories for Standalone directory 2810 #===========================================================================
2811 - def make(self):
2812 """Run make in the DHELAS, MODEL, PDF and CERNLIB directories, to set up 2813 everything for running madweight 2814 """ 2815 2816 source_dir = os.path.join(self.dir_path, "Source") 2817 logger.info("Running make for Helas") 2818 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2819 logger.info("Running make for Model") 2820 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran') 2821 logger.info("Running make for PDF") 2822 misc.compile(arg=['../lib/libpdf.a'], cwd=source_dir, mode='fortran') 2823 logger.info("Running make for CERNLIB") 2824 misc.compile(arg=['../lib/libcernlib.a'], cwd=source_dir, mode='fortran') 2825 logger.info("Running make for GENERIC") 2826 misc.compile(arg=['../lib/libgeneric.a'], cwd=source_dir, mode='fortran') 2827 logger.info("Running make for blocks") 2828 misc.compile(arg=['../lib/libblocks.a'], cwd=source_dir, mode='fortran') 2829 logger.info("Running make for tools") 2830 misc.compile(arg=['../lib/libtools.a'], cwd=source_dir, mode='fortran')
2831 2832 #=========================================================================== 2833 # Create proc_card_mg5.dat for MadWeight directory 2834 #===========================================================================
2835 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2836 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 2837 2838 compiler = {'fortran': mg5options['fortran_compiler'], 2839 'cpp': mg5options['cpp_compiler'], 2840 'f2py': mg5options['f2py_compiler']} 2841 2842 2843 2844 #proc_charac 2845 self.create_proc_charac() 2846 2847 # Write maxparticles.inc based on max of ME's/subprocess groups 2848 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 2849 self.write_maxparticles_file(writers.FortranWriter(filename), 2850 matrix_elements) 2851 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2852 pjoin(self.dir_path, 'Source','MadWeight','blocks')) 2853 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2854 pjoin(self.dir_path, 'Source','MadWeight','tools')) 2855 2856 self.set_compiler(compiler) 2857 self.make() 2858 2859 # Write command history as proc_card_mg5 2860 if os.path.isdir(os.path.join(self.dir_path, 'Cards')): 2861 output_file = os.path.join(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2862 history.write(output_file) 2863 2864 ProcessExporterFortran.finalize(self, matrix_elements, 2865 history, mg5options, flaglist)
2866 2867 2868 2869 #=========================================================================== 2870 # create the run_card for MW 2871 #===========================================================================
2872 - def create_run_card(self, matrix_elements, history):
2873 """ """ 2874 2875 run_card = banner_mod.RunCard() 2876 2877 # pass to default for MW 2878 run_card["run_tag"] = "\'not_use\'" 2879 run_card["fixed_ren_scale"] = "T" 2880 run_card["fixed_fac_scale"] = "T" 2881 run_card.remove_all_cut() 2882 2883 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 2884 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2885 python_template=True) 2886 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'), 2887 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2888 python_template=True)
2889 2890 #=========================================================================== 2891 # export model files 2892 #===========================================================================
2893 - def export_model_files(self, model_path):
2894 """export the model dependent files for V4 model""" 2895 2896 super(ProcessExporterFortranMW,self).export_model_files(model_path) 2897 # Add the routine update_as_param in v4 model 2898 # This is a function created in the UFO 2899 text=""" 2900 subroutine update_as_param() 2901 call setpara('param_card.dat',.false.) 2902 return 2903 end 2904 """ 2905 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 2906 ff.write(text) 2907 ff.close() 2908 2909 # Modify setrun.f 2910 text = open(os.path.join(self.dir_path,'Source','setrun.f')).read() 2911 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 2912 fsock = open(os.path.join(self.dir_path,'Source','setrun.f'), 'w') 2913 fsock.write(text) 2914 fsock.close() 2915 2916 # Modify initialization.f 2917 text = open(os.path.join(self.dir_path,'SubProcesses','initialization.f')).read() 2918 text = text.replace('call setpara(param_name)', 'call setpara(param_name, .true.)') 2919 fsock = open(os.path.join(self.dir_path,'SubProcesses','initialization.f'), 'w') 2920 fsock.write(text) 2921 fsock.close() 2922 2923 2924 self.make_model_symbolic_link()
2925 2926 #=========================================================================== 2927 # generate_subprocess_directory 2928 #===========================================================================
2929 - def generate_subprocess_directory(self, matrix_element, 2930 fortran_model,number):
2931 """Generate the Pxxxxx directory for a subprocess in MG4 MadWeight format, 2932 including the necessary matrix.f and nexternal.inc files""" 2933 2934 cwd = os.getcwd() 2935 # Create the directory PN_xx_xxxxx in the specified path 2936 dirpath = os.path.join(self.dir_path, 'SubProcesses', \ 2937 "P%s" % matrix_element.get('processes')[0].shell_string()) 2938 2939 try: 2940 os.mkdir(dirpath) 2941 except os.error as error: 2942 logger.warning(error.strerror + " " + dirpath) 2943 2944 #try: 2945 # os.chdir(dirpath) 2946 #except os.error: 2947 # logger.error('Could not cd to directory %s' % dirpath) 2948 # return 0 2949 2950 logger.info('Creating files in directory %s' % dirpath) 2951 2952 # Extract number of external particles 2953 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2954 2955 # Create the matrix.f file and the nexternal.inc file 2956 filename = pjoin(dirpath,'matrix.f') 2957 calls,ncolor = self.write_matrix_element_v4( 2958 writers.FortranWriter(filename), 2959 matrix_element, 2960 fortran_model) 2961 2962 filename = pjoin(dirpath, 'auto_dsig.f') 2963 self.write_auto_dsig_file(writers.FortranWriter(filename), 2964 matrix_element) 2965 2966 filename = pjoin(dirpath, 'configs.inc') 2967 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2968 writers.FortranWriter(filename), 2969 matrix_element) 2970 2971 filename = pjoin(dirpath, 'nexternal.inc') 2972 self.write_nexternal_file(writers.FortranWriter(filename), 2973 nexternal, ninitial) 2974 2975 filename = pjoin(dirpath, 'leshouche.inc') 2976 self.write_leshouche_file(writers.FortranWriter(filename), 2977 matrix_element) 2978 2979 filename = pjoin(dirpath, 'props.inc') 2980 self.write_props_file(writers.FortranWriter(filename), 2981 matrix_element, 2982 s_and_t_channels) 2983 2984 filename = pjoin(dirpath, 'pmass.inc') 2985 self.write_pmass_file(writers.FortranWriter(filename), 2986 matrix_element) 2987 2988 filename = pjoin(dirpath, 'ngraphs.inc') 2989 self.write_ngraphs_file(writers.FortranWriter(filename), 2990 len(matrix_element.get_all_amplitudes())) 2991 2992 filename = pjoin(dirpath, 'maxamps.inc') 2993 self.write_maxamps_file(writers.FortranWriter(filename), 2994 len(matrix_element.get('diagrams')), 2995 ncolor, 2996 len(matrix_element.get('processes')), 2997 1) 2998 2999 filename = pjoin(dirpath, 'phasespace.inc') 3000 self.write_phasespace_file(writers.FortranWriter(filename), 3001 len(matrix_element.get('diagrams')), 3002 ) 3003 3004 # Generate diagrams 3005 filename = pjoin(dirpath, "matrix.ps") 3006 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3007 get('diagrams'), 3008 filename, 3009 model=matrix_element.get('processes')[0].\ 3010 get('model'), 3011 amplitude='') 3012 logger.info("Generating Feynman diagrams for " + \ 3013 matrix_element.get('processes')[0].nice_string()) 3014 plot.draw() 3015 3016 #import genps.inc and maxconfigs.inc into Subprocesses 3017 ln(self.dir_path + '/Source/genps.inc', self.dir_path + '/SubProcesses', log=False) 3018 #ln(self.dir_path + '/Source/maxconfigs.inc', self.dir_path + '/SubProcesses', log=False) 3019 3020 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f', 'genps.inc'] 3021 3022 for file in linkfiles: 3023 ln('../%s' % file, starting_dir=cwd) 3024 3025 ln('nexternal.inc', '../../Source', log=False, cwd=dirpath) 3026 ln('leshouche.inc', '../../Source', log=False, cwd=dirpath) 3027 ln('maxamps.inc', '../../Source', log=False, cwd=dirpath) 3028 ln('phasespace.inc', '../', log=True, cwd=dirpath) 3029 # Return to original PWD 3030 #os.chdir(cwd) 3031 3032 if not calls: 3033 calls = 0 3034 return calls
3035 3036 #=========================================================================== 3037 # write_matrix_element_v4 3038 #===========================================================================
3039 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model,proc_id = "", config_map = []):
3040 """Export a matrix element to a matrix.f file in MG4 MadWeight format""" 3041 3042 if not matrix_element.get('processes') or \ 3043 not matrix_element.get('diagrams'): 3044 return 0 3045 3046 if writer: 3047 if not isinstance(writer, writers.FortranWriter): 3048 raise writers.FortranWriter.FortranWriterError(\ 3049 "writer not FortranWriter") 3050 3051 # Set lowercase/uppercase Fortran code 3052 writers.FortranWriter.downcase = False 3053 3054 replace_dict = {} 3055 3056 # Extract version number and date from VERSION file 3057 info_lines = self.get_mg5_info_lines() 3058 replace_dict['info_lines'] = info_lines 3059 3060 # Extract process info lines 3061 process_lines = self.get_process_info_lines(matrix_element) 3062 replace_dict['process_lines'] = process_lines 3063 3064 # Set proc_id 3065 replace_dict['proc_id'] = proc_id 3066 3067 # Extract number of external particles 3068 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3069 replace_dict['nexternal'] = nexternal 3070 3071 # Extract ncomb 3072 ncomb = matrix_element.get_helicity_combinations() 3073 replace_dict['ncomb'] = ncomb 3074 3075 # Extract helicity lines 3076 helicity_lines = self.get_helicity_lines(matrix_element) 3077 replace_dict['helicity_lines'] = helicity_lines 3078 3079 # Extract overall denominator 3080 # Averaging initial state color, spin, and identical FS particles 3081 den_factor_line = self.get_den_factor_line(matrix_element) 3082 replace_dict['den_factor_line'] = den_factor_line 3083 3084 # Extract ngraphs 3085 ngraphs = matrix_element.get_number_of_amplitudes() 3086 replace_dict['ngraphs'] = ngraphs 3087 3088 # Extract nwavefuncs 3089 nwavefuncs = matrix_element.get_number_of_wavefunctions() 3090 replace_dict['nwavefuncs'] = nwavefuncs 3091 3092 # Extract ncolor 3093 ncolor = max(1, len(matrix_element.get('color_basis'))) 3094 replace_dict['ncolor'] = ncolor 3095 3096 # Extract color data lines 3097 color_data_lines = self.get_color_data_lines(matrix_element) 3098 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 3099 3100 # Extract helas calls 3101 helas_calls = fortran_model.get_matrix_element_calls(\ 3102 matrix_element) 3103 3104 replace_dict['helas_calls'] = "\n".join(helas_calls) 3105 3106 # Extract JAMP lines 3107 jamp_lines = self.get_JAMP_lines(matrix_element) 3108 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 3109 3110 replace_dict['template_file'] = os.path.join(_file_path, \ 3111 'iolibs/template_files/%s' % self.matrix_file) 3112 replace_dict['template_file2'] = '' 3113 3114 if writer: 3115 file = open(replace_dict['template_file']).read() 3116 file = file % replace_dict 3117 # Write the file 3118 writer.writelines(file) 3119 return len(filter(lambda call: call.find('#') != 0, helas_calls)),ncolor 3120 else: 3121 replace_dict['return_value'] = (len(filter(lambda call: call.find('#') != 0, helas_calls)),ncolor)
3122 3123 #=========================================================================== 3124 # write_source_makefile 3125 #===========================================================================
3126 - def write_source_makefile(self, writer):
3127 """Write the nexternal.inc file for madweight""" 3128 3129 3130 path = os.path.join(_file_path,'iolibs','template_files','madweight_makefile_source') 3131 set_of_lib = '$(LIBRARIES) $(LIBDIR)libdhelas.$(libext) $(LIBDIR)libpdf.$(libext) $(LIBDIR)libmodel.$(libext) $(LIBDIR)libcernlib.$(libext) $(LIBDIR)libtf.$(libext)' 3132 text = open(path).read() % {'libraries': set_of_lib} 3133 writer.write(text) 3134 3135 return True
3136
3137 - def write_phasespace_file(self, writer, nb_diag):
3138 """ """ 3139 3140 template = """ include 'maxparticles.inc' 3141 integer max_branches 3142 parameter (max_branches=max_particles-1) 3143 integer max_configs 3144 parameter (max_configs=%(nb_diag)s) 3145 3146 c channel position 3147 integer config_pos,perm_pos 3148 common /to_config/config_pos,perm_pos 3149 3150 """ 3151 3152 writer.write(template % {'nb_diag': nb_diag})
3153 3154 3155 #=========================================================================== 3156 # write_auto_dsig_file 3157 #===========================================================================
3158 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
3159 """Write the auto_dsig.f file for the differential cross section 3160 calculation, includes pdf call information (MadWeight format)""" 3161 3162 if not matrix_element.get('processes') or \ 3163 not matrix_element.get('diagrams'): 3164 return 0 3165 3166 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 3167 3168 if ninitial < 1 or ninitial > 2: 3169 raise writers.FortranWriter.FortranWriterError, \ 3170 """Need ninitial = 1 or 2 to write auto_dsig file""" 3171 3172 replace_dict = {} 3173 3174 # Extract version number and date from VERSION file 3175 info_lines = self.get_mg5_info_lines() 3176 replace_dict['info_lines'] = info_lines 3177 3178 # Extract process info lines 3179 process_lines = self.get_process_info_lines(matrix_element) 3180 replace_dict['process_lines'] = process_lines 3181 3182 # Set proc_id 3183 replace_dict['proc_id'] = proc_id 3184 replace_dict['numproc'] = 1 3185 3186 # Set dsig_line 3187 if ninitial == 1: 3188 # No conversion, since result of decay should be given in GeV 3189 dsig_line = "pd(0)*dsiguu" 3190 else: 3191 # Convert result (in GeV) to pb 3192 dsig_line = "pd(0)*conv*dsiguu" 3193 3194 replace_dict['dsig_line'] = dsig_line 3195 3196 # Extract pdf lines 3197 pdf_vars, pdf_data, pdf_lines = \ 3198 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 3199 replace_dict['pdf_vars'] = pdf_vars 3200 replace_dict['pdf_data'] = pdf_data 3201 replace_dict['pdf_lines'] = pdf_lines 3202 3203 # Lines that differ between subprocess group and regular 3204 if proc_id: 3205 replace_dict['numproc'] = int(proc_id) 3206 replace_dict['passcuts_begin'] = "" 3207 replace_dict['passcuts_end'] = "" 3208 # Set lines for subprocess group version 3209 # Set define_iconfigs_lines 3210 replace_dict['define_subdiag_lines'] = \ 3211 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3212 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3213 else: 3214 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 3215 replace_dict['passcuts_end'] = "ENDIF" 3216 replace_dict['define_subdiag_lines'] = "" 3217 3218 if writer: 3219 file = open(os.path.join(_file_path, \ 3220 'iolibs/template_files/auto_dsig_mw.inc')).read() 3221 3222 file = file % replace_dict 3223 # Write the file 3224 writer.writelines(file) 3225 else: 3226 return replace_dict
3227 #=========================================================================== 3228 # write_configs_file 3229 #===========================================================================
3230 - def write_configs_file(self, writer, matrix_element):
3231 """Write the configs.inc file for MadEvent""" 3232 3233 # Extract number of external particles 3234 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3235 3236 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 3237 mapconfigs = [c[0] for c in configs] 3238 model = matrix_element.get('processes')[0].get('model') 3239 return mapconfigs, self.write_configs_file_from_diagrams(writer, 3240 [[c[1]] for c in configs], 3241 mapconfigs, 3242 nexternal, ninitial,matrix_element, model)
3243 3244 #=========================================================================== 3245 # write_run_configs_file 3246 #===========================================================================
3247 - def write_run_config_file(self, writer):
3248 """Write the run_configs.inc file for MadWeight""" 3249 3250 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 3251 text = open(path).read() % {'chanperjob':'5'} 3252 writer.write(text) 3253 return True
3254 3255 #=========================================================================== 3256 # write_configs_file_from_diagrams 3257 #===========================================================================
3258 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 3259 nexternal, ninitial, matrix_element, model):
3260 """Write the actual configs.inc file. 3261 3262 configs is the diagrams corresponding to configs (each 3263 diagrams is a list of corresponding diagrams for all 3264 subprocesses, with None if there is no corresponding diagrams 3265 for a given process). 3266 mapconfigs gives the diagram number for each config. 3267 3268 For s-channels, we need to output one PDG for each subprocess in 3269 the subprocess group, in order to be able to pick the right 3270 one for multiprocesses.""" 3271 3272 lines = [] 3273 3274 particle_dict = matrix_element.get('processes')[0].get('model').\ 3275 get('particle_dict') 3276 3277 s_and_t_channels = [] 3278 3279 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 3280 for config in configs if [d for d in config if d][0].\ 3281 get_vertex_leg_numbers()!=[]] 3282 3283 minvert = min(vert_list) if vert_list!=[] else 0 3284 # Number of subprocesses 3285 nsubprocs = len(configs[0]) 3286 3287 nconfigs = 0 3288 3289 new_pdg = model.get_first_non_pdg() 3290 3291 for iconfig, helas_diags in enumerate(configs): 3292 if any([vert > minvert for vert in 3293 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 3294 # Only 3-vertices allowed in configs.inc 3295 continue 3296 nconfigs += 1 3297 3298 # Need s- and t-channels for all subprocesses, including 3299 # those that don't contribute to this config 3300 empty_verts = [] 3301 stchannels = [] 3302 for h in helas_diags: 3303 if h: 3304 # get_s_and_t_channels gives vertices starting from 3305 # final state external particles and working inwards 3306 stchannels.append(h.get('amplitudes')[0].\ 3307 get_s_and_t_channels(ninitial,model,new_pdg)) 3308 else: 3309 stchannels.append((empty_verts, None)) 3310 3311 # For t-channels, just need the first non-empty one 3312 tchannels = [t for s,t in stchannels if t != None][0] 3313 3314 # For s_and_t_channels (to be used later) use only first config 3315 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 3316 tchannels]) 3317 3318 # Make sure empty_verts is same length as real vertices 3319 if any([s for s,t in stchannels]): 3320 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 3321 3322 # Reorganize s-channel vertices to get a list of all 3323 # subprocesses for each vertex 3324 schannels = zip(*[s for s,t in stchannels]) 3325 else: 3326 schannels = [] 3327 3328 allchannels = schannels 3329 if len(tchannels) > 1: 3330 # Write out tchannels only if there are any non-trivial ones 3331 allchannels = schannels + tchannels 3332 3333 # Write out propagators for s-channel and t-channel vertices 3334 3335 #lines.append("# Diagram %d" % (mapconfigs[iconfig])) 3336 # Correspondance between the config and the diagram = amp2 3337 lines.append("* %d %d " % (nconfigs, 3338 mapconfigs[iconfig])) 3339 3340 for verts in allchannels: 3341 if verts in schannels: 3342 vert = [v for v in verts if v][0] 3343 else: 3344 vert = verts 3345 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 3346 last_leg = vert.get('legs')[-1] 3347 line=str(last_leg.get('number'))+" "+str(daughters[0])+" "+str(daughters[1]) 3348 # lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 3349 # (last_leg.get('number'), nconfigs, len(daughters), 3350 # ",".join([str(d) for d in daughters]))) 3351 3352 if last_leg.get('id') == 21 and 21 not in particle_dict: 3353 # Fake propagator used in multiparticle vertices 3354 mass = 'zero' 3355 width = 'zero' 3356 pow_part = 0 3357 else: 3358 if (last_leg.get('id')!=7): 3359 particle = particle_dict[last_leg.get('id')] 3360 # Get mass 3361 mass = particle.get('mass') 3362 # Get width 3363 width = particle.get('width') 3364 else : # fake propagator used in multiparticle vertices 3365 mass= 'zero' 3366 width= 'zero' 3367 3368 line=line+" "+mass+" "+width+" " 3369 3370 if verts in schannels: 3371 pdgs = [] 3372 for v in verts: 3373 if v: 3374 pdgs.append(v.get('legs')[-1].get('id')) 3375 else: 3376 pdgs.append(0) 3377 lines.append(line+" S "+str(last_leg.get('id'))) 3378 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3379 # (last_leg.get('number'), nconfigs, nsubprocs, 3380 # ",".join([str(d) for d in pdgs]))) 3381 # lines.append("data tprid(%d,%d)/0/" % \ 3382 # (last_leg.get('number'), nconfigs)) 3383 elif verts in tchannels[:-1]: 3384 lines.append(line+" T "+str(last_leg.get('id'))) 3385 # lines.append("data tprid(%d,%d)/%d/" % \ 3386 # (last_leg.get('number'), nconfigs, 3387 # abs(last_leg.get('id')))) 3388 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3389 # (last_leg.get('number'), nconfigs, nsubprocs, 3390 # ",".join(['0'] * nsubprocs))) 3391 3392 # Write out number of configs 3393 # lines.append("# Number of configs") 3394 # lines.append("data mapconfig(0)/%d/" % nconfigs) 3395 lines.append(" * ") # a line with just a star indicates this is the end of file 3396 # Write the file 3397 writer.writelines(lines) 3398 3399 return s_and_t_channels
3400
3401 3402 #=============================================================================== 3403 # ProcessExporterFortranME 3404 #=============================================================================== 3405 -class ProcessExporterFortranME(ProcessExporterFortran):
3406 """Class to take care of exporting a set of matrix elements to 3407 MadEvent format.""" 3408 3409 matrix_file = "matrix_madevent_v4.inc" 3410
3411 - def copy_template(self, model):
3412 """Additional actions needed for setup of Template 3413 """ 3414 3415 super(ProcessExporterFortranME, self).copy_template(model) 3416 3417 # File created from Template (Different in some child class) 3418 filename = pjoin(self.dir_path,'Source','run_config.inc') 3419 self.write_run_config_file(writers.FortranWriter(filename)) 3420 3421 # The next file are model dependant (due to SLAH convention) 3422 self.model_name = model.get('name') 3423 # Add the symmetry.f 3424 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3425 self.write_symmetry(writers.FortranWriter(filename)) 3426 # 3427 filename = pjoin(self.dir_path,'SubProcesses','addmothers.f') 3428 self.write_addmothers(writers.FortranWriter(filename)) 3429 # Copy the different python file in the Template 3430 self.copy_python_file()
3431 3432 3433 3434 3435 3436 #=========================================================================== 3437 # generate_subprocess_directory 3438 #===========================================================================
3439 - def copy_python_file(self):
3440 """copy the python file require for the Template""" 3441 3442 # madevent interface 3443 cp(_file_path+'/interface/madevent_interface.py', 3444 self.dir_path+'/bin/internal/madevent_interface.py') 3445 cp(_file_path+'/interface/extended_cmd.py', 3446 self.dir_path+'/bin/internal/extended_cmd.py') 3447 cp(_file_path+'/interface/common_run_interface.py', 3448 self.dir_path+'/bin/internal/common_run_interface.py') 3449 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3450 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3451 cp(_file_path+'/iolibs/save_load_object.py', 3452 self.dir_path+'/bin/internal/save_load_object.py') 3453 cp(_file_path+'/iolibs/file_writers.py', 3454 self.dir_path+'/bin/internal/file_writers.py') 3455 #model file 3456 cp(_file_path+'../models/check_param_card.py', 3457 self.dir_path+'/bin/internal/check_param_card.py') 3458 3459 #copy all the file present in madevent directory 3460 for name in os.listdir(pjoin(_file_path, 'madevent')): 3461 if name not in ['__init__.py'] and name.endswith('.py'): 3462 cp(_file_path+'/madevent/'+name, self.dir_path+'/bin/internal/') 3463 3464 #madevent file 3465 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3466 cp(_file_path+'/various/lhe_parser.py', 3467 self.dir_path+'/bin/internal/lhe_parser.py') 3468 cp(_file_path+'/various/banner.py', 3469 self.dir_path+'/bin/internal/banner.py') 3470 cp(_file_path+'/various/histograms.py', 3471 self.dir_path+'/bin/internal/histograms.py') 3472 cp(_file_path+'/various/plot_djrs.py', 3473 self.dir_path+'/bin/internal/plot_djrs.py') 3474 cp(_file_path+'/various/systematics.py', self.dir_path+'/bin/internal/systematics.py') 3475 3476 cp(_file_path+'/various/cluster.py', 3477 self.dir_path+'/bin/internal/cluster.py') 3478 cp(_file_path+'/madevent/combine_runs.py', 3479 self.dir_path+'/bin/internal/combine_runs.py') 3480 # logging configuration 3481 cp(_file_path+'/interface/.mg5_logging.conf', 3482 self.dir_path+'/bin/internal/me5_logging.conf') 3483 cp(_file_path+'/interface/coloring_logging.py', 3484 self.dir_path+'/bin/internal/coloring_logging.py') 3485 # shower card and FO_analyse_card. 3486 # Although not needed, it is imported by banner.py 3487 cp(_file_path+'/various/shower_card.py', 3488 self.dir_path+'/bin/internal/shower_card.py') 3489 cp(_file_path+'/various/FO_analyse_card.py', 3490 self.dir_path+'/bin/internal/FO_analyse_card.py')
3491 3492
3493 - def convert_model(self, model, wanted_lorentz = [], 3494 wanted_couplings = []):
3495 3496 super(ProcessExporterFortranME,self).convert_model(model, 3497 wanted_lorentz, wanted_couplings) 3498 3499 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 3500 try: 3501 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 3502 except OSError as error: 3503 pass 3504 model_path = model.get('modelpath') 3505 # This is not safe if there is a '##' or '-' in the path. 3506 shutil.copytree(model_path, 3507 pjoin(self.dir_path,'bin','internal','ufomodel'), 3508 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 3509 if hasattr(model, 'restrict_card'): 3510 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 3511 'restrict_default.dat') 3512 if isinstance(model.restrict_card, check_param_card.ParamCard): 3513 model.restrict_card.write(out_path) 3514 else: 3515 files.cp(model.restrict_card, out_path)
3516 3517 #=========================================================================== 3518 # export model files 3519 #===========================================================================
3520 - def export_model_files(self, model_path):
3521 """export the model dependent files""" 3522 3523 super(ProcessExporterFortranME,self).export_model_files(model_path) 3524 3525 # Add the routine update_as_param in v4 model 3526 # This is a function created in the UFO 3527 text=""" 3528 subroutine update_as_param() 3529 call setpara('param_card.dat',.false.) 3530 return 3531 end 3532 """ 3533 ff = open(pjoin(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3534 ff.write(text) 3535 ff.close() 3536 3537 # Add the symmetry.f 3538 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3539 self.write_symmetry(writers.FortranWriter(filename), v5=False) 3540 3541 # Modify setrun.f 3542 text = open(pjoin(self.dir_path,'Source','setrun.f')).read() 3543 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3544 fsock = open(pjoin(self.dir_path,'Source','setrun.f'), 'w') 3545 fsock.write(text) 3546 fsock.close() 3547 3548 self.make_model_symbolic_link()
3549 3550 #=========================================================================== 3551 # generate_subprocess_directory 3552 #===========================================================================
3553 - def generate_subprocess_directory(self, matrix_element, 3554 fortran_model, 3555 me_number):
3556 """Generate the Pxxxxx directory for a subprocess in MG4 madevent, 3557 including the necessary matrix.f and various helper files""" 3558 3559 cwd = os.getcwd() 3560 path = pjoin(self.dir_path, 'SubProcesses') 3561 3562 3563 if not self.model: 3564 self.model = matrix_element.get('processes')[0].get('model') 3565 3566 3567 3568 #os.chdir(path) 3569 # Create the directory PN_xx_xxxxx in the specified path 3570 subprocdir = "P%s" % matrix_element.get('processes')[0].shell_string() 3571 try: 3572 os.mkdir(pjoin(path,subprocdir)) 3573 except os.error as error: 3574 logger.warning(error.strerror + " " + subprocdir) 3575 3576 #try: 3577 # os.chdir(subprocdir) 3578 #except os.error: 3579 # logger.error('Could not cd to directory %s' % subprocdir) 3580 # return 0 3581 3582 logger.info('Creating files in directory %s' % subprocdir) 3583 Ppath = pjoin(path, subprocdir) 3584 3585 # Extract number of external particles 3586 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3587 3588 # Add the driver.f 3589 ncomb = matrix_element.get_helicity_combinations() 3590 filename = pjoin(Ppath,'driver.f') 3591 self.write_driver(writers.FortranWriter(filename),ncomb,n_grouped_proc=1, 3592 v5=self.opt['v5_model']) 3593 3594 # Create the matrix.f file, auto_dsig.f file and all inc files 3595 filename = pjoin(Ppath, 'matrix.f') 3596 calls, ncolor = \ 3597 self.write_matrix_element_v4(writers.FortranWriter(filename), 3598 matrix_element, fortran_model, subproc_number = me_number) 3599 3600 filename = pjoin(Ppath, 'auto_dsig.f') 3601 self.write_auto_dsig_file(writers.FortranWriter(filename), 3602 matrix_element) 3603 3604 filename = pjoin(Ppath, 'configs.inc') 3605 mapconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 3606 writers.FortranWriter(filename), 3607 matrix_element) 3608 3609 filename = pjoin(Ppath, 'config_nqcd.inc') 3610 self.write_config_nqcd_file(writers.FortranWriter(filename), 3611 nqcd_list) 3612 3613 filename = pjoin(Ppath, 'config_subproc_map.inc') 3614 self.write_config_subproc_map_file(writers.FortranWriter(filename), 3615 s_and_t_channels) 3616 3617 filename = pjoin(Ppath, 'coloramps.inc') 3618 self.write_coloramps_file(writers.FortranWriter(filename), 3619 mapconfigs, 3620 matrix_element) 3621 3622 filename = pjoin(Ppath, 'get_color.f') 3623 self.write_colors_file(writers.FortranWriter(filename), 3624 matrix_element) 3625 3626 filename = pjoin(Ppath, 'decayBW.inc') 3627 self.write_decayBW_file(writers.FortranWriter(filename), 3628 s_and_t_channels) 3629 3630 filename = pjoin(Ppath, 'dname.mg') 3631 self.write_dname_file(writers.FileWriter(filename), 3632 "P"+matrix_element.get('processes')[0].shell_string()) 3633 3634 filename = pjoin(Ppath, 'iproc.dat') 3635 self.write_iproc_file(writers.FortranWriter(filename), 3636 me_number) 3637 3638 filename = pjoin(Ppath, 'leshouche.inc') 3639 self.write_leshouche_file(writers.FortranWriter(filename), 3640 matrix_element) 3641 3642 filename = pjoin(Ppath, 'maxamps.inc') 3643 self.write_maxamps_file(writers.FortranWriter(filename), 3644 len(matrix_element.get('diagrams')), 3645 ncolor, 3646 len(matrix_element.get('processes')), 3647 1) 3648 3649 filename = pjoin(Ppath, 'mg.sym') 3650 self.write_mg_sym_file(writers.FortranWriter(filename), 3651 matrix_element) 3652 3653 filename = pjoin(Ppath, 'ncombs.inc') 3654 self.write_ncombs_file(writers.FortranWriter(filename), 3655 nexternal) 3656 3657 filename = pjoin(Ppath, 'nexternal.inc') 3658 self.write_nexternal_file(writers.FortranWriter(filename), 3659 nexternal, ninitial) 3660 3661 filename = pjoin(Ppath, 'ngraphs.inc') 3662 self.write_ngraphs_file(writers.FortranWriter(filename), 3663 len(mapconfigs)) 3664 3665 3666 filename = pjoin(Ppath, 'pmass.inc') 3667 self.write_pmass_file(writers.FortranWriter(filename), 3668 matrix_element) 3669 3670 filename = pjoin(Ppath, 'props.inc') 3671 self.write_props_file(writers.FortranWriter(filename), 3672 matrix_element, 3673 s_and_t_channels) 3674 3675 # Find config symmetries and permutations 3676 symmetry, perms, ident_perms = \ 3677 diagram_symmetry.find_symmetry(matrix_element) 3678 3679 filename = pjoin(Ppath, 'symswap.inc') 3680 self.write_symswap_file(writers.FortranWriter(filename), 3681 ident_perms) 3682 3683 filename = pjoin(Ppath, 'symfact_orig.dat') 3684 self.write_symfact_file(open(filename, 'w'), symmetry) 3685 3686 # Generate diagrams 3687 filename = pjoin(Ppath, "matrix.ps") 3688 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3689 get('diagrams'), 3690 filename, 3691 model=matrix_element.get('processes')[0].\ 3692 get('model'), 3693 amplitude=True) 3694 logger.info("Generating Feynman diagrams for " + \ 3695 matrix_element.get('processes')[0].nice_string()) 3696 plot.draw() 3697 3698 self.link_files_in_SubProcess(Ppath) 3699 3700 #import nexternal/leshouche in Source 3701 ln(pjoin(Ppath,'nexternal.inc'), pjoin(self.dir_path,'Source'), log=False) 3702 ln(pjoin(Ppath,'leshouche.inc'), pjoin(self.dir_path,'Source'), log=False) 3703 ln(pjoin(Ppath,'maxamps.inc'), pjoin(self.dir_path,'Source'), log=False) 3704 # Return to SubProcesses dir 3705 #os.chdir(os.path.pardir) 3706 3707 # Add subprocess to subproc.mg 3708 filename = pjoin(path, 'subproc.mg') 3709 files.append_to_file(filename, 3710 self.write_subproc, 3711 subprocdir) 3712 3713 # Return to original dir 3714 #os.chdir(cwd) 3715 3716 # Generate info page 3717 gen_infohtml.make_info_html(self.dir_path) 3718 3719 3720 if not calls: 3721 calls = 0 3722 return calls
3723 3724 link_Sub_files = ['addmothers.f', 3725 'cluster.f', 3726 'cluster.inc', 3727 'coupl.inc', 3728 'cuts.f', 3729 'cuts.inc', 3730 'genps.f', 3731 'genps.inc', 3732 'idenparts.f', 3733 'initcluster.f', 3734 'makefile', 3735 'message.inc', 3736 'myamp.f', 3737 'reweight.f', 3738 'run.inc', 3739 'maxconfigs.inc', 3740 'maxparticles.inc', 3741 'run_config.inc', 3742 'lhe_event_infos.inc', 3743 'setcuts.f', 3744 'setscales.f', 3745 'sudakov.inc', 3746 'symmetry.f', 3747 'unwgt.f', 3748 'dummy_fct.f' 3749 ] 3750 3764 3765
3766 - def finalize(self, matrix_elements, history, mg5options, flaglist):
3767 """Finalize ME v4 directory by creating jpeg diagrams, html 3768 pages,proc_card_mg5.dat and madevent.tar.gz.""" 3769 3770 if 'nojpeg' in flaglist: 3771 makejpg = False 3772 else: 3773 makejpg = True 3774 if 'online' in flaglist: 3775 online = True 3776 else: 3777 online = False 3778 3779 compiler = {'fortran': mg5options['fortran_compiler'], 3780 'cpp': mg5options['cpp_compiler'], 3781 'f2py': mg5options['f2py_compiler']} 3782 3783 # indicate that the output type is not grouped 3784 if not isinstance(self, ProcessExporterFortranMEGroup): 3785 self.proc_characteristic['grouped_matrix'] = False 3786 self.proc_characteristic['complex_mass_scheme'] = mg5options['complex_mass_scheme'] 3787 # indicate the PDG of all initial particle 3788 try: 3789 pdgs1 = [p.get_initial_pdg(1) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 3790 pdgs2 = [p.get_initial_pdg(2) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 3791 except AttributeError: 3792 pdgs1 = [p.get_initial_pdg(1) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 3793 pdgs2 = [p.get_initial_pdg(2) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 3794 self.proc_characteristic['pdg_initial1'] = pdgs1 3795 self.proc_characteristic['pdg_initial2'] = pdgs2 3796 3797 3798 modelname = self.opt['model'] 3799 if modelname == 'mssm' or modelname.startswith('mssm-'): 3800 param_card = pjoin(self.dir_path, 'Cards','param_card.dat') 3801 mg5_param = pjoin(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 3802 check_param_card.convert_to_mg5card(param_card, mg5_param) 3803 check_param_card.check_valid_param_card(mg5_param) 3804 3805 # Add the combine_events.f modify param_card path/number of @X 3806 filename = pjoin(self.dir_path,'Source','combine_events.f') 3807 try: 3808 nb_proc =[p.get('id') for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes')] 3809 except AttributeError: 3810 nb_proc =[p.get('id') for m in matrix_elements.get('matrix_elements') for p in m.get('processes')] 3811 nb_proc = len(set(nb_proc)) 3812 self.write_combine_events(writers.FortranWriter(filename), nb_proc) # already formatted 3813 # Write maxconfigs.inc based on max of ME's/subprocess groups 3814 filename = pjoin(self.dir_path,'Source','maxconfigs.inc') 3815 self.write_maxconfigs_file(writers.FortranWriter(filename), 3816 matrix_elements) 3817 3818 # Write maxparticles.inc based on max of ME's/subprocess groups 3819 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 3820 self.write_maxparticles_file(writers.FortranWriter(filename), 3821 matrix_elements) 3822 3823 # Touch "done" file 3824 os.system('touch %s/done' % pjoin(self.dir_path,'SubProcesses')) 3825 3826 # Check for compiler 3827 self.set_compiler(compiler) 3828 self.set_cpp_compiler(compiler['cpp']) 3829 3830 3831 old_pos = os.getcwd() 3832 subpath = pjoin(self.dir_path, 'SubProcesses') 3833 3834 P_dir_list = [proc for proc in os.listdir(subpath) 3835 if os.path.isdir(pjoin(subpath,proc)) and proc[0] == 'P'] 3836 3837 devnull = os.open(os.devnull, os.O_RDWR) 3838 # Convert the poscript in jpg files (if authorize) 3839 if makejpg: 3840 try: 3841 os.remove(pjoin(self.dir_path,'HTML','card.jpg')) 3842 except Exception, error: 3843 pass 3844 3845 if misc.which('gs'): 3846 logger.info("Generate jpeg diagrams") 3847 for Pdir in P_dir_list: 3848 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 3849 stdout = devnull, cwd=pjoin(subpath, Pdir)) 3850 3851 logger.info("Generate web pages") 3852 # Create the WebPage using perl script 3853 3854 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 3855 stdout = devnull,cwd=pjoin(self.dir_path)) 3856 3857 #os.chdir(os.path.pardir) 3858 3859 obj = gen_infohtml.make_info_html(self.dir_path) 3860 3861 if online: 3862 nb_channel = obj.rep_rule['nb_gen_diag'] 3863 open(pjoin(self.dir_path, 'Online'),'w').write(str(nb_channel)) 3864 #add the information to proc_charac 3865 self.proc_characteristic['nb_channel'] = obj.rep_rule['nb_gen_diag'] 3866 3867 # Write command history as proc_card_mg5 3868 if os.path.isdir(pjoin(self.dir_path,'Cards')): 3869 output_file = pjoin(self.dir_path,'Cards', 'proc_card_mg5.dat') 3870 history.write(output_file) 3871 3872 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3873 stdout = devnull) 3874 3875 #crate the proc_characteristic file 3876 self.create_proc_charac(matrix_elements, history) 3877 3878 # create the run_card 3879 ProcessExporterFortran.finalize(self, matrix_elements, history, mg5options, flaglist) 3880 3881 # Run "make" to generate madevent.tar.gz file 3882 if os.path.exists(pjoin(self.dir_path,'SubProcesses', 'subproc.mg')): 3883 if os.path.exists(pjoin(self.dir_path,'madevent.tar.gz')): 3884 os.remove(pjoin(self.dir_path,'madevent.tar.gz')) 3885 misc.call([os.path.join(self.dir_path, 'bin', 'internal', 'make_madevent_tar')], 3886 stdout = devnull, cwd=self.dir_path) 3887 3888 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3889 stdout = devnull, cwd=self.dir_path)
3890 3891 3892 3893 3894 3895 3896 #return to the initial dir 3897 #os.chdir(old_pos) 3898 3899 #=========================================================================== 3900 # write_matrix_element_v4 3901 #===========================================================================
3902 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 3903 proc_id = "", config_map = [], subproc_number = ""):
3904 """Export a matrix element to a matrix.f file in MG4 madevent format""" 3905 3906 if not matrix_element.get('processes') or \ 3907 not matrix_element.get('diagrams'): 3908 return 0 3909 3910 if writer: 3911 if not isinstance(writer, writers.FortranWriter): 3912 raise writers.FortranWriter.FortranWriterError(\ 3913 "writer not FortranWriter") 3914 # Set lowercase/uppercase Fortran code 3915 writers.FortranWriter.downcase = False 3916 3917 # The proc prefix is not used for MadEvent output so it can safely be set 3918 # to an empty string. 3919 replace_dict = {'proc_prefix':''} 3920 3921 # Extract helas calls 3922 helas_calls = fortran_model.get_matrix_element_calls(\ 3923 matrix_element) 3924 3925 replace_dict['helas_calls'] = "\n".join(helas_calls) 3926 3927 3928 # Extract version number and date from VERSION file 3929 info_lines = self.get_mg5_info_lines() 3930 replace_dict['info_lines'] = info_lines 3931 3932 # Extract process info lines 3933 process_lines = self.get_process_info_lines(matrix_element) 3934 replace_dict['process_lines'] = process_lines 3935 3936 # Set proc_id 3937 replace_dict['proc_id'] = proc_id 3938 3939 # Extract ncomb 3940 ncomb = matrix_element.get_helicity_combinations() 3941 replace_dict['ncomb'] = ncomb 3942 3943 # Extract helicity lines 3944 helicity_lines = self.get_helicity_lines(matrix_element) 3945 replace_dict['helicity_lines'] = helicity_lines 3946 3947 # Extract IC line 3948 ic_line = self.get_ic_line(matrix_element) 3949 replace_dict['ic_line'] = ic_line 3950 3951 # Extract overall denominator 3952 # Averaging initial state color, spin, and identical FS particles 3953 den_factor_line = self.get_den_factor_line(matrix_element) 3954 replace_dict['den_factor_line'] = den_factor_line 3955 3956 # Extract ngraphs 3957 ngraphs = matrix_element.get_number_of_amplitudes() 3958 replace_dict['ngraphs'] = ngraphs 3959 3960 # Extract ndiags 3961 ndiags = len(matrix_element.get('diagrams')) 3962 replace_dict['ndiags'] = ndiags 3963 3964 # Set define_iconfigs_lines 3965 replace_dict['define_iconfigs_lines'] = \ 3966 """INTEGER MAPCONFIG(0:LMAXCONFIGS), ICONFIG 3967 COMMON/TO_MCONFIGS/MAPCONFIG, ICONFIG""" 3968 3969 if proc_id: 3970 # Set lines for subprocess group version 3971 # Set define_iconfigs_lines 3972 replace_dict['define_iconfigs_lines'] += \ 3973 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3974 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3975 # Set set_amp2_line 3976 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(SUBDIAG(%s))/XTOT" % \ 3977 proc_id 3978 else: 3979 # Standard running 3980 # Set set_amp2_line 3981 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(MAPCONFIG(ICONFIG))/XTOT" 3982 3983 # Extract nwavefuncs 3984 nwavefuncs = matrix_element.get_number_of_wavefunctions() 3985 replace_dict['nwavefuncs'] = nwavefuncs 3986 3987 # Extract ncolor 3988 ncolor = max(1, len(matrix_element.get('color_basis'))) 3989 replace_dict['ncolor'] = ncolor 3990 3991 # Extract color data lines 3992 color_data_lines = self.get_color_data_lines(matrix_element) 3993 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 3994 3995 3996 # Set the size of Wavefunction 3997 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 3998 replace_dict['wavefunctionsize'] = 18 3999 else: 4000 replace_dict['wavefunctionsize'] = 6 4001 4002 # Extract amp2 lines 4003 amp2_lines = self.get_amp2_lines(matrix_element, config_map) 4004 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 4005 4006 # The JAMP definition depends on the splitting order 4007 split_orders=matrix_element.get('processes')[0].get('split_orders') 4008 if len(split_orders)>0: 4009 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 4010 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 4011 matrix_element.get('processes')[0],squared_orders) 4012 else: 4013 # Consider the output of a dummy order 'ALL_ORDERS' for which we 4014 # set all amplitude order to weight 1 and only one squared order 4015 # contribution which is of course ALL_ORDERS=2. 4016 squared_orders = [(2,),] 4017 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 4018 replace_dict['chosen_so_configs'] = '.TRUE.' 4019 4020 replace_dict['nAmpSplitOrders']=len(amp_orders) 4021 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 4022 replace_dict['split_order_str_list']=str(split_orders) 4023 replace_dict['nSplitOrders']=max(len(split_orders),1) 4024 amp_so = self.get_split_orders_lines( 4025 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 4026 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 4027 replace_dict['ampsplitorders']='\n'.join(amp_so) 4028 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 4029 4030 4031 # Extract JAMP lines 4032 # If no split_orders then artificiall add one entry called 'ALL_ORDERS' 4033 jamp_lines = self.get_JAMP_lines_split_order(\ 4034 matrix_element,amp_orders,split_order_names= 4035 split_orders if len(split_orders)>0 else ['ALL_ORDERS']) 4036 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 4037 4038 replace_dict['template_file'] = pjoin(_file_path, \ 4039 'iolibs/template_files/%s' % self.matrix_file) 4040 replace_dict['template_file2'] = pjoin(_file_path, \ 4041 'iolibs/template_files/split_orders_helping_functions.inc') 4042 if writer: 4043 file = open(replace_dict['template_file']).read() 4044 file = file % replace_dict 4045 # Add the split orders helper functions. 4046 file = file + '\n' + open(replace_dict['template_file2'])\ 4047 .read()%replace_dict 4048 # Write the file 4049 writer.writelines(file) 4050 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor 4051 else: 4052 replace_dict['return_value'] = (len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor) 4053 return replace_dict
4054 4055 #=========================================================================== 4056 # write_auto_dsig_file 4057 #===========================================================================
4058 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
4059 """Write the auto_dsig.f file for the differential cross section 4060 calculation, includes pdf call information""" 4061 4062 if not matrix_element.get('processes') or \ 4063 not matrix_element.get('diagrams'): 4064 return 0 4065 4066 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 4067 self.proc_characteristic['ninitial'] = ninitial 4068 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 4069 4070 # Add information relevant for MLM matching: 4071 # Maximum QCD power in all the contributions 4072 max_qcd_order = 0 4073 for diag in matrix_element.get('diagrams'): 4074 orders = diag.calculate_orders() 4075 if 'QCD' in orders: 4076 max_qcd_order = max(max_qcd_order,orders['QCD']) 4077 max_n_light_final_partons = max(len([1 for id in proc.get_final_ids() 4078 if proc.get('model').get_particle(id).get('mass')=='ZERO' and 4079 proc.get('model').get_particle(id).get('color')>1]) 4080 for proc in matrix_element.get('processes')) 4081 # Maximum number of final state light jets to be matched 4082 self.proc_characteristic['max_n_matched_jets'] = max( 4083 self.proc_characteristic['max_n_matched_jets'], 4084 min(max_qcd_order,max_n_light_final_partons)) 4085 4086 # List of default pdgs to be considered for the CKKWl merging cut 4087 self.proc_characteristic['colored_pdgs'] = \ 4088 sorted(list(set([abs(p.get('pdg_code')) for p in 4089 matrix_element.get('processes')[0].get('model').get('particles') if 4090 p.get('color')>1]))) 4091 4092 if ninitial < 1 or ninitial > 2: 4093 raise writers.FortranWriter.FortranWriterError, \ 4094 """Need ninitial = 1 or 2 to write auto_dsig file""" 4095 4096 replace_dict = {} 4097 4098 # Extract version number and date from VERSION file 4099 info_lines = self.get_mg5_info_lines() 4100 replace_dict['info_lines'] = info_lines 4101 4102 # Extract process info lines 4103 process_lines = self.get_process_info_lines(matrix_element) 4104 replace_dict['process_lines'] = process_lines 4105 4106 # Set proc_id 4107 replace_dict['proc_id'] = proc_id 4108 replace_dict['numproc'] = 1 4109 4110 # Set dsig_line 4111 if ninitial == 1: 4112 # No conversion, since result of decay should be given in GeV 4113 dsig_line = "pd(0)*dsiguu" 4114 else: 4115 # Convert result (in GeV) to pb 4116 dsig_line = "pd(0)*conv*dsiguu" 4117 4118 replace_dict['dsig_line'] = dsig_line 4119 4120 # Extract pdf lines 4121 pdf_vars, pdf_data, pdf_lines = \ 4122 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 4123 replace_dict['pdf_vars'] = pdf_vars 4124 replace_dict['pdf_data'] = pdf_data 4125 replace_dict['pdf_lines'] = pdf_lines 4126 4127 # Lines that differ between subprocess group and regular 4128 if proc_id: 4129 replace_dict['numproc'] = int(proc_id) 4130 replace_dict['passcuts_begin'] = "" 4131 replace_dict['passcuts_end'] = "" 4132 # Set lines for subprocess group version 4133 # Set define_iconfigs_lines 4134 replace_dict['define_subdiag_lines'] = \ 4135 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4136 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4137 replace_dict['cutsdone'] = "" 4138 else: 4139 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 4140 replace_dict['passcuts_end'] = "ENDIF" 4141 replace_dict['define_subdiag_lines'] = "" 4142 replace_dict['cutsdone'] = " cutsdone=.false.\n cutspassed=.false." 4143 4144 if not isinstance(self, ProcessExporterFortranMEGroup): 4145 ncomb=matrix_element.get_helicity_combinations() 4146 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 4147 else: 4148 replace_dict['read_write_good_hel'] = "" 4149 4150 context = {'read_write_good_hel':True} 4151 4152 if writer: 4153 file = open(pjoin(_file_path, \ 4154 'iolibs/template_files/auto_dsig_v4.inc')).read() 4155 file = file % replace_dict 4156 4157 # Write the file 4158 writer.writelines(file, context=context) 4159 else: 4160 return replace_dict, context
4161 #=========================================================================== 4162 # write_coloramps_file 4163 #===========================================================================
4164 - def write_coloramps_file(self, writer, mapconfigs, matrix_element):
4165 """Write the coloramps.inc file for MadEvent""" 4166 4167 lines = self.get_icolamp_lines(mapconfigs, matrix_element, 1) 4168 lines.insert(0, "logical icolamp(%d,%d,1)" % \ 4169 (max(len(matrix_element.get('color_basis').keys()), 1), 4170 len(mapconfigs))) 4171 4172 4173 # Write the file 4174 writer.writelines(lines) 4175 4176 return True
4177 4178 #=========================================================================== 4179 # write_colors_file 4180 #===========================================================================
4181 - def write_colors_file(self, writer, matrix_elements):
4182 """Write the get_color.f file for MadEvent, which returns color 4183 for all particles used in the matrix element.""" 4184 4185 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 4186 matrix_elements = [matrix_elements] 4187 4188 model = matrix_elements[0].get('processes')[0].get('model') 4189 4190 # We need the both particle and antiparticle wf_ids, since the identity 4191 # depends on the direction of the wf. 4192 wf_ids = set(sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 4193 for wf in d.get('wavefunctions')],[]) \ 4194 for d in me.get('diagrams')], []) \ 4195 for me in matrix_elements], [])) 4196 4197 leg_ids = set(sum([sum([sum([[l.get('id'), 4198 model.get_particle(l.get('id')).get_anti_pdg_code()] \ 4199 for l in p.get_legs_with_decays()], []) \ 4200 for p in me.get('processes')], []) \ 4201 for me in matrix_elements], [])) 4202 particle_ids = sorted(list(wf_ids.union(leg_ids))) 4203 4204 lines = """function get_color(ipdg) 4205 implicit none 4206 integer get_color, ipdg 4207 4208 if(ipdg.eq.%d)then 4209 get_color=%d 4210 return 4211 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 4212 4213 for part_id in particle_ids[1:]: 4214 lines += """else if(ipdg.eq.%d)then 4215 get_color=%d 4216 return 4217 """ % (part_id, model.get_particle(part_id).get_color()) 4218 # Dummy particle for multiparticle vertices with pdg given by 4219 # first code not in the model 4220 lines += """else if(ipdg.eq.%d)then 4221 c This is dummy particle used in multiparticle vertices 4222 get_color=2 4223 return 4224 """ % model.get_first_non_pdg() 4225 lines += """else 4226 write(*,*)'Error: No color given for pdg ',ipdg 4227 get_color=0 4228 return 4229 endif 4230 end 4231 """ 4232 4233 # Write the file 4234 writer.writelines(lines) 4235 4236 return True
4237 4238 #=========================================================================== 4239 # write_config_nqcd_file 4240 #===========================================================================
4241 - def write_config_nqcd_file(self, writer, nqcd_list):
4242 """Write the config_nqcd.inc with the number of QCD couplings 4243 for each config""" 4244 4245 lines = [] 4246 for iconf, n in enumerate(nqcd_list): 4247 lines.append("data nqcd(%d)/%d/" % (iconf+1, n)) 4248 4249 # Write the file 4250 writer.writelines(lines) 4251 4252 return True
4253 4254 #=========================================================================== 4255 # write_maxconfigs_file 4256 #===========================================================================
4257 - def write_maxconfigs_file(self, writer, matrix_elements):
4258 """Write the maxconfigs.inc file for MadEvent""" 4259 4260 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 4261 maxconfigs = max([me.get_num_configs() for me in \ 4262 matrix_elements.get('matrix_elements')]) 4263 else: 4264 maxconfigs = max([me.get_num_configs() for me in matrix_elements]) 4265 4266 lines = "integer lmaxconfigs\n" 4267 lines += "parameter(lmaxconfigs=%d)" % maxconfigs 4268 4269 # Write the file 4270 writer.writelines(lines) 4271 4272 return True
4273 4274 #=========================================================================== 4275 # read_write_good_hel 4276 #===========================================================================
4277 - def read_write_good_hel(self, ncomb):
4278 """return the code to read/write the good_hel common_block""" 4279 4280 convert = {'ncomb' : ncomb} 4281 output = """ 4282 subroutine write_good_hel(stream_id) 4283 implicit none 4284 integer stream_id 4285 INTEGER NCOMB 4286 PARAMETER ( NCOMB=%(ncomb)d) 4287 LOGICAL GOODHEL(NCOMB) 4288 INTEGER NTRY 4289 common/BLOCK_GOODHEL/NTRY,GOODHEL 4290 write(stream_id,*) GOODHEL 4291 return 4292 end 4293 4294 4295 subroutine read_good_hel(stream_id) 4296 implicit none 4297 include 'genps.inc' 4298 integer stream_id 4299 INTEGER NCOMB 4300 PARAMETER ( NCOMB=%(ncomb)d) 4301 LOGICAL GOODHEL(NCOMB) 4302 INTEGER NTRY 4303 common/BLOCK_GOODHEL/NTRY,GOODHEL 4304 read(stream_id,*) GOODHEL 4305 NTRY = MAXTRIES + 1 4306 return 4307 end 4308 4309 subroutine init_good_hel() 4310 implicit none 4311 INTEGER NCOMB 4312 PARAMETER ( NCOMB=%(ncomb)d) 4313 LOGICAL GOODHEL(NCOMB) 4314 INTEGER NTRY 4315 INTEGER I 4316 4317 do i=1,NCOMB 4318 GOODHEL(I) = .false. 4319 enddo 4320 NTRY = 0 4321 end 4322 4323 integer function get_maxsproc() 4324 implicit none 4325 get_maxsproc = 1 4326 return 4327 end 4328 4329 """ % convert 4330 4331 return output
4332 4333 #=========================================================================== 4334 # write_config_subproc_map_file 4335 #===========================================================================
4336 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
4337 """Write a dummy config_subproc.inc file for MadEvent""" 4338 4339 lines = [] 4340 4341 for iconfig in range(len(s_and_t_channels)): 4342 lines.append("DATA CONFSUB(1,%d)/1/" % \ 4343 (iconfig + 1)) 4344 4345 # Write the file 4346 writer.writelines(lines) 4347 4348 return True
4349 4350 #=========================================================================== 4351 # write_configs_file 4352 #===========================================================================
4353 - def write_configs_file(self, writer, matrix_element):
4354 """Write the configs.inc file for MadEvent""" 4355 4356 # Extract number of external particles 4357 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4358 4359 model = matrix_element.get('processes')[0].get('model') 4360 configs = [(i+1, d) for (i, d) in \ 4361 enumerate(matrix_element.get('diagrams'))] 4362 mapconfigs = [c[0] for c in configs] 4363 return mapconfigs, self.write_configs_file_from_diagrams(writer, 4364 [[c[1]] for c in configs], 4365 mapconfigs, 4366 nexternal, ninitial, 4367 model)
4368 4369 #=========================================================================== 4370 # write_run_configs_file 4371 #===========================================================================
4372 - def write_run_config_file(self, writer):
4373 """Write the run_configs.inc file for MadEvent""" 4374 4375 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 4376 4377 if self.proc_characteristic['loop_induced']: 4378 job_per_chan = 1 4379 else: 4380 job_per_chan = 5 4381 4382 if writer: 4383 text = open(path).read() % {'chanperjob': job_per_chan} 4384 writer.write(text) 4385 return True 4386 else: 4387 return {'chanperjob': job_per_chan}
4388 4389 #=========================================================================== 4390 # write_configs_file_from_diagrams 4391 #===========================================================================
4392 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 4393 nexternal, ninitial, model):
4394 """Write the actual configs.inc file. 4395 4396 configs is the diagrams corresponding to configs (each 4397 diagrams is a list of corresponding diagrams for all 4398 subprocesses, with None if there is no corresponding diagrams 4399 for a given process). 4400 mapconfigs gives the diagram number for each config. 4401 4402 For s-channels, we need to output one PDG for each subprocess in 4403 the subprocess group, in order to be able to pick the right 4404 one for multiprocesses.""" 4405 4406 lines = [] 4407 4408 s_and_t_channels = [] 4409 4410 nqcd_list = [] 4411 4412 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 4413 for config in configs if [d for d in config if d][0].\ 4414 get_vertex_leg_numbers()!=[]] 4415 minvert = min(vert_list) if vert_list!=[] else 0 4416 4417 # Number of subprocesses 4418 nsubprocs = len(configs[0]) 4419 4420 nconfigs = 0 4421 4422 new_pdg = model.get_first_non_pdg() 4423 4424 for iconfig, helas_diags in enumerate(configs): 4425 if any([vert > minvert for vert in 4426 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 4427 # Only 3-vertices allowed in configs.inc 4428 continue 4429 nconfigs += 1 4430 4431 # Need s- and t-channels for all subprocesses, including 4432 # those that don't contribute to this config 4433 empty_verts = [] 4434 stchannels = [] 4435 for h in helas_diags: 4436 if h: 4437 # get_s_and_t_channels gives vertices starting from 4438 # final state external particles and working inwards 4439 stchannels.append(h.get('amplitudes')[0].\ 4440 get_s_and_t_channels(ninitial, model, 4441 new_pdg)) 4442 else: 4443 stchannels.append((empty_verts, None)) 4444 4445 # For t-channels, just need the first non-empty one 4446 tchannels = [t for s,t in stchannels if t != None][0] 4447 4448 # For s_and_t_channels (to be used later) use only first config 4449 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 4450 tchannels]) 4451 4452 # Make sure empty_verts is same length as real vertices 4453 if any([s for s,t in stchannels]): 4454 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 4455 4456 # Reorganize s-channel vertices to get a list of all 4457 # subprocesses for each vertex 4458 schannels = zip(*[s for s,t in stchannels]) 4459 else: 4460 schannels = [] 4461 4462 allchannels = schannels 4463 if len(tchannels) > 1: 4464 # Write out tchannels only if there are any non-trivial ones 4465 allchannels = schannels + tchannels 4466 4467 # Write out propagators for s-channel and t-channel vertices 4468 4469 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 4470 # Correspondance between the config and the diagram = amp2 4471 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 4472 mapconfigs[iconfig])) 4473 # Number of QCD couplings in this diagram 4474 nqcd = 0 4475 for h in helas_diags: 4476 if h: 4477 try: 4478 nqcd = h.calculate_orders()['QCD'] 4479 except KeyError: 4480 pass 4481 break 4482 else: 4483 continue 4484 4485 nqcd_list.append(nqcd) 4486 4487 for verts in allchannels: 4488 if verts in schannels: 4489 vert = [v for v in verts if v][0] 4490 else: 4491 vert = verts 4492 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 4493 last_leg = vert.get('legs')[-1] 4494 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 4495 (last_leg.get('number'), nconfigs, len(daughters), 4496 ",".join([str(d) for d in daughters]))) 4497 if verts in schannels: 4498 pdgs = [] 4499 for v in verts: 4500 if v: 4501 pdgs.append(v.get('legs')[-1].get('id')) 4502 else: 4503 pdgs.append(0) 4504 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4505 (last_leg.get('number'), nconfigs, nsubprocs, 4506 ",".join([str(d) for d in pdgs]))) 4507 lines.append("data tprid(%d,%d)/0/" % \ 4508 (last_leg.get('number'), nconfigs)) 4509 elif verts in tchannels[:-1]: 4510 lines.append("data tprid(%d,%d)/%d/" % \ 4511 (last_leg.get('number'), nconfigs, 4512 abs(last_leg.get('id')))) 4513 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4514 (last_leg.get('number'), nconfigs, nsubprocs, 4515 ",".join(['0'] * nsubprocs))) 4516 4517 # Write out number of configs 4518 lines.append("# Number of configs") 4519 lines.append("data mapconfig(0)/%d/" % nconfigs) 4520 4521 # Write the file 4522 writer.writelines(lines) 4523 4524 return s_and_t_channels, nqcd_list
4525 4526 #=========================================================================== 4527 # write_decayBW_file 4528 #===========================================================================
4529 - def write_decayBW_file(self, writer, s_and_t_channels):
4530 """Write the decayBW.inc file for MadEvent""" 4531 4532 lines = [] 4533 4534 booldict = {None: "0", True: "1", False: "2"} 4535 4536 for iconf, config in enumerate(s_and_t_channels): 4537 schannels = config[0] 4538 for vertex in schannels: 4539 # For the resulting leg, pick out whether it comes from 4540 # decay or not, as given by the onshell flag 4541 leg = vertex.get('legs')[-1] 4542 lines.append("data gForceBW(%d,%d)/%s/" % \ 4543 (leg.get('number'), iconf + 1, 4544 booldict[leg.get('onshell')])) 4545 4546 # Write the file 4547 writer.writelines(lines) 4548 4549 return True
4550 4551 #=========================================================================== 4552 # write_dname_file 4553 #===========================================================================
4554 - def write_dname_file(self, writer, dir_name):
4555 """Write the dname.mg file for MG4""" 4556 4557 line = "DIRNAME=%s" % dir_name 4558 4559 # Write the file 4560 writer.write(line + "\n") 4561 4562 return True
4563 4564 #=========================================================================== 4565 # write_driver 4566 #===========================================================================
4567 - def write_driver(self, writer, ncomb, n_grouped_proc, v5=True):
4568 """Write the SubProcess/driver.f file for MG4""" 4569 4570 path = pjoin(_file_path,'iolibs','template_files','madevent_driver.f') 4571 4572 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4573 card = 'Source/MODEL/MG5_param.dat' 4574 else: 4575 card = 'param_card.dat' 4576 # Requiring each helicity configuration to be probed by 10 points for 4577 # matrix element before using the resulting grid for MC over helicity 4578 # sampling. 4579 # We multiply this by 2 because each grouped subprocess is called at most 4580 # twice for each IMIRROR. 4581 replace_dict = {'param_card_name':card, 4582 'ncomb':ncomb, 4583 'hel_init_points':n_grouped_proc*10*2} 4584 if not v5: 4585 replace_dict['secondparam']=',.true.' 4586 else: 4587 replace_dict['secondparam']='' 4588 4589 if writer: 4590 text = open(path).read() % replace_dict 4591 writer.write(text) 4592 return True 4593 else: 4594 return replace_dict
4595 4596 #=========================================================================== 4597 # write_addmothers 4598 #===========================================================================
4599 - def write_addmothers(self, writer):
4600 """Write the SubProcess/addmothers.f""" 4601 4602 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 4603 4604 text = open(path).read() % {'iconfig': 'diag_number'} 4605 writer.write(text) 4606 4607 return True
4608 4609 4610 #=========================================================================== 4611 # write_combine_events 4612 #===========================================================================
4613 - def write_combine_events(self, writer, nb_proc=100):
4614 """Write the SubProcess/driver.f file for MG4""" 4615 4616 path = pjoin(_file_path,'iolibs','template_files','madevent_combine_events.f') 4617 4618 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4619 card = 'Source/MODEL/MG5_param.dat' 4620 else: 4621 card = 'param_card.dat' 4622 4623 #set maxpup (number of @X in the process card) 4624 4625 text = open(path).read() % {'param_card_name':card, 'maxpup':nb_proc+1} 4626 #the +1 is just a security. This is not needed but I feel(OM) safer with it. 4627 writer.write(text) 4628 4629 return True
4630 4631 4632 #=========================================================================== 4633 # write_symmetry 4634 #===========================================================================
4635 - def write_symmetry(self, writer, v5=True):
4636 """Write the SubProcess/driver.f file for ME""" 4637 4638 path = pjoin(_file_path,'iolibs','template_files','madevent_symmetry.f') 4639 4640 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4641 card = 'Source/MODEL/MG5_param.dat' 4642 else: 4643 card = 'param_card.dat' 4644 4645 if v5: 4646 replace_dict = {'param_card_name':card, 'setparasecondarg':''} 4647 else: 4648 replace_dict= {'param_card_name':card, 'setparasecondarg':',.true.'} 4649 4650 if writer: 4651 text = open(path).read() 4652 text = text % replace_dict 4653 writer.write(text) 4654 return True 4655 else: 4656 return replace_dict
4657 4658 4659 4660 #=========================================================================== 4661 # write_iproc_file 4662 #===========================================================================
4663 - def write_iproc_file(self, writer, me_number):
4664 """Write the iproc.dat file for MG4""" 4665 line = "%d" % (me_number + 1) 4666 4667 # Write the file 4668 for line_to_write in writer.write_line(line): 4669 writer.write(line_to_write) 4670 return True
4671 4672 #=========================================================================== 4673 # write_mg_sym_file 4674 #===========================================================================
4675 - def write_mg_sym_file(self, writer, matrix_element):
4676 """Write the mg.sym file for MadEvent.""" 4677 4678 lines = [] 4679 4680 # Extract process with all decays included 4681 final_legs = filter(lambda leg: leg.get('state') == True, 4682 matrix_element.get('processes')[0].get_legs_with_decays()) 4683 4684 ninitial = len(filter(lambda leg: leg.get('state') == False, 4685 matrix_element.get('processes')[0].get('legs'))) 4686 4687 identical_indices = {} 4688 4689 # Extract identical particle info 4690 for i, leg in enumerate(final_legs): 4691 if leg.get('id') in identical_indices: 4692 identical_indices[leg.get('id')].append(\ 4693 i + ninitial + 1) 4694 else: 4695 identical_indices[leg.get('id')] = [i + ninitial + 1] 4696 4697 # Remove keys which have only one particle 4698 for key in identical_indices.keys(): 4699 if len(identical_indices[key]) < 2: 4700 del identical_indices[key] 4701 4702 # Write mg.sym file 4703 lines.append(str(len(identical_indices.keys()))) 4704 for key in identical_indices.keys(): 4705 lines.append(str(len(identical_indices[key]))) 4706 for number in identical_indices[key]: 4707 lines.append(str(number)) 4708 4709 # Write the file 4710 writer.writelines(lines) 4711 4712 return True
4713 4714 #=========================================================================== 4715 # write_mg_sym_file 4716 #===========================================================================
4717 - def write_default_mg_sym_file(self, writer):
4718 """Write the mg.sym file for MadEvent.""" 4719 4720 lines = "0" 4721 4722 # Write the file 4723 writer.writelines(lines) 4724 4725 return True
4726 4727 #=========================================================================== 4728 # write_ncombs_file 4729 #===========================================================================
4730 - def write_ncombs_file(self, writer, nexternal):
4731 """Write the ncombs.inc file for MadEvent.""" 4732 4733 # ncomb (used for clustering) is 2^nexternal 4734 file = " integer n_max_cl\n" 4735 file = file + "parameter (n_max_cl=%d)" % (2 ** nexternal) 4736 4737 # Write the file 4738 writer.writelines(file) 4739 4740 return True
4741 4742 #=========================================================================== 4743 # write_processes_file 4744 #===========================================================================
4745 - def write_processes_file(self, writer, subproc_group):
4746 """Write the processes.dat file with info about the subprocesses 4747 in this group.""" 4748 4749 lines = [] 4750 4751 for ime, me in \ 4752 enumerate(subproc_group.get('matrix_elements')): 4753 lines.append("%s %s" % (str(ime+1) + " " * (7-len(str(ime+1))), 4754 ",".join(p.base_string() for p in \ 4755 me.get('processes')))) 4756 if me.get('has_mirror_process'): 4757 mirror_procs = [copy.copy(p) for p in me.get('processes')] 4758 for proc in mirror_procs: 4759 legs = copy.copy(proc.get('legs_with_decays')) 4760 legs.insert(0, legs.pop(1)) 4761 proc.set("legs_with_decays", legs) 4762 lines.append("mirror %s" % ",".join(p.base_string() for p in \ 4763 mirror_procs)) 4764 else: 4765 lines.append("mirror none") 4766 4767 # Write the file 4768 writer.write("\n".join(lines)) 4769 4770 return True
4771 4772 #=========================================================================== 4773 # write_symswap_file 4774 #===========================================================================
4775 - def write_symswap_file(self, writer, ident_perms):
4776 """Write the file symswap.inc for MG4 by comparing diagrams using 4777 the internal matrix element value functionality.""" 4778 4779 lines = [] 4780 4781 # Write out lines for symswap.inc file (used to permute the 4782 # external leg momenta 4783 for iperm, perm in enumerate(ident_perms): 4784 lines.append("data (isym(i,%d),i=1,nexternal)/%s/" % \ 4785 (iperm+1, ",".join([str(i+1) for i in perm]))) 4786 lines.append("data nsym/%d/" % len(ident_perms)) 4787 4788 # Write the file 4789 writer.writelines(lines) 4790 4791 return True
4792 4793 #=========================================================================== 4794 # write_symfact_file 4795 #===========================================================================
4796 - def write_symfact_file(self, writer, symmetry):
4797 """Write the files symfact.dat for MG4 by comparing diagrams using 4798 the internal matrix element value functionality.""" 4799 4800 pos = max(2, int(math.ceil(math.log10(len(symmetry))))) 4801 form = "%"+str(pos)+"r %"+str(pos+1)+"r" 4802 # Write out lines for symswap.inc file (used to permute the 4803 # external leg momenta 4804 lines = [ form %(i+1, s) for i,s in enumerate(symmetry) if s != 0] 4805 # Write the file 4806 writer.write('\n'.join(lines)) 4807 writer.write('\n') 4808 4809 return True
4810 4811 #=========================================================================== 4812 # write_symperms_file 4813 #===========================================================================
4814 - def write_symperms_file(self, writer, perms):
4815 """Write the symperms.inc file for subprocess group, used for 4816 symmetric configurations""" 4817 4818 lines = [] 4819 for iperm, perm in enumerate(perms): 4820 lines.append("data (perms(i,%d),i=1,nexternal)/%s/" % \ 4821 (iperm+1, ",".join([str(i+1) for i in perm]))) 4822 4823 # Write the file 4824 writer.writelines(lines) 4825 4826 return True
4827 4828 #=========================================================================== 4829 # write_subproc 4830 #===========================================================================
4831 - def write_subproc(self, writer, subprocdir):
4832 """Append this subprocess to the subproc.mg file for MG4""" 4833 4834 # Write line to file 4835 writer.write(subprocdir + "\n") 4836 4837 return True
4838
4839 #=============================================================================== 4840 # ProcessExporterFortranMEGroup 4841 #=============================================================================== 4842 -class ProcessExporterFortranMEGroup(ProcessExporterFortranME):
4843 """Class to take care of exporting a set of matrix elements to 4844 MadEvent subprocess group format.""" 4845 4846 matrix_file = "matrix_madevent_group_v4.inc" 4847 grouped_mode = 'madevent' 4848 #=========================================================================== 4849 # generate_subprocess_directory 4850 #===========================================================================
4851 - def generate_subprocess_directory(self, subproc_group, 4852 fortran_model, 4853 group_number):
4854 """Generate the Pn directory for a subprocess group in MadEvent, 4855 including the necessary matrix_N.f files, configs.inc and various 4856 other helper files.""" 4857 4858 assert isinstance(subproc_group, group_subprocs.SubProcessGroup), \ 4859 "subproc_group object not SubProcessGroup" 4860 4861 if not self.model: 4862 self.model = subproc_group.get('matrix_elements')[0].\ 4863 get('processes')[0].get('model') 4864 4865 cwd = os.getcwd() 4866 path = pjoin(self.dir_path, 'SubProcesses') 4867 4868 os.chdir(path) 4869 pathdir = os.getcwd() 4870 4871 # Create the directory PN in the specified path 4872 subprocdir = "P%d_%s" % (subproc_group.get('number'), 4873 subproc_group.get('name')) 4874 try: 4875 os.mkdir(subprocdir) 4876 except os.error as error: 4877 logger.warning(error.strerror + " " + subprocdir) 4878 4879 try: 4880 os.chdir(subprocdir) 4881 except os.error: 4882 logger.error('Could not cd to directory %s' % subprocdir) 4883 return 0 4884 4885 logger.info('Creating files in directory %s' % subprocdir) 4886 4887 # Create the matrix.f files, auto_dsig.f files and all inc files 4888 # for all subprocesses in the group 4889 4890 maxamps = 0 4891 maxflows = 0 4892 tot_calls = 0 4893 4894 matrix_elements = subproc_group.get('matrix_elements') 4895 4896 # Add the driver.f, all grouped ME's must share the same number of 4897 # helicity configuration 4898 ncomb = matrix_elements[0].get_helicity_combinations() 4899 for me in matrix_elements[1:]: 4900 if ncomb!=me.get_helicity_combinations(): 4901 raise MadGraph5Error, "All grouped processes must share the "+\ 4902 "same number of helicity configurations." 4903 4904 filename = 'driver.f' 4905 self.write_driver(writers.FortranWriter(filename),ncomb, 4906 n_grouped_proc=len(matrix_elements), v5=self.opt['v5_model']) 4907 4908 for ime, matrix_element in \ 4909 enumerate(matrix_elements): 4910 filename = 'matrix%d.f' % (ime+1) 4911 calls, ncolor = \ 4912 self.write_matrix_element_v4(writers.FortranWriter(filename), 4913 matrix_element, 4914 fortran_model, 4915 proc_id=str(ime+1), 4916 config_map=subproc_group.get('diagram_maps')[ime], 4917 subproc_number=group_number) 4918 4919 filename = 'auto_dsig%d.f' % (ime+1) 4920 self.write_auto_dsig_file(writers.FortranWriter(filename), 4921 matrix_element, 4922 str(ime+1)) 4923 4924 # Keep track of needed quantities 4925 tot_calls += int(calls) 4926 maxflows = max(maxflows, ncolor) 4927 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 4928 4929 # Draw diagrams 4930 filename = "matrix%d.ps" % (ime+1) 4931 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 4932 get('diagrams'), 4933 filename, 4934 model = \ 4935 matrix_element.get('processes')[0].\ 4936 get('model'), 4937 amplitude=True) 4938 logger.info("Generating Feynman diagrams for " + \ 4939 matrix_element.get('processes')[0].nice_string()) 4940 plot.draw() 4941 4942 # Extract number of external particles 4943 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4944 4945 # Generate a list of diagrams corresponding to each configuration 4946 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 4947 # If a subprocess has no diagrams for this config, the number is 0 4948 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 4949 4950 filename = 'auto_dsig.f' 4951 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 4952 subproc_group) 4953 4954 filename = 'coloramps.inc' 4955 self.write_coloramps_file(writers.FortranWriter(filename), 4956 subproc_diagrams_for_config, 4957 maxflows, 4958 matrix_elements) 4959 4960 filename = 'get_color.f' 4961 self.write_colors_file(writers.FortranWriter(filename), 4962 matrix_elements) 4963 4964 filename = 'config_subproc_map.inc' 4965 self.write_config_subproc_map_file(writers.FortranWriter(filename), 4966 subproc_diagrams_for_config) 4967 4968 filename = 'configs.inc' 4969 nconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 4970 writers.FortranWriter(filename), 4971 subproc_group, 4972 subproc_diagrams_for_config) 4973 4974 filename = 'config_nqcd.inc' 4975 self.write_config_nqcd_file(writers.FortranWriter(filename), 4976 nqcd_list) 4977 4978 filename = 'decayBW.inc' 4979 self.write_decayBW_file(writers.FortranWriter(filename), 4980 s_and_t_channels) 4981 4982 filename = 'dname.mg' 4983 self.write_dname_file(writers.FortranWriter(filename), 4984 subprocdir) 4985 4986 filename = 'iproc.dat' 4987 self.write_iproc_file(writers.FortranWriter(filename), 4988 group_number) 4989 4990 filename = 'leshouche.inc' 4991 self.write_leshouche_file(writers.FortranWriter(filename), 4992 subproc_group) 4993 4994 filename = 'maxamps.inc' 4995 self.write_maxamps_file(writers.FortranWriter(filename), 4996 maxamps, 4997 maxflows, 4998 max([len(me.get('processes')) for me in \ 4999 matrix_elements]), 5000 len(matrix_elements)) 5001 5002 # Note that mg.sym is not relevant for this case 5003 filename = 'mg.sym' 5004 self.write_default_mg_sym_file(writers.FortranWriter(filename)) 5005 5006 filename = 'mirrorprocs.inc' 5007 self.write_mirrorprocs(writers.FortranWriter(filename), 5008 subproc_group) 5009 5010 filename = 'ncombs.inc' 5011 self.write_ncombs_file(writers.FortranWriter(filename), 5012 nexternal) 5013 5014 filename = 'nexternal.inc' 5015 self.write_nexternal_file(writers.FortranWriter(filename), 5016 nexternal, ninitial) 5017 5018 filename = 'ngraphs.inc' 5019 self.write_ngraphs_file(writers.FortranWriter(filename), 5020 nconfigs) 5021 5022 filename = 'pmass.inc' 5023 self.write_pmass_file(writers.FortranWriter(filename), 5024 matrix_element) 5025 5026 filename = 'props.inc' 5027 self.write_props_file(writers.FortranWriter(filename), 5028 matrix_element, 5029 s_and_t_channels) 5030 5031 filename = 'processes.dat' 5032 files.write_to_file(filename, 5033 self.write_processes_file, 5034 subproc_group) 5035 5036 # Find config symmetries and permutations 5037 symmetry, perms, ident_perms = \ 5038 diagram_symmetry.find_symmetry(subproc_group) 5039 5040 filename = 'symswap.inc' 5041 self.write_symswap_file(writers.FortranWriter(filename), 5042 ident_perms) 5043 5044 filename = 'symfact_orig.dat' 5045 self.write_symfact_file(open(filename, 'w'), symmetry) 5046 5047 filename = 'symperms.inc' 5048 self.write_symperms_file(writers.FortranWriter(filename), 5049 perms) 5050 5051 # Generate jpgs -> pass in make_html 5052 #os.system(pjoin('..', '..', 'bin', 'gen_jpeg-pl')) 5053 5054 self.link_files_in_SubProcess(pjoin(pathdir,subprocdir)) 5055 5056 #import nexternal/leshouch in Source 5057 ln('nexternal.inc', '../../Source', log=False) 5058 ln('leshouche.inc', '../../Source', log=False) 5059 ln('maxamps.inc', '../../Source', log=False) 5060 5061 # Return to SubProcesses dir) 5062 os.chdir(pathdir) 5063 5064 # Add subprocess to subproc.mg 5065 filename = 'subproc.mg' 5066 files.append_to_file(filename, 5067 self.write_subproc, 5068 subprocdir) 5069 5070 # Return to original dir 5071 os.chdir(cwd) 5072 5073 if not tot_calls: 5074 tot_calls = 0 5075 return tot_calls
5076 5077 #=========================================================================== 5078 # write_super_auto_dsig_file 5079 #===========================================================================
5080 - def write_super_auto_dsig_file(self, writer, subproc_group):
5081 """Write the auto_dsig.f file selecting between the subprocesses 5082 in subprocess group mode""" 5083 5084 replace_dict = {} 5085 5086 # Extract version number and date from VERSION file 5087 info_lines = self.get_mg5_info_lines() 5088 replace_dict['info_lines'] = info_lines 5089 5090 matrix_elements = subproc_group.get('matrix_elements') 5091 5092 # Extract process info lines 5093 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 5094 matrix_elements]) 5095 replace_dict['process_lines'] = process_lines 5096 5097 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 5098 replace_dict['nexternal'] = nexternal 5099 5100 replace_dict['nsprocs'] = 2*len(matrix_elements) 5101 5102 # Generate dsig definition line 5103 dsig_def_line = "DOUBLE PRECISION " + \ 5104 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 5105 range(len(matrix_elements))]) 5106 replace_dict["dsig_def_line"] = dsig_def_line 5107 5108 # Generate dsig process lines 5109 call_dsig_proc_lines = [] 5110 for iproc in range(len(matrix_elements)): 5111 call_dsig_proc_lines.append(\ 5112 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 5113 {"num": iproc + 1, 5114 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 5115 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 5116 5117 ncomb=matrix_elements[0].get_helicity_combinations() 5118 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 5119 5120 if writer: 5121 file = open(pjoin(_file_path, \ 5122 'iolibs/template_files/super_auto_dsig_group_v4.inc')).read() 5123 file = file % replace_dict 5124 5125 # Write the file 5126 writer.writelines(file) 5127 else: 5128 return replace_dict
5129 5130 #=========================================================================== 5131 # write_mirrorprocs 5132 #===========================================================================
5133 - def write_mirrorprocs(self, writer, subproc_group):
5134 """Write the mirrorprocs.inc file determining which processes have 5135 IS mirror process in subprocess group mode.""" 5136 5137 lines = [] 5138 bool_dict = {True: '.true.', False: '.false.'} 5139 matrix_elements = subproc_group.get('matrix_elements') 5140 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 5141 (len(matrix_elements), 5142 ",".join([bool_dict[me.get('has_mirror_process')] for \ 5143 me in matrix_elements]))) 5144 # Write the file 5145 writer.writelines(lines)
5146 5147 #=========================================================================== 5148 # write_addmothers 5149 #===========================================================================
5150 - def write_addmothers(self, writer):
5151 """Write the SubProcess/addmothers.f""" 5152 5153 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 5154 5155 text = open(path).read() % {'iconfig': 'lconfig'} 5156 writer.write(text) 5157 5158 return True
5159 5160 5161 #=========================================================================== 5162 # write_coloramps_file 5163 #===========================================================================
5164 - def write_coloramps_file(self, writer, diagrams_for_config, maxflows, 5165 matrix_elements):
5166 """Write the coloramps.inc file for MadEvent in Subprocess group mode""" 5167 5168 # Create a map from subprocess (matrix element) to a list of 5169 # the diagrams corresponding to each config 5170 5171 lines = [] 5172 5173 subproc_to_confdiag = {} 5174 for config in diagrams_for_config: 5175 for subproc, diag in enumerate(config): 5176 try: 5177 subproc_to_confdiag[subproc].append(diag) 5178 except KeyError: 5179 subproc_to_confdiag[subproc] = [diag] 5180 5181 for subproc in sorted(subproc_to_confdiag.keys()): 5182 lines.extend(self.get_icolamp_lines(subproc_to_confdiag[subproc], 5183 matrix_elements[subproc], 5184 subproc + 1)) 5185 5186 lines.insert(0, "logical icolamp(%d,%d,%d)" % \ 5187 (maxflows, 5188 len(diagrams_for_config), 5189 len(matrix_elements))) 5190 5191 # Write the file 5192 writer.writelines(lines) 5193 5194 return True
5195 5196 #=========================================================================== 5197 # write_config_subproc_map_file 5198 #===========================================================================
5199 - def write_config_subproc_map_file(self, writer, config_subproc_map):
5200 """Write the config_subproc_map.inc file for subprocess groups""" 5201 5202 lines = [] 5203 # Output only configs that have some corresponding diagrams 5204 iconfig = 0 5205 for config in config_subproc_map: 5206 if set(config) == set([0]): 5207 continue 5208 lines.append("DATA (CONFSUB(i,%d),i=1,%d)/%s/" % \ 5209 (iconfig + 1, len(config), 5210 ",".join([str(i) for i in config]))) 5211 iconfig += 1 5212 # Write the file 5213 writer.writelines(lines) 5214 5215 return True
5216 5217 #=========================================================================== 5218 # read_write_good_hel 5219 #===========================================================================
5220 - def read_write_good_hel(self, ncomb):
5221 """return the code to read/write the good_hel common_block""" 5222 5223 convert = {'ncomb' : ncomb} 5224 5225 output = """ 5226 subroutine write_good_hel(stream_id) 5227 implicit none 5228 integer stream_id 5229 INTEGER NCOMB 5230 PARAMETER ( NCOMB=%(ncomb)d) 5231 LOGICAL GOODHEL(NCOMB, 2) 5232 INTEGER NTRY(2) 5233 common/BLOCK_GOODHEL/NTRY,GOODHEL 5234 write(stream_id,*) GOODHEL 5235 return 5236 end 5237 5238 5239 subroutine read_good_hel(stream_id) 5240 implicit none 5241 include 'genps.inc' 5242 integer stream_id 5243 INTEGER NCOMB 5244 PARAMETER ( NCOMB=%(ncomb)d) 5245 LOGICAL GOODHEL(NCOMB, 2) 5246 INTEGER NTRY(2) 5247 common/BLOCK_GOODHEL/NTRY,GOODHEL 5248 read(stream_id,*) GOODHEL 5249 NTRY(1) = MAXTRIES + 1 5250 NTRY(2) = MAXTRIES + 1 5251 return 5252 end 5253 5254 subroutine init_good_hel() 5255 implicit none 5256 INTEGER NCOMB 5257 PARAMETER ( NCOMB=%(ncomb)d) 5258 LOGICAL GOODHEL(NCOMB, 2) 5259 INTEGER NTRY(2) 5260 INTEGER I 5261 5262 do i=1,NCOMB 5263 GOODHEL(I,1) = .false. 5264 GOODHEL(I,2) = .false. 5265 enddo 5266 NTRY(1) = 0 5267 NTRY(2) = 0 5268 end 5269 5270 integer function get_maxsproc() 5271 implicit none 5272 include 'maxamps.inc' 5273 5274 get_maxsproc = maxsproc 5275 return 5276 end 5277 5278 """ % convert 5279 5280 return output
5281 5282 5283 5284 #=========================================================================== 5285 # write_configs_file 5286 #===========================================================================
5287 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
5288 """Write the configs.inc file with topology information for a 5289 subprocess group. Use the first subprocess with a diagram for each 5290 configuration.""" 5291 5292 matrix_elements = subproc_group.get('matrix_elements') 5293 model = matrix_elements[0].get('processes')[0].get('model') 5294 5295 diagrams = [] 5296 config_numbers = [] 5297 for iconfig, config in enumerate(diagrams_for_config): 5298 # Check if any diagrams correspond to this config 5299 if set(config) == set([0]): 5300 continue 5301 subproc_diags = [] 5302 for s,d in enumerate(config): 5303 if d: 5304 subproc_diags.append(matrix_elements[s].\ 5305 get('diagrams')[d-1]) 5306 else: 5307 subproc_diags.append(None) 5308 diagrams.append(subproc_diags) 5309 config_numbers.append(iconfig + 1) 5310 5311 # Extract number of external particles 5312 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 5313 5314 return len(diagrams), \ 5315 self.write_configs_file_from_diagrams(writer, diagrams, 5316 config_numbers, 5317 nexternal, ninitial, 5318 model)
5319 5320 #=========================================================================== 5321 # write_run_configs_file 5322 #===========================================================================
5323 - def write_run_config_file(self, writer):
5324 """Write the run_configs.inc file for MadEvent""" 5325 5326 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 5327 if self.proc_characteristic['loop_induced']: 5328 job_per_chan = 1 5329 else: 5330 job_per_chan = 2 5331 text = open(path).read() % {'chanperjob':job_per_chan} 5332 writer.write(text) 5333 return True
5334 5335 5336 #=========================================================================== 5337 # write_leshouche_file 5338 #===========================================================================
5339 - def write_leshouche_file(self, writer, subproc_group):
5340 """Write the leshouche.inc file for MG4""" 5341 5342 all_lines = [] 5343 5344 for iproc, matrix_element in \ 5345 enumerate(subproc_group.get('matrix_elements')): 5346 all_lines.extend(self.get_leshouche_lines(matrix_element, 5347 iproc)) 5348 # Write the file 5349 writer.writelines(all_lines) 5350 return True
5351 5352
5353 - def finalize(self,*args, **opts):
5354 5355 super(ProcessExporterFortranMEGroup, self).finalize(*args, **opts) 5356 #ensure that the grouping information is on the correct value 5357 self.proc_characteristic['grouped_matrix'] = True
5358 5359 5360 #=============================================================================== 5361 # UFO_model_to_mg4 5362 #=============================================================================== 5363 5364 python_to_fortran = lambda x: parsers.UFOExpressionParserFortran().parse(x)
5365 5366 -class UFO_model_to_mg4(object):
5367 """ A converter of the UFO-MG5 Model to the MG4 format """ 5368 5369 # The list below shows the only variables the user is allowed to change by 5370 # himself for each PS point. If he changes any other, then calling 5371 # UPDATE_AS_PARAM() (or equivalently MP_UPDATE_AS_PARAM()) will not 5372 # correctly account for the change. 5373 PS_dependent_key = ['aS','MU_R'] 5374 mp_complex_format = 'complex*32' 5375 mp_real_format = 'real*16' 5376 # Warning, it is crucial none of the couplings/parameters of the model 5377 # starts with this prefix. I should add a check for this. 5378 # You can change it as the global variable to check_param_card.ParamCard 5379 mp_prefix = check_param_card.ParamCard.mp_prefix 5380
5381 - def __init__(self, model, output_path, opt=None):
5382 """ initialization of the objects """ 5383 5384 self.model = model 5385 self.model_name = model['name'] 5386 self.dir_path = output_path 5387 5388 self.opt = {'complex_mass': False, 'export_format': 'madevent', 'mp':True, 5389 'loop_induced': False} 5390 if opt: 5391 self.opt.update(opt) 5392 5393 self.coups_dep = [] # (name, expression, type) 5394 self.coups_indep = [] # (name, expression, type) 5395 self.params_dep = [] # (name, expression, type) 5396 self.params_indep = [] # (name, expression, type) 5397 self.params_ext = [] # external parameter 5398 self.p_to_f = parsers.UFOExpressionParserFortran() 5399 self.mp_p_to_f = parsers.UFOExpressionParserMPFortran()
5400
5402 """modify the parameter if some of them are identical up to the case""" 5403 5404 lower_dict={} 5405 duplicate = set() 5406 keys = self.model['parameters'].keys() 5407 for key in keys: 5408 for param in self.model['parameters'][key]: 5409 lower_name = param.name.lower() 5410 if not lower_name: 5411 continue 5412 try: 5413 lower_dict[lower_name].append(param) 5414 except KeyError,error: 5415 lower_dict[lower_name] = [param] 5416 else: 5417 duplicate.add(lower_name) 5418 logger.debug('%s is define both as lower case and upper case.' 5419 % lower_name) 5420 if not duplicate: 5421 return 5422 5423 re_expr = r'''\b(%s)\b''' 5424 to_change = [] 5425 change={} 5426 for value in duplicate: 5427 for i, var in enumerate(lower_dict[value]): 5428 to_change.append(var.name) 5429 new_name = '%s%s' % (var.name.lower(), 5430 ('__%d'%(i+1) if i>0 else '')) 5431 change[var.name] = new_name 5432 var.name = new_name 5433 5434 # Apply the modification to the map_CTcoup_CTparam of the model 5435 # if it has one (giving for each coupling the CT parameters whcih 5436 # are necessary and which should be exported to the model. 5437 if hasattr(self.model,'map_CTcoup_CTparam'): 5438 for coup, ctparams in self.model.map_CTcoup_CTparam: 5439 for i, ctparam in enumerate(ctparams): 5440 try: 5441 self.model.map_CTcoup_CTparam[coup][i] = change[ctparam] 5442 except KeyError: 5443 pass 5444 5445 replace = lambda match_pattern: change[match_pattern.groups()[0]] 5446 rep_pattern = re.compile(re_expr % '|'.join(to_change)) 5447 5448 # change parameters 5449 for key in keys: 5450 if key == ('external',): 5451 continue 5452 for param in self.model['parameters'][key]: 5453 param.expr = rep_pattern.sub(replace, param.expr) 5454 5455 # change couplings 5456 for key in self.model['couplings'].keys(): 5457 for coup in self.model['couplings'][key]: 5458 coup.expr = rep_pattern.sub(replace, coup.expr) 5459 5460 # change mass/width 5461 for part in self.model['particles']: 5462 if str(part.get('mass')) in to_change: 5463 part.set('mass', rep_pattern.sub(replace, str(part.get('mass')))) 5464 if str(part.get('width')) in to_change: 5465 part.set('width', rep_pattern.sub(replace, str(part.get('width'))))
5466
5467 - def refactorize(self, wanted_couplings = []):
5468 """modify the couplings to fit with MG4 convention """ 5469 5470 # Keep only separation in alphaS 5471 keys = self.model['parameters'].keys() 5472 keys.sort(key=len) 5473 for key in keys: 5474 to_add = [o for o in self.model['parameters'][key] if o.name] 5475 5476 if key == ('external',): 5477 self.params_ext += to_add 5478 elif any([(k in key) for k in self.PS_dependent_key]): 5479 self.params_dep += to_add 5480 else: 5481 self.params_indep += to_add 5482 # same for couplings 5483 keys = self.model['couplings'].keys() 5484 keys.sort(key=len) 5485 for key, coup_list in self.model['couplings'].items(): 5486 if any([(k in key) for k in self.PS_dependent_key]): 5487 self.coups_dep += [c for c in coup_list if 5488 (not wanted_couplings or c.name in \ 5489 wanted_couplings)] 5490 else: 5491 self.coups_indep += [c for c in coup_list if 5492 (not wanted_couplings or c.name in \ 5493 wanted_couplings)] 5494 5495 # MG4 use G and not aS as it basic object for alphas related computation 5496 #Pass G in the independant list 5497 if 'G' in self.params_dep: 5498 index = self.params_dep.index('G') 5499 G = self.params_dep.pop(index) 5500 # G.expr = '2*cmath.sqrt(as*pi)' 5501 # self.params_indep.insert(0, self.params_dep.pop(index)) 5502 # No need to add it if not defined 5503 5504 if 'aS' not in self.params_ext: 5505 logger.critical('aS not define as external parameter adding it!') 5506 #self.model['parameters']['aS'] = base_objects.ParamCardVariable('aS', 0.138,'DUMMY',(1,)) 5507 self.params_indep.append( base_objects. ModelVariable('aS', '0.138','real')) 5508 self.params_indep.append( base_objects. ModelVariable('G', '4.1643','real'))
5509 - def build(self, wanted_couplings = [], full=True):
5510 """modify the couplings to fit with MG4 convention and creates all the 5511 different files""" 5512 5513 self.pass_parameter_to_case_insensitive() 5514 self.refactorize(wanted_couplings) 5515 5516 # write the files 5517 if full: 5518 if wanted_couplings: 5519 # extract the wanted ct parameters 5520 self.extract_needed_CTparam(wanted_couplings=wanted_couplings) 5521 self.write_all()
5522 5523
5524 - def open(self, name, comment='c', format='default'):
5525 """ Open the file name in the correct directory and with a valid 5526 header.""" 5527 5528 file_path = pjoin(self.dir_path, name) 5529 5530 if format == 'fortran': 5531 fsock = writers.FortranWriter(file_path, 'w') 5532 else: 5533 fsock = open(file_path, 'w') 5534 5535 file.writelines(fsock, comment * 77 + '\n') 5536 file.writelines(fsock,'%(comment)s written by the UFO converter\n' % \ 5537 {'comment': comment + (6 - len(comment)) * ' '}) 5538 file.writelines(fsock, comment * 77 + '\n\n') 5539 return fsock
5540 5541
5542 - def write_all(self):
5543 """ write all the files """ 5544 #write the part related to the external parameter 5545 self.create_ident_card() 5546 self.create_param_read() 5547 5548 #write the definition of the parameter 5549 self.create_input() 5550 self.create_intparam_def(dp=True,mp=False) 5551 if self.opt['mp']: 5552 self.create_intparam_def(dp=False,mp=True) 5553 5554 # definition of the coupling. 5555 self.create_actualize_mp_ext_param_inc() 5556 self.create_coupl_inc() 5557 self.create_write_couplings() 5558 self.create_couplings() 5559 5560 # the makefile 5561 self.create_makeinc() 5562 self.create_param_write() 5563 5564 # The model functions 5565 self.create_model_functions_inc() 5566 self.create_model_functions_def() 5567 5568 # The param_card.dat 5569 self.create_param_card() 5570 5571 5572 # All the standard files 5573 self.copy_standard_file()
5574 5575 ############################################################################ 5576 ## ROUTINE CREATING THE FILES ############################################ 5577 ############################################################################ 5578
5579 - def copy_standard_file(self):
5580 """Copy the standard files for the fortran model.""" 5581 5582 #copy the library files 5583 file_to_link = ['formats.inc','printout.f', \ 5584 'rw_para.f', 'testprog.f'] 5585 5586 for filename in file_to_link: 5587 cp( MG5DIR + '/models/template_files/fortran/' + filename, \ 5588 self.dir_path) 5589 5590 file = open(os.path.join(MG5DIR,\ 5591 'models/template_files/fortran/rw_para.f')).read() 5592 5593 includes=["include \'coupl.inc\'","include \'input.inc\'", 5594 "include \'model_functions.inc\'"] 5595 if self.opt['mp']: 5596 includes.extend(["include \'mp_coupl.inc\'","include \'mp_input.inc\'"]) 5597 # In standalone and madloop we do no use the compiled param card but 5598 # still parse the .dat one so we must load it. 5599 if self.opt['loop_induced']: 5600 #loop induced follow MadEvent way to handle the card. 5601 load_card = '' 5602 lha_read_filename='lha_read.f' 5603 elif self.opt['export_format'] in ['madloop','madloop_optimized', 'madloop_matchbox']: 5604 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5605 lha_read_filename='lha_read_mp.f' 5606 elif self.opt['export_format'].startswith('standalone') \ 5607 or self.opt['export_format'] in ['madweight', 'plugin']\ 5608 or self.opt['export_format'].startswith('matchbox'): 5609 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5610 lha_read_filename='lha_read.f' 5611 else: 5612 load_card = '' 5613 lha_read_filename='lha_read.f' 5614 cp( MG5DIR + '/models/template_files/fortran/' + lha_read_filename, \ 5615 os.path.join(self.dir_path,'lha_read.f')) 5616 5617 file=file%{'includes':'\n '.join(includes), 5618 'load_card':load_card} 5619 writer=open(os.path.join(self.dir_path,'rw_para.f'),'w') 5620 writer.writelines(file) 5621 writer.close() 5622 5623 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 5624 or self.opt['loop_induced']: 5625 cp( MG5DIR + '/models/template_files/fortran/makefile_madevent', 5626 self.dir_path + '/makefile') 5627 if self.opt['export_format'] in ['FKS5_default', 'FKS5_optimized']: 5628 path = pjoin(self.dir_path, 'makefile') 5629 text = open(path).read() 5630 text = text.replace('madevent','aMCatNLO') 5631 open(path, 'w').writelines(text) 5632 elif self.opt['export_format'] in ['standalone', 'standalone_msP','standalone_msF', 5633 'madloop','madloop_optimized', 'standalone_rw', 5634 'madweight','matchbox','madloop_matchbox', 'plugin']: 5635 cp( MG5DIR + '/models/template_files/fortran/makefile_standalone', 5636 self.dir_path + '/makefile') 5637 #elif self.opt['export_format'] in []: 5638 #pass 5639 else: 5640 raise MadGraph5Error('Unknown format')
5641
5642 - def create_coupl_inc(self):
5643 """ write coupling.inc """ 5644 5645 fsock = self.open('coupl.inc', format='fortran') 5646 if self.opt['mp']: 5647 mp_fsock = self.open('mp_coupl.inc', format='fortran') 5648 mp_fsock_same_name = self.open('mp_coupl_same_name.inc',\ 5649 format='fortran') 5650 5651 # Write header 5652 header = """double precision G 5653 common/strong/ G 5654 5655 double complex gal(2) 5656 common/weak/ gal 5657 5658 double precision MU_R 5659 common/rscale/ MU_R 5660 5661 double precision Nf 5662 parameter(Nf=%d) 5663 """ % self.model.get_nflav() 5664 5665 fsock.writelines(header) 5666 5667 if self.opt['mp']: 5668 header = """%(real_mp_format)s %(mp_prefix)sG 5669 common/MP_strong/ %(mp_prefix)sG 5670 5671 %(complex_mp_format)s %(mp_prefix)sgal(2) 5672 common/MP_weak/ %(mp_prefix)sgal 5673 5674 %(complex_mp_format)s %(mp_prefix)sMU_R 5675 common/MP_rscale/ %(mp_prefix)sMU_R 5676 5677 """ 5678 5679 5680 5681 5682 mp_fsock.writelines(header%{'real_mp_format':self.mp_real_format, 5683 'complex_mp_format':self.mp_complex_format, 5684 'mp_prefix':self.mp_prefix}) 5685 mp_fsock_same_name.writelines(header%{'real_mp_format':self.mp_real_format, 5686 'complex_mp_format':self.mp_complex_format, 5687 'mp_prefix':''}) 5688 5689 # Write the Mass definition/ common block 5690 masses = set() 5691 widths = set() 5692 if self.opt['complex_mass']: 5693 complex_mass = set() 5694 5695 for particle in self.model.get('particles'): 5696 #find masses 5697 one_mass = particle.get('mass') 5698 if one_mass.lower() != 'zero': 5699 masses.add(one_mass) 5700 5701 # find width 5702 one_width = particle.get('width') 5703 if one_width.lower() != 'zero': 5704 widths.add(one_width) 5705 if self.opt['complex_mass'] and one_mass.lower() != 'zero': 5706 complex_mass.add('CMASS_%s' % one_mass) 5707 5708 if masses: 5709 fsock.writelines('double precision '+','.join(masses)+'\n') 5710 fsock.writelines('common/masses/ '+','.join(masses)+'\n\n') 5711 if self.opt['mp']: 5712 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5713 ','.join(masses)+'\n') 5714 mp_fsock_same_name.writelines('common/MP_masses/ '+\ 5715 ','.join(masses)+'\n\n') 5716 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5717 self.mp_prefix+m for m in masses])+'\n') 5718 mp_fsock.writelines('common/MP_masses/ '+\ 5719 ','.join([self.mp_prefix+m for m in masses])+'\n\n') 5720 5721 if widths: 5722 fsock.writelines('double precision '+','.join(widths)+'\n') 5723 fsock.writelines('common/widths/ '+','.join(widths)+'\n\n') 5724 if self.opt['mp']: 5725 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5726 ','.join(widths)+'\n') 5727 mp_fsock_same_name.writelines('common/MP_widths/ '+\ 5728 ','.join(widths)+'\n\n') 5729 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5730 self.mp_prefix+w for w in widths])+'\n') 5731 mp_fsock.writelines('common/MP_widths/ '+\ 5732 ','.join([self.mp_prefix+w for w in widths])+'\n\n') 5733 5734 # Write the Couplings 5735 coupling_list = [coupl.name for coupl in self.coups_dep + self.coups_indep] 5736 fsock.writelines('double complex '+', '.join(coupling_list)+'\n') 5737 fsock.writelines('common/couplings/ '+', '.join(coupling_list)+'\n') 5738 if self.opt['mp']: 5739 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5740 ','.join(coupling_list)+'\n') 5741 mp_fsock_same_name.writelines('common/MP_couplings/ '+\ 5742 ','.join(coupling_list)+'\n\n') 5743 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5744 self.mp_prefix+c for c in coupling_list])+'\n') 5745 mp_fsock.writelines('common/MP_couplings/ '+\ 5746 ','.join([self.mp_prefix+c for c in coupling_list])+'\n\n') 5747 5748 # Write complex mass for complex mass scheme (if activated) 5749 if self.opt['complex_mass'] and complex_mass: 5750 fsock.writelines('double complex '+', '.join(complex_mass)+'\n') 5751 fsock.writelines('common/complex_mass/ '+', '.join(complex_mass)+'\n') 5752 if self.opt['mp']: 5753 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5754 ','.join(complex_mass)+'\n') 5755 mp_fsock_same_name.writelines('common/MP_complex_mass/ '+\ 5756 ','.join(complex_mass)+'\n\n') 5757 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5758 self.mp_prefix+cm for cm in complex_mass])+'\n') 5759 mp_fsock.writelines('common/MP_complex_mass/ '+\ 5760 ','.join([self.mp_prefix+cm for cm in complex_mass])+'\n\n')
5761
5762 - def create_write_couplings(self):
5763 """ write the file coupl_write.inc """ 5764 5765 fsock = self.open('coupl_write.inc', format='fortran') 5766 5767 fsock.writelines("""write(*,*) ' Couplings of %s' 5768 write(*,*) ' ---------------------------------' 5769 write(*,*) ' '""" % self.model_name) 5770 def format(coupl): 5771 return 'write(*,2) \'%(name)s = \', %(name)s' % {'name': coupl.name}
5772 5773 # Write the Couplings 5774 lines = [format(coupl) for coupl in self.coups_dep + self.coups_indep] 5775 fsock.writelines('\n'.join(lines)) 5776 5777
5778 - def create_input(self):
5779 """create input.inc containing the definition of the parameters""" 5780 5781 fsock = self.open('input.inc', format='fortran') 5782 if self.opt['mp']: 5783 mp_fsock = self.open('mp_input.inc', format='fortran') 5784 5785 #find mass/ width since they are already define 5786 already_def = set() 5787 for particle in self.model.get('particles'): 5788 already_def.add(particle.get('mass').lower()) 5789 already_def.add(particle.get('width').lower()) 5790 if self.opt['complex_mass']: 5791 already_def.add('cmass_%s' % particle.get('mass').lower()) 5792 5793 is_valid = lambda name: name.lower() not in ['g', 'mu_r', 'zero'] and \ 5794 name.lower() not in already_def 5795 5796 real_parameters = [param.name for param in self.params_dep + 5797 self.params_indep if param.type == 'real' 5798 and is_valid(param.name)] 5799 5800 real_parameters += [param.name for param in self.params_ext 5801 if param.type == 'real'and 5802 is_valid(param.name)] 5803 5804 # check the parameter is a CT parameter or not 5805 # if yes, just use the needed ones 5806 real_parameters = [param for param in real_parameters \ 5807 if self.check_needed_param(param)] 5808 5809 fsock.writelines('double precision '+','.join(real_parameters)+'\n') 5810 fsock.writelines('common/params_R/ '+','.join(real_parameters)+'\n\n') 5811 if self.opt['mp']: 5812 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5813 self.mp_prefix+p for p in real_parameters])+'\n') 5814 mp_fsock.writelines('common/MP_params_R/ '+','.join([\ 5815 self.mp_prefix+p for p in real_parameters])+'\n\n') 5816 5817 complex_parameters = [param.name for param in self.params_dep + 5818 self.params_indep if param.type == 'complex' and 5819 is_valid(param.name)] 5820 5821 # check the parameter is a CT parameter or not 5822 # if yes, just use the needed ones 5823 complex_parameters = [param for param in complex_parameters \ 5824 if self.check_needed_param(param)] 5825 5826 if complex_parameters: 5827 fsock.writelines('double complex '+','.join(complex_parameters)+'\n') 5828 fsock.writelines('common/params_C/ '+','.join(complex_parameters)+'\n\n') 5829 if self.opt['mp']: 5830 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5831 self.mp_prefix+p for p in complex_parameters])+'\n') 5832 mp_fsock.writelines('common/MP_params_C/ '+','.join([\ 5833 self.mp_prefix+p for p in complex_parameters])+'\n\n')
5834
5835 - def check_needed_param(self, param):
5836 """ Returns whether the parameter in argument is needed for this 5837 specific computation or not.""" 5838 5839 # If this is a leading order model or if there was no CT parameter 5840 # employed in this NLO model, one can directly return that the 5841 # parameter is needed since only CTParameters are filtered. 5842 if not hasattr(self, 'allCTparameters') or \ 5843 self.allCTparameters is None or self.usedCTparameters is None or \ 5844 len(self.allCTparameters)==0: 5845 return True 5846 5847 # We must allow the conjugate shorthand for the complex parameter as 5848 # well so we check wether either the parameter name or its name with 5849 # 'conjg__' substituted with '' is present in the list. 5850 # This is acceptable even if some parameter had an original name 5851 # including 'conjg__' in it, because at worst we export a parameter 5852 # was not needed. 5853 param = param.lower() 5854 cjg_param = param.replace('conjg__','',1) 5855 5856 # First make sure it is a CTparameter 5857 if param not in self.allCTparameters and \ 5858 cjg_param not in self.allCTparameters: 5859 return True 5860 5861 # Now check if it is in the list of CTparameters actually used 5862 return (param in self.usedCTparameters or \ 5863 cjg_param in self.usedCTparameters)
5864
5865 - def extract_needed_CTparam(self,wanted_couplings=[]):
5866 """ Extract what are the needed CT parameters given the wanted_couplings""" 5867 5868 if not hasattr(self.model,'map_CTcoup_CTparam') or not wanted_couplings: 5869 # Setting these lists to none wil disable the filtering in 5870 # check_needed_param 5871 self.allCTparameters = None 5872 self.usedCTparameters = None 5873 return 5874 5875 # All CTparameters appearin in all CT couplings 5876 allCTparameters=self.model.map_CTcoup_CTparam.values() 5877 # Define in this class the list of all CT parameters 5878 self.allCTparameters=list(\ 5879 set(itertools.chain.from_iterable(allCTparameters))) 5880 5881 # All used CT couplings 5882 w_coupls = [coupl.lower() for coupl in wanted_couplings] 5883 allUsedCTCouplings = [coupl for coupl in 5884 self.model.map_CTcoup_CTparam.keys() if coupl.lower() in w_coupls] 5885 5886 # Now define the list of all CT parameters that are actually used 5887 self.usedCTparameters=list(\ 5888 set(itertools.chain.from_iterable([ 5889 self.model.map_CTcoup_CTparam[coupl] for coupl in allUsedCTCouplings 5890 ]))) 5891 5892 # Now at last, make these list case insensitive 5893 self.allCTparameters = [ct.lower() for ct in self.allCTparameters] 5894 self.usedCTparameters = [ct.lower() for ct in self.usedCTparameters]
5895
5896 - def create_intparam_def(self, dp=True, mp=False):
5897 """ create intparam_definition.inc setting the internal parameters. 5898 Output the double precision and/or the multiple precision parameters 5899 depending on the parameters dp and mp. If mp only, then the file names 5900 get the 'mp_' prefix. 5901 """ 5902 5903 fsock = self.open('%sintparam_definition.inc'% 5904 ('mp_' if mp and not dp else ''), format='fortran') 5905 5906 fsock.write_comments(\ 5907 "Parameters that should not be recomputed event by event.\n") 5908 fsock.writelines("if(readlha) then\n") 5909 if dp: 5910 fsock.writelines("G = 2 * DSQRT(AS*PI) ! for the first init\n") 5911 if mp: 5912 fsock.writelines("MP__G = 2 * SQRT(MP__AS*MP__PI) ! for the first init\n") 5913 5914 for param in self.params_indep: 5915 if param.name == 'ZERO': 5916 continue 5917 # check whether the parameter is a CT parameter 5918 # if yes,just used the needed ones 5919 if not self.check_needed_param(param.name): 5920 continue 5921 if dp: 5922 fsock.writelines("%s = %s\n" % (param.name, 5923 self.p_to_f.parse(param.expr))) 5924 if mp: 5925 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 5926 self.mp_p_to_f.parse(param.expr))) 5927 5928 fsock.writelines('endif') 5929 5930 fsock.write_comments('\nParameters that should be recomputed at an event by even basis.\n') 5931 if dp: 5932 fsock.writelines("aS = G**2/4/pi\n") 5933 if mp: 5934 fsock.writelines("MP__aS = MP__G**2/4/MP__PI\n") 5935 for param in self.params_dep: 5936 # check whether the parameter is a CT parameter 5937 # if yes,just used the needed ones 5938 if not self.check_needed_param(param.name): 5939 continue 5940 if dp: 5941 fsock.writelines("%s = %s\n" % (param.name, 5942 self.p_to_f.parse(param.expr))) 5943 elif mp: 5944 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 5945 self.mp_p_to_f.parse(param.expr))) 5946 5947 fsock.write_comments("\nDefinition of the EW coupling used in the write out of aqed\n") 5948 if ('aEWM1',) in self.model['parameters']: 5949 if dp: 5950 fsock.writelines(""" gal(1) = 3.5449077018110318d0 / DSQRT(aEWM1) 5951 gal(2) = 1d0 5952 """) 5953 elif mp: 5954 fsock.writelines(""" %(mp_prefix)sgal(1) = 2 * SQRT(MP__PI/MP__aEWM1) 5955 %(mp_prefix)sgal(2) = 1d0 5956 """ %{'mp_prefix':self.mp_prefix}) 5957 pass 5958 # in Gmu scheme, aEWM1 is not external but Gf is an exteranl variable 5959 elif ('Gf',) in self.model['parameters']: 5960 if dp: 5961 fsock.writelines(""" gal(1) = 2.378414230005442133435d0*MDL_MW*DSQRT(1D0-MDL_MW**2/MDL_MZ**2)*DSQRT(MDL_Gf) 5962 gal(2) = 1d0 5963 """) 5964 elif mp: 5965 fsock.writelines(""" %(mp_prefix)sgal(1) = 2*MP__MDL_MW*SQRT(1e0_16-MP__MDL_MW**2/MP__MDL_MZ**2)*SQRT(SQRT(2e0_16)*MP__MDL_Gf) 5966 %(mp_prefix)sgal(2) = 1d0 5967 """ %{'mp_prefix':self.mp_prefix}) 5968 pass 5969 else: 5970 if dp: 5971 logger.warning('$RED aEWM1 and Gf not define in MODEL. AQED will not be written correcty in LHE FILE') 5972 fsock.writelines(""" gal(1) = 1d0 5973 gal(2) = 1d0 5974 """) 5975 elif mp: 5976 fsock.writelines(""" %(mp_prefix)sgal(1) = 1e0_16 5977 %(mp_prefix)sgal(2) = 1e0_16 5978 """%{'mp_prefix':self.mp_prefix})
5979 5980
5981 - def create_couplings(self):
5982 """ create couplings.f and all couplingsX.f """ 5983 5984 nb_def_by_file = 25 5985 5986 self.create_couplings_main(nb_def_by_file) 5987 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5988 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5989 5990 for i in range(nb_coup_indep): 5991 # For the independent couplings, we compute the double and multiple 5992 # precision ones together 5993 data = self.coups_indep[nb_def_by_file * i: 5994 min(len(self.coups_indep), nb_def_by_file * (i+1))] 5995 self.create_couplings_part(i + 1, data, dp=True, mp=self.opt['mp']) 5996 5997 for i in range(nb_coup_dep): 5998 # For the dependent couplings, we compute the double and multiple 5999 # precision ones in separate subroutines. 6000 data = self.coups_dep[nb_def_by_file * i: 6001 min(len(self.coups_dep), nb_def_by_file * (i+1))] 6002 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6003 dp=True,mp=False) 6004 if self.opt['mp']: 6005 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6006 dp=False,mp=True)
6007 6008
6009 - def create_couplings_main(self, nb_def_by_file=25):
6010 """ create couplings.f """ 6011 6012 fsock = self.open('couplings.f', format='fortran') 6013 6014 fsock.writelines("""subroutine coup() 6015 6016 implicit none 6017 double precision PI, ZERO 6018 logical READLHA 6019 parameter (PI=3.141592653589793d0) 6020 parameter (ZERO=0d0) 6021 include \'model_functions.inc\'""") 6022 if self.opt['mp']: 6023 fsock.writelines("""%s MP__PI, MP__ZERO 6024 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6025 parameter (MP__ZERO=0e0_16) 6026 include \'mp_input.inc\' 6027 include \'mp_coupl.inc\' 6028 """%self.mp_real_format) 6029 fsock.writelines("""include \'input.inc\' 6030 include \'coupl.inc\' 6031 READLHA = .true. 6032 include \'intparam_definition.inc\'""") 6033 if self.opt['mp']: 6034 fsock.writelines("""include \'mp_intparam_definition.inc\'\n""") 6035 6036 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6037 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6038 6039 fsock.writelines('\n'.join(\ 6040 ['call coup%s()' % (i + 1) for i in range(nb_coup_indep)])) 6041 6042 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6043 6044 fsock.writelines('\n'.join(\ 6045 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6046 for i in range(nb_coup_dep)])) 6047 if self.opt['mp']: 6048 fsock.writelines('\n'.join(\ 6049 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6050 for i in range(nb_coup_dep)])) 6051 fsock.writelines('''\n return \n end\n''') 6052 6053 fsock.writelines("""subroutine update_as_param() 6054 6055 implicit none 6056 double precision PI, ZERO 6057 logical READLHA 6058 parameter (PI=3.141592653589793d0) 6059 parameter (ZERO=0d0) 6060 include \'model_functions.inc\'""") 6061 fsock.writelines("""include \'input.inc\' 6062 include \'coupl.inc\' 6063 READLHA = .false.""") 6064 fsock.writelines(""" 6065 include \'intparam_definition.inc\'\n 6066 """) 6067 6068 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6069 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6070 6071 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6072 6073 fsock.writelines('\n'.join(\ 6074 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6075 for i in range(nb_coup_dep)])) 6076 fsock.writelines('''\n return \n end\n''') 6077 6078 fsock.writelines("""subroutine update_as_param2(mu_r2,as2) 6079 6080 implicit none 6081 double precision PI 6082 parameter (PI=3.141592653589793d0) 6083 double precision mu_r2, as2 6084 include \'model_functions.inc\'""") 6085 fsock.writelines("""include \'input.inc\' 6086 include \'coupl.inc\'""") 6087 fsock.writelines(""" 6088 if (mu_r2.gt.0d0) MU_R = mu_r2 6089 G = SQRT(4.0d0*PI*AS2) 6090 AS = as2 6091 6092 CALL UPDATE_AS_PARAM() 6093 """) 6094 fsock.writelines('''\n return \n end\n''') 6095 6096 if self.opt['mp']: 6097 fsock.writelines("""subroutine mp_update_as_param() 6098 6099 implicit none 6100 logical READLHA 6101 include \'model_functions.inc\'""") 6102 fsock.writelines("""%s MP__PI, MP__ZERO 6103 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6104 parameter (MP__ZERO=0e0_16) 6105 include \'mp_input.inc\' 6106 include \'mp_coupl.inc\' 6107 """%self.mp_real_format) 6108 fsock.writelines("""include \'input.inc\' 6109 include \'coupl.inc\' 6110 include \'actualize_mp_ext_params.inc\' 6111 READLHA = .false. 6112 include \'mp_intparam_definition.inc\'\n 6113 """) 6114 6115 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6116 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6117 6118 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6119 6120 fsock.writelines('\n'.join(\ 6121 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6122 for i in range(nb_coup_dep)])) 6123 fsock.writelines('''\n return \n end\n''')
6124
6125 - def create_couplings_part(self, nb_file, data, dp=True, mp=False):
6126 """ create couplings[nb_file].f containing information coming from data. 6127 Outputs the computation of the double precision and/or the multiple 6128 precision couplings depending on the parameters dp and mp. 6129 If mp is True and dp is False, then the prefix 'MP_' is appended to the 6130 filename and subroutine name. 6131 """ 6132 6133 fsock = self.open('%scouplings%s.f' %('mp_' if mp and not dp else '', 6134 nb_file), format='fortran') 6135 fsock.writelines("""subroutine %scoup%s() 6136 6137 implicit none 6138 include \'model_functions.inc\'"""%('mp_' if mp and not dp else '',nb_file)) 6139 if dp: 6140 fsock.writelines(""" 6141 double precision PI, ZERO 6142 parameter (PI=3.141592653589793d0) 6143 parameter (ZERO=0d0) 6144 include 'input.inc' 6145 include 'coupl.inc'""") 6146 if mp: 6147 fsock.writelines("""%s MP__PI, MP__ZERO 6148 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6149 parameter (MP__ZERO=0e0_16) 6150 include \'mp_input.inc\' 6151 include \'mp_coupl.inc\' 6152 """%self.mp_real_format) 6153 6154 for coupling in data: 6155 if dp: 6156 fsock.writelines('%s = %s' % (coupling.name, 6157 self.p_to_f.parse(coupling.expr))) 6158 if mp: 6159 fsock.writelines('%s%s = %s' % (self.mp_prefix,coupling.name, 6160 self.mp_p_to_f.parse(coupling.expr))) 6161 fsock.writelines('end')
6162
6163 - def create_model_functions_inc(self):
6164 """ Create model_functions.inc which contains the various declarations 6165 of auxiliary functions which might be used in the couplings expressions 6166 """ 6167 6168 additional_fct = [] 6169 # check for functions define in the UFO model 6170 ufo_fct = self.model.get('functions') 6171 if ufo_fct: 6172 for fct in ufo_fct: 6173 # already handle by default 6174 if fct.name not in ["complexconjugate", "re", "im", "sec", 6175 "csc", "asec", "acsc", "theta_function", "cond", 6176 "condif", "reglogp", "reglogm", "reglog", "recms", "arg", "cot"]: 6177 additional_fct.append(fct.name) 6178 6179 6180 fsock = self.open('model_functions.inc', format='fortran') 6181 fsock.writelines("""double complex cond 6182 double complex condif 6183 double complex reglog 6184 double complex reglogp 6185 double complex reglogm 6186 double complex recms 6187 double complex arg 6188 %s 6189 """ % "\n".join([" double complex %s" % i for i in additional_fct])) 6190 6191 6192 if self.opt['mp']: 6193 fsock.writelines("""%(complex_mp_format)s mp_cond 6194 %(complex_mp_format)s mp_condif 6195 %(complex_mp_format)s mp_reglog 6196 %(complex_mp_format)s mp_reglogp 6197 %(complex_mp_format)s mp_reglogm 6198 %(complex_mp_format)s mp_recms 6199 %(complex_mp_format)s mp_arg 6200 %(additional)s 6201 """ %\ 6202 {"additional": "\n".join([" %s mp_%s" % (self.mp_complex_format, i) for i in additional_fct]), 6203 'complex_mp_format':self.mp_complex_format 6204 })
6205
6206 - def create_model_functions_def(self):
6207 """ Create model_functions.f which contains the various definitions 6208 of auxiliary functions which might be used in the couplings expressions 6209 Add the functions.f functions for formfactors support 6210 """ 6211 6212 fsock = self.open('model_functions.f', format='fortran') 6213 fsock.writelines("""double complex function cond(condition,truecase,falsecase) 6214 implicit none 6215 double complex condition,truecase,falsecase 6216 if(condition.eq.(0.0d0,0.0d0)) then 6217 cond=truecase 6218 else 6219 cond=falsecase 6220 endif 6221 end 6222 6223 double complex function condif(condition,truecase,falsecase) 6224 implicit none 6225 logical condition 6226 double complex truecase,falsecase 6227 if(condition) then 6228 condif=truecase 6229 else 6230 condif=falsecase 6231 endif 6232 end 6233 6234 double complex function recms(condition,expr) 6235 implicit none 6236 logical condition 6237 double complex expr 6238 if(condition)then 6239 recms=expr 6240 else 6241 recms=dcmplx(dble(expr)) 6242 endif 6243 end 6244 6245 double complex function reglog(arg) 6246 implicit none 6247 double complex TWOPII 6248 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6249 double complex arg 6250 if(arg.eq.(0.0d0,0.0d0)) then 6251 reglog=(0.0d0,0.0d0) 6252 else 6253 reglog=log(arg) 6254 endif 6255 end 6256 6257 double complex function reglogp(arg) 6258 implicit none 6259 double complex TWOPII 6260 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6261 double complex arg 6262 if(arg.eq.(0.0d0,0.0d0))then 6263 reglogp=(0.0d0,0.0d0) 6264 else 6265 if(dble(arg).lt.0.0d0.and.dimag(arg).lt.0.0d0)then 6266 reglogp=log(arg) + TWOPII 6267 else 6268 reglogp=log(arg) 6269 endif 6270 endif 6271 end 6272 6273 double complex function reglogm(arg) 6274 implicit none 6275 double complex TWOPII 6276 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6277 double complex arg 6278 if(arg.eq.(0.0d0,0.0d0))then 6279 reglogm=(0.0d0,0.0d0) 6280 else 6281 if(dble(arg).lt.0.0d0.and.dimag(arg).gt.0.0d0)then 6282 reglogm=log(arg) - TWOPII 6283 else 6284 reglogm=log(arg) 6285 endif 6286 endif 6287 end 6288 6289 double complex function arg(comnum) 6290 implicit none 6291 double complex comnum 6292 double complex iim 6293 iim = (0.0d0,1.0d0) 6294 if(comnum.eq.(0.0d0,0.0d0)) then 6295 arg=(0.0d0,0.0d0) 6296 else 6297 arg=log(comnum/abs(comnum))/iim 6298 endif 6299 end""") 6300 if self.opt['mp']: 6301 fsock.writelines(""" 6302 6303 %(complex_mp_format)s function mp_cond(condition,truecase,falsecase) 6304 implicit none 6305 %(complex_mp_format)s condition,truecase,falsecase 6306 if(condition.eq.(0.0e0_16,0.0e0_16)) then 6307 mp_cond=truecase 6308 else 6309 mp_cond=falsecase 6310 endif 6311 end 6312 6313 %(complex_mp_format)s function mp_condif(condition,truecase,falsecase) 6314 implicit none 6315 logical condition 6316 %(complex_mp_format)s truecase,falsecase 6317 if(condition) then 6318 mp_condif=truecase 6319 else 6320 mp_condif=falsecase 6321 endif 6322 end 6323 6324 %(complex_mp_format)s function mp_recms(condition,expr) 6325 implicit none 6326 logical condition 6327 %(complex_mp_format)s expr 6328 if(condition)then 6329 mp_recms=expr 6330 else 6331 mp_recms=cmplx(real(expr),kind=16) 6332 endif 6333 end 6334 6335 %(complex_mp_format)s function mp_reglog(arg) 6336 implicit none 6337 %(complex_mp_format)s TWOPII 6338 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6339 %(complex_mp_format)s arg 6340 if(arg.eq.(0.0e0_16,0.0e0_16)) then 6341 mp_reglog=(0.0e0_16,0.0e0_16) 6342 else 6343 mp_reglog=log(arg) 6344 endif 6345 end 6346 6347 %(complex_mp_format)s function mp_reglogp(arg) 6348 implicit none 6349 %(complex_mp_format)s TWOPII 6350 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6351 %(complex_mp_format)s arg 6352 if(arg.eq.(0.0e0_16,0.0e0_16))then 6353 mp_reglogp=(0.0e0_16,0.0e0_16) 6354 else 6355 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).lt.0.0e0_16)then 6356 mp_reglogp=log(arg) + TWOPII 6357 else 6358 mp_reglogp=log(arg) 6359 endif 6360 endif 6361 end 6362 6363 %(complex_mp_format)s function mp_reglogm(arg) 6364 implicit none 6365 %(complex_mp_format)s TWOPII 6366 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6367 %(complex_mp_format)s arg 6368 if(arg.eq.(0.0e0_16,0.0e0_16))then 6369 mp_reglogm=(0.0e0_16,0.0e0_16) 6370 else 6371 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).gt.0.0e0_16)then 6372 mp_reglogm=log(arg) - TWOPII 6373 else 6374 mp_reglogm=log(arg) 6375 endif 6376 endif 6377 end 6378 6379 %(complex_mp_format)s function mp_arg(comnum) 6380 implicit none 6381 %(complex_mp_format)s comnum 6382 %(complex_mp_format)s imm 6383 imm = (0.0e0_16,1.0e0_16) 6384 if(comnum.eq.(0.0e0_16,0.0e0_16)) then 6385 mp_arg=(0.0e0_16,0.0e0_16) 6386 else 6387 mp_arg=log(comnum/abs(comnum))/imm 6388 endif 6389 end"""%{'complex_mp_format':self.mp_complex_format}) 6390 6391 6392 #check for the file functions.f 6393 model_path = self.model.get('modelpath') 6394 if os.path.exists(pjoin(model_path,'Fortran','functions.f')): 6395 fsock.write_comment_line(' USER DEFINE FUNCTIONS ') 6396 input = pjoin(model_path,'Fortran','functions.f') 6397 file.writelines(fsock, open(input).read()) 6398 fsock.write_comment_line(' END USER DEFINE FUNCTIONS ') 6399 6400 # check for functions define in the UFO model 6401 ufo_fct = self.model.get('functions') 6402 if ufo_fct: 6403 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS ') 6404 for fct in ufo_fct: 6405 # already handle by default 6406 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc", "condif", 6407 "theta_function", "cond", "reglog", "reglogp", "reglogm", "recms","arg"]: 6408 ufo_fct_template = """ 6409 double complex function %(name)s(%(args)s) 6410 implicit none 6411 double complex %(args)s 6412 %(definitions)s 6413 %(name)s = %(fct)s 6414 6415 return 6416 end 6417 """ 6418 str_fct = self.p_to_f.parse(fct.expr) 6419 if not self.p_to_f.to_define: 6420 definitions = [] 6421 else: 6422 definitions=[] 6423 for d in self.p_to_f.to_define: 6424 if d == 'pi': 6425 definitions.append(' double precision pi') 6426 definitions.append(' data pi /3.1415926535897932d0/') 6427 else: 6428 definitions.append(' double complex %s' % d) 6429 6430 text = ufo_fct_template % { 6431 'name': fct.name, 6432 'args': ", ".join(fct.arguments), 6433 'fct': str_fct, 6434 'definitions': '\n'.join(definitions) 6435 } 6436 6437 fsock.writelines(text) 6438 if self.opt['mp']: 6439 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS FOR MP') 6440 for fct in ufo_fct: 6441 # already handle by default 6442 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc","condif", 6443 "theta_function", "cond", "reglog", "reglogp","reglogm", "recms","arg"]: 6444 ufo_fct_template = """ 6445 %(complex_mp_format)s function mp_%(name)s(mp__%(args)s) 6446 implicit none 6447 %(complex_mp_format)s mp__%(args)s 6448 %(definitions)s 6449 mp_%(name)s = %(fct)s 6450 6451 return 6452 end 6453 """ 6454 str_fct = self.mp_p_to_f.parse(fct.expr) 6455 if not self.mp_p_to_f.to_define: 6456 definitions = [] 6457 else: 6458 definitions=[] 6459 for d in self.mp_p_to_f.to_define: 6460 if d == 'pi': 6461 definitions.append(' %s mp__pi' % self.mp_real_format) 6462 definitions.append(' data mp__pi /3.141592653589793238462643383279502884197e+00_16/') 6463 else: 6464 definitions.append(' %s mp_%s' % (self.mp_complex_format,d)) 6465 text = ufo_fct_template % { 6466 'name': fct.name, 6467 'args': ", mp__".join(fct.arguments), 6468 'fct': str_fct, 6469 'definitions': '\n'.join(definitions), 6470 'complex_mp_format': self.mp_complex_format 6471 } 6472 fsock.writelines(text) 6473 6474 6475 6476 fsock.write_comment_line(' STOP UFO DEFINE FUNCTIONS ')
6477 6478 6479
6480 - def create_makeinc(self):
6481 """create makeinc.inc containing the file to compile """ 6482 6483 fsock = self.open('makeinc.inc', comment='#') 6484 text = 'MODEL = couplings.o lha_read.o printout.o rw_para.o' 6485 text += ' model_functions.o ' 6486 6487 nb_coup_indep = 1 + len(self.coups_dep) // 25 6488 nb_coup_dep = 1 + len(self.coups_indep) // 25 6489 couplings_files=['couplings%s.o' % (i+1) \ 6490 for i in range(nb_coup_dep + nb_coup_indep) ] 6491 if self.opt['mp']: 6492 couplings_files+=['mp_couplings%s.o' % (i+1) for i in \ 6493 range(nb_coup_dep,nb_coup_dep + nb_coup_indep) ] 6494 text += ' '.join(couplings_files) 6495 fsock.writelines(text)
6496
6497 - def create_param_write(self):
6498 """ create param_write """ 6499 6500 fsock = self.open('param_write.inc', format='fortran') 6501 6502 fsock.writelines("""write(*,*) ' External Params' 6503 write(*,*) ' ---------------------------------' 6504 write(*,*) ' '""") 6505 def format(name): 6506 return 'write(*,*) \'%(name)s = \', %(name)s' % {'name': name}
6507 6508 # Write the external parameter 6509 lines = [format(param.name) for param in self.params_ext] 6510 fsock.writelines('\n'.join(lines)) 6511 6512 fsock.writelines("""write(*,*) ' Internal Params' 6513 write(*,*) ' ---------------------------------' 6514 write(*,*) ' '""") 6515 lines = [format(data.name) for data in self.params_indep 6516 if data.name != 'ZERO' and self.check_needed_param(data.name)] 6517 fsock.writelines('\n'.join(lines)) 6518 fsock.writelines("""write(*,*) ' Internal Params evaluated point by point' 6519 write(*,*) ' ----------------------------------------' 6520 write(*,*) ' '""") 6521 lines = [format(data.name) for data in self.params_dep \ 6522 if self.check_needed_param(data.name)] 6523 6524 fsock.writelines('\n'.join(lines)) 6525 6526 6527
6528 - def create_ident_card(self):
6529 """ create the ident_card.dat """ 6530 6531 def format(parameter): 6532 """return the line for the ident_card corresponding to this parameter""" 6533 colum = [parameter.lhablock.lower()] + \ 6534 [str(value) for value in parameter.lhacode] + \ 6535 [parameter.name] 6536 if not parameter.name: 6537 return '' 6538 return ' '.join(colum)+'\n'
6539 6540 fsock = self.open('ident_card.dat') 6541 6542 external_param = [format(param) for param in self.params_ext] 6543 fsock.writelines('\n'.join(external_param)) 6544
6545 - def create_actualize_mp_ext_param_inc(self):
6546 """ create the actualize_mp_ext_params.inc code """ 6547 6548 # In principle one should actualize all external, but for now, it is 6549 # hardcoded that only AS and MU_R can by dynamically changed by the user 6550 # so that we only update those ones. 6551 # Of course, to be on the safe side, one could decide to update all 6552 # external parameters. 6553 update_params_list=[p for p in self.params_ext if p.name in 6554 self.PS_dependent_key] 6555 6556 res_strings = ["%(mp_prefix)s%(name)s=%(name)s"\ 6557 %{'mp_prefix':self.mp_prefix,'name':param.name}\ 6558 for param in update_params_list] 6559 # When read_lha is false, it is G which is taken in input and not AS, so 6560 # this is what should be reset here too. 6561 if 'aS' in [param.name for param in update_params_list]: 6562 res_strings.append("%(mp_prefix)sG=G"%{'mp_prefix':self.mp_prefix}) 6563 6564 fsock = self.open('actualize_mp_ext_params.inc', format='fortran') 6565 fsock.writelines('\n'.join(res_strings))
6566
6567 - def create_param_read(self):
6568 """create param_read""" 6569 6570 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 6571 or self.opt['loop_induced']: 6572 fsock = self.open('param_read.inc', format='fortran') 6573 fsock.writelines(' include \'../param_card.inc\'') 6574 return 6575 6576 def format_line(parameter): 6577 """return the line for the ident_card corresponding to this 6578 parameter""" 6579 template = \ 6580 """ call LHA_get_real(npara,param,value,'%(name)s',%(name)s,%(value)s)""" \ 6581 % {'name': parameter.name, 6582 'value': self.p_to_f.parse(str(parameter.value.real))} 6583 if self.opt['mp']: 6584 template = template+ \ 6585 ("\n call MP_LHA_get_real(npara,param,value,'%(name)s',"+ 6586 "%(mp_prefix)s%(name)s,%(value)s)") \ 6587 % {'name': parameter.name,'mp_prefix': self.mp_prefix, 6588 'value': self.mp_p_to_f.parse(str(parameter.value.real))} 6589 return template 6590 6591 fsock = self.open('param_read.inc', format='fortran') 6592 res_strings = [format_line(param) \ 6593 for param in self.params_ext] 6594 6595 # Correct width sign for Majorana particles (where the width 6596 # and mass need to have the same sign) 6597 for particle in self.model.get('particles'): 6598 if particle.is_fermion() and particle.get('self_antipart') and \ 6599 particle.get('width').lower() != 'zero': 6600 6601 res_strings.append('%(width)s = sign(%(width)s,%(mass)s)' % \ 6602 {'width': particle.get('width'), 'mass': particle.get('mass')}) 6603 if self.opt['mp']: 6604 res_strings.append(\ 6605 ('%(mp_pref)s%(width)s = sign(%(mp_pref)s%(width)s,'+\ 6606 '%(mp_pref)s%(mass)s)')%{'width': particle.get('width'),\ 6607 'mass': particle.get('mass'),'mp_pref':self.mp_prefix}) 6608 6609 fsock.writelines('\n'.join(res_strings)) 6610 6611 6612 @staticmethod
6613 - def create_param_card_static(model, output_path, rule_card_path=False, 6614 mssm_convert=True):
6615 """ create the param_card.dat for a givent model --static method-- """ 6616 #1. Check if a default param_card is present: 6617 done = False 6618 if hasattr(model, 'restrict_card') and isinstance(model.restrict_card, str): 6619 restrict_name = os.path.basename(model.restrict_card)[9:-4] 6620 model_path = model.get('modelpath') 6621 if os.path.exists(pjoin(model_path,'paramcard_%s.dat' % restrict_name)): 6622 done = True 6623 files.cp(pjoin(model_path,'paramcard_%s.dat' % restrict_name), 6624 output_path) 6625 if not done: 6626 param_writer.ParamCardWriter(model, output_path) 6627 6628 if rule_card_path: 6629 if hasattr(model, 'rule_card'): 6630 model.rule_card.write_file(rule_card_path) 6631 6632 if mssm_convert: 6633 model_name = model.get('name') 6634 # IF MSSM convert the card to SLAH1 6635 if model_name == 'mssm' or model_name.startswith('mssm-'): 6636 import models.check_param_card as translator 6637 # Check the format of the param_card for Pythia and make it correct 6638 if rule_card_path: 6639 translator.make_valid_param_card(output_path, rule_card_path) 6640 translator.convert_to_slha1(output_path)
6641
6642 - def create_param_card(self):
6643 """ create the param_card.dat """ 6644 6645 rule_card = pjoin(self.dir_path, 'param_card_rule.dat') 6646 if not hasattr(self.model, 'rule_card'): 6647 rule_card=False 6648 self.create_param_card_static(self.model, 6649 output_path=pjoin(self.dir_path, 'param_card.dat'), 6650 rule_card_path=rule_card, 6651 mssm_convert=True)
6652
6653 -def ExportV4Factory(cmd, noclean, output_type='default', group_subprocesses=True, cmd_options={}):
6654 """ Determine which Export_v4 class is required. cmd is the command 6655 interface containing all potential usefull information. 6656 The output_type argument specifies from which context the output 6657 is called. It is 'madloop' for MadLoop5, 'amcatnlo' for FKS5 output 6658 and 'default' for tree-level outputs.""" 6659 6660 opt = dict(cmd.options) 6661 opt['output_options'] = cmd_options 6662 6663 # ========================================================================== 6664 # First check whether Ninja must be installed. 6665 # Ninja would only be required if: 6666 # a) Loop optimized output is selected 6667 # b) the process gathered from the amplitude generated use loops 6668 6669 if len(cmd._curr_amps)>0: 6670 try: 6671 curr_proc = cmd._curr_amps[0].get('process') 6672 except base_objects.PhysicsObject.PhysicsObjectError: 6673 curr_proc = None 6674 elif hasattr(cmd,'_fks_multi_proc') and \ 6675 len(cmd._fks_multi_proc.get('process_definitions'))>0: 6676 curr_proc = cmd._fks_multi_proc.get('process_definitions')[0] 6677 else: 6678 curr_proc = None 6679 6680 requires_reduction_tool = opt['loop_optimized_output'] and \ 6681 (not curr_proc is None) and \ 6682 (curr_proc.get('perturbation_couplings') != [] and \ 6683 not curr_proc.get('NLO_mode') in [None,'real','tree','LO','LOonly']) 6684 6685 # An installation is required then, but only if the specified path is the 6686 # default local one and that the Ninja library appears missing. 6687 if requires_reduction_tool: 6688 cmd.install_reduction_library() 6689 6690 # ========================================================================== 6691 # First treat the MadLoop5 standalone case 6692 MadLoop_SA_options = {'clean': not noclean, 6693 'complex_mass':cmd.options['complex_mass_scheme'], 6694 'export_format':'madloop', 6695 'mp':True, 6696 'loop_dir': os.path.join(cmd._mgme_dir,'Template','loop_material'), 6697 'cuttools_dir': cmd._cuttools_dir, 6698 'iregi_dir':cmd._iregi_dir, 6699 'pjfry_dir':cmd.options['pjfry'], 6700 'golem_dir':cmd.options['golem'], 6701 'samurai_dir':cmd.options['samurai'], 6702 'ninja_dir':cmd.options['ninja'], 6703 'collier_dir':cmd.options['collier'], 6704 'fortran_compiler':cmd.options['fortran_compiler'], 6705 'f2py_compiler':cmd.options['f2py_compiler'], 6706 'output_dependencies':cmd.options['output_dependencies'], 6707 'SubProc_prefix':'P', 6708 'compute_color_flows':cmd.options['loop_color_flows'], 6709 'mode': 'reweight' if cmd._export_format == "standalone_rw" else '', 6710 'cluster_local_path': cmd.options['cluster_local_path'], 6711 'output_options': cmd_options 6712 } 6713 6714 if output_type.startswith('madloop'): 6715 import madgraph.loop.loop_exporters as loop_exporters 6716 if os.path.isdir(os.path.join(cmd._mgme_dir, 'Template/loop_material')): 6717 ExporterClass=None 6718 if not cmd.options['loop_optimized_output']: 6719 ExporterClass=loop_exporters.LoopProcessExporterFortranSA 6720 else: 6721 if output_type == "madloop": 6722 ExporterClass=loop_exporters.LoopProcessOptimizedExporterFortranSA 6723 MadLoop_SA_options['export_format'] = 'madloop_optimized' 6724 elif output_type == "madloop_matchbox": 6725 ExporterClass=loop_exporters.LoopProcessExporterFortranMatchBox 6726 MadLoop_SA_options['export_format'] = 'madloop_matchbox' 6727 else: 6728 raise Exception, "output_type not recognize %s" % output_type 6729 return ExporterClass(cmd._export_dir, MadLoop_SA_options) 6730 else: 6731 raise MadGraph5Error('MG5_aMC cannot find the \'loop_material\' directory'+\ 6732 ' in %s'%str(cmd._mgme_dir)) 6733 6734 # Then treat the aMC@NLO output 6735 elif output_type=='amcatnlo': 6736 import madgraph.iolibs.export_fks as export_fks 6737 ExporterClass=None 6738 amcatnlo_options = dict(opt) 6739 amcatnlo_options.update(MadLoop_SA_options) 6740 amcatnlo_options['mp'] = len(cmd._fks_multi_proc.get_virt_amplitudes()) > 0 6741 if not cmd.options['loop_optimized_output']: 6742 logger.info("Writing out the aMC@NLO code") 6743 ExporterClass = export_fks.ProcessExporterFortranFKS 6744 amcatnlo_options['export_format']='FKS5_default' 6745 else: 6746 logger.info("Writing out the aMC@NLO code, using optimized Loops") 6747 ExporterClass = export_fks.ProcessOptimizedExporterFortranFKS 6748 amcatnlo_options['export_format']='FKS5_optimized' 6749 return ExporterClass(cmd._export_dir, amcatnlo_options) 6750 6751 6752 # Then the default tree-level output 6753 elif output_type=='default': 6754 assert group_subprocesses in [True, False] 6755 6756 opt = dict(opt) 6757 opt.update({'clean': not noclean, 6758 'complex_mass': cmd.options['complex_mass_scheme'], 6759 'export_format':cmd._export_format, 6760 'mp': False, 6761 'sa_symmetry':False, 6762 'model': cmd._curr_model.get('name'), 6763 'v5_model': False if cmd._model_v4_path else True }) 6764 6765 format = cmd._export_format #shortcut 6766 6767 if format in ['standalone_msP', 'standalone_msF', 'standalone_rw']: 6768 opt['sa_symmetry'] = True 6769 elif format == 'plugin': 6770 opt['sa_symmetry'] = cmd._export_plugin.sa_symmetry 6771 6772 loop_induced_opt = dict(opt) 6773 loop_induced_opt.update(MadLoop_SA_options) 6774 loop_induced_opt['export_format'] = 'madloop_optimized' 6775 loop_induced_opt['SubProc_prefix'] = 'PV' 6776 # For loop_induced output with MadEvent, we must have access to the 6777 # color flows. 6778 loop_induced_opt['compute_color_flows'] = True 6779 for key in opt: 6780 if key not in loop_induced_opt: 6781 loop_induced_opt[key] = opt[key] 6782 6783 # Madevent output supports MadAnalysis5 6784 if format in ['madevent']: 6785 opt['madanalysis5'] = cmd.options['madanalysis5_path'] 6786 6787 if format == 'matrix' or format.startswith('standalone'): 6788 return ProcessExporterFortranSA(cmd._export_dir, opt, format=format) 6789 6790 elif format in ['madevent'] and group_subprocesses: 6791 if isinstance(cmd._curr_amps[0], 6792 loop_diagram_generation.LoopAmplitude): 6793 import madgraph.loop.loop_exporters as loop_exporters 6794 return loop_exporters.LoopInducedExporterMEGroup( 6795 cmd._export_dir,loop_induced_opt) 6796 else: 6797 return ProcessExporterFortranMEGroup(cmd._export_dir,opt) 6798 elif format in ['madevent']: 6799 if isinstance(cmd._curr_amps[0], 6800 loop_diagram_generation.LoopAmplitude): 6801 import madgraph.loop.loop_exporters as loop_exporters 6802 return loop_exporters.LoopInducedExporterMENoGroup( 6803 cmd._export_dir,loop_induced_opt) 6804 else: 6805 return ProcessExporterFortranME(cmd._export_dir,opt) 6806 elif format in ['matchbox']: 6807 return ProcessExporterFortranMatchBox(cmd._export_dir,opt) 6808 elif cmd._export_format in ['madweight'] and group_subprocesses: 6809 6810 return ProcessExporterFortranMWGroup(cmd._export_dir, opt) 6811 elif cmd._export_format in ['madweight']: 6812 return ProcessExporterFortranMW(cmd._export_dir, opt) 6813 elif format == 'plugin': 6814 if isinstance(cmd._curr_amps[0], 6815 loop_diagram_generation.LoopAmplitude): 6816 return cmd._export_plugin(cmd._export_dir, loop_induced_opt) 6817 else: 6818 return cmd._export_plugin(cmd._export_dir, opt) 6819 6820 else: 6821 raise Exception, 'Wrong export_v4 format' 6822 else: 6823 raise MadGraph5Error, 'Output type %s not reckognized in ExportV4Factory.'
6824
6825 6826 6827 6828 #=============================================================================== 6829 # ProcessExporterFortranMWGroup 6830 #=============================================================================== 6831 -class ProcessExporterFortranMWGroup(ProcessExporterFortranMW):
6832 """Class to take care of exporting a set of matrix elements to 6833 MadEvent subprocess group format.""" 6834 6835 matrix_file = "matrix_madweight_group_v4.inc" 6836 grouped_mode = 'madweight' 6837 #=========================================================================== 6838 # generate_subprocess_directory 6839 #===========================================================================
6840 - def generate_subprocess_directory(self, subproc_group, 6841 fortran_model, 6842 group_number):
6843 """Generate the Pn directory for a subprocess group in MadEvent, 6844 including the necessary matrix_N.f files, configs.inc and various 6845 other helper files.""" 6846 6847 if not isinstance(subproc_group, group_subprocs.SubProcessGroup): 6848 raise base_objects.PhysicsObject.PhysicsObjectError,\ 6849 "subproc_group object not SubProcessGroup" 6850 6851 if not self.model: 6852 self.model = subproc_group.get('matrix_elements')[0].\ 6853 get('processes')[0].get('model') 6854 6855 pathdir = os.path.join(self.dir_path, 'SubProcesses') 6856 6857 # Create the directory PN in the specified path 6858 subprocdir = "P%d_%s" % (subproc_group.get('number'), 6859 subproc_group.get('name')) 6860 try: 6861 os.mkdir(pjoin(pathdir, subprocdir)) 6862 except os.error as error: 6863 logger.warning(error.strerror + " " + subprocdir) 6864 6865 6866 logger.info('Creating files in directory %s' % subprocdir) 6867 Ppath = pjoin(pathdir, subprocdir) 6868 6869 # Create the matrix.f files, auto_dsig.f files and all inc files 6870 # for all subprocesses in the group 6871 6872 maxamps = 0 6873 maxflows = 0 6874 tot_calls = 0 6875 6876 matrix_elements = subproc_group.get('matrix_elements') 6877 6878 for ime, matrix_element in \ 6879 enumerate(matrix_elements): 6880 filename = pjoin(Ppath, 'matrix%d.f' % (ime+1)) 6881 calls, ncolor = \ 6882 self.write_matrix_element_v4(writers.FortranWriter(filename), 6883 matrix_element, 6884 fortran_model, 6885 str(ime+1), 6886 subproc_group.get('diagram_maps')[\ 6887 ime]) 6888 6889 filename = pjoin(Ppath, 'auto_dsig%d.f' % (ime+1)) 6890 self.write_auto_dsig_file(writers.FortranWriter(filename), 6891 matrix_element, 6892 str(ime+1)) 6893 6894 # Keep track of needed quantities 6895 tot_calls += int(calls) 6896 maxflows = max(maxflows, ncolor) 6897 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 6898 6899 # Draw diagrams 6900 filename = pjoin(Ppath, "matrix%d.ps" % (ime+1)) 6901 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 6902 get('diagrams'), 6903 filename, 6904 model = \ 6905 matrix_element.get('processes')[0].\ 6906 get('model'), 6907 amplitude=True) 6908 logger.info("Generating Feynman diagrams for " + \ 6909 matrix_element.get('processes')[0].nice_string()) 6910 plot.draw() 6911 6912 # Extract number of external particles 6913 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 6914 6915 # Generate a list of diagrams corresponding to each configuration 6916 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 6917 # If a subprocess has no diagrams for this config, the number is 0 6918 6919 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 6920 6921 filename = pjoin(Ppath, 'auto_dsig.f') 6922 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 6923 subproc_group) 6924 6925 filename = pjoin(Ppath,'configs.inc') 6926 nconfigs, s_and_t_channels = self.write_configs_file(\ 6927 writers.FortranWriter(filename), 6928 subproc_group, 6929 subproc_diagrams_for_config) 6930 6931 filename = pjoin(Ppath, 'leshouche.inc') 6932 self.write_leshouche_file(writers.FortranWriter(filename), 6933 subproc_group) 6934 6935 filename = pjoin(Ppath, 'phasespace.inc') 6936 self.write_phasespace_file(writers.FortranWriter(filename), 6937 nconfigs) 6938 6939 6940 filename = pjoin(Ppath, 'maxamps.inc') 6941 self.write_maxamps_file(writers.FortranWriter(filename), 6942 maxamps, 6943 maxflows, 6944 max([len(me.get('processes')) for me in \ 6945 matrix_elements]), 6946 len(matrix_elements)) 6947 6948 filename = pjoin(Ppath, 'mirrorprocs.inc') 6949 self.write_mirrorprocs(writers.FortranWriter(filename), 6950 subproc_group) 6951 6952 filename = pjoin(Ppath, 'nexternal.inc') 6953 self.write_nexternal_file(writers.FortranWriter(filename), 6954 nexternal, ninitial) 6955 6956 filename = pjoin(Ppath, 'pmass.inc') 6957 self.write_pmass_file(writers.FortranWriter(filename), 6958 matrix_element) 6959 6960 filename = pjoin(Ppath, 'props.inc') 6961 self.write_props_file(writers.FortranWriter(filename), 6962 matrix_element, 6963 s_and_t_channels) 6964 6965 # filename = pjoin(Ppath, 'processes.dat') 6966 # files.write_to_file(filename, 6967 # self.write_processes_file, 6968 # subproc_group) 6969 6970 # Generate jpgs -> pass in make_html 6971 #os.system(os.path.join('..', '..', 'bin', 'gen_jpeg-pl')) 6972 6973 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f'] 6974 6975 for file in linkfiles: 6976 ln('../%s' % file, cwd=Ppath) 6977 6978 ln('nexternal.inc', '../../Source', cwd=Ppath, log=False) 6979 ln('leshouche.inc', '../../Source', cwd=Ppath, log=False) 6980 ln('maxamps.inc', '../../Source', cwd=Ppath, log=False) 6981 ln('../../Source/maxparticles.inc', '.', log=True, cwd=Ppath) 6982 ln('../../Source/maxparticles.inc', '.', name='genps.inc', log=True, cwd=Ppath) 6983 ln('phasespace.inc', '../', log=True, cwd=Ppath) 6984 if not tot_calls: 6985 tot_calls = 0 6986 return tot_calls
6987 6988 6989 #=========================================================================== 6990 # Helper functions 6991 #===========================================================================
6992 - def modify_grouping(self, matrix_element):
6993 """allow to modify the grouping (if grouping is in place) 6994 return two value: 6995 - True/False if the matrix_element was modified 6996 - the new(or old) matrix element""" 6997 6998 return True, matrix_element.split_lepton_grouping()
6999 7000 #=========================================================================== 7001 # write_super_auto_dsig_file 7002 #===========================================================================
7003 - def write_super_auto_dsig_file(self, writer, subproc_group):
7004 """Write the auto_dsig.f file selecting between the subprocesses 7005 in subprocess group mode""" 7006 7007 replace_dict = {} 7008 7009 # Extract version number and date from VERSION file 7010 info_lines = self.get_mg5_info_lines() 7011 replace_dict['info_lines'] = info_lines 7012 7013 matrix_elements = subproc_group.get('matrix_elements') 7014 7015 # Extract process info lines 7016 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 7017 matrix_elements]) 7018 replace_dict['process_lines'] = process_lines 7019 7020 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 7021 replace_dict['nexternal'] = nexternal 7022 7023 replace_dict['nsprocs'] = 2*len(matrix_elements) 7024 7025 # Generate dsig definition line 7026 dsig_def_line = "DOUBLE PRECISION " + \ 7027 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 7028 range(len(matrix_elements))]) 7029 replace_dict["dsig_def_line"] = dsig_def_line 7030 7031 # Generate dsig process lines 7032 call_dsig_proc_lines = [] 7033 for iproc in range(len(matrix_elements)): 7034 call_dsig_proc_lines.append(\ 7035 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 7036 {"num": iproc + 1, 7037 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 7038 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 7039 7040 if writer: 7041 file = open(os.path.join(_file_path, \ 7042 'iolibs/template_files/super_auto_dsig_mw_group_v4.inc')).read() 7043 file = file % replace_dict 7044 # Write the file 7045 writer.writelines(file) 7046 else: 7047 return replace_dict
7048 7049 #=========================================================================== 7050 # write_mirrorprocs 7051 #===========================================================================
7052 - def write_mirrorprocs(self, writer, subproc_group):
7053 """Write the mirrorprocs.inc file determining which processes have 7054 IS mirror process in subprocess group mode.""" 7055 7056 lines = [] 7057 bool_dict = {True: '.true.', False: '.false.'} 7058 matrix_elements = subproc_group.get('matrix_elements') 7059 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 7060 (len(matrix_elements), 7061 ",".join([bool_dict[me.get('has_mirror_process')] for \ 7062 me in matrix_elements]))) 7063 # Write the file 7064 writer.writelines(lines)
7065 7066 #=========================================================================== 7067 # write_configs_file 7068 #===========================================================================
7069 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
7070 """Write the configs.inc file with topology information for a 7071 subprocess group. Use the first subprocess with a diagram for each 7072 configuration.""" 7073 7074 matrix_elements = subproc_group.get('matrix_elements') 7075 model = matrix_elements[0].get('processes')[0].get('model') 7076 7077 diagrams = [] 7078 config_numbers = [] 7079 for iconfig, config in enumerate(diagrams_for_config): 7080 # Check if any diagrams correspond to this config 7081 if set(config) == set([0]): 7082 continue 7083 subproc_diags = [] 7084 for s,d in enumerate(config): 7085 if d: 7086 subproc_diags.append(matrix_elements[s].\ 7087 get('diagrams')[d-1]) 7088 else: 7089 subproc_diags.append(None) 7090 diagrams.append(subproc_diags) 7091 config_numbers.append(iconfig + 1) 7092 7093 # Extract number of external particles 7094 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 7095 7096 return len(diagrams), \ 7097 self.write_configs_file_from_diagrams(writer, diagrams, 7098 config_numbers, 7099 nexternal, ninitial, 7100 matrix_elements[0],model)
7101 7102 #=========================================================================== 7103 # write_run_configs_file 7104 #===========================================================================
7105 - def write_run_config_file(self, writer):
7106 """Write the run_configs.inc file for MadEvent""" 7107 7108 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 7109 text = open(path).read() % {'chanperjob':'2'} 7110 writer.write(text) 7111 return True
7112 7113 7114 #=========================================================================== 7115 # write_leshouche_file 7116 #===========================================================================
7117 - def write_leshouche_file(self, writer, subproc_group):
7118 """Write the leshouche.inc file for MG4""" 7119 7120 all_lines = [] 7121 7122 for iproc, matrix_element in \ 7123 enumerate(subproc_group.get('matrix_elements')): 7124 all_lines.extend(self.get_leshouche_lines(matrix_element, 7125 iproc)) 7126 7127 # Write the file 7128 writer.writelines(all_lines) 7129 7130 return True
7131