Package madgraph :: Package iolibs :: Module export_v4
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_v4

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Methods and classes to export matrix elements to v4 format.""" 
  16   
  17  import copy 
  18  from cStringIO import StringIO 
  19  from distutils import dir_util 
  20  import itertools 
  21  import fractions 
  22  import glob 
  23  import logging 
  24  import math 
  25  import os 
  26  import re 
  27  import shutil 
  28  import subprocess 
  29  import sys 
  30  import time 
  31  import traceback 
  32   
  33  import aloha 
  34   
  35  import madgraph.core.base_objects as base_objects 
  36  import madgraph.core.color_algebra as color 
  37  import madgraph.core.helas_objects as helas_objects 
  38  import madgraph.iolibs.drawing_eps as draw 
  39  import madgraph.iolibs.files as files 
  40  import madgraph.iolibs.group_subprocs as group_subprocs 
  41  import madgraph.iolibs.file_writers as writers 
  42  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  43  import madgraph.iolibs.template_files as template_files 
  44  import madgraph.iolibs.ufo_expression_parsers as parsers 
  45  import madgraph.iolibs.helas_call_writers as helas_call_writers 
  46  import madgraph.interface.common_run_interface as common_run_interface 
  47  import madgraph.various.diagram_symmetry as diagram_symmetry 
  48  import madgraph.various.misc as misc 
  49  import madgraph.various.banner as banner_mod 
  50  import madgraph.various.process_checks as process_checks 
  51  import madgraph.loop.loop_diagram_generation as loop_diagram_generation 
  52  import aloha.create_aloha as create_aloha 
  53  import models.import_ufo as import_ufo 
  54  import models.write_param_card as param_writer 
  55  import models.check_param_card as check_param_card 
  56   
  57   
  58  from madgraph import MadGraph5Error, MG5DIR, ReadWrite 
  59  from madgraph.iolibs.files import cp, ln, mv 
  60   
  61  from madgraph import InvalidCmd 
  62   
  63  pjoin = os.path.join 
  64   
  65  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  66  logger = logging.getLogger('madgraph.export_v4') 
  67   
  68  default_compiler= {'fortran': 'gfortran', 
  69                         'f2py': 'f2py', 
  70                         'cpp':'g++'} 
71 72 73 -class VirtualExporter(object):
74 75 #exporter variable who modified the way madgraph interacts with this class 76 77 grouped_mode = 'madevent' 78 # This variable changes the type of object called within 'generate_subprocess_directory' 79 #functions. 80 # False to avoid grouping (only identical matrix element are merged) 81 # 'madevent' group the massless quark and massless lepton 82 # 'madweight' group the gluon with the massless quark 83 sa_symmetry = False 84 # If no grouped_mode=False, uu~ and u~u will be called independently. 85 #Putting sa_symmetry generates only one of the two matrix-element. 86 check = True 87 # Ask madgraph to check if the directory already exists and propose to the user to 88 #remove it first if this is the case 89 output = 'Template' 90 # [Template, None, dir] 91 # - Template, madgraph will call copy_template 92 # - dir, madgraph will just create an empty directory for initialisation 93 # - None, madgraph do nothing for initialisation 94 exporter = 'v4' 95 # language of the output 'v4' for Fortran output 96 # 'cpp' for C++ output 97 98
99 - def __init__(self, dir_path = "", opt=None):
100 return
101
102 - def copy_template(self, model):
103 return
104
105 - def generate_subprocess_directory(self, subproc_group, helicity_model, me=None):
106 # generate_subprocess_directory(self, matrix_element, helicity_model, me_number) [for ungrouped] 107 return 0 # return an integer stating the number of call to helicity routine
108
109 - def convert_model(self, model, wanted_lorentz=[], wanted_couplings=[]):
110 return
111
112 - def finalize(self,matrix_element, cmdhistory, MG5options, outputflag):
113 return
114 115
116 - def pass_information_from_cmd(self, cmd):
117 """pass information from the command interface to the exporter. 118 Please do not modify any object of the interface from the exporter. 119 """ 120 return
121
122 - def modify_grouping(self, matrix_element):
123 return False, matrix_element
124
125 - def export_model_files(self, model_v4_path):
126 raise Exception, "V4 model not supported by this type of exporter. Please use UFO model" 127 return
128
129 - def export_helas(self, HELAS_PATH):
130 raise Exception, "V4 model not supported by this type of exporter. Please use UFO model" 131 return
132
133 #=============================================================================== 134 # ProcessExporterFortran 135 #=============================================================================== 136 -class ProcessExporterFortran(VirtualExporter):
137 """Class to take care of exporting a set of matrix elements to 138 Fortran (v4) format.""" 139 140 default_opt = {'clean': False, 'complex_mass':False, 141 'export_format':'madevent', 'mp': False, 142 'v5_model': True 143 } 144 grouped_mode = False 145
146 - def __init__(self, dir_path = "", opt=None):
147 """Initiate the ProcessExporterFortran with directory information""" 148 self.mgme_dir = MG5DIR 149 self.dir_path = dir_path 150 self.model = None 151 152 self.opt = dict(self.default_opt) 153 if opt: 154 self.opt.update(opt) 155 156 #place holder to pass information to the run_interface 157 self.proc_characteristic = banner_mod.ProcCharacteristic()
158 159 160 #=========================================================================== 161 # process exporter fortran switch between group and not grouped 162 #===========================================================================
163 - def export_processes(self, matrix_elements, fortran_model):
164 """Make the switch between grouped and not grouped output""" 165 166 calls = 0 167 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 168 for (group_number, me_group) in enumerate(matrix_elements): 169 calls = calls + self.generate_subprocess_directory(\ 170 me_group, fortran_model, group_number) 171 else: 172 for me_number, me in enumerate(matrix_elements.get_matrix_elements()): 173 calls = calls + self.generate_subprocess_directory(\ 174 me, fortran_model, me_number) 175 176 return calls
177 178 179 #=========================================================================== 180 # create the run_card 181 #===========================================================================
182 - def create_run_card(self, matrix_elements, history):
183 """ """ 184 185 186 # bypass this for the loop-check 187 import madgraph.loop.loop_helas_objects as loop_helas_objects 188 if isinstance(matrix_elements, loop_helas_objects.LoopHelasMatrixElement): 189 matrix_elements = None 190 191 run_card = banner_mod.RunCard() 192 193 194 default=True 195 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 196 processes = [me.get('processes') for megroup in matrix_elements 197 for me in megroup['matrix_elements']] 198 elif matrix_elements: 199 processes = [me.get('processes') 200 for me in matrix_elements['matrix_elements']] 201 else: 202 default =False 203 204 if default: 205 run_card.create_default_for_process(self.proc_characteristic, 206 history, 207 processes) 208 209 210 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 211 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'))
212 213 214 #=========================================================================== 215 # copy the Template in a new directory. 216 #===========================================================================
217 - def copy_template(self, model):
218 """create the directory run_name as a copy of the MadEvent 219 Template, and clean the directory 220 """ 221 222 #First copy the full template tree if dir_path doesn't exit 223 if not os.path.isdir(self.dir_path): 224 assert self.mgme_dir, \ 225 "No valid MG_ME path given for MG4 run directory creation." 226 logger.info('initialize a new directory: %s' % \ 227 os.path.basename(self.dir_path)) 228 shutil.copytree(pjoin(self.mgme_dir, 'Template/LO'), 229 self.dir_path, True) 230 # distutils.dir_util.copy_tree since dir_path already exists 231 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 232 self.dir_path) 233 # copy plot_card 234 for card in ['plot_card']: 235 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 236 try: 237 shutil.copy(pjoin(self.dir_path, 'Cards',card + '.dat'), 238 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 239 except IOError: 240 logger.warning("Failed to copy " + card + ".dat to default") 241 elif os.getcwd() == os.path.realpath(self.dir_path): 242 logger.info('working in local directory: %s' % \ 243 os.path.realpath(self.dir_path)) 244 # distutils.dir_util.copy_tree since dir_path already exists 245 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/LO'), 246 self.dir_path) 247 # for name in misc.glob('Template/LO/*', self.mgme_dir): 248 # name = os.path.basename(name) 249 # filname = pjoin(self.mgme_dir, 'Template','LO',name) 250 # if os.path.isfile(filename): 251 # files.cp(filename, pjoin(self.dir_path,name)) 252 # elif os.path.isdir(filename): 253 # shutil.copytree(filename, pjoin(self.dir_path,name), True) 254 # distutils.dir_util.copy_tree since dir_path already exists 255 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 256 self.dir_path) 257 # Copy plot_card 258 for card in ['plot_card']: 259 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 260 try: 261 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 262 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 263 except IOError: 264 logger.warning("Failed to copy " + card + ".dat to default") 265 elif not os.path.isfile(pjoin(self.dir_path, 'TemplateVersion.txt')): 266 assert self.mgme_dir, \ 267 "No valid MG_ME path given for MG4 run directory creation." 268 try: 269 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 270 except IOError: 271 MG5_version = misc.get_pkg_info() 272 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write(MG5_version['version']) 273 274 #Ensure that the Template is clean 275 if self.opt['clean']: 276 logger.info('remove old information in %s' % \ 277 os.path.basename(self.dir_path)) 278 if os.environ.has_key('MADGRAPH_BASE'): 279 misc.call([pjoin('bin', 'internal', 'clean_template'), 280 '--web'], cwd=self.dir_path) 281 else: 282 try: 283 misc.call([pjoin('bin', 'internal', 'clean_template')], \ 284 cwd=self.dir_path) 285 except Exception, why: 286 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 287 % (os.path.basename(self.dir_path),why)) 288 289 #Write version info 290 MG_version = misc.get_pkg_info() 291 open(pjoin(self.dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 292 MG_version['version']) 293 294 # add the makefile in Source directory 295 filename = pjoin(self.dir_path,'Source','makefile') 296 self.write_source_makefile(writers.FileWriter(filename)) 297 298 # add the DiscreteSampler information 299 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'DiscreteSampler.f'), 300 pjoin(self.dir_path, 'Source')) 301 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'StringCast.f'), 302 pjoin(self.dir_path, 'Source')) 303 304 # We need to create the correct open_data for the pdf 305 self.write_pdf_opendata()
306 307 308 #=========================================================================== 309 # Call MadAnalysis5 to generate the default cards for this process 310 #===========================================================================
311 - def create_default_madanalysis5_cards(self, history, proc_defs, processes, 312 ma5_path, output_dir, levels = ['parton','hadron']):
313 """ Call MA5 so that it writes default cards for both parton and 314 post-shower levels, tailored for this particular process.""" 315 316 if len(levels)==0: 317 return 318 start = time.time() 319 logger.info('Generating MadAnalysis5 default cards tailored to this process') 320 try: 321 MA5_interpreter = common_run_interface.CommonRunCmd.\ 322 get_MadAnalysis5_interpreter(MG5DIR,ma5_path,loglevel=100) 323 except (Exception, SystemExit) as e: 324 logger.warning('Fail to create a MadAnalysis5 instance. Therefore the default analysis with MadAnalysis5 will be empty.') 325 return 326 if MA5_interpreter is None: 327 return 328 329 MA5_main = MA5_interpreter.main 330 for lvl in ['parton','hadron']: 331 if lvl in levels: 332 card_to_generate = pjoin(output_dir,'madanalysis5_%s_card_default.dat'%lvl) 333 try: 334 text = MA5_main.madgraph.generate_card(history, proc_defs, processes,lvl) 335 except (Exception, SystemExit) as e: 336 # keep the default card (skip only) 337 logger.warning('MadAnalysis5 failed to write a %s-level'%lvl+ 338 ' default analysis card for this process.') 339 logger.warning('Therefore, %s-level default analysis with MadAnalysis5 will be empty.'%lvl) 340 error=StringIO() 341 traceback.print_exc(file=error) 342 logger.debug('MadAnalysis5 error was:') 343 logger.debug('-'*60) 344 logger.debug(error.getvalue()[:-1]) 345 logger.debug('-'*60) 346 else: 347 open(card_to_generate,'w').write(text) 348 stop = time.time() 349 if stop-start >1: 350 logger.info('Cards created in %.2fs' % (stop-start))
351 352 #=========================================================================== 353 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 354 #===========================================================================
355 - def write_procdef_mg5(self, file_pos, modelname, process_str):
356 """ write an equivalent of the MG4 proc_card in order that all the Madevent 357 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 358 359 proc_card_template = template_files.mg4_proc_card.mg4_template 360 process_template = template_files.mg4_proc_card.process_template 361 process_text = '' 362 coupling = '' 363 new_process_content = [] 364 365 366 # First find the coupling and suppress the coupling from process_str 367 #But first ensure that coupling are define whithout spaces: 368 process_str = process_str.replace(' =', '=') 369 process_str = process_str.replace('= ', '=') 370 process_str = process_str.replace(',',' , ') 371 #now loop on the element and treat all the coupling 372 for info in process_str.split(): 373 if '=' in info: 374 coupling += info + '\n' 375 else: 376 new_process_content.append(info) 377 # Recombine the process_str (which is the input process_str without coupling 378 #info) 379 process_str = ' '.join(new_process_content) 380 381 #format the SubProcess 382 replace_dict = {'process': process_str, 383 'coupling': coupling} 384 process_text += process_template.substitute(replace_dict) 385 386 replace_dict = {'process': process_text, 387 'model': modelname, 388 'multiparticle':''} 389 text = proc_card_template.substitute(replace_dict) 390 391 if file_pos: 392 ff = open(file_pos, 'w') 393 ff.write(text) 394 ff.close() 395 else: 396 return replace_dict
397 398
399 - def pass_information_from_cmd(self, cmd):
400 """Pass information for MA5""" 401 402 self.proc_defs = cmd._curr_proc_defs
403 404 #=========================================================================== 405 # Create jpeg diagrams, html pages,proc_card_mg5.dat and madevent.tar.gz 406 #===========================================================================
407 - def finalize(self, matrix_elements, history='', mg5options={}, flaglist=[]):
408 """Function to finalize v4 directory, for inheritance.""" 409 410 self.create_run_card(matrix_elements, history) 411 self.create_MA5_cards(matrix_elements, history)
412
413 - def create_MA5_cards(self,matrix_elements,history):
414 """ A wrapper around the creation of the MA5 cards so that it can be 415 bypassed by daughter classes (i.e. in standalone).""" 416 if 'madanalysis5_path' in self.opt and not \ 417 self.opt['madanalysis5_path'] is None and not self.proc_defs is None: 418 processes = None 419 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 420 processes = [me.get('processes') for megroup in matrix_elements 421 for me in megroup['matrix_elements']] 422 elif matrix_elements: 423 processes = [me.get('processes') 424 for me in matrix_elements['matrix_elements']] 425 426 self.create_default_madanalysis5_cards( 427 history, self.proc_defs, processes, 428 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'), 429 levels = ['hadron','parton']) 430 431 for level in ['hadron','parton']: 432 # Copying these cards turn on the use of MadAnalysis5 by default. 433 if os.path.isfile(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level)): 434 shutil.copy(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level), 435 pjoin(self.dir_path,'Cards','madanalysis5_%s_card.dat'%level))
436 437 #=========================================================================== 438 # Create the proc_characteristic file passing information to the run_interface 439 #===========================================================================
440 - def create_proc_charac(self, matrix_elements=None, history="", **opts):
441 442 self.proc_characteristic.write(pjoin(self.dir_path, 'SubProcesses', 'proc_characteristics'))
443 444 #=========================================================================== 445 # write_matrix_element_v4 446 #===========================================================================
447 - def write_matrix_element_v4(self):
448 """Function to write a matrix.f file, for inheritance. 449 """ 450 pass
451 452 #=========================================================================== 453 # write_pdf_opendata 454 #===========================================================================
455 - def write_pdf_opendata(self):
456 """ modify the pdf opendata file, to allow direct access to cluster node 457 repository if configure""" 458 459 if not self.opt["cluster_local_path"]: 460 changer = {"pdf_systemwide": ""} 461 else: 462 to_add = """ 463 tempname='%(path)s'//Tablefile 464 open(IU,file=tempname,status='old',ERR=1) 465 return 466 1 tempname='%(path)s/Pdfdata/'//Tablefile 467 open(IU,file=tempname,status='old',ERR=2) 468 return 469 2 tempname='%(path)s/lhapdf'//Tablefile 470 open(IU,file=tempname,status='old',ERR=3) 471 return 472 3 tempname='%(path)s/../lhapdf/pdfsets/'//Tablefile 473 open(IU,file=tempname,status='old',ERR=4) 474 return 475 4 tempname='%(path)s/../lhapdf/pdfsets/6.1/'//Tablefile 476 open(IU,file=tempname,status='old',ERR=5) 477 return 478 """ % {"path" : self.opt["cluster_local_path"]} 479 480 changer = {"pdf_systemwide": to_add} 481 482 483 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "opendata.f")) 484 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_opendata.f"),"r").read() 485 ff.writelines(template % changer) 486 487 # Do the same for lhapdf set 488 if not self.opt["cluster_local_path"]: 489 changer = {"cluster_specific_path": ""} 490 else: 491 to_add=""" 492 LHAPath='%(path)s/PDFsets' 493 Inquire(File=LHAPath, exist=exists) 494 if(exists)return 495 LHAPath='%(path)s/../lhapdf/pdfsets/6.1/' 496 Inquire(File=LHAPath, exist=exists) 497 if(exists)return 498 LHAPath='%(path)s/../lhapdf/pdfsets/' 499 Inquire(File=LHAPath, exist=exists) 500 if(exists)return 501 LHAPath='./PDFsets' 502 """ % {"path" : self.opt["cluster_local_path"]} 503 changer = {"cluster_specific_path": to_add} 504 505 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f")) 506 #ff = open(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f"),"w") 507 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_wrap_lhapdf.f"),"r").read() 508 ff.writelines(template % changer) 509 510 511 return
512 513 514 515 #=========================================================================== 516 # write_maxparticles_file 517 #===========================================================================
518 - def write_maxparticles_file(self, writer, matrix_elements):
519 """Write the maxparticles.inc file for MadEvent""" 520 521 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 522 maxparticles = max([me.get_nexternal_ninitial()[0] for me in \ 523 matrix_elements.get('matrix_elements')]) 524 else: 525 maxparticles = max([me.get_nexternal_ninitial()[0] \ 526 for me in matrix_elements]) 527 528 lines = "integer max_particles\n" 529 lines += "parameter(max_particles=%d)" % maxparticles 530 531 # Write the file 532 writer.writelines(lines) 533 534 return True
535 536 537 #=========================================================================== 538 # export the model 539 #===========================================================================
540 - def export_model_files(self, model_path):
541 """Configure the files/link of the process according to the model""" 542 543 # Import the model 544 for file in os.listdir(model_path): 545 if os.path.isfile(pjoin(model_path, file)): 546 shutil.copy2(pjoin(model_path, file), \ 547 pjoin(self.dir_path, 'Source', 'MODEL'))
548 549 563 571 572 573 #=========================================================================== 574 # export the helas routine 575 #===========================================================================
576 - def export_helas(self, helas_path):
577 """Configure the files/link of the process according to the model""" 578 579 # Import helas routine 580 for filename in os.listdir(helas_path): 581 filepos = pjoin(helas_path, filename) 582 if os.path.isfile(filepos): 583 if filepos.endswith('Makefile.template'): 584 cp(filepos, self.dir_path + '/Source/DHELAS/Makefile') 585 elif filepos.endswith('Makefile'): 586 pass 587 else: 588 cp(filepos, self.dir_path + '/Source/DHELAS')
589 # following lines do the same but whithout symbolic link 590 # 591 #def export_helas(mgme_dir, dir_path): 592 # 593 # # Copy the HELAS directory 594 # helas_dir = pjoin(mgme_dir, 'HELAS') 595 # for filename in os.listdir(helas_dir): 596 # if os.path.isfile(pjoin(helas_dir, filename)): 597 # shutil.copy2(pjoin(helas_dir, filename), 598 # pjoin(dir_path, 'Source', 'DHELAS')) 599 # shutil.move(pjoin(dir_path, 'Source', 'DHELAS', 'Makefile.template'), 600 # pjoin(dir_path, 'Source', 'DHELAS', 'Makefile')) 601 # 602 603 #=========================================================================== 604 # generate_subprocess_directory 605 #===========================================================================
606 - def generate_subprocess_directory(self, matrix_element, 607 fortran_model, 608 me_number):
609 """Routine to generate a subprocess directory (for inheritance)""" 610 611 pass
612 613 #=========================================================================== 614 # get_source_libraries_list 615 #===========================================================================
616 - def get_source_libraries_list(self):
617 """ Returns the list of libraries to be compiling when compiling the 618 SOURCE directory. It is different for loop_induced processes and 619 also depends on the value of the 'output_dependencies' option""" 620 621 return ['$(LIBDIR)libdhelas.$(libext)', 622 '$(LIBDIR)libpdf.$(libext)', 623 '$(LIBDIR)libmodel.$(libext)', 624 '$(LIBDIR)libcernlib.$(libext)', 625 '$(LIBDIR)libbias.$(libext)']
626 627 #=========================================================================== 628 # write_source_makefile 629 #===========================================================================
630 - def write_source_makefile(self, writer):
631 """Write the nexternal.inc file for MG4""" 632 633 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 634 set_of_lib = ' '.join(['$(LIBRARIES)']+self.get_source_libraries_list()) 635 if self.opt['model'] == 'mssm' or self.opt['model'].startswith('mssm-'): 636 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 637 MODEL/MG5_param.dat: ../Cards/param_card.dat\n\t../bin/madevent treatcards param 638 param_card.inc: MODEL/MG5_param.dat\n\t../bin/madevent treatcards param\n''' 639 else: 640 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 641 param_card.inc: ../Cards/param_card.dat\n\t../bin/madevent treatcards param\n''' 642 text = open(path).read() % {'libraries': set_of_lib, 'model':model_line} 643 writer.write(text) 644 645 return True
646 647 #=========================================================================== 648 # write_nexternal_madspin 649 #===========================================================================
650 - def write_nexternal_madspin(self, writer, nexternal, ninitial):
651 """Write the nexternal_prod.inc file for madspin""" 652 653 replace_dict = {} 654 655 replace_dict['nexternal'] = nexternal 656 replace_dict['ninitial'] = ninitial 657 658 file = """ \ 659 integer nexternal_prod 660 parameter (nexternal_prod=%(nexternal)d) 661 integer nincoming_prod 662 parameter (nincoming_prod=%(ninitial)d)""" % replace_dict 663 664 # Write the file 665 if writer: 666 writer.writelines(file) 667 return True 668 else: 669 return replace_dict
670 671 #=========================================================================== 672 # write_helamp_madspin 673 #===========================================================================
674 - def write_helamp_madspin(self, writer, ncomb):
675 """Write the helamp.inc file for madspin""" 676 677 replace_dict = {} 678 679 replace_dict['ncomb'] = ncomb 680 681 file = """ \ 682 integer ncomb1 683 parameter (ncomb1=%(ncomb)d) 684 double precision helamp(ncomb1) 685 common /to_helamp/helamp """ % replace_dict 686 687 # Write the file 688 if writer: 689 writer.writelines(file) 690 return True 691 else: 692 return replace_dict
693 694 695 696 #=========================================================================== 697 # write_nexternal_file 698 #===========================================================================
699 - def write_nexternal_file(self, writer, nexternal, ninitial):
700 """Write the nexternal.inc file for MG4""" 701 702 replace_dict = {} 703 704 replace_dict['nexternal'] = nexternal 705 replace_dict['ninitial'] = ninitial 706 707 file = """ \ 708 integer nexternal 709 parameter (nexternal=%(nexternal)d) 710 integer nincoming 711 parameter (nincoming=%(ninitial)d)""" % replace_dict 712 713 # Write the file 714 if writer: 715 writer.writelines(file) 716 return True 717 else: 718 return replace_dict
719 #=========================================================================== 720 # write_pmass_file 721 #===========================================================================
722 - def write_pmass_file(self, writer, matrix_element):
723 """Write the pmass.inc file for MG4""" 724 725 model = matrix_element.get('processes')[0].get('model') 726 727 lines = [] 728 for wf in matrix_element.get_external_wavefunctions(): 729 mass = model.get('particle_dict')[wf.get('pdg_code')].get('mass') 730 if mass.lower() != "zero": 731 mass = "abs(%s)" % mass 732 733 lines.append("pmass(%d)=%s" % \ 734 (wf.get('number_external'), mass)) 735 736 # Write the file 737 writer.writelines(lines) 738 739 return True
740 741 #=========================================================================== 742 # write_ngraphs_file 743 #===========================================================================
744 - def write_ngraphs_file(self, writer, nconfigs):
745 """Write the ngraphs.inc file for MG4. Needs input from 746 write_configs_file.""" 747 748 file = " integer n_max_cg\n" 749 file = file + "parameter (n_max_cg=%d)" % nconfigs 750 751 # Write the file 752 writer.writelines(file) 753 754 return True
755 756 #=========================================================================== 757 # write_leshouche_file 758 #===========================================================================
759 - def write_leshouche_file(self, writer, matrix_element):
760 """Write the leshouche.inc file for MG4""" 761 762 # Write the file 763 writer.writelines(self.get_leshouche_lines(matrix_element, 0)) 764 765 return True
766 767 #=========================================================================== 768 # get_leshouche_lines 769 #===========================================================================
770 - def get_leshouche_lines(self, matrix_element, numproc):
771 """Write the leshouche.inc file for MG4""" 772 773 # Extract number of external particles 774 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 775 776 lines = [] 777 for iproc, proc in enumerate(matrix_element.get('processes')): 778 legs = proc.get_legs_with_decays() 779 lines.append("DATA (IDUP(i,%d,%d),i=1,%d)/%s/" % \ 780 (iproc + 1, numproc+1, nexternal, 781 ",".join([str(l.get('id')) for l in legs]))) 782 if iproc == 0 and numproc == 0: 783 for i in [1, 2]: 784 lines.append("DATA (MOTHUP(%d,i),i=1,%2r)/%s/" % \ 785 (i, nexternal, 786 ",".join([ "%3r" % 0 ] * ninitial + \ 787 [ "%3r" % i ] * (nexternal - ninitial)))) 788 789 # Here goes the color connections corresponding to the JAMPs 790 # Only one output, for the first subproc! 791 if iproc == 0: 792 # If no color basis, just output trivial color flow 793 if not matrix_element.get('color_basis'): 794 for i in [1, 2]: 795 lines.append("DATA (ICOLUP(%d,i,1,%d),i=1,%2r)/%s/" % \ 796 (i, numproc+1,nexternal, 797 ",".join([ "%3r" % 0 ] * nexternal))) 798 799 else: 800 # First build a color representation dictionnary 801 repr_dict = {} 802 for l in legs: 803 repr_dict[l.get('number')] = \ 804 proc.get('model').get_particle(l.get('id')).get_color()\ 805 * (-1)**(1+l.get('state')) 806 # Get the list of color flows 807 color_flow_list = \ 808 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 809 ninitial) 810 # And output them properly 811 for cf_i, color_flow_dict in enumerate(color_flow_list): 812 for i in [0, 1]: 813 lines.append("DATA (ICOLUP(%d,i,%d,%d),i=1,%2r)/%s/" % \ 814 (i + 1, cf_i + 1, numproc+1, nexternal, 815 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 816 for l in legs]))) 817 818 return lines
819 820 821 822 823 #=========================================================================== 824 # write_maxamps_file 825 #===========================================================================
826 - def write_maxamps_file(self, writer, maxamps, maxflows, 827 maxproc,maxsproc):
828 """Write the maxamps.inc file for MG4.""" 829 830 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 831 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 832 (maxamps, maxflows) 833 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 834 (maxproc, maxsproc) 835 836 # Write the file 837 writer.writelines(file) 838 839 return True
840 841 842 #=========================================================================== 843 # Routines to output UFO models in MG4 format 844 #=========================================================================== 845
846 - def convert_model(self, model, wanted_lorentz = [], 847 wanted_couplings = []):
848 """ Create a full valid MG4 model from a MG5 model (coming from UFO)""" 849 850 # Make sure aloha is in quadruple precision if needed 851 old_aloha_mp=aloha.mp_precision 852 aloha.mp_precision=self.opt['mp'] 853 854 # create the MODEL 855 write_dir=pjoin(self.dir_path, 'Source', 'MODEL') 856 model_builder = UFO_model_to_mg4(model, write_dir, self.opt + self.proc_characteristic) 857 model_builder.build(wanted_couplings) 858 859 # Backup the loop mode, because it can be changed in what follows. 860 old_loop_mode = aloha.loop_mode 861 862 # Create the aloha model or use the existing one (for loop exporters 863 # this is useful as the aloha model will be used again in the 864 # LoopHelasMatrixElements generated). We do not save the model generated 865 # here if it didn't exist already because it would be a waste of 866 # memory for tree level applications since aloha is only needed at the 867 # time of creating the aloha fortran subroutines. 868 if hasattr(self, 'aloha_model'): 869 aloha_model = self.aloha_model 870 else: 871 aloha_model = create_aloha.AbstractALOHAModel(os.path.basename(model.get('modelpath'))) 872 aloha_model.add_Lorentz_object(model.get('lorentz')) 873 874 # Compute the subroutines 875 if wanted_lorentz: 876 aloha_model.compute_subset(wanted_lorentz) 877 else: 878 aloha_model.compute_all(save=False) 879 880 # Write them out 881 write_dir=pjoin(self.dir_path, 'Source', 'DHELAS') 882 aloha_model.write(write_dir, 'Fortran') 883 884 # Revert the original aloha loop mode 885 aloha.loop_mode = old_loop_mode 886 887 #copy Helas Template 888 cp(MG5DIR + '/aloha/template_files/Makefile_F', write_dir+'/makefile') 889 if any([any(['L' in tag for tag in d[1]]) for d in wanted_lorentz]): 890 cp(MG5DIR + '/aloha/template_files/aloha_functions_loop.f', 891 write_dir+'/aloha_functions.f') 892 aloha_model.loop_mode = False 893 else: 894 cp(MG5DIR + '/aloha/template_files/aloha_functions.f', 895 write_dir+'/aloha_functions.f') 896 create_aloha.write_aloha_file_inc(write_dir, '.f', '.o') 897 898 # Make final link in the Process 899 self.make_model_symbolic_link() 900 901 # Re-establish original aloha mode 902 aloha.mp_precision=old_aloha_mp
903 904 905 #=========================================================================== 906 # Helper functions 907 #===========================================================================
908 - def modify_grouping(self, matrix_element):
909 """allow to modify the grouping (if grouping is in place) 910 return two value: 911 - True/False if the matrix_element was modified 912 - the new(or old) matrix element""" 913 914 return False, matrix_element
915 916 #=========================================================================== 917 # Helper functions 918 #===========================================================================
919 - def get_mg5_info_lines(self):
920 """Return info lines for MG5, suitable to place at beginning of 921 Fortran files""" 922 923 info = misc.get_pkg_info() 924 info_lines = "" 925 if info and info.has_key('version') and info.has_key('date'): 926 info_lines = "# Generated by MadGraph5_aMC@NLO v. %s, %s\n" % \ 927 (info['version'], info['date']) 928 info_lines = info_lines + \ 929 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 930 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 931 else: 932 info_lines = "# Generated by MadGraph5_aMC@NLO\n" + \ 933 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 934 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 935 936 return info_lines
937
938 - def get_process_info_lines(self, matrix_element):
939 """Return info lines describing the processes for this matrix element""" 940 941 return"\n".join([ "C " + process.nice_string().replace('\n', '\nC * ') \ 942 for process in matrix_element.get('processes')])
943 944
945 - def get_helicity_lines(self, matrix_element,array_name='NHEL'):
946 """Return the Helicity matrix definition lines for this matrix element""" 947 948 helicity_line_list = [] 949 i = 0 950 for helicities in matrix_element.get_helicity_matrix(): 951 i = i + 1 952 int_list = [i, len(helicities)] 953 int_list.extend(helicities) 954 helicity_line_list.append(\ 955 ("DATA ("+array_name+"(I,%4r),I=1,%d) /" + \ 956 ",".join(['%2r'] * len(helicities)) + "/") % tuple(int_list)) 957 958 return "\n".join(helicity_line_list)
959
960 - def get_ic_line(self, matrix_element):
961 """Return the IC definition line coming after helicities, required by 962 switchmom in madevent""" 963 964 nexternal = matrix_element.get_nexternal_ninitial()[0] 965 int_list = range(1, nexternal + 1) 966 967 return "DATA (IC(I,1),I=1,%i) /%s/" % (nexternal, 968 ",".join([str(i) for \ 969 i in int_list]))
970
971 - def set_chosen_SO_index(self, process, squared_orders):
972 """ From the squared order constraints set by the user, this function 973 finds what indices of the squared_orders list the user intends to pick. 974 It returns this as a string of comma-separated successive '.true.' or 975 '.false.' for each index.""" 976 977 user_squared_orders = process.get('squared_orders') 978 split_orders = process.get('split_orders') 979 980 if len(user_squared_orders)==0: 981 return ','.join(['.true.']*len(squared_orders)) 982 983 res = [] 984 for sqsos in squared_orders: 985 is_a_match = True 986 for user_sqso, value in user_squared_orders.items(): 987 if (process.get_squared_order_type(user_sqso) =='==' and \ 988 value!=sqsos[split_orders.index(user_sqso)]) or \ 989 (process.get_squared_order_type(user_sqso) in ['<=','='] and \ 990 value<sqsos[split_orders.index(user_sqso)]) or \ 991 (process.get_squared_order_type(user_sqso) == '>' and \ 992 value>=sqsos[split_orders.index(user_sqso)]): 993 is_a_match = False 994 break 995 res.append('.true.' if is_a_match else '.false.') 996 997 return ','.join(res)
998
999 - def get_split_orders_lines(self, orders, array_name, n=5):
1000 """ Return the split orders definition as defined in the list orders and 1001 for the name of the array 'array_name'. Split rows in chunks of size n.""" 1002 1003 ret_list = [] 1004 for index, order in enumerate(orders): 1005 for k in xrange(0, len(order), n): 1006 ret_list.append("DATA (%s(%3r,i),i=%3r,%3r) /%s/" % \ 1007 (array_name,index + 1, k + 1, min(k + n, len(order)), 1008 ','.join(["%5r" % i for i in order[k:k + n]]))) 1009 return ret_list
1010
1011 - def format_integer_list(self, list, name, n=5):
1012 """ Return an initialization of the python list in argument following 1013 the fortran syntax using the data keyword assignment, filling an array 1014 of name 'name'. It splits rows in chunks of size n.""" 1015 1016 ret_list = [] 1017 for k in xrange(0, len(list), n): 1018 ret_list.append("DATA (%s(i),i=%3r,%3r) /%s/" % \ 1019 (name, k + 1, min(k + n, len(list)), 1020 ','.join(["%5r" % i for i in list[k:k + n]]))) 1021 return ret_list
1022
1023 - def get_color_data_lines(self, matrix_element, n=6):
1024 """Return the color matrix definition lines for this matrix element. Split 1025 rows in chunks of size n.""" 1026 1027 if not matrix_element.get('color_matrix'): 1028 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 1029 else: 1030 ret_list = [] 1031 my_cs = color.ColorString() 1032 for index, denominator in \ 1033 enumerate(matrix_element.get('color_matrix').\ 1034 get_line_denominators()): 1035 # First write the common denominator for this color matrix line 1036 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 1037 # Then write the numerators for the matrix elements 1038 num_list = matrix_element.get('color_matrix').\ 1039 get_line_numerators(index, denominator) 1040 1041 for k in xrange(0, len(num_list), n): 1042 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 1043 (index + 1, k + 1, min(k + n, len(num_list)), 1044 ','.join(["%5r" % i for i in num_list[k:k + n]]))) 1045 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[index]) 1046 ret_list.append("C %s" % repr(my_cs)) 1047 return ret_list
1048 1049
1050 - def get_den_factor_line(self, matrix_element):
1051 """Return the denominator factor line for this matrix element""" 1052 1053 return "DATA IDEN/%2r/" % \ 1054 matrix_element.get_denominator_factor()
1055
1056 - def get_icolamp_lines(self, mapconfigs, matrix_element, num_matrix_element):
1057 """Return the ICOLAMP matrix, showing which JAMPs contribute to 1058 which configs (diagrams).""" 1059 1060 ret_list = [] 1061 1062 booldict = {False: ".false.", True: ".true."} 1063 1064 if not matrix_element.get('color_basis'): 1065 # No color, so only one color factor. Simply write a ".true." 1066 # for each config (i.e., each diagram with only 3 particle 1067 # vertices 1068 configs = len(mapconfigs) 1069 ret_list.append("DATA(icolamp(1,i,%d),i=1,%d)/%s/" % \ 1070 (num_matrix_element, configs, 1071 ','.join([".true." for i in range(configs)]))) 1072 return ret_list 1073 1074 # There is a color basis - create a list showing which JAMPs have 1075 # contributions to which configs 1076 1077 # Only want to include leading color flows, so find max_Nc 1078 color_basis = matrix_element.get('color_basis') 1079 1080 # We don't want to include the power of Nc's which come from the potential 1081 # loop color trace (i.e. in the case of a closed fermion loop for example) 1082 # so we subtract it here when computing max_Nc 1083 max_Nc = max(sum([[(v[4]-v[5]) for v in val] for val in 1084 color_basis.values()],[])) 1085 1086 # Crate dictionary between diagram number and JAMP number 1087 diag_jamp = {} 1088 for ijamp, col_basis_elem in \ 1089 enumerate(sorted(matrix_element.get('color_basis').keys())): 1090 for diag_tuple in matrix_element.get('color_basis')[col_basis_elem]: 1091 # Only use color flows with Nc == max_Nc. However, notice that 1092 # we don't want to include the Nc power coming from the loop 1093 # in this counting. 1094 if (diag_tuple[4]-diag_tuple[5]) == max_Nc: 1095 diag_num = diag_tuple[0] + 1 1096 # Add this JAMP number to this diag_num 1097 diag_jamp[diag_num] = diag_jamp.setdefault(diag_num, []) + \ 1098 [ijamp+1] 1099 1100 colamps = ijamp + 1 1101 for iconfig, num_diag in enumerate(mapconfigs): 1102 if num_diag == 0: 1103 continue 1104 1105 # List of True or False 1106 bool_list = [(i + 1 in diag_jamp[num_diag]) for i in range(colamps)] 1107 # Add line 1108 ret_list.append("DATA(icolamp(i,%d,%d),i=1,%d)/%s/" % \ 1109 (iconfig+1, num_matrix_element, colamps, 1110 ','.join(["%s" % booldict[b] for b in \ 1111 bool_list]))) 1112 1113 return ret_list
1114
1115 - def get_amp2_lines(self, matrix_element, config_map = []):
1116 """Return the amp2(i) = sum(amp for diag(i))^2 lines""" 1117 1118 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 1119 # Get minimum legs in a vertex 1120 vert_list = [max(diag.get_vertex_leg_numbers()) for diag in \ 1121 matrix_element.get('diagrams') if diag.get_vertex_leg_numbers()!=[]] 1122 minvert = min(vert_list) if vert_list!=[] else 0 1123 1124 ret_lines = [] 1125 if config_map: 1126 # In this case, we need to sum up all amplitudes that have 1127 # identical topologies, as given by the config_map (which 1128 # gives the topology/config for each of the diagrams 1129 diagrams = matrix_element.get('diagrams') 1130 # Combine the diagrams with identical topologies 1131 config_to_diag_dict = {} 1132 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1133 if config_map[idiag] == 0: 1134 continue 1135 try: 1136 config_to_diag_dict[config_map[idiag]].append(idiag) 1137 except KeyError: 1138 config_to_diag_dict[config_map[idiag]] = [idiag] 1139 # Write out the AMP2s summing squares of amplitudes belonging 1140 # to eiher the same diagram or different diagrams with 1141 # identical propagator properties. Note that we need to use 1142 # AMP2 number corresponding to the first diagram number used 1143 # for that AMP2. 1144 for config in sorted(config_to_diag_dict.keys()): 1145 1146 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % \ 1147 {"num": (config_to_diag_dict[config][0] + 1)} 1148 1149 amp = "+".join(["AMP(%(num)d)" % {"num": a.get('number')} for a in \ 1150 sum([diagrams[idiag].get('amplitudes') for \ 1151 idiag in config_to_diag_dict[config]], [])]) 1152 1153 # Not using \sum |M|^2 anymore since this creates troubles 1154 # when ckm is not diagonal due to the JIM mechanism. 1155 if '+' in amp: 1156 line += "(%s)*dconjg(%s)" % (amp, amp) 1157 else: 1158 line += "%s*dconjg(%s)" % (amp, amp) 1159 ret_lines.append(line) 1160 else: 1161 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1162 # Ignore any diagrams with 4-particle vertices. 1163 if diag.get_vertex_leg_numbers()!=[] and max(diag.get_vertex_leg_numbers()) > minvert: 1164 continue 1165 # Now write out the expression for AMP2, meaning the sum of 1166 # squared amplitudes belonging to the same diagram 1167 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % {"num": (idiag + 1)} 1168 line += "+".join(["AMP(%(num)d)*dconjg(AMP(%(num)d))" % \ 1169 {"num": a.get('number')} for a in \ 1170 diag.get('amplitudes')]) 1171 ret_lines.append(line) 1172 1173 return ret_lines
1174 1175 #=========================================================================== 1176 # Returns the data statements initializing the coeffictients for the JAMP 1177 # decomposition. It is used when the JAMP initialization is decided to be 1178 # done through big arrays containing the projection coefficients. 1179 #===========================================================================
1180 - def get_JAMP_coefs(self, color_amplitudes, color_basis=None, tag_letter="",\ 1181 n=50, Nc_value=3):
1182 """This functions return the lines defining the DATA statement setting 1183 the coefficients building the JAMPS out of the AMPS. Split rows in 1184 bunches of size n. 1185 One can specify the color_basis from which the color amplitudes originates 1186 so that there are commentaries telling what color structure each JAMP 1187 corresponds to.""" 1188 1189 if(not isinstance(color_amplitudes,list) or 1190 not (color_amplitudes and isinstance(color_amplitudes[0],list))): 1191 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_coefs" 1192 1193 res_list = [] 1194 my_cs = color.ColorString() 1195 for index, coeff_list in enumerate(color_amplitudes): 1196 # Create the list of the complete numerical coefficient. 1197 coefs_list=[coefficient[0][0]*coefficient[0][1]*\ 1198 (fractions.Fraction(Nc_value)**coefficient[0][3]) for \ 1199 coefficient in coeff_list] 1200 # Create the list of the numbers of the contributing amplitudes. 1201 # Mutliply by -1 for those which have an imaginary coefficient. 1202 ampnumbers_list=[coefficient[1]*(-1 if coefficient[0][2] else 1) \ 1203 for coefficient in coeff_list] 1204 # Find the common denominator. 1205 commondenom=abs(reduce(fractions.gcd, coefs_list).denominator) 1206 num_list=[(coefficient*commondenom).numerator \ 1207 for coefficient in coefs_list] 1208 res_list.append("DATA NCONTRIBAMPS%s(%i)/%i/"%(tag_letter,\ 1209 index+1,len(num_list))) 1210 res_list.append("DATA DENOMCCOEF%s(%i)/%i/"%(tag_letter,\ 1211 index+1,commondenom)) 1212 if color_basis: 1213 my_cs.from_immutable(sorted(color_basis.keys())[index]) 1214 res_list.append("C %s" % repr(my_cs)) 1215 for k in xrange(0, len(num_list), n): 1216 res_list.append("DATA (NUMCCOEF%s(%3r,i),i=%6r,%6r) /%s/" % \ 1217 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1218 ','.join(["%6r" % i for i in num_list[k:k + n]]))) 1219 res_list.append("DATA (AMPNUMBERS%s(%3r,i),i=%6r,%6r) /%s/" % \ 1220 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1221 ','.join(["%6r" % i for i in ampnumbers_list[k:k + n]]))) 1222 pass 1223 return res_list
1224 1225
1226 - def get_JAMP_lines_split_order(self, col_amps, split_order_amps, 1227 split_order_names=None, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)"):
1228 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1229 defined as a matrix element or directly as a color_amplitudes dictionary. 1230 The split_order_amps specifies the group of amplitudes sharing the same 1231 amplitude orders which should be put in together in a given set of JAMPS. 1232 The split_order_amps is supposed to have the format of the second output 1233 of the function get_split_orders_mapping function in helas_objects.py. 1234 The split_order_names is optional (it should correspond to the process 1235 'split_orders' attribute) and only present to provide comments in the 1236 JAMP definitions in the code.""" 1237 1238 # Let the user call get_JAMP_lines_split_order directly from a 1239 error_msg="Malformed '%s' argument passed to the "+\ 1240 "get_JAMP_lines_split_order function: %s"%str(split_order_amps) 1241 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1242 color_amplitudes=col_amps.get_color_amplitudes() 1243 elif(isinstance(col_amps,list)): 1244 if(col_amps and isinstance(col_amps[0],list)): 1245 color_amplitudes=col_amps 1246 else: 1247 raise MadGraph5Error, error_msg%'col_amps' 1248 else: 1249 raise MadGraph5Error, error_msg%'col_amps' 1250 1251 # Verify the sanity of the split_order_amps and split_order_names args 1252 if isinstance(split_order_amps,list): 1253 for elem in split_order_amps: 1254 if len(elem)!=2: 1255 raise MadGraph5Error, error_msg%'split_order_amps' 1256 # Check the first element of the two lists to make sure they are 1257 # integers, although in principle they should all be integers. 1258 if not isinstance(elem[0],tuple) or \ 1259 not isinstance(elem[1],tuple) or \ 1260 not isinstance(elem[0][0],int) or \ 1261 not isinstance(elem[1][0],int): 1262 raise MadGraph5Error, error_msg%'split_order_amps' 1263 else: 1264 raise MadGraph5Error, error_msg%'split_order_amps' 1265 1266 if not split_order_names is None: 1267 if isinstance(split_order_names,list): 1268 # Should specify the same number of names as there are elements 1269 # in the key of the split_order_amps. 1270 if len(split_order_names)!=len(split_order_amps[0][0]): 1271 raise MadGraph5Error, error_msg%'split_order_names' 1272 # Check the first element of the list to be a string 1273 if not isinstance(split_order_names[0],str): 1274 raise MadGraph5Error, error_msg%'split_order_names' 1275 else: 1276 raise MadGraph5Error, error_msg%'split_order_names' 1277 1278 # Now scan all contributing orders to be individually computed and 1279 # construct the list of color_amplitudes for JAMP to be constructed 1280 # accordingly. 1281 res_list=[] 1282 for i, amp_order in enumerate(split_order_amps): 1283 col_amps_order = [] 1284 for jamp in color_amplitudes: 1285 col_amps_order.append(filter(lambda col_amp: 1286 col_amp[1] in amp_order[1],jamp)) 1287 if split_order_names: 1288 res_list.append('C JAMPs contributing to orders '+' '.join( 1289 ['%s=%i'%order for order in zip(split_order_names, 1290 amp_order[0])])) 1291 if self.opt['export_format'] in ['madloop_matchbox']: 1292 res_list.extend(self.get_JAMP_lines(col_amps_order, 1293 JAMP_format="JAMP(%s,{0})".format(str(i+1)), 1294 JAMP_formatLC="LNJAMP(%s,{0})".format(str(i+1)))) 1295 else: 1296 res_list.extend(self.get_JAMP_lines(col_amps_order, 1297 JAMP_format="JAMP(%s,{0})".format(str(i+1)))) 1298 1299 return res_list
1300 1301
1302 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", 1303 split=-1):
1304 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1305 defined as a matrix element or directly as a color_amplitudes dictionary, 1306 Jamp_formatLC should be define to allow to add LeadingColor computation 1307 (usefull for MatchBox) 1308 The split argument defines how the JAMP lines should be split in order 1309 not to be too long.""" 1310 1311 # Let the user call get_JAMP_lines directly from a MatrixElement or from 1312 # the color amplitudes lists. 1313 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1314 color_amplitudes=col_amps.get_color_amplitudes() 1315 elif(isinstance(col_amps,list)): 1316 if(col_amps and isinstance(col_amps[0],list)): 1317 color_amplitudes=col_amps 1318 else: 1319 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1320 else: 1321 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1322 1323 1324 res_list = [] 1325 for i, coeff_list in enumerate(color_amplitudes): 1326 # It might happen that coeff_list is empty if this function was 1327 # called from get_JAMP_lines_split_order (i.e. if some color flow 1328 # does not contribute at all for a given order). 1329 # In this case we simply set it to 0. 1330 if coeff_list==[]: 1331 res_list.append(((JAMP_format+"=0D0") % str(i + 1))) 1332 continue 1333 # Break the JAMP definition into 'n=split' pieces to avoid having 1334 # arbitrarly long lines. 1335 first=True 1336 n = (len(coeff_list)+1 if split<=0 else split) 1337 while coeff_list!=[]: 1338 coefs=coeff_list[:n] 1339 coeff_list=coeff_list[n:] 1340 res = ((JAMP_format+"=") % str(i + 1)) + \ 1341 ((JAMP_format % str(i + 1)) if not first and split>0 else '') 1342 1343 first=False 1344 # Optimization: if all contributions to that color basis element have 1345 # the same coefficient (up to a sign), put it in front 1346 list_fracs = [abs(coefficient[0][1]) for coefficient in coefs] 1347 common_factor = False 1348 diff_fracs = list(set(list_fracs)) 1349 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1: 1350 common_factor = True 1351 global_factor = diff_fracs[0] 1352 res = res + '%s(' % self.coeff(1, global_factor, False, 0) 1353 1354 # loop for JAMP 1355 for (coefficient, amp_number) in coefs: 1356 if not coefficient: 1357 continue 1358 if common_factor: 1359 res = (res + "%s" + AMP_format) % \ 1360 (self.coeff(coefficient[0], 1361 coefficient[1] / abs(coefficient[1]), 1362 coefficient[2], 1363 coefficient[3]), 1364 str(amp_number)) 1365 else: 1366 res = (res + "%s" + AMP_format) % (self.coeff(coefficient[0], 1367 coefficient[1], 1368 coefficient[2], 1369 coefficient[3]), 1370 str(amp_number)) 1371 1372 if common_factor: 1373 res = res + ')' 1374 1375 res_list.append(res) 1376 1377 return res_list
1378
1379 - def get_pdf_lines(self, matrix_element, ninitial, subproc_group = False):
1380 """Generate the PDF lines for the auto_dsig.f file""" 1381 1382 processes = matrix_element.get('processes') 1383 model = processes[0].get('model') 1384 1385 pdf_definition_lines = "" 1386 pdf_data_lines = "" 1387 pdf_lines = "" 1388 1389 if ninitial == 1: 1390 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 1391 for i, proc in enumerate(processes): 1392 process_line = proc.base_string() 1393 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1394 pdf_lines = pdf_lines + "\nPD(IPROC)=1d0\n" 1395 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 1396 else: 1397 # Pick out all initial state particles for the two beams 1398 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 1399 p in processes]))), 1400 sorted(list(set([p.get_initial_pdg(2) for \ 1401 p in processes])))] 1402 1403 # Prepare all variable names 1404 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 1405 sum(initial_states,[])]) 1406 for key,val in pdf_codes.items(): 1407 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 1408 1409 # Set conversion from PDG code to number used in PDF calls 1410 pdgtopdf = {21: 0, 22: 7} 1411 1412 # Fill in missing entries of pdgtopdf 1413 for pdg in sum(initial_states,[]): 1414 if not pdg in pdgtopdf and not pdg in pdgtopdf.values(): 1415 pdgtopdf[pdg] = pdg 1416 elif pdg not in pdgtopdf and pdg in pdgtopdf.values(): 1417 # If any particle has pdg code 7, we need to use something else 1418 pdgtopdf[pdg] = 6000000 + pdg 1419 1420 # Get PDF variable declarations for all initial states 1421 for i in [0,1]: 1422 pdf_definition_lines += "DOUBLE PRECISION " + \ 1423 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1424 for pdg in \ 1425 initial_states[i]]) + \ 1426 "\n" 1427 1428 # Get PDF data lines for all initial states 1429 for i in [0,1]: 1430 pdf_data_lines += "DATA " + \ 1431 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1432 for pdg in initial_states[i]]) + \ 1433 "/%d*1D0/" % len(initial_states[i]) + \ 1434 "\n" 1435 1436 # Get PDF lines for all different initial states 1437 for i, init_states in enumerate(initial_states): 1438 if subproc_group: 1439 pdf_lines = pdf_lines + \ 1440 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 1441 % (i + 1, i + 1) 1442 else: 1443 pdf_lines = pdf_lines + \ 1444 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 1445 % (i + 1, i + 1) 1446 1447 for initial_state in init_states: 1448 if initial_state in pdf_codes.keys(): 1449 if subproc_group: 1450 pdf_lines = pdf_lines + \ 1451 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP," + \ 1452 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 1453 (pdf_codes[initial_state], 1454 i + 1, i + 1, pdgtopdf[initial_state], 1455 i + 1, i + 1) 1456 else: 1457 pdf_lines = pdf_lines + \ 1458 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP," + \ 1459 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 1460 (pdf_codes[initial_state], 1461 i + 1, i + 1, pdgtopdf[initial_state], 1462 i + 1, i + 1) 1463 pdf_lines = pdf_lines + "ENDIF\n" 1464 1465 # Add up PDFs for the different initial state particles 1466 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 1467 for proc in processes: 1468 process_line = proc.base_string() 1469 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1470 pdf_lines = pdf_lines + "\nPD(IPROC)=" 1471 for ibeam in [1, 2]: 1472 initial_state = proc.get_initial_pdg(ibeam) 1473 if initial_state in pdf_codes.keys(): 1474 pdf_lines = pdf_lines + "%s%d*" % \ 1475 (pdf_codes[initial_state], ibeam) 1476 else: 1477 pdf_lines = pdf_lines + "1d0*" 1478 # Remove last "*" from pdf_lines 1479 pdf_lines = pdf_lines[:-1] + "\n" 1480 pdf_lines = pdf_lines + "PD(0)=PD(0)+DABS(PD(IPROC))\n" 1481 1482 # Remove last line break from the return variables 1483 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1]
1484 1485 #=========================================================================== 1486 # write_props_file 1487 #===========================================================================
1488 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
1489 """Write the props.inc file for MadEvent. Needs input from 1490 write_configs_file.""" 1491 1492 lines = [] 1493 1494 particle_dict = matrix_element.get('processes')[0].get('model').\ 1495 get('particle_dict') 1496 1497 for iconf, configs in enumerate(s_and_t_channels): 1498 for vertex in configs[0] + configs[1][:-1]: 1499 leg = vertex.get('legs')[-1] 1500 if leg.get('id') not in particle_dict: 1501 # Fake propagator used in multiparticle vertices 1502 mass = 'zero' 1503 width = 'zero' 1504 pow_part = 0 1505 else: 1506 particle = particle_dict[leg.get('id')] 1507 # Get mass 1508 if particle.get('mass').lower() == 'zero': 1509 mass = particle.get('mass') 1510 else: 1511 mass = "abs(%s)" % particle.get('mass') 1512 # Get width 1513 if particle.get('width').lower() == 'zero': 1514 width = particle.get('width') 1515 else: 1516 width = "abs(%s)" % particle.get('width') 1517 1518 pow_part = 1 + int(particle.is_boson()) 1519 1520 lines.append("prmass(%d,%d) = %s" % \ 1521 (leg.get('number'), iconf + 1, mass)) 1522 lines.append("prwidth(%d,%d) = %s" % \ 1523 (leg.get('number'), iconf + 1, width)) 1524 lines.append("pow(%d,%d) = %d" % \ 1525 (leg.get('number'), iconf + 1, pow_part)) 1526 1527 # Write the file 1528 writer.writelines(lines) 1529 1530 return True
1531 1532 #=========================================================================== 1533 # write_configs_file 1534 #===========================================================================
1535 - def write_configs_file(self, writer, matrix_element):
1536 """Write the configs.inc file for MadEvent""" 1537 1538 # Extract number of external particles 1539 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1540 1541 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 1542 mapconfigs = [c[0] for c in configs] 1543 model = matrix_element.get('processes')[0].get('model') 1544 return mapconfigs, self.write_configs_file_from_diagrams(writer, 1545 [[c[1]] for c in configs], 1546 mapconfigs, 1547 nexternal, ninitial, 1548 model)
1549 1550 #=========================================================================== 1551 # write_configs_file_from_diagrams 1552 #===========================================================================
1553 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 1554 nexternal, ninitial, model):
1555 """Write the actual configs.inc file. 1556 1557 configs is the diagrams corresponding to configs (each 1558 diagrams is a list of corresponding diagrams for all 1559 subprocesses, with None if there is no corresponding diagrams 1560 for a given process). 1561 mapconfigs gives the diagram number for each config. 1562 1563 For s-channels, we need to output one PDG for each subprocess in 1564 the subprocess group, in order to be able to pick the right 1565 one for multiprocesses.""" 1566 1567 lines = [] 1568 1569 s_and_t_channels = [] 1570 1571 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 1572 for config in configs if [d for d in config if d][0].\ 1573 get_vertex_leg_numbers()!=[]] 1574 minvert = min(vert_list) if vert_list!=[] else 0 1575 1576 # Number of subprocesses 1577 nsubprocs = len(configs[0]) 1578 1579 nconfigs = 0 1580 1581 new_pdg = model.get_first_non_pdg() 1582 1583 for iconfig, helas_diags in enumerate(configs): 1584 if any(vert > minvert for vert in [d for d in helas_diags if d]\ 1585 [0].get_vertex_leg_numbers()) : 1586 # Only 3-vertices allowed in configs.inc except for vertices 1587 # which originate from a shrunk loop. 1588 continue 1589 nconfigs += 1 1590 1591 # Need s- and t-channels for all subprocesses, including 1592 # those that don't contribute to this config 1593 empty_verts = [] 1594 stchannels = [] 1595 for h in helas_diags: 1596 if h: 1597 # get_s_and_t_channels gives vertices starting from 1598 # final state external particles and working inwards 1599 stchannels.append(h.get('amplitudes')[0].\ 1600 get_s_and_t_channels(ninitial, model, new_pdg)) 1601 else: 1602 stchannels.append((empty_verts, None)) 1603 1604 # For t-channels, just need the first non-empty one 1605 tchannels = [t for s,t in stchannels if t != None][0] 1606 1607 # For s_and_t_channels (to be used later) use only first config 1608 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 1609 tchannels]) 1610 1611 # Make sure empty_verts is same length as real vertices 1612 if any([s for s,t in stchannels]): 1613 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 1614 1615 # Reorganize s-channel vertices to get a list of all 1616 # subprocesses for each vertex 1617 schannels = zip(*[s for s,t in stchannels]) 1618 else: 1619 schannels = [] 1620 1621 allchannels = schannels 1622 if len(tchannels) > 1: 1623 # Write out tchannels only if there are any non-trivial ones 1624 allchannels = schannels + tchannels 1625 1626 # Write out propagators for s-channel and t-channel vertices 1627 1628 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 1629 # Correspondance between the config and the diagram = amp2 1630 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 1631 mapconfigs[iconfig])) 1632 1633 for verts in allchannels: 1634 if verts in schannels: 1635 vert = [v for v in verts if v][0] 1636 else: 1637 vert = verts 1638 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1639 last_leg = vert.get('legs')[-1] 1640 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 1641 (last_leg.get('number'), nconfigs, len(daughters), 1642 ",".join([str(d) for d in daughters]))) 1643 if verts in schannels: 1644 pdgs = [] 1645 for v in verts: 1646 if v: 1647 pdgs.append(v.get('legs')[-1].get('id')) 1648 else: 1649 pdgs.append(0) 1650 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1651 (last_leg.get('number'), nconfigs, nsubprocs, 1652 ",".join([str(d) for d in pdgs]))) 1653 lines.append("data tprid(%d,%d)/0/" % \ 1654 (last_leg.get('number'), nconfigs)) 1655 elif verts in tchannels[:-1]: 1656 lines.append("data tprid(%d,%d)/%d/" % \ 1657 (last_leg.get('number'), nconfigs, 1658 abs(last_leg.get('id')))) 1659 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1660 (last_leg.get('number'), nconfigs, nsubprocs, 1661 ",".join(['0'] * nsubprocs))) 1662 1663 # Write out number of configs 1664 lines.append("# Number of configs") 1665 lines.append("data mapconfig(0)/%d/" % nconfigs) 1666 1667 # Write the file 1668 writer.writelines(lines) 1669 1670 return s_and_t_channels
1671 1672 #=========================================================================== 1673 # Global helper methods 1674 #=========================================================================== 1675
1676 - def coeff(self, ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1677 """Returns a nicely formatted string for the coefficients in JAMP lines""" 1678 1679 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power 1680 1681 if total_coeff == 1: 1682 if is_imaginary: 1683 return '+imag1*' 1684 else: 1685 return '+' 1686 elif total_coeff == -1: 1687 if is_imaginary: 1688 return '-imag1*' 1689 else: 1690 return '-' 1691 1692 res_str = '%+iD0' % total_coeff.numerator 1693 1694 if total_coeff.denominator != 1: 1695 # Check if total_coeff is an integer 1696 res_str = res_str + '/%iD0' % total_coeff.denominator 1697 1698 if is_imaginary: 1699 res_str = res_str + '*imag1' 1700 1701 return res_str + '*'
1702 1703
1704 - def set_fortran_compiler(self, default_compiler, force=False):
1705 """Set compiler based on what's available on the system""" 1706 1707 # Check for compiler 1708 if default_compiler['fortran'] and misc.which(default_compiler['fortran']): 1709 f77_compiler = default_compiler['fortran'] 1710 elif misc.which('gfortran'): 1711 f77_compiler = 'gfortran' 1712 elif misc.which('g77'): 1713 f77_compiler = 'g77' 1714 elif misc.which('f77'): 1715 f77_compiler = 'f77' 1716 elif default_compiler['fortran']: 1717 logger.warning('No Fortran Compiler detected! Please install one') 1718 f77_compiler = default_compiler['fortran'] # maybe misc fail so try with it 1719 else: 1720 raise MadGraph5Error, 'No Fortran Compiler detected! Please install one' 1721 logger.info('Use Fortran compiler ' + f77_compiler) 1722 1723 1724 # Check for compiler. 1. set default. 1725 if default_compiler['f2py']: 1726 f2py_compiler = default_compiler['f2py'] 1727 else: 1728 f2py_compiler = '' 1729 # Try to find the correct one. 1730 if default_compiler['f2py'] and misc.which(default_compiler['f2py']): 1731 f2py_compiler = default_compiler 1732 elif misc.which('f2py'): 1733 f2py_compiler = 'f2py' 1734 elif sys.version_info[1] == 6: 1735 if misc.which('f2py-2.6'): 1736 f2py_compiler = 'f2py-2.6' 1737 elif misc.which('f2py2.6'): 1738 f2py_compiler = 'f2py2.6' 1739 elif sys.version_info[1] == 7: 1740 if misc.which('f2py-2.7'): 1741 f2py_compiler = 'f2py-2.7' 1742 elif misc.which('f2py2.7'): 1743 f2py_compiler = 'f2py2.7' 1744 1745 to_replace = {'fortran': f77_compiler, 'f2py': f2py_compiler} 1746 1747 1748 self.replace_make_opt_f_compiler(to_replace) 1749 # Replace also for Template but not for cluster 1750 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite: 1751 self.replace_make_opt_f_compiler(to_replace, pjoin(MG5DIR, 'Template', 'LO')) 1752 1753 return f77_compiler
1754 1755 # an alias for backward compatibility 1756 set_compiler = set_fortran_compiler 1757 1758
1759 - def set_cpp_compiler(self, default_compiler, force=False):
1760 """Set compiler based on what's available on the system""" 1761 1762 # Check for compiler 1763 if default_compiler and misc.which(default_compiler): 1764 compiler = default_compiler 1765 elif misc.which('g++'): 1766 #check if clang version 1767 p = misc.Popen(['g++', '--version'], stdout=subprocess.PIPE, 1768 stderr=subprocess.PIPE) 1769 out, _ = p.communicate() 1770 if 'clang' in out and misc.which('clang'): 1771 compiler = 'clang' 1772 else: 1773 compiler = 'g++' 1774 elif misc.which('c++'): 1775 compiler = 'c++' 1776 elif misc.which('clang'): 1777 compiler = 'clang' 1778 elif default_compiler: 1779 logger.warning('No c++ Compiler detected! Please install one') 1780 compiler = default_compiler # maybe misc fail so try with it 1781 else: 1782 raise MadGraph5Error, 'No c++ Compiler detected! Please install one' 1783 logger.info('Use c++ compiler ' + compiler) 1784 self.replace_make_opt_c_compiler(compiler) 1785 # Replace also for Template but not for cluster 1786 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite and \ 1787 not __debug__ and not os.path.exists(pjoin(MG5DIR,'bin','create_release.py')): 1788 self.replace_make_opt_c_compiler(compiler, pjoin(MG5DIR, 'Template', 'LO')) 1789 1790 return compiler
1791 1792
1793 - def replace_make_opt_f_compiler(self, compilers, root_dir = ""):
1794 """Set FC=compiler in Source/make_opts""" 1795 1796 assert isinstance(compilers, dict) 1797 1798 mod = False #avoid to rewrite the file if not needed 1799 if not root_dir: 1800 root_dir = self.dir_path 1801 1802 compiler= compilers['fortran'] 1803 f2py_compiler = compilers['f2py'] 1804 if not f2py_compiler: 1805 f2py_compiler = 'f2py' 1806 for_update= {'DEFAULT_F_COMPILER':compiler, 1807 'DEFAULT_F2PY_COMPILER':f2py_compiler} 1808 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1809 1810 try: 1811 common_run_interface.CommonRunCmd.update_make_opts_full( 1812 make_opts, for_update) 1813 except IOError: 1814 if root_dir == self.dir_path: 1815 logger.info('Fail to set compiler. Trying to continue anyway.')
1816
1817 - def replace_make_opt_c_compiler(self, compiler, root_dir = ""):
1818 """Set CXX=compiler in Source/make_opts. 1819 The version is also checked, in order to set some extra flags 1820 if the compiler is clang (on MACOS)""" 1821 1822 is_clang = misc.detect_if_cpp_compiler_is_clang(compiler) 1823 is_lc = misc.detect_cpp_std_lib_dependence(compiler) == '-lc++' 1824 1825 # list of the variable to set in the make_opts file 1826 for_update= {'DEFAULT_CPP_COMPILER':compiler, 1827 'MACFLAG':'-mmacosx-version-min=10.7' if is_clang and is_lc else '', 1828 'STDLIB': '-lc++' if is_lc else '-lstdc++', 1829 'STDLIB_FLAG': '-stdlib=libc++' if is_lc and is_clang else '' 1830 } 1831 1832 if not root_dir: 1833 root_dir = self.dir_path 1834 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1835 1836 try: 1837 common_run_interface.CommonRunCmd.update_make_opts_full( 1838 make_opts, for_update) 1839 except IOError: 1840 if root_dir == self.dir_path: 1841 logger.info('Fail to set compiler. Trying to continue anyway.') 1842 1843 return
1844
1845 #=============================================================================== 1846 # ProcessExporterFortranSA 1847 #=============================================================================== 1848 -class ProcessExporterFortranSA(ProcessExporterFortran):
1849 """Class to take care of exporting a set of matrix elements to 1850 MadGraph v4 StandAlone format.""" 1851 1852 matrix_template = "matrix_standalone_v4.inc" 1853
1854 - def __init__(self, *args, **opts):
1855 """add the format information compare to standard init""" 1856 1857 if 'format' in opts: 1858 self.format = opts['format'] 1859 del opts['format'] 1860 else: 1861 self.format = 'standalone' 1862 ProcessExporterFortran.__init__(self, *args, **opts)
1863
1864 - def copy_template(self, model):
1865 """Additional actions needed for setup of Template 1866 """ 1867 1868 #First copy the full template tree if dir_path doesn't exit 1869 if os.path.isdir(self.dir_path): 1870 return 1871 1872 logger.info('initialize a new standalone directory: %s' % \ 1873 os.path.basename(self.dir_path)) 1874 temp_dir = pjoin(self.mgme_dir, 'Template/LO') 1875 1876 # Create the directory structure 1877 os.mkdir(self.dir_path) 1878 os.mkdir(pjoin(self.dir_path, 'Source')) 1879 os.mkdir(pjoin(self.dir_path, 'Source', 'MODEL')) 1880 os.mkdir(pjoin(self.dir_path, 'Source', 'DHELAS')) 1881 os.mkdir(pjoin(self.dir_path, 'SubProcesses')) 1882 os.mkdir(pjoin(self.dir_path, 'bin')) 1883 os.mkdir(pjoin(self.dir_path, 'bin', 'internal')) 1884 os.mkdir(pjoin(self.dir_path, 'lib')) 1885 os.mkdir(pjoin(self.dir_path, 'Cards')) 1886 1887 # Information at top-level 1888 #Write version info 1889 shutil.copy(pjoin(temp_dir, 'TemplateVersion.txt'), self.dir_path) 1890 try: 1891 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 1892 except IOError: 1893 MG5_version = misc.get_pkg_info() 1894 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 1895 "5." + MG5_version['version']) 1896 1897 1898 # Add file in SubProcesses 1899 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f_sp'), 1900 pjoin(self.dir_path, 'SubProcesses', 'makefile')) 1901 1902 if self.format == 'standalone': 1903 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'check_sa.f'), 1904 pjoin(self.dir_path, 'SubProcesses', 'check_sa.f')) 1905 1906 # Add file in Source 1907 shutil.copy(pjoin(temp_dir, 'Source', 'make_opts'), 1908 pjoin(self.dir_path, 'Source')) 1909 # add the makefile 1910 filename = pjoin(self.dir_path,'Source','makefile') 1911 self.write_source_makefile(writers.FileWriter(filename))
1912 1913 #=========================================================================== 1914 # export model files 1915 #===========================================================================
1916 - def export_model_files(self, model_path):
1917 """export the model dependent files for V4 model""" 1918 1919 super(ProcessExporterFortranSA,self).export_model_files(model_path) 1920 # Add the routine update_as_param in v4 model 1921 # This is a function created in the UFO 1922 text=""" 1923 subroutine update_as_param() 1924 call setpara('param_card.dat',.false.) 1925 return 1926 end 1927 """ 1928 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 1929 ff.write(text) 1930 ff.close() 1931 1932 text = open(pjoin(self.dir_path,'SubProcesses','check_sa.f')).read() 1933 text = text.replace('call setpara(\'param_card.dat\')', 'call setpara(\'param_card.dat\', .true.)') 1934 fsock = open(pjoin(self.dir_path,'SubProcesses','check_sa.f'), 'w') 1935 fsock.write(text) 1936 fsock.close() 1937 1938 self.make_model_symbolic_link()
1939 1940 #=========================================================================== 1941 # Make the Helas and Model directories for Standalone directory 1942 #===========================================================================
1943 - def make(self):
1944 """Run make in the DHELAS and MODEL directories, to set up 1945 everything for running standalone 1946 """ 1947 1948 source_dir = pjoin(self.dir_path, "Source") 1949 logger.info("Running make for Helas") 1950 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 1951 logger.info("Running make for Model") 1952 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran')
1953 1954 #=========================================================================== 1955 # Create proc_card_mg5.dat for Standalone directory 1956 #===========================================================================
1957 - def finalize(self, matrix_elements, history, mg5options, flaglist):
1958 """Finalize Standalone MG4 directory by 1959 generation proc_card_mg5.dat 1960 generate a global makefile 1961 """ 1962 1963 compiler = {'fortran': mg5options['fortran_compiler'], 1964 'cpp': mg5options['cpp_compiler'], 1965 'f2py': mg5options['f2py_compiler']} 1966 1967 self.compiler_choice(compiler) 1968 self.make() 1969 1970 # Write command history as proc_card_mg5 1971 if history and os.path.isdir(pjoin(self.dir_path, 'Cards')): 1972 output_file = pjoin(self.dir_path, 'Cards', 'proc_card_mg5.dat') 1973 history.write(output_file) 1974 1975 ProcessExporterFortran.finalize(self, matrix_elements, 1976 history, mg5options, flaglist) 1977 open(pjoin(self.dir_path,'__init__.py'),'w') 1978 open(pjoin(self.dir_path,'SubProcesses','__init__.py'),'w') 1979 1980 if 'mode' in self.opt and self.opt['mode'] == "reweight": 1981 #add the module to hande the NLO weight 1982 files.copytree(pjoin(MG5DIR, 'Template', 'RWGTNLO'), 1983 pjoin(self.dir_path, 'Source')) 1984 files.copytree(pjoin(MG5DIR, 'Template', 'NLO', 'Source', 'PDF'), 1985 pjoin(self.dir_path, 'Source', 'PDF')) 1986 self.write_pdf_opendata() 1987 1988 # create a single makefile to compile all the subprocesses 1989 text = '''\n# For python linking (require f2py part of numpy)\nifeq ($(origin MENUM),undefined)\n MENUM=2\nendif\n''' 1990 deppython = '' 1991 for Pdir in os.listdir(pjoin(self.dir_path,'SubProcesses')): 1992 if os.path.isdir(pjoin(self.dir_path, 'SubProcesses', Pdir)): 1993 text += '%(0)s/matrix$(MENUM)py.so:\n\tcd %(0)s;make matrix$(MENUM)py.so\n'% {'0': Pdir} 1994 deppython += ' %(0)s/matrix$(MENUM)py.so ' % {'0': Pdir} 1995 1996 text+='all: %s\n\techo \'done\'' % deppython 1997 1998 ff = open(pjoin(self.dir_path, 'SubProcesses', 'makefile'),'a') 1999 ff.write(text) 2000 ff.close()
2001 2002 2003 2004 2005 2006
2007 - def create_MA5_cards(self,*args,**opts):
2008 """ Overload the function of the mother so as to bypass this in StandAlone.""" 2009 pass
2010
2011 - def compiler_choice(self, compiler):
2012 """ Different daughter classes might want different compilers. 2013 So this function is meant to be overloaded if desired.""" 2014 2015 self.set_compiler(compiler)
2016 2017 #=========================================================================== 2018 # generate_subprocess_directory 2019 #===========================================================================
2020 - def generate_subprocess_directory(self, matrix_element, 2021 fortran_model, number):
2022 """Generate the Pxxxxx directory for a subprocess in MG4 standalone, 2023 including the necessary matrix.f and nexternal.inc files""" 2024 2025 cwd = os.getcwd() 2026 # Create the directory PN_xx_xxxxx in the specified path 2027 dirpath = pjoin(self.dir_path, 'SubProcesses', \ 2028 "P%s" % matrix_element.get('processes')[0].shell_string()) 2029 2030 if self.opt['sa_symmetry']: 2031 # avoid symmetric output 2032 for i,proc in enumerate(matrix_element.get('processes')): 2033 2034 initial = [] #filled in the next line 2035 final = [l.get('id') for l in proc.get('legs')\ 2036 if l.get('state') or initial.append(l.get('id'))] 2037 decay_finals = proc.get_final_ids_after_decay() 2038 decay_finals.sort() 2039 tag = (tuple(initial), tuple(decay_finals)) 2040 legs = proc.get('legs')[:] 2041 leg0 = proc.get('legs')[0] 2042 leg1 = proc.get('legs')[1] 2043 if not leg1.get('state'): 2044 proc.get('legs')[0] = leg1 2045 proc.get('legs')[1] = leg0 2046 flegs = proc.get('legs')[2:] 2047 for perm in itertools.permutations(flegs): 2048 for i,p in enumerate(perm): 2049 proc.get('legs')[i+2] = p 2050 dirpath2 = pjoin(self.dir_path, 'SubProcesses', \ 2051 "P%s" % proc.shell_string()) 2052 #restore original order 2053 proc.get('legs')[2:] = legs[2:] 2054 if os.path.exists(dirpath2): 2055 proc.get('legs')[:] = legs 2056 return 0 2057 proc.get('legs')[:] = legs 2058 2059 try: 2060 os.mkdir(dirpath) 2061 except os.error as error: 2062 logger.warning(error.strerror + " " + dirpath) 2063 2064 #try: 2065 # os.chdir(dirpath) 2066 #except os.error: 2067 # logger.error('Could not cd to directory %s' % dirpath) 2068 # return 0 2069 2070 logger.info('Creating files in directory %s' % dirpath) 2071 2072 # Extract number of external particles 2073 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2074 2075 # Create the matrix.f file and the nexternal.inc file 2076 if self.opt['export_format']=='standalone_msP': 2077 filename = pjoin(dirpath, 'matrix_prod.f') 2078 else: 2079 filename = pjoin(dirpath, 'matrix.f') 2080 calls = self.write_matrix_element_v4( 2081 writers.FortranWriter(filename), 2082 matrix_element, 2083 fortran_model) 2084 2085 if self.opt['export_format'] == 'standalone_msP': 2086 filename = pjoin(dirpath,'configs_production.inc') 2087 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2088 writers.FortranWriter(filename), 2089 matrix_element) 2090 2091 filename = pjoin(dirpath,'props_production.inc') 2092 self.write_props_file(writers.FortranWriter(filename), 2093 matrix_element, 2094 s_and_t_channels) 2095 2096 filename = pjoin(dirpath,'nexternal_prod.inc') 2097 self.write_nexternal_madspin(writers.FortranWriter(filename), 2098 nexternal, ninitial) 2099 2100 if self.opt['export_format']=='standalone_msF': 2101 filename = pjoin(dirpath, 'helamp.inc') 2102 ncomb=matrix_element.get_helicity_combinations() 2103 self.write_helamp_madspin(writers.FortranWriter(filename), 2104 ncomb) 2105 2106 filename = pjoin(dirpath, 'nexternal.inc') 2107 self.write_nexternal_file(writers.FortranWriter(filename), 2108 nexternal, ninitial) 2109 2110 filename = pjoin(dirpath, 'pmass.inc') 2111 self.write_pmass_file(writers.FortranWriter(filename), 2112 matrix_element) 2113 2114 filename = pjoin(dirpath, 'ngraphs.inc') 2115 self.write_ngraphs_file(writers.FortranWriter(filename), 2116 len(matrix_element.get_all_amplitudes())) 2117 2118 # Generate diagrams 2119 filename = pjoin(dirpath, "matrix.ps") 2120 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2121 get('diagrams'), 2122 filename, 2123 model=matrix_element.get('processes')[0].\ 2124 get('model'), 2125 amplitude=True) 2126 logger.info("Generating Feynman diagrams for " + \ 2127 matrix_element.get('processes')[0].nice_string()) 2128 plot.draw() 2129 2130 linkfiles = ['check_sa.f', 'coupl.inc', 'makefile'] 2131 2132 for file in linkfiles: 2133 ln('../%s' % file, cwd=dirpath) 2134 2135 # Return to original PWD 2136 #os.chdir(cwd) 2137 2138 if not calls: 2139 calls = 0 2140 return calls
2141 2142 2143 #=========================================================================== 2144 # write_source_makefile 2145 #===========================================================================
2146 - def write_source_makefile(self, writer):
2147 """Write the nexternal.inc file for MG4""" 2148 2149 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 2150 set_of_lib = '$(LIBDIR)libdhelas.$(libext) $(LIBDIR)libmodel.$(libext)' 2151 model_line='''$(LIBDIR)libmodel.$(libext): MODEL\n\t cd MODEL; make\n''' 2152 text = open(path).read() % {'libraries': set_of_lib, 'model':model_line} 2153 writer.write(text) 2154 2155 return True
2156 2157 #=========================================================================== 2158 # write_matrix_element_v4 2159 #===========================================================================
2160 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 2161 write=True, proc_prefix=''):
2162 """Export a matrix element to a matrix.f file in MG4 standalone format 2163 if write is on False, just return the replace_dict and not write anything.""" 2164 2165 2166 if not matrix_element.get('processes') or \ 2167 not matrix_element.get('diagrams'): 2168 return 0 2169 2170 if writer: 2171 if not isinstance(writer, writers.FortranWriter): 2172 raise writers.FortranWriter.FortranWriterError(\ 2173 "writer not FortranWriter but %s" % type(writer)) 2174 # Set lowercase/uppercase Fortran code 2175 writers.FortranWriter.downcase = False 2176 2177 2178 if not self.opt.has_key('sa_symmetry'): 2179 self.opt['sa_symmetry']=False 2180 2181 2182 2183 # The proc_id is for MadEvent grouping which is never used in SA. 2184 replace_dict = {'global_variable':'', 'amp2_lines':'', 2185 'proc_prefix':proc_prefix, 'proc_id':''} 2186 2187 # Extract helas calls 2188 helas_calls = fortran_model.get_matrix_element_calls(\ 2189 matrix_element) 2190 2191 replace_dict['helas_calls'] = "\n".join(helas_calls) 2192 2193 # Extract version number and date from VERSION file 2194 info_lines = self.get_mg5_info_lines() 2195 replace_dict['info_lines'] = info_lines 2196 2197 # Extract process info lines 2198 process_lines = self.get_process_info_lines(matrix_element) 2199 replace_dict['process_lines'] = process_lines 2200 2201 # Extract number of external particles 2202 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2203 replace_dict['nexternal'] = nexternal 2204 replace_dict['nincoming'] = ninitial 2205 2206 # Extract ncomb 2207 ncomb = matrix_element.get_helicity_combinations() 2208 replace_dict['ncomb'] = ncomb 2209 2210 # Extract helicity lines 2211 helicity_lines = self.get_helicity_lines(matrix_element) 2212 replace_dict['helicity_lines'] = helicity_lines 2213 2214 # Extract overall denominator 2215 # Averaging initial state color, spin, and identical FS particles 2216 replace_dict['den_factor_line'] = self.get_den_factor_line(matrix_element) 2217 2218 # Extract ngraphs 2219 ngraphs = matrix_element.get_number_of_amplitudes() 2220 replace_dict['ngraphs'] = ngraphs 2221 2222 # Extract nwavefuncs 2223 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2224 replace_dict['nwavefuncs'] = nwavefuncs 2225 2226 # Extract ncolor 2227 ncolor = max(1, len(matrix_element.get('color_basis'))) 2228 replace_dict['ncolor'] = ncolor 2229 2230 replace_dict['hel_avg_factor'] = matrix_element.get_hel_avg_factor() 2231 replace_dict['beamone_helavgfactor'], replace_dict['beamtwo_helavgfactor'] =\ 2232 matrix_element.get_beams_hel_avg_factor() 2233 2234 # Extract color data lines 2235 color_data_lines = self.get_color_data_lines(matrix_element) 2236 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2237 2238 if self.opt['export_format']=='standalone_msP': 2239 # For MadSpin need to return the AMP2 2240 amp2_lines = self.get_amp2_lines(matrix_element, [] ) 2241 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2242 replace_dict['global_variable'] = \ 2243 " Double Precision amp2(NGRAPHS)\n common/to_amps/ amp2\n" 2244 2245 # JAMP definition, depends on the number of independent split orders 2246 split_orders=matrix_element.get('processes')[0].get('split_orders') 2247 2248 if len(split_orders)==0: 2249 replace_dict['nSplitOrders']='' 2250 # Extract JAMP lines 2251 jamp_lines = self.get_JAMP_lines(matrix_element) 2252 # Consider the output of a dummy order 'ALL_ORDERS' for which we 2253 # set all amplitude order to weight 1 and only one squared order 2254 # contribution which is of course ALL_ORDERS=2. 2255 squared_orders = [(2,),] 2256 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 2257 replace_dict['chosen_so_configs'] = '.TRUE.' 2258 replace_dict['nSqAmpSplitOrders']=1 2259 replace_dict['split_order_str_list']='' 2260 else: 2261 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 2262 replace_dict['nAmpSplitOrders']=len(amp_orders) 2263 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 2264 replace_dict['nSplitOrders']=len(split_orders) 2265 replace_dict['split_order_str_list']=str(split_orders) 2266 amp_so = self.get_split_orders_lines( 2267 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 2268 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 2269 replace_dict['ampsplitorders']='\n'.join(amp_so) 2270 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 2271 jamp_lines = self.get_JAMP_lines_split_order(\ 2272 matrix_element,amp_orders,split_order_names=split_orders) 2273 2274 # Now setup the array specifying what squared split order is chosen 2275 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 2276 matrix_element.get('processes')[0],squared_orders) 2277 2278 # For convenience we also write the driver check_sa_splitOrders.f 2279 # that explicitely writes out the contribution from each squared order. 2280 # The original driver still works and is compiled with 'make' while 2281 # the splitOrders one is compiled with 'make check_sa_born_splitOrders' 2282 check_sa_writer=writers.FortranWriter('check_sa_born_splitOrders.f') 2283 self.write_check_sa_splitOrders(squared_orders,split_orders, 2284 nexternal,ninitial,proc_prefix,check_sa_writer) 2285 2286 if write: 2287 writers.FortranWriter('nsqso_born.inc').writelines( 2288 """INTEGER NSQSO_BORN 2289 PARAMETER (NSQSO_BORN=%d)"""%replace_dict['nSqAmpSplitOrders']) 2290 2291 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2292 2293 matrix_template = self.matrix_template 2294 if self.opt['export_format']=='standalone_msP' : 2295 matrix_template = 'matrix_standalone_msP_v4.inc' 2296 elif self.opt['export_format']=='standalone_msF': 2297 matrix_template = 'matrix_standalone_msF_v4.inc' 2298 elif self.opt['export_format']=='matchbox': 2299 replace_dict["proc_prefix"] = 'MG5_%i_' % matrix_element.get('processes')[0].get('id') 2300 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2301 2302 if len(split_orders)>0: 2303 if self.opt['export_format'] in ['standalone_msP', 'standalone_msF']: 2304 logger.debug("Warning: The export format %s is not "+\ 2305 " available for individual ME evaluation of given coupl. orders."+\ 2306 " Only the total ME will be computed.", self.opt['export_format']) 2307 elif self.opt['export_format'] in ['madloop_matchbox']: 2308 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2309 matrix_template = "matrix_standalone_matchbox_splitOrders_v4.inc" 2310 else: 2311 matrix_template = "matrix_standalone_splitOrders_v4.inc" 2312 2313 replace_dict['template_file'] = pjoin(_file_path, 'iolibs', 'template_files', matrix_template) 2314 replace_dict['template_file2'] = pjoin(_file_path, \ 2315 'iolibs/template_files/split_orders_helping_functions.inc') 2316 if write and writer: 2317 path = replace_dict['template_file'] 2318 content = open(path).read() 2319 content = content % replace_dict 2320 # Write the file 2321 writer.writelines(content) 2322 # Add the helper functions. 2323 if len(split_orders)>0: 2324 content = '\n' + open(replace_dict['template_file2'])\ 2325 .read()%replace_dict 2326 writer.writelines(content) 2327 return len(filter(lambda call: call.find('#') != 0, helas_calls)) 2328 else: 2329 replace_dict['return_value'] = len(filter(lambda call: call.find('#') != 0, helas_calls)) 2330 return replace_dict # for subclass update
2331
2332 - def write_check_sa_splitOrders(self,squared_orders, split_orders, nexternal, 2333 nincoming, proc_prefix, writer):
2334 """ Write out a more advanced version of the check_sa drivers that 2335 individually returns the matrix element for each contributing squared 2336 order.""" 2337 2338 check_sa_content = open(pjoin(self.mgme_dir, 'madgraph', 'iolibs', \ 2339 'template_files', 'check_sa_splitOrders.f')).read() 2340 printout_sq_orders=[] 2341 for i, squared_order in enumerate(squared_orders): 2342 sq_orders=[] 2343 for j, sqo in enumerate(squared_order): 2344 sq_orders.append('%s=%d'%(split_orders[j],sqo)) 2345 printout_sq_orders.append(\ 2346 "write(*,*) '%d) Matrix element for (%s) = ',MATELEMS(%d)"\ 2347 %(i+1,' '.join(sq_orders),i+1)) 2348 printout_sq_orders='\n'.join(printout_sq_orders) 2349 replace_dict = {'printout_sqorders':printout_sq_orders, 2350 'nSplitOrders':len(squared_orders), 2351 'nexternal':nexternal, 2352 'nincoming':nincoming, 2353 'proc_prefix':proc_prefix} 2354 2355 if writer: 2356 writer.writelines(check_sa_content % replace_dict) 2357 else: 2358 return replace_dict
2359
2360 -class ProcessExporterFortranMatchBox(ProcessExporterFortranSA):
2361 """class to take care of exporting a set of matrix element for the Matchbox 2362 code in the case of Born only routine""" 2363 2364 default_opt = {'clean': False, 'complex_mass':False, 2365 'export_format':'matchbox', 'mp': False, 2366 'sa_symmetry': True} 2367 2368 #specific template of the born 2369 2370 2371 matrix_template = "matrix_standalone_matchbox.inc" 2372 2373 @staticmethod
2374 - def get_color_string_lines(matrix_element):
2375 """Return the color matrix definition lines for this matrix element. Split 2376 rows in chunks of size n.""" 2377 2378 if not matrix_element.get('color_matrix'): 2379 return "\n".join(["out = 1"]) 2380 2381 #start the real work 2382 color_denominators = matrix_element.get('color_matrix').\ 2383 get_line_denominators() 2384 matrix_strings = [] 2385 my_cs = color.ColorString() 2386 for i_color in xrange(len(color_denominators)): 2387 # Then write the numerators for the matrix elements 2388 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[i_color]) 2389 t_str=repr(my_cs) 2390 t_match=re.compile(r"(\w+)\(([\s\d+\,]*)\)") 2391 # from '1 T(2,4,1) Tr(4,5,6) Epsilon(5,3,2,1) T(1,2)' returns with findall: 2392 # [('T', '2,4,1'), ('Tr', '4,5,6'), ('Epsilon', '5,3,2,1'), ('T', '1,2')] 2393 all_matches = t_match.findall(t_str) 2394 output = {} 2395 arg=[] 2396 for match in all_matches: 2397 ctype, tmparg = match[0], [m.strip() for m in match[1].split(',')] 2398 if ctype in ['ColorOne' ]: 2399 continue 2400 if ctype not in ['T', 'Tr' ]: 2401 raise MadGraph5Error, 'Color Structure not handled by Matchbox: %s' % ctype 2402 tmparg += ['0'] 2403 arg +=tmparg 2404 for j, v in enumerate(arg): 2405 output[(i_color,j)] = v 2406 2407 for key in output: 2408 if matrix_strings == []: 2409 #first entry 2410 matrix_strings.append(""" 2411 if (in1.eq.%s.and.in2.eq.%s)then 2412 out = %s 2413 """ % (key[0], key[1], output[key])) 2414 else: 2415 #not first entry 2416 matrix_strings.append(""" 2417 elseif (in1.eq.%s.and.in2.eq.%s)then 2418 out = %s 2419 """ % (key[0], key[1], output[key])) 2420 if len(matrix_strings): 2421 matrix_strings.append(" else \n out = - 1 \n endif") 2422 else: 2423 return "\n out = - 1 \n " 2424 return "\n".join(matrix_strings)
2425
2426 - def make(self,*args,**opts):
2427 pass
2428
2429 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", split=-1, 2430 JAMP_formatLC=None):
2431 2432 """Adding leading color part of the colorflow""" 2433 2434 if not JAMP_formatLC: 2435 JAMP_formatLC= "LN%s" % JAMP_format 2436 2437 error_msg="Malformed '%s' argument passed to the get_JAMP_lines" 2438 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 2439 col_amps=col_amps.get_color_amplitudes() 2440 elif(isinstance(col_amps,list)): 2441 if(col_amps and isinstance(col_amps[0],list)): 2442 col_amps=col_amps 2443 else: 2444 raise MadGraph5Error, error_msg % 'col_amps' 2445 else: 2446 raise MadGraph5Error, error_msg % 'col_amps' 2447 2448 text = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(col_amps, 2449 JAMP_format=JAMP_format, 2450 AMP_format=AMP_format, 2451 split=-1) 2452 2453 2454 # Filter the col_ampls to generate only those without any 1/NC terms 2455 2456 LC_col_amps = [] 2457 for coeff_list in col_amps: 2458 to_add = [] 2459 for (coefficient, amp_number) in coeff_list: 2460 if coefficient[3]==0: 2461 to_add.append( (coefficient, amp_number) ) 2462 LC_col_amps.append(to_add) 2463 2464 text += super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(LC_col_amps, 2465 JAMP_format=JAMP_formatLC, 2466 AMP_format=AMP_format, 2467 split=-1) 2468 2469 return text
2470
2471 2472 2473 2474 #=============================================================================== 2475 # ProcessExporterFortranMW 2476 #=============================================================================== 2477 -class ProcessExporterFortranMW(ProcessExporterFortran):
2478 """Class to take care of exporting a set of matrix elements to 2479 MadGraph v4 - MadWeight format.""" 2480 2481 matrix_file="matrix_standalone_v4.inc" 2482
2483 - def copy_template(self, model):
2484 """Additional actions needed for setup of Template 2485 """ 2486 2487 super(ProcessExporterFortranMW, self).copy_template(model) 2488 2489 # Add the MW specific file 2490 shutil.copytree(pjoin(MG5DIR,'Template','MadWeight'), 2491 pjoin(self.dir_path, 'Source','MadWeight'), True) 2492 shutil.copytree(pjoin(MG5DIR,'madgraph','madweight'), 2493 pjoin(self.dir_path, 'bin','internal','madweight'), True) 2494 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','setrun.f'), 2495 pjoin(self.dir_path, 'Source','setrun.f')) 2496 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','run.inc'), 2497 pjoin(self.dir_path, 'Source','run.inc')) 2498 # File created from Template (Different in some child class) 2499 filename = os.path.join(self.dir_path,'Source','run_config.inc') 2500 self.write_run_config_file(writers.FortranWriter(filename)) 2501 2502 try: 2503 subprocess.call([os.path.join(self.dir_path, 'Source','MadWeight','bin','internal','pass_to_madweight')], 2504 stdout = os.open(os.devnull, os.O_RDWR), 2505 stderr = os.open(os.devnull, os.O_RDWR), 2506 cwd=self.dir_path) 2507 except OSError: 2508 # Probably madweight already called 2509 pass 2510 2511 # Copy the different python file in the Template 2512 self.copy_python_file() 2513 # create the appropriate cuts.f 2514 self.get_mw_cuts_version() 2515 2516 # add the makefile in Source directory 2517 filename = os.path.join(self.dir_path,'Source','makefile') 2518 self.write_source_makefile(writers.FortranWriter(filename))
2519 2520 2521 2522 2523 #=========================================================================== 2524 # convert_model 2525 #===========================================================================
2526 - def convert_model(self, model, wanted_lorentz = [], 2527 wanted_couplings = []):
2528 2529 super(ProcessExporterFortranMW,self).convert_model(model, 2530 wanted_lorentz, wanted_couplings) 2531 2532 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 2533 try: 2534 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 2535 except OSError as error: 2536 pass 2537 model_path = model.get('modelpath') 2538 # This is not safe if there is a '##' or '-' in the path. 2539 shutil.copytree(model_path, 2540 pjoin(self.dir_path,'bin','internal','ufomodel'), 2541 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 2542 if hasattr(model, 'restrict_card'): 2543 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 2544 'restrict_default.dat') 2545 if isinstance(model.restrict_card, check_param_card.ParamCard): 2546 model.restrict_card.write(out_path) 2547 else: 2548 files.cp(model.restrict_card, out_path)
2549 2550 #=========================================================================== 2551 # generate_subprocess_directory 2552 #===========================================================================
2553 - def copy_python_file(self):
2554 """copy the python file require for the Template""" 2555 2556 # madevent interface 2557 cp(_file_path+'/interface/madweight_interface.py', 2558 self.dir_path+'/bin/internal/madweight_interface.py') 2559 cp(_file_path+'/interface/extended_cmd.py', 2560 self.dir_path+'/bin/internal/extended_cmd.py') 2561 cp(_file_path+'/interface/common_run_interface.py', 2562 self.dir_path+'/bin/internal/common_run_interface.py') 2563 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 2564 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 2565 cp(_file_path+'/iolibs/save_load_object.py', 2566 self.dir_path+'/bin/internal/save_load_object.py') 2567 cp(_file_path+'/madevent/gen_crossxhtml.py', 2568 self.dir_path+'/bin/internal/gen_crossxhtml.py') 2569 cp(_file_path+'/various/FO_analyse_card.py', 2570 self.dir_path+'/bin/internal/FO_analyse_card.py') 2571 cp(_file_path+'/iolibs/file_writers.py', 2572 self.dir_path+'/bin/internal/file_writers.py') 2573 #model file 2574 cp(_file_path+'../models/check_param_card.py', 2575 self.dir_path+'/bin/internal/check_param_card.py') 2576 2577 #madevent file 2578 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 2579 cp(_file_path+'/various/lhe_parser.py', 2580 self.dir_path+'/bin/internal/lhe_parser.py') 2581 2582 cp(_file_path+'/various/banner.py', 2583 self.dir_path+'/bin/internal/banner.py') 2584 cp(_file_path+'/various/shower_card.py', 2585 self.dir_path+'/bin/internal/shower_card.py') 2586 cp(_file_path+'/various/cluster.py', 2587 self.dir_path+'/bin/internal/cluster.py') 2588 2589 # logging configuration 2590 cp(_file_path+'/interface/.mg5_logging.conf', 2591 self.dir_path+'/bin/internal/me5_logging.conf') 2592 cp(_file_path+'/interface/coloring_logging.py', 2593 self.dir_path+'/bin/internal/coloring_logging.py')
2594 2595 2596 #=========================================================================== 2597 # Change the version of cuts.f to the one compatible with MW 2598 #===========================================================================
2599 - def get_mw_cuts_version(self, outpath=None):
2600 """create the appropriate cuts.f 2601 This is based on the one associated to ME output but: 2602 1) No clustering (=> remove initcluster/setclscales) 2603 2) Adding the definition of cut_bw at the file. 2604 """ 2605 2606 template = open(pjoin(MG5DIR,'Template','LO','SubProcesses','cuts.f')) 2607 2608 text = StringIO() 2609 #1) remove all dependencies in ickkw >1: 2610 nb_if = 0 2611 for line in template: 2612 if 'if(xqcut.gt.0d0' in line: 2613 nb_if = 1 2614 if nb_if == 0: 2615 text.write(line) 2616 continue 2617 if re.search(r'if\(.*\)\s*then', line): 2618 nb_if += 1 2619 elif 'endif' in line: 2620 nb_if -= 1 2621 2622 #2) add fake cut_bw (have to put the true one later) 2623 text.write(""" 2624 logical function cut_bw(p) 2625 include 'madweight_param.inc' 2626 double precision p(*) 2627 if (bw_cut) then 2628 cut_bw = .true. 2629 else 2630 stop 1 2631 endif 2632 return 2633 end 2634 """) 2635 2636 final = text.getvalue() 2637 #3) remove the call to initcluster: 2638 template = final.replace('call initcluster', '! Remove for MW!call initcluster') 2639 template = template.replace('genps.inc', 'maxparticles.inc') 2640 #Now we can write it 2641 if not outpath: 2642 fsock = open(pjoin(self.dir_path, 'SubProcesses', 'cuts.f'), 'w') 2643 elif isinstance(outpath, str): 2644 fsock = open(outpath, 'w') 2645 else: 2646 fsock = outpath 2647 fsock.write(template)
2648 2649 2650 2651 #=========================================================================== 2652 # Make the Helas and Model directories for Standalone directory 2653 #===========================================================================
2654 - def make(self):
2655 """Run make in the DHELAS, MODEL, PDF and CERNLIB directories, to set up 2656 everything for running madweight 2657 """ 2658 2659 source_dir = os.path.join(self.dir_path, "Source") 2660 logger.info("Running make for Helas") 2661 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2662 logger.info("Running make for Model") 2663 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran') 2664 logger.info("Running make for PDF") 2665 misc.compile(arg=['../lib/libpdf.a'], cwd=source_dir, mode='fortran') 2666 logger.info("Running make for CERNLIB") 2667 misc.compile(arg=['../lib/libcernlib.a'], cwd=source_dir, mode='fortran') 2668 logger.info("Running make for GENERIC") 2669 misc.compile(arg=['../lib/libgeneric.a'], cwd=source_dir, mode='fortran') 2670 logger.info("Running make for blocks") 2671 misc.compile(arg=['../lib/libblocks.a'], cwd=source_dir, mode='fortran') 2672 logger.info("Running make for tools") 2673 misc.compile(arg=['../lib/libtools.a'], cwd=source_dir, mode='fortran')
2674 2675 #=========================================================================== 2676 # Create proc_card_mg5.dat for MadWeight directory 2677 #===========================================================================
2678 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2679 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 2680 2681 compiler = {'fortran': mg5options['fortran_compiler'], 2682 'cpp': mg5options['cpp_compiler'], 2683 'f2py': mg5options['f2py_compiler']} 2684 2685 2686 2687 #proc_charac 2688 self.create_proc_charac() 2689 2690 # Write maxparticles.inc based on max of ME's/subprocess groups 2691 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 2692 self.write_maxparticles_file(writers.FortranWriter(filename), 2693 matrix_elements) 2694 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2695 pjoin(self.dir_path, 'Source','MadWeight','blocks')) 2696 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2697 pjoin(self.dir_path, 'Source','MadWeight','tools')) 2698 2699 self.set_compiler(compiler) 2700 self.make() 2701 2702 # Write command history as proc_card_mg5 2703 if os.path.isdir(os.path.join(self.dir_path, 'Cards')): 2704 output_file = os.path.join(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2705 history.write(output_file) 2706 2707 ProcessExporterFortran.finalize(self, matrix_elements, 2708 history, mg5options, flaglist)
2709 2710 2711 2712 #=========================================================================== 2713 # create the run_card for MW 2714 #===========================================================================
2715 - def create_run_card(self, matrix_elements, history):
2716 """ """ 2717 2718 run_card = banner_mod.RunCard() 2719 2720 # pass to default for MW 2721 run_card["run_tag"] = "\'not_use\'" 2722 run_card["fixed_ren_scale"] = "T" 2723 run_card["fixed_fac_scale"] = "T" 2724 run_card.remove_all_cut() 2725 2726 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 2727 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2728 python_template=True) 2729 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'), 2730 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2731 python_template=True)
2732 2733 #=========================================================================== 2734 # export model files 2735 #===========================================================================
2736 - def export_model_files(self, model_path):
2737 """export the model dependent files for V4 model""" 2738 2739 super(ProcessExporterFortranMW,self).export_model_files(model_path) 2740 # Add the routine update_as_param in v4 model 2741 # This is a function created in the UFO 2742 text=""" 2743 subroutine update_as_param() 2744 call setpara('param_card.dat',.false.) 2745 return 2746 end 2747 """ 2748 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 2749 ff.write(text) 2750 ff.close() 2751 2752 # Modify setrun.f 2753 text = open(os.path.join(self.dir_path,'Source','setrun.f')).read() 2754 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 2755 fsock = open(os.path.join(self.dir_path,'Source','setrun.f'), 'w') 2756 fsock.write(text) 2757 fsock.close() 2758 2759 # Modify initialization.f 2760 text = open(os.path.join(self.dir_path,'SubProcesses','initialization.f')).read() 2761 text = text.replace('call setpara(param_name)', 'call setpara(param_name, .true.)') 2762 fsock = open(os.path.join(self.dir_path,'SubProcesses','initialization.f'), 'w') 2763 fsock.write(text) 2764 fsock.close() 2765 2766 2767 self.make_model_symbolic_link()
2768 2769 #=========================================================================== 2770 # generate_subprocess_directory 2771 #===========================================================================
2772 - def generate_subprocess_directory(self, matrix_element, 2773 fortran_model,number):
2774 """Generate the Pxxxxx directory for a subprocess in MG4 MadWeight format, 2775 including the necessary matrix.f and nexternal.inc files""" 2776 2777 cwd = os.getcwd() 2778 misc.sprint(type(matrix_element)) 2779 # Create the directory PN_xx_xxxxx in the specified path 2780 dirpath = os.path.join(self.dir_path, 'SubProcesses', \ 2781 "P%s" % matrix_element.get('processes')[0].shell_string()) 2782 2783 try: 2784 os.mkdir(dirpath) 2785 except os.error as error: 2786 logger.warning(error.strerror + " " + dirpath) 2787 2788 #try: 2789 # os.chdir(dirpath) 2790 #except os.error: 2791 # logger.error('Could not cd to directory %s' % dirpath) 2792 # return 0 2793 2794 logger.info('Creating files in directory %s' % dirpath) 2795 2796 # Extract number of external particles 2797 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2798 2799 # Create the matrix.f file and the nexternal.inc file 2800 filename = pjoin(dirpath,'matrix.f') 2801 calls,ncolor = self.write_matrix_element_v4( 2802 writers.FortranWriter(filename), 2803 matrix_element, 2804 fortran_model) 2805 2806 filename = pjoin(dirpath, 'auto_dsig.f') 2807 self.write_auto_dsig_file(writers.FortranWriter(filename), 2808 matrix_element) 2809 2810 filename = pjoin(dirpath, 'configs.inc') 2811 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2812 writers.FortranWriter(filename), 2813 matrix_element) 2814 2815 filename = pjoin(dirpath, 'nexternal.inc') 2816 self.write_nexternal_file(writers.FortranWriter(filename), 2817 nexternal, ninitial) 2818 2819 filename = pjoin(dirpath, 'leshouche.inc') 2820 self.write_leshouche_file(writers.FortranWriter(filename), 2821 matrix_element) 2822 2823 filename = pjoin(dirpath, 'props.inc') 2824 self.write_props_file(writers.FortranWriter(filename), 2825 matrix_element, 2826 s_and_t_channels) 2827 2828 filename = pjoin(dirpath, 'pmass.inc') 2829 self.write_pmass_file(writers.FortranWriter(filename), 2830 matrix_element) 2831 2832 filename = pjoin(dirpath, 'ngraphs.inc') 2833 self.write_ngraphs_file(writers.FortranWriter(filename), 2834 len(matrix_element.get_all_amplitudes())) 2835 2836 filename = pjoin(dirpath, 'maxamps.inc') 2837 self.write_maxamps_file(writers.FortranWriter(filename), 2838 len(matrix_element.get('diagrams')), 2839 ncolor, 2840 len(matrix_element.get('processes')), 2841 1) 2842 2843 filename = pjoin(dirpath, 'phasespace.inc') 2844 self.write_phasespace_file(writers.FortranWriter(filename), 2845 len(matrix_element.get('diagrams')), 2846 ) 2847 2848 # Generate diagrams 2849 filename = pjoin(dirpath, "matrix.ps") 2850 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2851 get('diagrams'), 2852 filename, 2853 model=matrix_element.get('processes')[0].\ 2854 get('model'), 2855 amplitude='') 2856 logger.info("Generating Feynman diagrams for " + \ 2857 matrix_element.get('processes')[0].nice_string()) 2858 plot.draw() 2859 2860 #import genps.inc and maxconfigs.inc into Subprocesses 2861 ln(self.dir_path + '/Source/genps.inc', self.dir_path + '/SubProcesses', log=False) 2862 #ln(self.dir_path + '/Source/maxconfigs.inc', self.dir_path + '/SubProcesses', log=False) 2863 2864 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f', 'genps.inc'] 2865 2866 for file in linkfiles: 2867 ln('../%s' % file, starting_dir=cwd) 2868 2869 ln('nexternal.inc', '../../Source', log=False, cwd=dirpath) 2870 ln('leshouche.inc', '../../Source', log=False, cwd=dirpath) 2871 ln('maxamps.inc', '../../Source', log=False, cwd=dirpath) 2872 ln('phasespace.inc', '../', log=True, cwd=dirpath) 2873 # Return to original PWD 2874 #os.chdir(cwd) 2875 2876 if not calls: 2877 calls = 0 2878 return calls
2879 2880 #=========================================================================== 2881 # write_matrix_element_v4 2882 #===========================================================================
2883 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model,proc_id = "", config_map = []):
2884 """Export a matrix element to a matrix.f file in MG4 MadWeight format""" 2885 2886 if not matrix_element.get('processes') or \ 2887 not matrix_element.get('diagrams'): 2888 return 0 2889 2890 if writer: 2891 if not isinstance(writer, writers.FortranWriter): 2892 raise writers.FortranWriter.FortranWriterError(\ 2893 "writer not FortranWriter") 2894 2895 # Set lowercase/uppercase Fortran code 2896 writers.FortranWriter.downcase = False 2897 2898 replace_dict = {} 2899 2900 # Extract version number and date from VERSION file 2901 info_lines = self.get_mg5_info_lines() 2902 replace_dict['info_lines'] = info_lines 2903 2904 # Extract process info lines 2905 process_lines = self.get_process_info_lines(matrix_element) 2906 replace_dict['process_lines'] = process_lines 2907 2908 # Set proc_id 2909 replace_dict['proc_id'] = proc_id 2910 2911 # Extract number of external particles 2912 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2913 replace_dict['nexternal'] = nexternal 2914 2915 # Extract ncomb 2916 ncomb = matrix_element.get_helicity_combinations() 2917 replace_dict['ncomb'] = ncomb 2918 2919 # Extract helicity lines 2920 helicity_lines = self.get_helicity_lines(matrix_element) 2921 replace_dict['helicity_lines'] = helicity_lines 2922 2923 # Extract overall denominator 2924 # Averaging initial state color, spin, and identical FS particles 2925 den_factor_line = self.get_den_factor_line(matrix_element) 2926 replace_dict['den_factor_line'] = den_factor_line 2927 2928 # Extract ngraphs 2929 ngraphs = matrix_element.get_number_of_amplitudes() 2930 replace_dict['ngraphs'] = ngraphs 2931 2932 # Extract nwavefuncs 2933 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2934 replace_dict['nwavefuncs'] = nwavefuncs 2935 2936 # Extract ncolor 2937 ncolor = max(1, len(matrix_element.get('color_basis'))) 2938 replace_dict['ncolor'] = ncolor 2939 2940 # Extract color data lines 2941 color_data_lines = self.get_color_data_lines(matrix_element) 2942 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2943 2944 # Extract helas calls 2945 helas_calls = fortran_model.get_matrix_element_calls(\ 2946 matrix_element) 2947 2948 replace_dict['helas_calls'] = "\n".join(helas_calls) 2949 2950 # Extract JAMP lines 2951 jamp_lines = self.get_JAMP_lines(matrix_element) 2952 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2953 2954 replace_dict['template_file'] = os.path.join(_file_path, \ 2955 'iolibs/template_files/%s' % self.matrix_file) 2956 replace_dict['template_file2'] = '' 2957 2958 if writer: 2959 file = open(replace_dict['template_file']).read() 2960 file = file % replace_dict 2961 # Write the file 2962 writer.writelines(file) 2963 return len(filter(lambda call: call.find('#') != 0, helas_calls)),ncolor 2964 else: 2965 replace_dict['return_value'] = (len(filter(lambda call: call.find('#') != 0, helas_calls)),ncolor)
2966 2967 #=========================================================================== 2968 # write_source_makefile 2969 #===========================================================================
2970 - def write_source_makefile(self, writer):
2971 """Write the nexternal.inc file for madweight""" 2972 2973 2974 path = os.path.join(_file_path,'iolibs','template_files','madweight_makefile_source') 2975 set_of_lib = '$(LIBRARIES) $(LIBDIR)libdhelas.$(libext) $(LIBDIR)libpdf.$(libext) $(LIBDIR)libmodel.$(libext) $(LIBDIR)libcernlib.$(libext) $(LIBDIR)libtf.$(libext)' 2976 text = open(path).read() % {'libraries': set_of_lib} 2977 writer.write(text) 2978 2979 return True
2980
2981 - def write_phasespace_file(self, writer, nb_diag):
2982 """ """ 2983 2984 template = """ include 'maxparticles.inc' 2985 integer max_branches 2986 parameter (max_branches=max_particles-1) 2987 integer max_configs 2988 parameter (max_configs=%(nb_diag)s) 2989 2990 c channel position 2991 integer config_pos,perm_pos 2992 common /to_config/config_pos,perm_pos 2993 2994 """ 2995 2996 writer.write(template % {'nb_diag': nb_diag})
2997 2998 2999 #=========================================================================== 3000 # write_auto_dsig_file 3001 #===========================================================================
3002 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
3003 """Write the auto_dsig.f file for the differential cross section 3004 calculation, includes pdf call information (MadWeight format)""" 3005 3006 if not matrix_element.get('processes') or \ 3007 not matrix_element.get('diagrams'): 3008 return 0 3009 3010 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 3011 3012 if ninitial < 1 or ninitial > 2: 3013 raise writers.FortranWriter.FortranWriterError, \ 3014 """Need ninitial = 1 or 2 to write auto_dsig file""" 3015 3016 replace_dict = {} 3017 3018 # Extract version number and date from VERSION file 3019 info_lines = self.get_mg5_info_lines() 3020 replace_dict['info_lines'] = info_lines 3021 3022 # Extract process info lines 3023 process_lines = self.get_process_info_lines(matrix_element) 3024 replace_dict['process_lines'] = process_lines 3025 3026 # Set proc_id 3027 replace_dict['proc_id'] = proc_id 3028 replace_dict['numproc'] = 1 3029 3030 # Set dsig_line 3031 if ninitial == 1: 3032 # No conversion, since result of decay should be given in GeV 3033 dsig_line = "pd(0)*dsiguu" 3034 else: 3035 # Convert result (in GeV) to pb 3036 dsig_line = "pd(0)*conv*dsiguu" 3037 3038 replace_dict['dsig_line'] = dsig_line 3039 3040 # Extract pdf lines 3041 pdf_vars, pdf_data, pdf_lines = \ 3042 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 3043 replace_dict['pdf_vars'] = pdf_vars 3044 replace_dict['pdf_data'] = pdf_data 3045 replace_dict['pdf_lines'] = pdf_lines 3046 3047 # Lines that differ between subprocess group and regular 3048 if proc_id: 3049 replace_dict['numproc'] = int(proc_id) 3050 replace_dict['passcuts_begin'] = "" 3051 replace_dict['passcuts_end'] = "" 3052 # Set lines for subprocess group version 3053 # Set define_iconfigs_lines 3054 replace_dict['define_subdiag_lines'] = \ 3055 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3056 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3057 else: 3058 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 3059 replace_dict['passcuts_end'] = "ENDIF" 3060 replace_dict['define_subdiag_lines'] = "" 3061 3062 if writer: 3063 file = open(os.path.join(_file_path, \ 3064 'iolibs/template_files/auto_dsig_mw.inc')).read() 3065 3066 file = file % replace_dict 3067 # Write the file 3068 writer.writelines(file) 3069 else: 3070 return replace_dict
3071 #=========================================================================== 3072 # write_configs_file 3073 #===========================================================================
3074 - def write_configs_file(self, writer, matrix_element):
3075 """Write the configs.inc file for MadEvent""" 3076 3077 # Extract number of external particles 3078 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3079 3080 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 3081 mapconfigs = [c[0] for c in configs] 3082 model = matrix_element.get('processes')[0].get('model') 3083 return mapconfigs, self.write_configs_file_from_diagrams(writer, 3084 [[c[1]] for c in configs], 3085 mapconfigs, 3086 nexternal, ninitial,matrix_element, model)
3087 3088 #=========================================================================== 3089 # write_run_configs_file 3090 #===========================================================================
3091 - def write_run_config_file(self, writer):
3092 """Write the run_configs.inc file for MadWeight""" 3093 3094 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 3095 text = open(path).read() % {'chanperjob':'5'} 3096 writer.write(text) 3097 return True
3098 3099 #=========================================================================== 3100 # write_configs_file_from_diagrams 3101 #===========================================================================
3102 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 3103 nexternal, ninitial, matrix_element, model):
3104 """Write the actual configs.inc file. 3105 3106 configs is the diagrams corresponding to configs (each 3107 diagrams is a list of corresponding diagrams for all 3108 subprocesses, with None if there is no corresponding diagrams 3109 for a given process). 3110 mapconfigs gives the diagram number for each config. 3111 3112 For s-channels, we need to output one PDG for each subprocess in 3113 the subprocess group, in order to be able to pick the right 3114 one for multiprocesses.""" 3115 3116 lines = [] 3117 3118 particle_dict = matrix_element.get('processes')[0].get('model').\ 3119 get('particle_dict') 3120 3121 s_and_t_channels = [] 3122 3123 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 3124 for config in configs if [d for d in config if d][0].\ 3125 get_vertex_leg_numbers()!=[]] 3126 3127 minvert = min(vert_list) if vert_list!=[] else 0 3128 # Number of subprocesses 3129 nsubprocs = len(configs[0]) 3130 3131 nconfigs = 0 3132 3133 new_pdg = model.get_first_non_pdg() 3134 3135 for iconfig, helas_diags in enumerate(configs): 3136 if any([vert > minvert for vert in 3137 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 3138 # Only 3-vertices allowed in configs.inc 3139 continue 3140 nconfigs += 1 3141 3142 # Need s- and t-channels for all subprocesses, including 3143 # those that don't contribute to this config 3144 empty_verts = [] 3145 stchannels = [] 3146 for h in helas_diags: 3147 if h: 3148 # get_s_and_t_channels gives vertices starting from 3149 # final state external particles and working inwards 3150 stchannels.append(h.get('amplitudes')[0].\ 3151 get_s_and_t_channels(ninitial,model,new_pdg)) 3152 else: 3153 stchannels.append((empty_verts, None)) 3154 3155 # For t-channels, just need the first non-empty one 3156 tchannels = [t for s,t in stchannels if t != None][0] 3157 3158 # For s_and_t_channels (to be used later) use only first config 3159 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 3160 tchannels]) 3161 3162 # Make sure empty_verts is same length as real vertices 3163 if any([s for s,t in stchannels]): 3164 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 3165 3166 # Reorganize s-channel vertices to get a list of all 3167 # subprocesses for each vertex 3168 schannels = zip(*[s for s,t in stchannels]) 3169 else: 3170 schannels = [] 3171 3172 allchannels = schannels 3173 if len(tchannels) > 1: 3174 # Write out tchannels only if there are any non-trivial ones 3175 allchannels = schannels + tchannels 3176 3177 # Write out propagators for s-channel and t-channel vertices 3178 3179 #lines.append("# Diagram %d" % (mapconfigs[iconfig])) 3180 # Correspondance between the config and the diagram = amp2 3181 lines.append("* %d %d " % (nconfigs, 3182 mapconfigs[iconfig])) 3183 3184 for verts in allchannels: 3185 if verts in schannels: 3186 vert = [v for v in verts if v][0] 3187 else: 3188 vert = verts 3189 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 3190 last_leg = vert.get('legs')[-1] 3191 line=str(last_leg.get('number'))+" "+str(daughters[0])+" "+str(daughters[1]) 3192 # lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 3193 # (last_leg.get('number'), nconfigs, len(daughters), 3194 # ",".join([str(d) for d in daughters]))) 3195 3196 if last_leg.get('id') == 21 and 21 not in particle_dict: 3197 # Fake propagator used in multiparticle vertices 3198 mass = 'zero' 3199 width = 'zero' 3200 pow_part = 0 3201 else: 3202 if (last_leg.get('id')!=7): 3203 particle = particle_dict[last_leg.get('id')] 3204 # Get mass 3205 mass = particle.get('mass') 3206 # Get width 3207 width = particle.get('width') 3208 else : # fake propagator used in multiparticle vertices 3209 mass= 'zero' 3210 width= 'zero' 3211 3212 line=line+" "+mass+" "+width+" " 3213 3214 if verts in schannels: 3215 pdgs = [] 3216 for v in verts: 3217 if v: 3218 pdgs.append(v.get('legs')[-1].get('id')) 3219 else: 3220 pdgs.append(0) 3221 lines.append(line+" S "+str(last_leg.get('id'))) 3222 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3223 # (last_leg.get('number'), nconfigs, nsubprocs, 3224 # ",".join([str(d) for d in pdgs]))) 3225 # lines.append("data tprid(%d,%d)/0/" % \ 3226 # (last_leg.get('number'), nconfigs)) 3227 elif verts in tchannels[:-1]: 3228 lines.append(line+" T "+str(last_leg.get('id'))) 3229 # lines.append("data tprid(%d,%d)/%d/" % \ 3230 # (last_leg.get('number'), nconfigs, 3231 # abs(last_leg.get('id')))) 3232 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3233 # (last_leg.get('number'), nconfigs, nsubprocs, 3234 # ",".join(['0'] * nsubprocs))) 3235 3236 # Write out number of configs 3237 # lines.append("# Number of configs") 3238 # lines.append("data mapconfig(0)/%d/" % nconfigs) 3239 lines.append(" * ") # a line with just a star indicates this is the end of file 3240 # Write the file 3241 writer.writelines(lines) 3242 3243 return s_and_t_channels
3244
3245 3246 #=============================================================================== 3247 # ProcessExporterFortranME 3248 #=============================================================================== 3249 -class ProcessExporterFortranME(ProcessExporterFortran):
3250 """Class to take care of exporting a set of matrix elements to 3251 MadEvent format.""" 3252 3253 matrix_file = "matrix_madevent_v4.inc" 3254
3255 - def copy_template(self, model):
3256 """Additional actions needed for setup of Template 3257 """ 3258 3259 super(ProcessExporterFortranME, self).copy_template(model) 3260 3261 # File created from Template (Different in some child class) 3262 filename = pjoin(self.dir_path,'Source','run_config.inc') 3263 self.write_run_config_file(writers.FortranWriter(filename)) 3264 3265 # The next file are model dependant (due to SLAH convention) 3266 self.model_name = model.get('name') 3267 # Add the symmetry.f 3268 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3269 self.write_symmetry(writers.FortranWriter(filename)) 3270 # 3271 filename = pjoin(self.dir_path,'SubProcesses','addmothers.f') 3272 self.write_addmothers(writers.FortranWriter(filename)) 3273 # Copy the different python file in the Template 3274 self.copy_python_file()
3275 3276 3277 3278 3279 3280 #=========================================================================== 3281 # generate_subprocess_directory 3282 #===========================================================================
3283 - def copy_python_file(self):
3284 """copy the python file require for the Template""" 3285 3286 # madevent interface 3287 cp(_file_path+'/interface/madevent_interface.py', 3288 self.dir_path+'/bin/internal/madevent_interface.py') 3289 cp(_file_path+'/interface/extended_cmd.py', 3290 self.dir_path+'/bin/internal/extended_cmd.py') 3291 cp(_file_path+'/interface/common_run_interface.py', 3292 self.dir_path+'/bin/internal/common_run_interface.py') 3293 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3294 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3295 cp(_file_path+'/iolibs/save_load_object.py', 3296 self.dir_path+'/bin/internal/save_load_object.py') 3297 cp(_file_path+'/iolibs/file_writers.py', 3298 self.dir_path+'/bin/internal/file_writers.py') 3299 #model file 3300 cp(_file_path+'../models/check_param_card.py', 3301 self.dir_path+'/bin/internal/check_param_card.py') 3302 3303 #copy all the file present in madevent directory 3304 for name in os.listdir(pjoin(_file_path, 'madevent')): 3305 if name not in ['__init__.py'] and name.endswith('.py'): 3306 cp(_file_path+'/madevent/'+name, self.dir_path+'/bin/internal/') 3307 3308 #madevent file 3309 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3310 cp(_file_path+'/various/lhe_parser.py', 3311 self.dir_path+'/bin/internal/lhe_parser.py') 3312 cp(_file_path+'/various/banner.py', 3313 self.dir_path+'/bin/internal/banner.py') 3314 cp(_file_path+'/various/histograms.py', 3315 self.dir_path+'/bin/internal/histograms.py') 3316 cp(_file_path+'/various/plot_djrs.py', 3317 self.dir_path+'/bin/internal/plot_djrs.py') 3318 cp(_file_path+'/various/systematics.py', self.dir_path+'/bin/internal/systematics.py') 3319 3320 cp(_file_path+'/various/cluster.py', 3321 self.dir_path+'/bin/internal/cluster.py') 3322 cp(_file_path+'/madevent/combine_runs.py', 3323 self.dir_path+'/bin/internal/combine_runs.py') 3324 # logging configuration 3325 cp(_file_path+'/interface/.mg5_logging.conf', 3326 self.dir_path+'/bin/internal/me5_logging.conf') 3327 cp(_file_path+'/interface/coloring_logging.py', 3328 self.dir_path+'/bin/internal/coloring_logging.py') 3329 # shower card and FO_analyse_card. 3330 # Although not needed, it is imported by banner.py 3331 cp(_file_path+'/various/shower_card.py', 3332 self.dir_path+'/bin/internal/shower_card.py') 3333 cp(_file_path+'/various/FO_analyse_card.py', 3334 self.dir_path+'/bin/internal/FO_analyse_card.py')
3335 3336
3337 - def convert_model(self, model, wanted_lorentz = [], 3338 wanted_couplings = []):
3339 3340 super(ProcessExporterFortranME,self).convert_model(model, 3341 wanted_lorentz, wanted_couplings) 3342 3343 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 3344 try: 3345 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 3346 except OSError as error: 3347 pass 3348 model_path = model.get('modelpath') 3349 # This is not safe if there is a '##' or '-' in the path. 3350 shutil.copytree(model_path, 3351 pjoin(self.dir_path,'bin','internal','ufomodel'), 3352 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 3353 if hasattr(model, 'restrict_card'): 3354 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 3355 'restrict_default.dat') 3356 if isinstance(model.restrict_card, check_param_card.ParamCard): 3357 model.restrict_card.write(out_path) 3358 else: 3359 files.cp(model.restrict_card, out_path)
3360 3361 #=========================================================================== 3362 # export model files 3363 #===========================================================================
3364 - def export_model_files(self, model_path):
3365 """export the model dependent files""" 3366 3367 super(ProcessExporterFortranME,self).export_model_files(model_path) 3368 3369 # Add the routine update_as_param in v4 model 3370 # This is a function created in the UFO 3371 text=""" 3372 subroutine update_as_param() 3373 call setpara('param_card.dat',.false.) 3374 return 3375 end 3376 """ 3377 ff = open(pjoin(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3378 ff.write(text) 3379 ff.close() 3380 3381 # Add the symmetry.f 3382 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3383 self.write_symmetry(writers.FortranWriter(filename), v5=False) 3384 3385 # Modify setrun.f 3386 text = open(pjoin(self.dir_path,'Source','setrun.f')).read() 3387 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3388 fsock = open(pjoin(self.dir_path,'Source','setrun.f'), 'w') 3389 fsock.write(text) 3390 fsock.close() 3391 3392 self.make_model_symbolic_link()
3393 3394 #=========================================================================== 3395 # generate_subprocess_directory 3396 #===========================================================================
3397 - def generate_subprocess_directory(self, matrix_element, 3398 fortran_model, 3399 me_number):
3400 """Generate the Pxxxxx directory for a subprocess in MG4 madevent, 3401 including the necessary matrix.f and various helper files""" 3402 3403 cwd = os.getcwd() 3404 path = pjoin(self.dir_path, 'SubProcesses') 3405 3406 3407 if not self.model: 3408 self.model = matrix_element.get('processes')[0].get('model') 3409 3410 3411 3412 #os.chdir(path) 3413 # Create the directory PN_xx_xxxxx in the specified path 3414 subprocdir = "P%s" % matrix_element.get('processes')[0].shell_string() 3415 try: 3416 os.mkdir(pjoin(path,subprocdir)) 3417 except os.error as error: 3418 logger.warning(error.strerror + " " + subprocdir) 3419 3420 #try: 3421 # os.chdir(subprocdir) 3422 #except os.error: 3423 # logger.error('Could not cd to directory %s' % subprocdir) 3424 # return 0 3425 3426 logger.info('Creating files in directory %s' % subprocdir) 3427 Ppath = pjoin(path, subprocdir) 3428 3429 # Extract number of external particles 3430 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3431 3432 # Add the driver.f 3433 ncomb = matrix_element.get_helicity_combinations() 3434 filename = pjoin(Ppath,'driver.f') 3435 self.write_driver(writers.FortranWriter(filename),ncomb,n_grouped_proc=1, 3436 v5=self.opt['v5_model']) 3437 3438 # Create the matrix.f file, auto_dsig.f file and all inc files 3439 filename = pjoin(Ppath, 'matrix.f') 3440 calls, ncolor = \ 3441 self.write_matrix_element_v4(writers.FortranWriter(filename), 3442 matrix_element, fortran_model, subproc_number = me_number) 3443 3444 filename = pjoin(Ppath, 'auto_dsig.f') 3445 self.write_auto_dsig_file(writers.FortranWriter(filename), 3446 matrix_element) 3447 3448 filename = pjoin(Ppath, 'configs.inc') 3449 mapconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 3450 writers.FortranWriter(filename), 3451 matrix_element) 3452 3453 filename = pjoin(Ppath, 'config_nqcd.inc') 3454 self.write_config_nqcd_file(writers.FortranWriter(filename), 3455 nqcd_list) 3456 3457 filename = pjoin(Ppath, 'config_subproc_map.inc') 3458 self.write_config_subproc_map_file(writers.FortranWriter(filename), 3459 s_and_t_channels) 3460 3461 filename = pjoin(Ppath, 'coloramps.inc') 3462 self.write_coloramps_file(writers.FortranWriter(filename), 3463 mapconfigs, 3464 matrix_element) 3465 3466 filename = pjoin(Ppath, 'get_color.f') 3467 self.write_colors_file(writers.FortranWriter(filename), 3468 matrix_element) 3469 3470 filename = pjoin(Ppath, 'decayBW.inc') 3471 self.write_decayBW_file(writers.FortranWriter(filename), 3472 s_and_t_channels) 3473 3474 filename = pjoin(Ppath, 'dname.mg') 3475 self.write_dname_file(writers.FileWriter(filename), 3476 "P"+matrix_element.get('processes')[0].shell_string()) 3477 3478 filename = pjoin(Ppath, 'iproc.dat') 3479 self.write_iproc_file(writers.FortranWriter(filename), 3480 me_number) 3481 3482 filename = pjoin(Ppath, 'leshouche.inc') 3483 self.write_leshouche_file(writers.FortranWriter(filename), 3484 matrix_element) 3485 3486 filename = pjoin(Ppath, 'maxamps.inc') 3487 self.write_maxamps_file(writers.FortranWriter(filename), 3488 len(matrix_element.get('diagrams')), 3489 ncolor, 3490 len(matrix_element.get('processes')), 3491 1) 3492 3493 filename = pjoin(Ppath, 'mg.sym') 3494 self.write_mg_sym_file(writers.FortranWriter(filename), 3495 matrix_element) 3496 3497 filename = pjoin(Ppath, 'ncombs.inc') 3498 self.write_ncombs_file(writers.FortranWriter(filename), 3499 nexternal) 3500 3501 filename = pjoin(Ppath, 'nexternal.inc') 3502 self.write_nexternal_file(writers.FortranWriter(filename), 3503 nexternal, ninitial) 3504 3505 filename = pjoin(Ppath, 'ngraphs.inc') 3506 self.write_ngraphs_file(writers.FortranWriter(filename), 3507 len(mapconfigs)) 3508 3509 3510 filename = pjoin(Ppath, 'pmass.inc') 3511 self.write_pmass_file(writers.FortranWriter(filename), 3512 matrix_element) 3513 3514 filename = pjoin(Ppath, 'props.inc') 3515 self.write_props_file(writers.FortranWriter(filename), 3516 matrix_element, 3517 s_and_t_channels) 3518 3519 # Find config symmetries and permutations 3520 symmetry, perms, ident_perms = \ 3521 diagram_symmetry.find_symmetry(matrix_element) 3522 3523 filename = pjoin(Ppath, 'symswap.inc') 3524 self.write_symswap_file(writers.FortranWriter(filename), 3525 ident_perms) 3526 3527 filename = pjoin(Ppath, 'symfact_orig.dat') 3528 self.write_symfact_file(open(filename, 'w'), symmetry) 3529 3530 # Generate diagrams 3531 filename = pjoin(Ppath, "matrix.ps") 3532 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3533 get('diagrams'), 3534 filename, 3535 model=matrix_element.get('processes')[0].\ 3536 get('model'), 3537 amplitude=True) 3538 logger.info("Generating Feynman diagrams for " + \ 3539 matrix_element.get('processes')[0].nice_string()) 3540 plot.draw() 3541 3542 self.link_files_in_SubProcess(Ppath) 3543 3544 #import nexternal/leshouche in Source 3545 ln(pjoin(Ppath,'nexternal.inc'), pjoin(self.dir_path,'Source'), log=False) 3546 ln(pjoin(Ppath,'leshouche.inc'), pjoin(self.dir_path,'Source'), log=False) 3547 ln(pjoin(Ppath,'maxamps.inc'), pjoin(self.dir_path,'Source'), log=False) 3548 # Return to SubProcesses dir 3549 #os.chdir(os.path.pardir) 3550 3551 # Add subprocess to subproc.mg 3552 filename = pjoin(path, 'subproc.mg') 3553 files.append_to_file(filename, 3554 self.write_subproc, 3555 subprocdir) 3556 3557 # Return to original dir 3558 #os.chdir(cwd) 3559 3560 # Generate info page 3561 gen_infohtml.make_info_html(self.dir_path) 3562 3563 3564 if not calls: 3565 calls = 0 3566 return calls
3567 3604 3605
3606 - def finalize(self, matrix_elements, history, mg5options, flaglist):
3607 """Finalize ME v4 directory by creating jpeg diagrams, html 3608 pages,proc_card_mg5.dat and madevent.tar.gz.""" 3609 3610 if 'nojpeg' in flaglist: 3611 makejpg = False 3612 else: 3613 makejpg = True 3614 if 'online' in flaglist: 3615 online = True 3616 else: 3617 online = False 3618 3619 compiler = {'fortran': mg5options['fortran_compiler'], 3620 'cpp': mg5options['cpp_compiler'], 3621 'f2py': mg5options['f2py_compiler']} 3622 3623 # indicate that the output type is not grouped 3624 if not isinstance(self, ProcessExporterFortranMEGroup): 3625 self.proc_characteristic['grouped_matrix'] = False 3626 self.proc_characteristic['complex_mass_scheme'] = mg5options['complex_mass_scheme'] 3627 3628 modelname = self.opt['model'] 3629 if modelname == 'mssm' or modelname.startswith('mssm-'): 3630 param_card = pjoin(self.dir_path, 'Cards','param_card.dat') 3631 mg5_param = pjoin(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 3632 check_param_card.convert_to_mg5card(param_card, mg5_param) 3633 check_param_card.check_valid_param_card(mg5_param) 3634 3635 # Add the combine_events.f modify param_card path/number of @X 3636 filename = pjoin(self.dir_path,'Source','combine_events.f') 3637 try: 3638 nb_proc =[p.get('id') for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes')] 3639 except AttributeError: 3640 nb_proc =[p.get('id') for m in matrix_elements.get('matrix_elements') for p in m.get('processes')] 3641 nb_proc = len(set(nb_proc)) 3642 self.write_combine_events(writers.FortranWriter(filename), nb_proc) # already formatted 3643 # Write maxconfigs.inc based on max of ME's/subprocess groups 3644 filename = pjoin(self.dir_path,'Source','maxconfigs.inc') 3645 self.write_maxconfigs_file(writers.FortranWriter(filename), 3646 matrix_elements) 3647 3648 # Write maxparticles.inc based on max of ME's/subprocess groups 3649 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 3650 self.write_maxparticles_file(writers.FortranWriter(filename), 3651 matrix_elements) 3652 3653 # Touch "done" file 3654 os.system('touch %s/done' % pjoin(self.dir_path,'SubProcesses')) 3655 3656 # Check for compiler 3657 self.set_compiler(compiler) 3658 self.set_cpp_compiler(compiler['cpp']) 3659 3660 3661 old_pos = os.getcwd() 3662 subpath = pjoin(self.dir_path, 'SubProcesses') 3663 3664 P_dir_list = [proc for proc in os.listdir(subpath) 3665 if os.path.isdir(pjoin(subpath,proc)) and proc[0] == 'P'] 3666 3667 devnull = os.open(os.devnull, os.O_RDWR) 3668 # Convert the poscript in jpg files (if authorize) 3669 if makejpg: 3670 try: 3671 os.remove(pjoin(self.dir_path,'HTML','card.jpg')) 3672 except Exception, error: 3673 pass 3674 3675 if misc.which('gs'): 3676 logger.info("Generate jpeg diagrams") 3677 for Pdir in P_dir_list: 3678 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 3679 stdout = devnull, cwd=pjoin(subpath, Pdir)) 3680 3681 logger.info("Generate web pages") 3682 # Create the WebPage using perl script 3683 3684 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 3685 stdout = devnull,cwd=pjoin(self.dir_path)) 3686 3687 #os.chdir(os.path.pardir) 3688 3689 obj = gen_infohtml.make_info_html(self.dir_path) 3690 3691 if online: 3692 nb_channel = obj.rep_rule['nb_gen_diag'] 3693 open(pjoin(self.dir_path, 'Online'),'w').write(str(nb_channel)) 3694 #add the information to proc_charac 3695 self.proc_characteristic['nb_channel'] = obj.rep_rule['nb_gen_diag'] 3696 3697 # Write command history as proc_card_mg5 3698 if os.path.isdir(pjoin(self.dir_path,'Cards')): 3699 output_file = pjoin(self.dir_path,'Cards', 'proc_card_mg5.dat') 3700 history.write(output_file) 3701 3702 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3703 stdout = devnull) 3704 3705 #crate the proc_characteristic file 3706 self.create_proc_charac(matrix_elements, history) 3707 3708 # create the run_card 3709 ProcessExporterFortran.finalize(self, matrix_elements, history, mg5options, flaglist) 3710 3711 # Run "make" to generate madevent.tar.gz file 3712 if os.path.exists(pjoin(self.dir_path,'SubProcesses', 'subproc.mg')): 3713 if os.path.exists(pjoin(self.dir_path,'madevent.tar.gz')): 3714 os.remove(pjoin(self.dir_path,'madevent.tar.gz')) 3715 misc.call([os.path.join(self.dir_path, 'bin', 'internal', 'make_madevent_tar')], 3716 stdout = devnull, cwd=self.dir_path) 3717 3718 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3719 stdout = devnull, cwd=self.dir_path)
3720 3721 3722 3723 3724 3725 3726 #return to the initial dir 3727 #os.chdir(old_pos) 3728 3729 #=========================================================================== 3730 # write_matrix_element_v4 3731 #===========================================================================
3732 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 3733 proc_id = "", config_map = [], subproc_number = ""):
3734 """Export a matrix element to a matrix.f file in MG4 madevent format""" 3735 3736 if not matrix_element.get('processes') or \ 3737 not matrix_element.get('diagrams'): 3738 return 0 3739 3740 if writer: 3741 if not isinstance(writer, writers.FortranWriter): 3742 raise writers.FortranWriter.FortranWriterError(\ 3743 "writer not FortranWriter") 3744 # Set lowercase/uppercase Fortran code 3745 writers.FortranWriter.downcase = False 3746 3747 # The proc prefix is not used for MadEvent output so it can safely be set 3748 # to an empty string. 3749 replace_dict = {'proc_prefix':''} 3750 3751 # Extract helas calls 3752 helas_calls = fortran_model.get_matrix_element_calls(\ 3753 matrix_element) 3754 3755 replace_dict['helas_calls'] = "\n".join(helas_calls) 3756 3757 3758 # Extract version number and date from VERSION file 3759 info_lines = self.get_mg5_info_lines() 3760 replace_dict['info_lines'] = info_lines 3761 3762 # Extract process info lines 3763 process_lines = self.get_process_info_lines(matrix_element) 3764 replace_dict['process_lines'] = process_lines 3765 3766 # Set proc_id 3767 replace_dict['proc_id'] = proc_id 3768 3769 # Extract ncomb 3770 ncomb = matrix_element.get_helicity_combinations() 3771 replace_dict['ncomb'] = ncomb 3772 3773 # Extract helicity lines 3774 helicity_lines = self.get_helicity_lines(matrix_element) 3775 replace_dict['helicity_lines'] = helicity_lines 3776 3777 # Extract IC line 3778 ic_line = self.get_ic_line(matrix_element) 3779 replace_dict['ic_line'] = ic_line 3780 3781 # Extract overall denominator 3782 # Averaging initial state color, spin, and identical FS particles 3783 den_factor_line = self.get_den_factor_line(matrix_element) 3784 replace_dict['den_factor_line'] = den_factor_line 3785 3786 # Extract ngraphs 3787 ngraphs = matrix_element.get_number_of_amplitudes() 3788 replace_dict['ngraphs'] = ngraphs 3789 3790 # Extract ndiags 3791 ndiags = len(matrix_element.get('diagrams')) 3792 replace_dict['ndiags'] = ndiags 3793 3794 # Set define_iconfigs_lines 3795 replace_dict['define_iconfigs_lines'] = \ 3796 """INTEGER MAPCONFIG(0:LMAXCONFIGS), ICONFIG 3797 COMMON/TO_MCONFIGS/MAPCONFIG, ICONFIG""" 3798 3799 if proc_id: 3800 # Set lines for subprocess group version 3801 # Set define_iconfigs_lines 3802 replace_dict['define_iconfigs_lines'] += \ 3803 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3804 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3805 # Set set_amp2_line 3806 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(SUBDIAG(%s))/XTOT" % \ 3807 proc_id 3808 else: 3809 # Standard running 3810 # Set set_amp2_line 3811 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(MAPCONFIG(ICONFIG))/XTOT" 3812 3813 # Extract nwavefuncs 3814 nwavefuncs = matrix_element.get_number_of_wavefunctions() 3815 replace_dict['nwavefuncs'] = nwavefuncs 3816 3817 # Extract ncolor 3818 ncolor = max(1, len(matrix_element.get('color_basis'))) 3819 replace_dict['ncolor'] = ncolor 3820 3821 # Extract color data lines 3822 color_data_lines = self.get_color_data_lines(matrix_element) 3823 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 3824 3825 3826 # Set the size of Wavefunction 3827 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 3828 replace_dict['wavefunctionsize'] = 18 3829 else: 3830 replace_dict['wavefunctionsize'] = 6 3831 3832 # Extract amp2 lines 3833 amp2_lines = self.get_amp2_lines(matrix_element, config_map) 3834 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 3835 3836 # The JAMP definition depends on the splitting order 3837 split_orders=matrix_element.get('processes')[0].get('split_orders') 3838 if len(split_orders)>0: 3839 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 3840 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 3841 matrix_element.get('processes')[0],squared_orders) 3842 else: 3843 # Consider the output of a dummy order 'ALL_ORDERS' for which we 3844 # set all amplitude order to weight 1 and only one squared order 3845 # contribution which is of course ALL_ORDERS=2. 3846 squared_orders = [(2,),] 3847 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 3848 replace_dict['chosen_so_configs'] = '.TRUE.' 3849 3850 replace_dict['nAmpSplitOrders']=len(amp_orders) 3851 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 3852 replace_dict['split_order_str_list']=str(split_orders) 3853 replace_dict['nSplitOrders']=max(len(split_orders),1) 3854 amp_so = self.get_split_orders_lines( 3855 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 3856 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 3857 replace_dict['ampsplitorders']='\n'.join(amp_so) 3858 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 3859 3860 3861 # Extract JAMP lines 3862 # If no split_orders then artificiall add one entry called 'ALL_ORDERS' 3863 jamp_lines = self.get_JAMP_lines_split_order(\ 3864 matrix_element,amp_orders,split_order_names= 3865 split_orders if len(split_orders)>0 else ['ALL_ORDERS']) 3866 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 3867 3868 replace_dict['template_file'] = pjoin(_file_path, \ 3869 'iolibs/template_files/%s' % self.matrix_file) 3870 replace_dict['template_file2'] = pjoin(_file_path, \ 3871 'iolibs/template_files/split_orders_helping_functions.inc') 3872 if writer: 3873 file = open(replace_dict['template_file']).read() 3874 file = file % replace_dict 3875 # Add the split orders helper functions. 3876 file = file + '\n' + open(replace_dict['template_file2'])\ 3877 .read()%replace_dict 3878 # Write the file 3879 writer.writelines(file) 3880 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor 3881 else: 3882 replace_dict['return_value'] = (len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor) 3883 return replace_dict
3884 3885 #=========================================================================== 3886 # write_auto_dsig_file 3887 #===========================================================================
3888 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
3889 """Write the auto_dsig.f file for the differential cross section 3890 calculation, includes pdf call information""" 3891 3892 if not matrix_element.get('processes') or \ 3893 not matrix_element.get('diagrams'): 3894 return 0 3895 3896 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 3897 self.proc_characteristic['ninitial'] = ninitial 3898 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 3899 3900 # Add information relevant for MLM matching: 3901 # Maximum QCD power in all the contributions 3902 max_qcd_order = 0 3903 for diag in matrix_element.get('diagrams'): 3904 orders = diag.calculate_orders() 3905 if 'QCD' in orders: 3906 max_qcd_order = max(max_qcd_order,orders['QCD']) 3907 max_n_light_final_partons = max(len([1 for id in proc.get_final_ids() 3908 if proc.get('model').get_particle(id).get('mass')=='ZERO' and 3909 proc.get('model').get_particle(id).get('color')>1]) 3910 for proc in matrix_element.get('processes')) 3911 # Maximum number of final state light jets to be matched 3912 self.proc_characteristic['max_n_matched_jets'] = max( 3913 self.proc_characteristic['max_n_matched_jets'], 3914 min(max_qcd_order,max_n_light_final_partons)) 3915 3916 # List of default pdgs to be considered for the CKKWl merging cut 3917 self.proc_characteristic['colored_pdgs'] = \ 3918 sorted(list(set([abs(p.get('pdg_code')) for p in 3919 matrix_element.get('processes')[0].get('model').get('particles') if 3920 p.get('color')>1]))) 3921 3922 if ninitial < 1 or ninitial > 2: 3923 raise writers.FortranWriter.FortranWriterError, \ 3924 """Need ninitial = 1 or 2 to write auto_dsig file""" 3925 3926 replace_dict = {} 3927 3928 # Extract version number and date from VERSION file 3929 info_lines = self.get_mg5_info_lines() 3930 replace_dict['info_lines'] = info_lines 3931 3932 # Extract process info lines 3933 process_lines = self.get_process_info_lines(matrix_element) 3934 replace_dict['process_lines'] = process_lines 3935 3936 # Set proc_id 3937 replace_dict['proc_id'] = proc_id 3938 replace_dict['numproc'] = 1 3939 3940 # Set dsig_line 3941 if ninitial == 1: 3942 # No conversion, since result of decay should be given in GeV 3943 dsig_line = "pd(0)*dsiguu" 3944 else: 3945 # Convert result (in GeV) to pb 3946 dsig_line = "pd(0)*conv*dsiguu" 3947 3948 replace_dict['dsig_line'] = dsig_line 3949 3950 # Extract pdf lines 3951 pdf_vars, pdf_data, pdf_lines = \ 3952 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 3953 replace_dict['pdf_vars'] = pdf_vars 3954 replace_dict['pdf_data'] = pdf_data 3955 replace_dict['pdf_lines'] = pdf_lines 3956 3957 # Lines that differ between subprocess group and regular 3958 if proc_id: 3959 replace_dict['numproc'] = int(proc_id) 3960 replace_dict['passcuts_begin'] = "" 3961 replace_dict['passcuts_end'] = "" 3962 # Set lines for subprocess group version 3963 # Set define_iconfigs_lines 3964 replace_dict['define_subdiag_lines'] = \ 3965 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3966 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3967 replace_dict['cutsdone'] = "" 3968 else: 3969 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 3970 replace_dict['passcuts_end'] = "ENDIF" 3971 replace_dict['define_subdiag_lines'] = "" 3972 replace_dict['cutsdone'] = " cutsdone=.false.\n cutspassed=.false." 3973 3974 if not isinstance(self, ProcessExporterFortranMEGroup): 3975 ncomb=matrix_element.get_helicity_combinations() 3976 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 3977 else: 3978 replace_dict['read_write_good_hel'] = "" 3979 3980 context = {'read_write_good_hel':True} 3981 3982 if writer: 3983 file = open(pjoin(_file_path, \ 3984 'iolibs/template_files/auto_dsig_v4.inc')).read() 3985 file = file % replace_dict 3986 3987 # Write the file 3988 writer.writelines(file, context=context) 3989 else: 3990 return replace_dict, context
3991 #=========================================================================== 3992 # write_coloramps_file 3993 #===========================================================================
3994 - def write_coloramps_file(self, writer, mapconfigs, matrix_element):
3995 """Write the coloramps.inc file for MadEvent""" 3996 3997 lines = self.get_icolamp_lines(mapconfigs, matrix_element, 1) 3998 lines.insert(0, "logical icolamp(%d,%d,1)" % \ 3999 (max(len(matrix_element.get('color_basis').keys()), 1), 4000 len(mapconfigs))) 4001 4002 4003 # Write the file 4004 writer.writelines(lines) 4005 4006 return True
4007 4008 #=========================================================================== 4009 # write_colors_file 4010 #===========================================================================
4011 - def write_colors_file(self, writer, matrix_elements):
4012 """Write the get_color.f file for MadEvent, which returns color 4013 for all particles used in the matrix element.""" 4014 4015 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 4016 matrix_elements = [matrix_elements] 4017 4018 model = matrix_elements[0].get('processes')[0].get('model') 4019 4020 # We need the both particle and antiparticle wf_ids, since the identity 4021 # depends on the direction of the wf. 4022 wf_ids = set(sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 4023 for wf in d.get('wavefunctions')],[]) \ 4024 for d in me.get('diagrams')], []) \ 4025 for me in matrix_elements], [])) 4026 4027 leg_ids = set(sum([sum([sum([[l.get('id'), 4028 model.get_particle(l.get('id')).get_anti_pdg_code()] \ 4029 for l in p.get_legs_with_decays()], []) \ 4030 for p in me.get('processes')], []) \ 4031 for me in matrix_elements], [])) 4032 particle_ids = sorted(list(wf_ids.union(leg_ids))) 4033 4034 lines = """function get_color(ipdg) 4035 implicit none 4036 integer get_color, ipdg 4037 4038 if(ipdg.eq.%d)then 4039 get_color=%d 4040 return 4041 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 4042 4043 for part_id in particle_ids[1:]: 4044 lines += """else if(ipdg.eq.%d)then 4045 get_color=%d 4046 return 4047 """ % (part_id, model.get_particle(part_id).get_color()) 4048 # Dummy particle for multiparticle vertices with pdg given by 4049 # first code not in the model 4050 lines += """else if(ipdg.eq.%d)then 4051 c This is dummy particle used in multiparticle vertices 4052 get_color=2 4053 return 4054 """ % model.get_first_non_pdg() 4055 lines += """else 4056 write(*,*)'Error: No color given for pdg ',ipdg 4057 get_color=0 4058 return 4059 endif 4060 end 4061 """ 4062 4063 # Write the file 4064 writer.writelines(lines) 4065 4066 return True
4067 4068 #=========================================================================== 4069 # write_config_nqcd_file 4070 #===========================================================================
4071 - def write_config_nqcd_file(self, writer, nqcd_list):
4072 """Write the config_nqcd.inc with the number of QCD couplings 4073 for each config""" 4074 4075 lines = [] 4076 for iconf, n in enumerate(nqcd_list): 4077 lines.append("data nqcd(%d)/%d/" % (iconf+1, n)) 4078 4079 # Write the file 4080 writer.writelines(lines) 4081 4082 return True
4083 4084 #=========================================================================== 4085 # write_maxconfigs_file 4086 #===========================================================================
4087 - def write_maxconfigs_file(self, writer, matrix_elements):
4088 """Write the maxconfigs.inc file for MadEvent""" 4089 4090 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 4091 maxconfigs = max([me.get_num_configs() for me in \ 4092 matrix_elements.get('matrix_elements')]) 4093 else: 4094 maxconfigs = max([me.get_num_configs() for me in matrix_elements]) 4095 4096 lines = "integer lmaxconfigs\n" 4097 lines += "parameter(lmaxconfigs=%d)" % maxconfigs 4098 4099 # Write the file 4100 writer.writelines(lines) 4101 4102 return True
4103 4104 #=========================================================================== 4105 # read_write_good_hel 4106 #===========================================================================
4107 - def read_write_good_hel(self, ncomb):
4108 """return the code to read/write the good_hel common_block""" 4109 4110 convert = {'ncomb' : ncomb} 4111 output = """ 4112 subroutine write_good_hel(stream_id) 4113 implicit none 4114 integer stream_id 4115 INTEGER NCOMB 4116 PARAMETER ( NCOMB=%(ncomb)d) 4117 LOGICAL GOODHEL(NCOMB) 4118 INTEGER NTRY 4119 common/BLOCK_GOODHEL/NTRY,GOODHEL 4120 write(stream_id,*) GOODHEL 4121 return 4122 end 4123 4124 4125 subroutine read_good_hel(stream_id) 4126 implicit none 4127 include 'genps.inc' 4128 integer stream_id 4129 INTEGER NCOMB 4130 PARAMETER ( NCOMB=%(ncomb)d) 4131 LOGICAL GOODHEL(NCOMB) 4132 INTEGER NTRY 4133 common/BLOCK_GOODHEL/NTRY,GOODHEL 4134 read(stream_id,*) GOODHEL 4135 NTRY = MAXTRIES + 1 4136 return 4137 end 4138 4139 subroutine init_good_hel() 4140 implicit none 4141 INTEGER NCOMB 4142 PARAMETER ( NCOMB=%(ncomb)d) 4143 LOGICAL GOODHEL(NCOMB) 4144 INTEGER NTRY 4145 INTEGER I 4146 4147 do i=1,NCOMB 4148 GOODHEL(I) = .false. 4149 enddo 4150 NTRY = 0 4151 end 4152 4153 integer function get_maxsproc() 4154 implicit none 4155 get_maxsproc = 1 4156 return 4157 end 4158 4159 """ % convert 4160 4161 return output
4162 4163 #=========================================================================== 4164 # write_config_subproc_map_file 4165 #===========================================================================
4166 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
4167 """Write a dummy config_subproc.inc file for MadEvent""" 4168 4169 lines = [] 4170 4171 for iconfig in range(len(s_and_t_channels)): 4172 lines.append("DATA CONFSUB(1,%d)/1/" % \ 4173 (iconfig + 1)) 4174 4175 # Write the file 4176 writer.writelines(lines) 4177 4178 return True
4179 4180 #=========================================================================== 4181 # write_configs_file 4182 #===========================================================================
4183 - def write_configs_file(self, writer, matrix_element):
4184 """Write the configs.inc file for MadEvent""" 4185 4186 # Extract number of external particles 4187 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4188 4189 model = matrix_element.get('processes')[0].get('model') 4190 configs = [(i+1, d) for (i, d) in \ 4191 enumerate(matrix_element.get('diagrams'))] 4192 mapconfigs = [c[0] for c in configs] 4193 return mapconfigs, self.write_configs_file_from_diagrams(writer, 4194 [[c[1]] for c in configs], 4195 mapconfigs, 4196 nexternal, ninitial, 4197 model)
4198 4199 #=========================================================================== 4200 # write_run_configs_file 4201 #===========================================================================
4202 - def write_run_config_file(self, writer):
4203 """Write the run_configs.inc file for MadEvent""" 4204 4205 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 4206 4207 if self.proc_characteristic['loop_induced']: 4208 job_per_chan = 1 4209 else: 4210 job_per_chan = 5 4211 4212 if writer: 4213 text = open(path).read() % {'chanperjob': job_per_chan} 4214 writer.write(text) 4215 return True 4216 else: 4217 return {'chanperjob': job_per_chan}
4218 4219 #=========================================================================== 4220 # write_configs_file_from_diagrams 4221 #===========================================================================
4222 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 4223 nexternal, ninitial, model):
4224 """Write the actual configs.inc file. 4225 4226 configs is the diagrams corresponding to configs (each 4227 diagrams is a list of corresponding diagrams for all 4228 subprocesses, with None if there is no corresponding diagrams 4229 for a given process). 4230 mapconfigs gives the diagram number for each config. 4231 4232 For s-channels, we need to output one PDG for each subprocess in 4233 the subprocess group, in order to be able to pick the right 4234 one for multiprocesses.""" 4235 4236 lines = [] 4237 4238 s_and_t_channels = [] 4239 4240 nqcd_list = [] 4241 4242 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 4243 for config in configs if [d for d in config if d][0].\ 4244 get_vertex_leg_numbers()!=[]] 4245 minvert = min(vert_list) if vert_list!=[] else 0 4246 4247 # Number of subprocesses 4248 nsubprocs = len(configs[0]) 4249 4250 nconfigs = 0 4251 4252 new_pdg = model.get_first_non_pdg() 4253 4254 for iconfig, helas_diags in enumerate(configs): 4255 if any([vert > minvert for vert in 4256 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 4257 # Only 3-vertices allowed in configs.inc 4258 continue 4259 nconfigs += 1 4260 4261 # Need s- and t-channels for all subprocesses, including 4262 # those that don't contribute to this config 4263 empty_verts = [] 4264 stchannels = [] 4265 for h in helas_diags: 4266 if h: 4267 # get_s_and_t_channels gives vertices starting from 4268 # final state external particles and working inwards 4269 stchannels.append(h.get('amplitudes')[0].\ 4270 get_s_and_t_channels(ninitial, model, 4271 new_pdg)) 4272 else: 4273 stchannels.append((empty_verts, None)) 4274 4275 # For t-channels, just need the first non-empty one 4276 tchannels = [t for s,t in stchannels if t != None][0] 4277 4278 # For s_and_t_channels (to be used later) use only first config 4279 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 4280 tchannels]) 4281 4282 # Make sure empty_verts is same length as real vertices 4283 if any([s for s,t in stchannels]): 4284 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 4285 4286 # Reorganize s-channel vertices to get a list of all 4287 # subprocesses for each vertex 4288 schannels = zip(*[s for s,t in stchannels]) 4289 else: 4290 schannels = [] 4291 4292 allchannels = schannels 4293 if len(tchannels) > 1: 4294 # Write out tchannels only if there are any non-trivial ones 4295 allchannels = schannels + tchannels 4296 4297 # Write out propagators for s-channel and t-channel vertices 4298 4299 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 4300 # Correspondance between the config and the diagram = amp2 4301 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 4302 mapconfigs[iconfig])) 4303 # Number of QCD couplings in this diagram 4304 nqcd = 0 4305 for h in helas_diags: 4306 if h: 4307 try: 4308 nqcd = h.calculate_orders()['QCD'] 4309 except KeyError: 4310 pass 4311 break 4312 else: 4313 continue 4314 4315 nqcd_list.append(nqcd) 4316 4317 for verts in allchannels: 4318 if verts in schannels: 4319 vert = [v for v in verts if v][0] 4320 else: 4321 vert = verts 4322 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 4323 last_leg = vert.get('legs')[-1] 4324 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 4325 (last_leg.get('number'), nconfigs, len(daughters), 4326 ",".join([str(d) for d in daughters]))) 4327 if verts in schannels: 4328 pdgs = [] 4329 for v in verts: 4330 if v: 4331 pdgs.append(v.get('legs')[-1].get('id')) 4332 else: 4333 pdgs.append(0) 4334 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4335 (last_leg.get('number'), nconfigs, nsubprocs, 4336 ",".join([str(d) for d in pdgs]))) 4337 lines.append("data tprid(%d,%d)/0/" % \ 4338 (last_leg.get('number'), nconfigs)) 4339 elif verts in tchannels[:-1]: 4340 lines.append("data tprid(%d,%d)/%d/" % \ 4341 (last_leg.get('number'), nconfigs, 4342 abs(last_leg.get('id')))) 4343 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4344 (last_leg.get('number'), nconfigs, nsubprocs, 4345 ",".join(['0'] * nsubprocs))) 4346 4347 # Write out number of configs 4348 lines.append("# Number of configs") 4349 lines.append("data mapconfig(0)/%d/" % nconfigs) 4350 4351 # Write the file 4352 writer.writelines(lines) 4353 4354 return s_and_t_channels, nqcd_list
4355 4356 #=========================================================================== 4357 # write_decayBW_file 4358 #===========================================================================
4359 - def write_decayBW_file(self, writer, s_and_t_channels):
4360 """Write the decayBW.inc file for MadEvent""" 4361 4362 lines = [] 4363 4364 booldict = {None: "0", True: "1", False: "2"} 4365 4366 for iconf, config in enumerate(s_and_t_channels): 4367 schannels = config[0] 4368 for vertex in schannels: 4369 # For the resulting leg, pick out whether it comes from 4370 # decay or not, as given by the onshell flag 4371 leg = vertex.get('legs')[-1] 4372 lines.append("data gForceBW(%d,%d)/%s/" % \ 4373 (leg.get('number'), iconf + 1, 4374 booldict[leg.get('onshell')])) 4375 4376 # Write the file 4377 writer.writelines(lines) 4378 4379 return True
4380 4381 #=========================================================================== 4382 # write_dname_file 4383 #===========================================================================
4384 - def write_dname_file(self, writer, dir_name):
4385 """Write the dname.mg file for MG4""" 4386 4387 line = "DIRNAME=%s" % dir_name 4388 4389 # Write the file 4390 writer.write(line + "\n") 4391 4392 return True
4393 4394 #=========================================================================== 4395 # write_driver 4396 #===========================================================================
4397 - def write_driver(self, writer, ncomb, n_grouped_proc, v5=True):
4398 """Write the SubProcess/driver.f file for MG4""" 4399 4400 path = pjoin(_file_path,'iolibs','template_files','madevent_driver.f') 4401 4402 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4403 card = 'Source/MODEL/MG5_param.dat' 4404 else: 4405 card = 'param_card.dat' 4406 # Requiring each helicity configuration to be probed by 10 points for 4407 # matrix element before using the resulting grid for MC over helicity 4408 # sampling. 4409 # We multiply this by 2 because each grouped subprocess is called at most 4410 # twice for each IMIRROR. 4411 replace_dict = {'param_card_name':card, 4412 'ncomb':ncomb, 4413 'hel_init_points':n_grouped_proc*10*2} 4414 if not v5: 4415 replace_dict['secondparam']=',.true.' 4416 else: 4417 replace_dict['secondparam']='' 4418 4419 if writer: 4420 text = open(path).read() % replace_dict 4421 writer.write(text) 4422 return True 4423 else: 4424 return replace_dict
4425 4426 #=========================================================================== 4427 # write_addmothers 4428 #===========================================================================
4429 - def write_addmothers(self, writer):
4430 """Write the SubProcess/addmothers.f""" 4431 4432 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 4433 4434 text = open(path).read() % {'iconfig': 'diag_number'} 4435 writer.write(text) 4436 4437 return True
4438 4439 4440 #=========================================================================== 4441 # write_combine_events 4442 #===========================================================================
4443 - def write_combine_events(self, writer, nb_proc=100):
4444 """Write the SubProcess/driver.f file for MG4""" 4445 4446 path = pjoin(_file_path,'iolibs','template_files','madevent_combine_events.f') 4447 4448 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4449 card = 'Source/MODEL/MG5_param.dat' 4450 else: 4451 card = 'param_card.dat' 4452 4453 #set maxpup (number of @X in the process card) 4454 4455 text = open(path).read() % {'param_card_name':card, 'maxpup':nb_proc+1} 4456 #the +1 is just a security. This is not needed but I feel(OM) safer with it. 4457 writer.write(text) 4458 4459 return True
4460 4461 4462 #=========================================================================== 4463 # write_symmetry 4464 #===========================================================================
4465 - def write_symmetry(self, writer, v5=True):
4466 """Write the SubProcess/driver.f file for ME""" 4467 4468 path = pjoin(_file_path,'iolibs','template_files','madevent_symmetry.f') 4469 4470 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4471 card = 'Source/MODEL/MG5_param.dat' 4472 else: 4473 card = 'param_card.dat' 4474 4475 if v5: 4476 replace_dict = {'param_card_name':card, 'setparasecondarg':''} 4477 else: 4478 replace_dict= {'param_card_name':card, 'setparasecondarg':',.true.'} 4479 4480 if writer: 4481 text = open(path).read() 4482 text = text % replace_dict 4483 writer.write(text) 4484 return True 4485 else: 4486 return replace_dict
4487 4488 4489 4490 #=========================================================================== 4491 # write_iproc_file 4492 #===========================================================================
4493 - def write_iproc_file(self, writer, me_number):
4494 """Write the iproc.dat file for MG4""" 4495 line = "%d" % (me_number + 1) 4496 4497 # Write the file 4498 for line_to_write in writer.write_line(line): 4499 writer.write(line_to_write) 4500 return True
4501 4502 #=========================================================================== 4503 # write_mg_sym_file 4504 #===========================================================================
4505 - def write_mg_sym_file(self, writer, matrix_element):
4506 """Write the mg.sym file for MadEvent.""" 4507 4508 lines = [] 4509 4510 # Extract process with all decays included 4511 final_legs = filter(lambda leg: leg.get('state') == True, 4512 matrix_element.get('processes')[0].get_legs_with_decays()) 4513 4514 ninitial = len(filter(lambda leg: leg.get('state') == False, 4515 matrix_element.get('processes')[0].get('legs'))) 4516 4517 identical_indices = {} 4518 4519 # Extract identical particle info 4520 for i, leg in enumerate(final_legs): 4521 if leg.get('id') in identical_indices: 4522 identical_indices[leg.get('id')].append(\ 4523 i + ninitial + 1) 4524 else: 4525 identical_indices[leg.get('id')] = [i + ninitial + 1] 4526 4527 # Remove keys which have only one particle 4528 for key in identical_indices.keys(): 4529 if len(identical_indices[key]) < 2: 4530 del identical_indices[key] 4531 4532 # Write mg.sym file 4533 lines.append(str(len(identical_indices.keys()))) 4534 for key in identical_indices.keys(): 4535 lines.append(str(len(identical_indices[key]))) 4536 for number in identical_indices[key]: 4537 lines.append(str(number)) 4538 4539 # Write the file 4540 writer.writelines(lines) 4541 4542 return True
4543 4544 #=========================================================================== 4545 # write_mg_sym_file 4546 #===========================================================================
4547 - def write_default_mg_sym_file(self, writer):
4548 """Write the mg.sym file for MadEvent.""" 4549 4550 lines = "0" 4551 4552 # Write the file 4553 writer.writelines(lines) 4554 4555 return True
4556 4557 #=========================================================================== 4558 # write_ncombs_file 4559 #===========================================================================
4560 - def write_ncombs_file(self, writer, nexternal):
4561 """Write the ncombs.inc file for MadEvent.""" 4562 4563 # ncomb (used for clustering) is 2^nexternal 4564 file = " integer n_max_cl\n" 4565 file = file + "parameter (n_max_cl=%d)" % (2 ** nexternal) 4566 4567 # Write the file 4568 writer.writelines(file) 4569 4570 return True
4571 4572 #=========================================================================== 4573 # write_processes_file 4574 #===========================================================================
4575 - def write_processes_file(self, writer, subproc_group):
4576 """Write the processes.dat file with info about the subprocesses 4577 in this group.""" 4578 4579 lines = [] 4580 4581 for ime, me in \ 4582 enumerate(subproc_group.get('matrix_elements')): 4583 lines.append("%s %s" % (str(ime+1) + " " * (7-len(str(ime+1))), 4584 ",".join(p.base_string() for p in \ 4585 me.get('processes')))) 4586 if me.get('has_mirror_process'): 4587 mirror_procs = [copy.copy(p) for p in me.get('processes')] 4588 for proc in mirror_procs: 4589 legs = copy.copy(proc.get('legs_with_decays')) 4590 legs.insert(0, legs.pop(1)) 4591 proc.set("legs_with_decays", legs) 4592 lines.append("mirror %s" % ",".join(p.base_string() for p in \ 4593 mirror_procs)) 4594 else: 4595 lines.append("mirror none") 4596 4597 # Write the file 4598 writer.write("\n".join(lines)) 4599 4600 return True
4601 4602 #=========================================================================== 4603 # write_symswap_file 4604 #===========================================================================
4605 - def write_symswap_file(self, writer, ident_perms):
4606 """Write the file symswap.inc for MG4 by comparing diagrams using 4607 the internal matrix element value functionality.""" 4608 4609 lines = [] 4610 4611 # Write out lines for symswap.inc file (used to permute the 4612 # external leg momenta 4613 for iperm, perm in enumerate(ident_perms): 4614 lines.append("data (isym(i,%d),i=1,nexternal)/%s/" % \ 4615 (iperm+1, ",".join([str(i+1) for i in perm]))) 4616 lines.append("data nsym/%d/" % len(ident_perms)) 4617 4618 # Write the file 4619 writer.writelines(lines) 4620 4621 return True
4622 4623 #=========================================================================== 4624 # write_symfact_file 4625 #===========================================================================
4626 - def write_symfact_file(self, writer, symmetry):
4627 """Write the files symfact.dat for MG4 by comparing diagrams using 4628 the internal matrix element value functionality.""" 4629 4630 pos = max(2, int(math.ceil(math.log10(len(symmetry))))) 4631 form = "%"+str(pos)+"r %"+str(pos+1)+"r" 4632 # Write out lines for symswap.inc file (used to permute the 4633 # external leg momenta 4634 lines = [ form %(i+1, s) for i,s in enumerate(symmetry) if s != 0] 4635 # Write the file 4636 writer.write('\n'.join(lines)) 4637 writer.write('\n') 4638 4639 return True
4640 4641 #=========================================================================== 4642 # write_symperms_file 4643 #===========================================================================
4644 - def write_symperms_file(self, writer, perms):
4645 """Write the symperms.inc file for subprocess group, used for 4646 symmetric configurations""" 4647 4648 lines = [] 4649 for iperm, perm in enumerate(perms): 4650 lines.append("data (perms(i,%d),i=1,nexternal)/%s/" % \ 4651 (iperm+1, ",".join([str(i+1) for i in perm]))) 4652 4653 # Write the file 4654 writer.writelines(lines) 4655 4656 return True
4657 4658 #=========================================================================== 4659 # write_subproc 4660 #===========================================================================
4661 - def write_subproc(self, writer, subprocdir):
4662 """Append this subprocess to the subproc.mg file for MG4""" 4663 4664 # Write line to file 4665 writer.write(subprocdir + "\n") 4666 4667 return True
4668
4669 #=============================================================================== 4670 # ProcessExporterFortranMEGroup 4671 #=============================================================================== 4672 -class ProcessExporterFortranMEGroup(ProcessExporterFortranME):
4673 """Class to take care of exporting a set of matrix elements to 4674 MadEvent subprocess group format.""" 4675 4676 matrix_file = "matrix_madevent_group_v4.inc" 4677 grouped_mode = 'madevent' 4678 #=========================================================================== 4679 # generate_subprocess_directory 4680 #===========================================================================
4681 - def generate_subprocess_directory(self, subproc_group, 4682 fortran_model, 4683 group_number):
4684 """Generate the Pn directory for a subprocess group in MadEvent, 4685 including the necessary matrix_N.f files, configs.inc and various 4686 other helper files.""" 4687 4688 assert isinstance(subproc_group, group_subprocs.SubProcessGroup), \ 4689 "subproc_group object not SubProcessGroup" 4690 4691 if not self.model: 4692 self.model = subproc_group.get('matrix_elements')[0].\ 4693 get('processes')[0].get('model') 4694 4695 cwd = os.getcwd() 4696 path = pjoin(self.dir_path, 'SubProcesses') 4697 4698 os.chdir(path) 4699 pathdir = os.getcwd() 4700 4701 # Create the directory PN in the specified path 4702 subprocdir = "P%d_%s" % (subproc_group.get('number'), 4703 subproc_group.get('name')) 4704 try: 4705 os.mkdir(subprocdir) 4706 except os.error as error: 4707 logger.warning(error.strerror + " " + subprocdir) 4708 4709 try: 4710 os.chdir(subprocdir) 4711 except os.error: 4712 logger.error('Could not cd to directory %s' % subprocdir) 4713 return 0 4714 4715 logger.info('Creating files in directory %s' % subprocdir) 4716 4717 # Create the matrix.f files, auto_dsig.f files and all inc files 4718 # for all subprocesses in the group 4719 4720 maxamps = 0 4721 maxflows = 0 4722 tot_calls = 0 4723 4724 matrix_elements = subproc_group.get('matrix_elements') 4725 4726 # Add the driver.f, all grouped ME's must share the same number of 4727 # helicity configuration 4728 ncomb = matrix_elements[0].get_helicity_combinations() 4729 for me in matrix_elements[1:]: 4730 if ncomb!=me.get_helicity_combinations(): 4731 raise MadGraph5Error, "All grouped processes must share the "+\ 4732 "same number of helicity configurations." 4733 4734 filename = 'driver.f' 4735 self.write_driver(writers.FortranWriter(filename),ncomb, 4736 n_grouped_proc=len(matrix_elements), v5=self.opt['v5_model']) 4737 4738 for ime, matrix_element in \ 4739 enumerate(matrix_elements): 4740 filename = 'matrix%d.f' % (ime+1) 4741 calls, ncolor = \ 4742 self.write_matrix_element_v4(writers.FortranWriter(filename), 4743 matrix_element, 4744 fortran_model, 4745 proc_id=str(ime+1), 4746 config_map=subproc_group.get('diagram_maps')[ime], 4747 subproc_number=group_number) 4748 4749 filename = 'auto_dsig%d.f' % (ime+1) 4750 self.write_auto_dsig_file(writers.FortranWriter(filename), 4751 matrix_element, 4752 str(ime+1)) 4753 4754 # Keep track of needed quantities 4755 tot_calls += int(calls) 4756 maxflows = max(maxflows, ncolor) 4757 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 4758 4759 # Draw diagrams 4760 filename = "matrix%d.ps" % (ime+1) 4761 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 4762 get('diagrams'), 4763 filename, 4764 model = \ 4765 matrix_element.get('processes')[0].\ 4766 get('model'), 4767 amplitude=True) 4768 logger.info("Generating Feynman diagrams for " + \ 4769 matrix_element.get('processes')[0].nice_string()) 4770 plot.draw() 4771 4772 # Extract number of external particles 4773 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4774 4775 # Generate a list of diagrams corresponding to each configuration 4776 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 4777 # If a subprocess has no diagrams for this config, the number is 0 4778 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 4779 4780 filename = 'auto_dsig.f' 4781 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 4782 subproc_group) 4783 4784 filename = 'coloramps.inc' 4785 self.write_coloramps_file(writers.FortranWriter(filename), 4786 subproc_diagrams_for_config, 4787 maxflows, 4788 matrix_elements) 4789 4790 filename = 'get_color.f' 4791 self.write_colors_file(writers.FortranWriter(filename), 4792 matrix_elements) 4793 4794 filename = 'config_subproc_map.inc' 4795 self.write_config_subproc_map_file(writers.FortranWriter(filename), 4796 subproc_diagrams_for_config) 4797 4798 filename = 'configs.inc' 4799 nconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 4800 writers.FortranWriter(filename), 4801 subproc_group, 4802 subproc_diagrams_for_config) 4803 4804 filename = 'config_nqcd.inc' 4805 self.write_config_nqcd_file(writers.FortranWriter(filename), 4806 nqcd_list) 4807 4808 filename = 'decayBW.inc' 4809 self.write_decayBW_file(writers.FortranWriter(filename), 4810 s_and_t_channels) 4811 4812 filename = 'dname.mg' 4813 self.write_dname_file(writers.FortranWriter(filename), 4814 subprocdir) 4815 4816 filename = 'iproc.dat' 4817 self.write_iproc_file(writers.FortranWriter(filename), 4818 group_number) 4819 4820 filename = 'leshouche.inc' 4821 self.write_leshouche_file(writers.FortranWriter(filename), 4822 subproc_group) 4823 4824 filename = 'maxamps.inc' 4825 self.write_maxamps_file(writers.FortranWriter(filename), 4826 maxamps, 4827 maxflows, 4828 max([len(me.get('processes')) for me in \ 4829 matrix_elements]), 4830 len(matrix_elements)) 4831 4832 # Note that mg.sym is not relevant for this case 4833 filename = 'mg.sym' 4834 self.write_default_mg_sym_file(writers.FortranWriter(filename)) 4835 4836 filename = 'mirrorprocs.inc' 4837 self.write_mirrorprocs(writers.FortranWriter(filename), 4838 subproc_group) 4839 4840 filename = 'ncombs.inc' 4841 self.write_ncombs_file(writers.FortranWriter(filename), 4842 nexternal) 4843 4844 filename = 'nexternal.inc' 4845 self.write_nexternal_file(writers.FortranWriter(filename), 4846 nexternal, ninitial) 4847 4848 filename = 'ngraphs.inc' 4849 self.write_ngraphs_file(writers.FortranWriter(filename), 4850 nconfigs) 4851 4852 filename = 'pmass.inc' 4853 self.write_pmass_file(writers.FortranWriter(filename), 4854 matrix_element) 4855 4856 filename = 'props.inc' 4857 self.write_props_file(writers.FortranWriter(filename), 4858 matrix_element, 4859 s_and_t_channels) 4860 4861 filename = 'processes.dat' 4862 files.write_to_file(filename, 4863 self.write_processes_file, 4864 subproc_group) 4865 4866 # Find config symmetries and permutations 4867 symmetry, perms, ident_perms = \ 4868 diagram_symmetry.find_symmetry(subproc_group) 4869 4870 filename = 'symswap.inc' 4871 self.write_symswap_file(writers.FortranWriter(filename), 4872 ident_perms) 4873 4874 filename = 'symfact_orig.dat' 4875 self.write_symfact_file(open(filename, 'w'), symmetry) 4876 4877 filename = 'symperms.inc' 4878 self.write_symperms_file(writers.FortranWriter(filename), 4879 perms) 4880 4881 # Generate jpgs -> pass in make_html 4882 #os.system(pjoin('..', '..', 'bin', 'gen_jpeg-pl')) 4883 4884 self.link_files_in_SubProcess(pjoin(pathdir,subprocdir)) 4885 4886 #import nexternal/leshouch in Source 4887 ln('nexternal.inc', '../../Source', log=False) 4888 ln('leshouche.inc', '../../Source', log=False) 4889 ln('maxamps.inc', '../../Source', log=False) 4890 4891 # Return to SubProcesses dir) 4892 os.chdir(pathdir) 4893 4894 # Add subprocess to subproc.mg 4895 filename = 'subproc.mg' 4896 files.append_to_file(filename, 4897 self.write_subproc, 4898 subprocdir) 4899 4900 # Return to original dir 4901 os.chdir(cwd) 4902 4903 if not tot_calls: 4904 tot_calls = 0 4905 return tot_calls
4906 4907 #=========================================================================== 4908 # write_super_auto_dsig_file 4909 #===========================================================================
4910 - def write_super_auto_dsig_file(self, writer, subproc_group):
4911 """Write the auto_dsig.f file selecting between the subprocesses 4912 in subprocess group mode""" 4913 4914 replace_dict = {} 4915 4916 # Extract version number and date from VERSION file 4917 info_lines = self.get_mg5_info_lines() 4918 replace_dict['info_lines'] = info_lines 4919 4920 matrix_elements = subproc_group.get('matrix_elements') 4921 4922 # Extract process info lines 4923 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 4924 matrix_elements]) 4925 replace_dict['process_lines'] = process_lines 4926 4927 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 4928 replace_dict['nexternal'] = nexternal 4929 4930 replace_dict['nsprocs'] = 2*len(matrix_elements) 4931 4932 # Generate dsig definition line 4933 dsig_def_line = "DOUBLE PRECISION " + \ 4934 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 4935 range(len(matrix_elements))]) 4936 replace_dict["dsig_def_line"] = dsig_def_line 4937 4938 # Generate dsig process lines 4939 call_dsig_proc_lines = [] 4940 for iproc in range(len(matrix_elements)): 4941 call_dsig_proc_lines.append(\ 4942 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 4943 {"num": iproc + 1, 4944 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 4945 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 4946 4947 ncomb=matrix_elements[0].get_helicity_combinations() 4948 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 4949 4950 if writer: 4951 file = open(pjoin(_file_path, \ 4952 'iolibs/template_files/super_auto_dsig_group_v4.inc')).read() 4953 file = file % replace_dict 4954 4955 # Write the file 4956 writer.writelines(file) 4957 else: 4958 return replace_dict
4959 4960 #=========================================================================== 4961 # write_mirrorprocs 4962 #===========================================================================
4963 - def write_mirrorprocs(self, writer, subproc_group):
4964 """Write the mirrorprocs.inc file determining which processes have 4965 IS mirror process in subprocess group mode.""" 4966 4967 lines = [] 4968 bool_dict = {True: '.true.', False: '.false.'} 4969 matrix_elements = subproc_group.get('matrix_elements') 4970 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 4971 (len(matrix_elements), 4972 ",".join([bool_dict[me.get('has_mirror_process')] for \ 4973 me in matrix_elements]))) 4974 # Write the file 4975 writer.writelines(lines)
4976 4977 #=========================================================================== 4978 # write_addmothers 4979 #===========================================================================
4980 - def write_addmothers(self, writer):
4981 """Write the SubProcess/addmothers.f""" 4982 4983 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 4984 4985 text = open(path).read() % {'iconfig': 'lconfig'} 4986 writer.write(text) 4987 4988 return True
4989 4990 4991 #=========================================================================== 4992 # write_coloramps_file 4993 #===========================================================================
4994 - def write_coloramps_file(self, writer, diagrams_for_config, maxflows, 4995 matrix_elements):
4996 """Write the coloramps.inc file for MadEvent in Subprocess group mode""" 4997 4998 # Create a map from subprocess (matrix element) to a list of 4999 # the diagrams corresponding to each config 5000 5001 lines = [] 5002 5003 subproc_to_confdiag = {} 5004 for config in diagrams_for_config: 5005 for subproc, diag in enumerate(config): 5006 try: 5007 subproc_to_confdiag[subproc].append(diag) 5008 except KeyError: 5009 subproc_to_confdiag[subproc] = [diag] 5010 5011 for subproc in sorted(subproc_to_confdiag.keys()): 5012 lines.extend(self.get_icolamp_lines(subproc_to_confdiag[subproc], 5013 matrix_elements[subproc], 5014 subproc + 1)) 5015 5016 lines.insert(0, "logical icolamp(%d,%d,%d)" % \ 5017 (maxflows, 5018 len(diagrams_for_config), 5019 len(matrix_elements))) 5020 5021 # Write the file 5022 writer.writelines(lines) 5023 5024 return True
5025 5026 #=========================================================================== 5027 # write_config_subproc_map_file 5028 #===========================================================================
5029 - def write_config_subproc_map_file(self, writer, config_subproc_map):
5030 """Write the config_subproc_map.inc file for subprocess groups""" 5031 5032 lines = [] 5033 # Output only configs that have some corresponding diagrams 5034 iconfig = 0 5035 for config in config_subproc_map: 5036 if set(config) == set([0]): 5037 continue 5038 lines.append("DATA (CONFSUB(i,%d),i=1,%d)/%s/" % \ 5039 (iconfig + 1, len(config), 5040 ",".join([str(i) for i in config]))) 5041 iconfig += 1 5042 # Write the file 5043 writer.writelines(lines) 5044 5045 return True
5046 5047 #=========================================================================== 5048 # read_write_good_hel 5049 #===========================================================================
5050 - def read_write_good_hel(self, ncomb):
5051 """return the code to read/write the good_hel common_block""" 5052 5053 convert = {'ncomb' : ncomb} 5054 5055 output = """ 5056 subroutine write_good_hel(stream_id) 5057 implicit none 5058 integer stream_id 5059 INTEGER NCOMB 5060 PARAMETER ( NCOMB=%(ncomb)d) 5061 LOGICAL GOODHEL(NCOMB, 2) 5062 INTEGER NTRY(2) 5063 common/BLOCK_GOODHEL/NTRY,GOODHEL 5064 write(stream_id,*) GOODHEL 5065 return 5066 end 5067 5068 5069 subroutine read_good_hel(stream_id) 5070 implicit none 5071 include 'genps.inc' 5072 integer stream_id 5073 INTEGER NCOMB 5074 PARAMETER ( NCOMB=%(ncomb)d) 5075 LOGICAL GOODHEL(NCOMB, 2) 5076 INTEGER NTRY(2) 5077 common/BLOCK_GOODHEL/NTRY,GOODHEL 5078 read(stream_id,*) GOODHEL 5079 NTRY(1) = MAXTRIES + 1 5080 NTRY(2) = MAXTRIES + 1 5081 return 5082 end 5083 5084 subroutine init_good_hel() 5085 implicit none 5086 INTEGER NCOMB 5087 PARAMETER ( NCOMB=%(ncomb)d) 5088 LOGICAL GOODHEL(NCOMB, 2) 5089 INTEGER NTRY(2) 5090 INTEGER I 5091 5092 do i=1,NCOMB 5093 GOODHEL(I,1) = .false. 5094 GOODHEL(I,2) = .false. 5095 enddo 5096 NTRY(1) = 0 5097 NTRY(2) = 0 5098 end 5099 5100 integer function get_maxsproc() 5101 implicit none 5102 include 'maxamps.inc' 5103 5104 get_maxsproc = maxsproc 5105 return 5106 end 5107 5108 """ % convert 5109 5110 return output
5111 5112 5113 5114 #=========================================================================== 5115 # write_configs_file 5116 #===========================================================================
5117 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
5118 """Write the configs.inc file with topology information for a 5119 subprocess group. Use the first subprocess with a diagram for each 5120 configuration.""" 5121 5122 matrix_elements = subproc_group.get('matrix_elements') 5123 model = matrix_elements[0].get('processes')[0].get('model') 5124 5125 diagrams = [] 5126 config_numbers = [] 5127 for iconfig, config in enumerate(diagrams_for_config): 5128 # Check if any diagrams correspond to this config 5129 if set(config) == set([0]): 5130 continue 5131 subproc_diags = [] 5132 for s,d in enumerate(config): 5133 if d: 5134 subproc_diags.append(matrix_elements[s].\ 5135 get('diagrams')[d-1]) 5136 else: 5137 subproc_diags.append(None) 5138 diagrams.append(subproc_diags) 5139 config_numbers.append(iconfig + 1) 5140 5141 # Extract number of external particles 5142 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 5143 5144 return len(diagrams), \ 5145 self.write_configs_file_from_diagrams(writer, diagrams, 5146 config_numbers, 5147 nexternal, ninitial, 5148 model)
5149 5150 #=========================================================================== 5151 # write_run_configs_file 5152 #===========================================================================
5153 - def write_run_config_file(self, writer):
5154 """Write the run_configs.inc file for MadEvent""" 5155 5156 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 5157 if self.proc_characteristic['loop_induced']: 5158 job_per_chan = 1 5159 else: 5160 job_per_chan = 2 5161 text = open(path).read() % {'chanperjob':job_per_chan} 5162 writer.write(text) 5163 return True
5164 5165 5166 #=========================================================================== 5167 # write_leshouche_file 5168 #===========================================================================
5169 - def write_leshouche_file(self, writer, subproc_group):
5170 """Write the leshouche.inc file for MG4""" 5171 5172 all_lines = [] 5173 5174 for iproc, matrix_element in \ 5175 enumerate(subproc_group.get('matrix_elements')): 5176 all_lines.extend(self.get_leshouche_lines(matrix_element, 5177 iproc)) 5178 # Write the file 5179 writer.writelines(all_lines) 5180 return True
5181 5182
5183 - def finalize(self,*args, **opts):
5184 5185 super(ProcessExporterFortranMEGroup, self).finalize(*args, **opts) 5186 #ensure that the grouping information is on the correct value 5187 self.proc_characteristic['grouped_matrix'] = True
5188 5189 5190 #=============================================================================== 5191 # UFO_model_to_mg4 5192 #=============================================================================== 5193 5194 python_to_fortran = lambda x: parsers.UFOExpressionParserFortran().parse(x)
5195 5196 -class UFO_model_to_mg4(object):
5197 """ A converter of the UFO-MG5 Model to the MG4 format """ 5198 5199 # The list below shows the only variables the user is allowed to change by 5200 # himself for each PS point. If he changes any other, then calling 5201 # UPDATE_AS_PARAM() (or equivalently MP_UPDATE_AS_PARAM()) will not 5202 # correctly account for the change. 5203 PS_dependent_key = ['aS','MU_R'] 5204 mp_complex_format = 'complex*32' 5205 mp_real_format = 'real*16' 5206 # Warning, it is crucial none of the couplings/parameters of the model 5207 # starts with this prefix. I should add a check for this. 5208 # You can change it as the global variable to check_param_card.ParamCard 5209 mp_prefix = check_param_card.ParamCard.mp_prefix 5210
5211 - def __init__(self, model, output_path, opt=None):
5212 """ initialization of the objects """ 5213 5214 self.model = model 5215 self.model_name = model['name'] 5216 self.dir_path = output_path 5217 5218 self.opt = {'complex_mass': False, 'export_format': 'madevent', 'mp':True, 5219 'loop_induced': False} 5220 if opt: 5221 self.opt.update(opt) 5222 5223 self.coups_dep = [] # (name, expression, type) 5224 self.coups_indep = [] # (name, expression, type) 5225 self.params_dep = [] # (name, expression, type) 5226 self.params_indep = [] # (name, expression, type) 5227 self.params_ext = [] # external parameter 5228 self.p_to_f = parsers.UFOExpressionParserFortran() 5229 self.mp_p_to_f = parsers.UFOExpressionParserMPFortran()
5230
5232 """modify the parameter if some of them are identical up to the case""" 5233 5234 lower_dict={} 5235 duplicate = set() 5236 keys = self.model['parameters'].keys() 5237 for key in keys: 5238 for param in self.model['parameters'][key]: 5239 lower_name = param.name.lower() 5240 if not lower_name: 5241 continue 5242 try: 5243 lower_dict[lower_name].append(param) 5244 except KeyError,error: 5245 lower_dict[lower_name] = [param] 5246 else: 5247 duplicate.add(lower_name) 5248 logger.debug('%s is define both as lower case and upper case.' 5249 % lower_name) 5250 if not duplicate: 5251 return 5252 5253 re_expr = r'''\b(%s)\b''' 5254 to_change = [] 5255 change={} 5256 for value in duplicate: 5257 for i, var in enumerate(lower_dict[value]): 5258 to_change.append(var.name) 5259 new_name = '%s%s' % (var.name.lower(), 5260 ('__%d'%(i+1) if i>0 else '')) 5261 change[var.name] = new_name 5262 var.name = new_name 5263 5264 # Apply the modification to the map_CTcoup_CTparam of the model 5265 # if it has one (giving for each coupling the CT parameters whcih 5266 # are necessary and which should be exported to the model. 5267 if hasattr(self.model,'map_CTcoup_CTparam'): 5268 for coup, ctparams in self.model.map_CTcoup_CTparam: 5269 for i, ctparam in enumerate(ctparams): 5270 try: 5271 self.model.map_CTcoup_CTparam[coup][i] = change[ctparam] 5272 except KeyError: 5273 pass 5274 5275 replace = lambda match_pattern: change[match_pattern.groups()[0]] 5276 rep_pattern = re.compile(re_expr % '|'.join(to_change)) 5277 5278 # change parameters 5279 for key in keys: 5280 if key == ('external',): 5281 continue 5282 for param in self.model['parameters'][key]: 5283 param.expr = rep_pattern.sub(replace, param.expr) 5284 5285 # change couplings 5286 for key in self.model['couplings'].keys(): 5287 for coup in self.model['couplings'][key]: 5288 coup.expr = rep_pattern.sub(replace, coup.expr) 5289 5290 # change mass/width 5291 for part in self.model['particles']: 5292 if str(part.get('mass')) in to_change: 5293 part.set('mass', rep_pattern.sub(replace, str(part.get('mass')))) 5294 if str(part.get('width')) in to_change: 5295 part.set('width', rep_pattern.sub(replace, str(part.get('width'))))
5296
5297 - def refactorize(self, wanted_couplings = []):
5298 """modify the couplings to fit with MG4 convention """ 5299 5300 # Keep only separation in alphaS 5301 keys = self.model['parameters'].keys() 5302 keys.sort(key=len) 5303 for key in keys: 5304 to_add = [o for o in self.model['parameters'][key] if o.name] 5305 5306 if key == ('external',): 5307 self.params_ext += to_add 5308 elif any([(k in key) for k in self.PS_dependent_key]): 5309 self.params_dep += to_add 5310 else: 5311 self.params_indep += to_add 5312 # same for couplings 5313 keys = self.model['couplings'].keys() 5314 keys.sort(key=len) 5315 for key, coup_list in self.model['couplings'].items(): 5316 if any([(k in key) for k in self.PS_dependent_key]): 5317 self.coups_dep += [c for c in coup_list if 5318 (not wanted_couplings or c.name in \ 5319 wanted_couplings)] 5320 else: 5321 self.coups_indep += [c for c in coup_list if 5322 (not wanted_couplings or c.name in \ 5323 wanted_couplings)] 5324 5325 # MG4 use G and not aS as it basic object for alphas related computation 5326 #Pass G in the independant list 5327 if 'G' in self.params_dep: 5328 index = self.params_dep.index('G') 5329 G = self.params_dep.pop(index) 5330 # G.expr = '2*cmath.sqrt(as*pi)' 5331 # self.params_indep.insert(0, self.params_dep.pop(index)) 5332 # No need to add it if not defined 5333 5334 if 'aS' not in self.params_ext: 5335 logger.critical('aS not define as external parameter adding it!') 5336 #self.model['parameters']['aS'] = base_objects.ParamCardVariable('aS', 0.138,'DUMMY',(1,)) 5337 self.params_indep.append( base_objects. ModelVariable('aS', '0.138','real')) 5338 self.params_indep.append( base_objects. ModelVariable('G', '4.1643','real'))
5339 - def build(self, wanted_couplings = [], full=True):
5340 """modify the couplings to fit with MG4 convention and creates all the 5341 different files""" 5342 5343 self.pass_parameter_to_case_insensitive() 5344 self.refactorize(wanted_couplings) 5345 5346 # write the files 5347 if full: 5348 if wanted_couplings: 5349 # extract the wanted ct parameters 5350 self.extract_needed_CTparam(wanted_couplings=wanted_couplings) 5351 self.write_all()
5352 5353
5354 - def open(self, name, comment='c', format='default'):
5355 """ Open the file name in the correct directory and with a valid 5356 header.""" 5357 5358 file_path = pjoin(self.dir_path, name) 5359 5360 if format == 'fortran': 5361 fsock = writers.FortranWriter(file_path, 'w') 5362 else: 5363 fsock = open(file_path, 'w') 5364 5365 file.writelines(fsock, comment * 77 + '\n') 5366 file.writelines(fsock,'%(comment)s written by the UFO converter\n' % \ 5367 {'comment': comment + (6 - len(comment)) * ' '}) 5368 file.writelines(fsock, comment * 77 + '\n\n') 5369 return fsock
5370 5371
5372 - def write_all(self):
5373 """ write all the files """ 5374 #write the part related to the external parameter 5375 self.create_ident_card() 5376 self.create_param_read() 5377 5378 #write the definition of the parameter 5379 self.create_input() 5380 self.create_intparam_def(dp=True,mp=False) 5381 if self.opt['mp']: 5382 self.create_intparam_def(dp=False,mp=True) 5383 5384 # definition of the coupling. 5385 self.create_actualize_mp_ext_param_inc() 5386 self.create_coupl_inc() 5387 self.create_write_couplings() 5388 self.create_couplings() 5389 5390 # the makefile 5391 self.create_makeinc() 5392 self.create_param_write() 5393 5394 # The model functions 5395 self.create_model_functions_inc() 5396 self.create_model_functions_def() 5397 5398 # The param_card.dat 5399 self.create_param_card() 5400 5401 5402 # All the standard files 5403 self.copy_standard_file()
5404 5405 ############################################################################ 5406 ## ROUTINE CREATING THE FILES ############################################ 5407 ############################################################################ 5408
5409 - def copy_standard_file(self):
5410 """Copy the standard files for the fortran model.""" 5411 5412 #copy the library files 5413 file_to_link = ['formats.inc','printout.f', \ 5414 'rw_para.f', 'testprog.f'] 5415 5416 for filename in file_to_link: 5417 cp( MG5DIR + '/models/template_files/fortran/' + filename, \ 5418 self.dir_path) 5419 5420 file = open(os.path.join(MG5DIR,\ 5421 'models/template_files/fortran/rw_para.f')).read() 5422 5423 includes=["include \'coupl.inc\'","include \'input.inc\'", 5424 "include \'model_functions.inc\'"] 5425 if self.opt['mp']: 5426 includes.extend(["include \'mp_coupl.inc\'","include \'mp_input.inc\'"]) 5427 # In standalone and madloop we do no use the compiled param card but 5428 # still parse the .dat one so we must load it. 5429 if self.opt['loop_induced']: 5430 #loop induced follow MadEvent way to handle the card. 5431 load_card = '' 5432 lha_read_filename='lha_read.f' 5433 elif self.opt['export_format'] in ['madloop','madloop_optimized', 'madloop_matchbox']: 5434 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5435 lha_read_filename='lha_read_mp.f' 5436 elif self.opt['export_format'].startswith('standalone') \ 5437 or self.opt['export_format'] in ['madweight', 'plugin']\ 5438 or self.opt['export_format'].startswith('matchbox'): 5439 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5440 lha_read_filename='lha_read.f' 5441 else: 5442 load_card = '' 5443 lha_read_filename='lha_read.f' 5444 cp( MG5DIR + '/models/template_files/fortran/' + lha_read_filename, \ 5445 os.path.join(self.dir_path,'lha_read.f')) 5446 5447 file=file%{'includes':'\n '.join(includes), 5448 'load_card':load_card} 5449 writer=open(os.path.join(self.dir_path,'rw_para.f'),'w') 5450 writer.writelines(file) 5451 writer.close() 5452 5453 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 5454 or self.opt['loop_induced']: 5455 cp( MG5DIR + '/models/template_files/fortran/makefile_madevent', 5456 self.dir_path + '/makefile') 5457 if self.opt['export_format'] in ['FKS5_default', 'FKS5_optimized']: 5458 path = pjoin(self.dir_path, 'makefile') 5459 text = open(path).read() 5460 text = text.replace('madevent','aMCatNLO') 5461 open(path, 'w').writelines(text) 5462 elif self.opt['export_format'] in ['standalone', 'standalone_msP','standalone_msF', 5463 'madloop','madloop_optimized', 'standalone_rw', 5464 'madweight','matchbox','madloop_matchbox', 'plugin']: 5465 cp( MG5DIR + '/models/template_files/fortran/makefile_standalone', 5466 self.dir_path + '/makefile') 5467 #elif self.opt['export_format'] in []: 5468 #pass 5469 else: 5470 raise MadGraph5Error('Unknown format')
5471
5472 - def create_coupl_inc(self):
5473 """ write coupling.inc """ 5474 5475 fsock = self.open('coupl.inc', format='fortran') 5476 if self.opt['mp']: 5477 mp_fsock = self.open('mp_coupl.inc', format='fortran') 5478 mp_fsock_same_name = self.open('mp_coupl_same_name.inc',\ 5479 format='fortran') 5480 5481 # Write header 5482 header = """double precision G 5483 common/strong/ G 5484 5485 double complex gal(2) 5486 common/weak/ gal 5487 5488 double precision MU_R 5489 common/rscale/ MU_R 5490 5491 double precision Nf 5492 parameter(Nf=%d) 5493 """ % self.model.get_nflav() 5494 5495 fsock.writelines(header) 5496 5497 if self.opt['mp']: 5498 header = """%(real_mp_format)s %(mp_prefix)sG 5499 common/MP_strong/ %(mp_prefix)sG 5500 5501 %(complex_mp_format)s %(mp_prefix)sgal(2) 5502 common/MP_weak/ %(mp_prefix)sgal 5503 5504 %(complex_mp_format)s %(mp_prefix)sMU_R 5505 common/MP_rscale/ %(mp_prefix)sMU_R 5506 5507 """ 5508 5509 5510 5511 5512 mp_fsock.writelines(header%{'real_mp_format':self.mp_real_format, 5513 'complex_mp_format':self.mp_complex_format, 5514 'mp_prefix':self.mp_prefix}) 5515 mp_fsock_same_name.writelines(header%{'real_mp_format':self.mp_real_format, 5516 'complex_mp_format':self.mp_complex_format, 5517 'mp_prefix':''}) 5518 5519 # Write the Mass definition/ common block 5520 masses = set() 5521 widths = set() 5522 if self.opt['complex_mass']: 5523 complex_mass = set() 5524 5525 for particle in self.model.get('particles'): 5526 #find masses 5527 one_mass = particle.get('mass') 5528 if one_mass.lower() != 'zero': 5529 masses.add(one_mass) 5530 5531 # find width 5532 one_width = particle.get('width') 5533 if one_width.lower() != 'zero': 5534 widths.add(one_width) 5535 if self.opt['complex_mass'] and one_mass.lower() != 'zero': 5536 complex_mass.add('CMASS_%s' % one_mass) 5537 5538 if masses: 5539 fsock.writelines('double precision '+','.join(masses)+'\n') 5540 fsock.writelines('common/masses/ '+','.join(masses)+'\n\n') 5541 if self.opt['mp']: 5542 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5543 ','.join(masses)+'\n') 5544 mp_fsock_same_name.writelines('common/MP_masses/ '+\ 5545 ','.join(masses)+'\n\n') 5546 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5547 self.mp_prefix+m for m in masses])+'\n') 5548 mp_fsock.writelines('common/MP_masses/ '+\ 5549 ','.join([self.mp_prefix+m for m in masses])+'\n\n') 5550 5551 if widths: 5552 fsock.writelines('double precision '+','.join(widths)+'\n') 5553 fsock.writelines('common/widths/ '+','.join(widths)+'\n\n') 5554 if self.opt['mp']: 5555 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5556 ','.join(widths)+'\n') 5557 mp_fsock_same_name.writelines('common/MP_widths/ '+\ 5558 ','.join(widths)+'\n\n') 5559 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5560 self.mp_prefix+w for w in widths])+'\n') 5561 mp_fsock.writelines('common/MP_widths/ '+\ 5562 ','.join([self.mp_prefix+w for w in widths])+'\n\n') 5563 5564 # Write the Couplings 5565 coupling_list = [coupl.name for coupl in self.coups_dep + self.coups_indep] 5566 fsock.writelines('double complex '+', '.join(coupling_list)+'\n') 5567 fsock.writelines('common/couplings/ '+', '.join(coupling_list)+'\n') 5568 if self.opt['mp']: 5569 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5570 ','.join(coupling_list)+'\n') 5571 mp_fsock_same_name.writelines('common/MP_couplings/ '+\ 5572 ','.join(coupling_list)+'\n\n') 5573 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5574 self.mp_prefix+c for c in coupling_list])+'\n') 5575 mp_fsock.writelines('common/MP_couplings/ '+\ 5576 ','.join([self.mp_prefix+c for c in coupling_list])+'\n\n') 5577 5578 # Write complex mass for complex mass scheme (if activated) 5579 if self.opt['complex_mass'] and complex_mass: 5580 fsock.writelines('double complex '+', '.join(complex_mass)+'\n') 5581 fsock.writelines('common/complex_mass/ '+', '.join(complex_mass)+'\n') 5582 if self.opt['mp']: 5583 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5584 ','.join(complex_mass)+'\n') 5585 mp_fsock_same_name.writelines('common/MP_complex_mass/ '+\ 5586 ','.join(complex_mass)+'\n\n') 5587 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5588 self.mp_prefix+cm for cm in complex_mass])+'\n') 5589 mp_fsock.writelines('common/MP_complex_mass/ '+\ 5590 ','.join([self.mp_prefix+cm for cm in complex_mass])+'\n\n')
5591
5592 - def create_write_couplings(self):
5593 """ write the file coupl_write.inc """ 5594 5595 fsock = self.open('coupl_write.inc', format='fortran') 5596 5597 fsock.writelines("""write(*,*) ' Couplings of %s' 5598 write(*,*) ' ---------------------------------' 5599 write(*,*) ' '""" % self.model_name) 5600 def format(coupl): 5601 return 'write(*,2) \'%(name)s = \', %(name)s' % {'name': coupl.name}
5602 5603 # Write the Couplings 5604 lines = [format(coupl) for coupl in self.coups_dep + self.coups_indep] 5605 fsock.writelines('\n'.join(lines)) 5606 5607
5608 - def create_input(self):
5609 """create input.inc containing the definition of the parameters""" 5610 5611 fsock = self.open('input.inc', format='fortran') 5612 if self.opt['mp']: 5613 mp_fsock = self.open('mp_input.inc', format='fortran') 5614 5615 #find mass/ width since they are already define 5616 already_def = set() 5617 for particle in self.model.get('particles'): 5618 already_def.add(particle.get('mass').lower()) 5619 already_def.add(particle.get('width').lower()) 5620 if self.opt['complex_mass']: 5621 already_def.add('cmass_%s' % particle.get('mass').lower()) 5622 5623 is_valid = lambda name: name.lower() not in ['g', 'mu_r', 'zero'] and \ 5624 name.lower() not in already_def 5625 5626 real_parameters = [param.name for param in self.params_dep + 5627 self.params_indep if param.type == 'real' 5628 and is_valid(param.name)] 5629 5630 real_parameters += [param.name for param in self.params_ext 5631 if param.type == 'real'and 5632 is_valid(param.name)] 5633 5634 # check the parameter is a CT parameter or not 5635 # if yes, just use the needed ones 5636 real_parameters = [param for param in real_parameters \ 5637 if self.check_needed_param(param)] 5638 5639 fsock.writelines('double precision '+','.join(real_parameters)+'\n') 5640 fsock.writelines('common/params_R/ '+','.join(real_parameters)+'\n\n') 5641 if self.opt['mp']: 5642 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5643 self.mp_prefix+p for p in real_parameters])+'\n') 5644 mp_fsock.writelines('common/MP_params_R/ '+','.join([\ 5645 self.mp_prefix+p for p in real_parameters])+'\n\n') 5646 5647 complex_parameters = [param.name for param in self.params_dep + 5648 self.params_indep if param.type == 'complex' and 5649 is_valid(param.name)] 5650 5651 # check the parameter is a CT parameter or not 5652 # if yes, just use the needed ones 5653 complex_parameters = [param for param in complex_parameters \ 5654 if self.check_needed_param(param)] 5655 5656 if complex_parameters: 5657 fsock.writelines('double complex '+','.join(complex_parameters)+'\n') 5658 fsock.writelines('common/params_C/ '+','.join(complex_parameters)+'\n\n') 5659 if self.opt['mp']: 5660 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5661 self.mp_prefix+p for p in complex_parameters])+'\n') 5662 mp_fsock.writelines('common/MP_params_C/ '+','.join([\ 5663 self.mp_prefix+p for p in complex_parameters])+'\n\n')
5664
5665 - def check_needed_param(self, param):
5666 """ Returns whether the parameter in argument is needed for this 5667 specific computation or not.""" 5668 5669 # If this is a leading order model or if there was no CT parameter 5670 # employed in this NLO model, one can directly return that the 5671 # parameter is needed since only CTParameters are filtered. 5672 if not hasattr(self, 'allCTparameters') or \ 5673 self.allCTparameters is None or self.usedCTparameters is None or \ 5674 len(self.allCTparameters)==0: 5675 return True 5676 5677 # We must allow the conjugate shorthand for the complex parameter as 5678 # well so we check wether either the parameter name or its name with 5679 # 'conjg__' substituted with '' is present in the list. 5680 # This is acceptable even if some parameter had an original name 5681 # including 'conjg__' in it, because at worst we export a parameter 5682 # was not needed. 5683 param = param.lower() 5684 cjg_param = param.replace('conjg__','',1) 5685 5686 # First make sure it is a CTparameter 5687 if param not in self.allCTparameters and \ 5688 cjg_param not in self.allCTparameters: 5689 return True 5690 5691 # Now check if it is in the list of CTparameters actually used 5692 return (param in self.usedCTparameters or \ 5693 cjg_param in self.usedCTparameters)
5694
5695 - def extract_needed_CTparam(self,wanted_couplings=[]):
5696 """ Extract what are the needed CT parameters given the wanted_couplings""" 5697 5698 if not hasattr(self.model,'map_CTcoup_CTparam') or not wanted_couplings: 5699 # Setting these lists to none wil disable the filtering in 5700 # check_needed_param 5701 self.allCTparameters = None 5702 self.usedCTparameters = None 5703 return 5704 5705 # All CTparameters appearin in all CT couplings 5706 allCTparameters=self.model.map_CTcoup_CTparam.values() 5707 # Define in this class the list of all CT parameters 5708 self.allCTparameters=list(\ 5709 set(itertools.chain.from_iterable(allCTparameters))) 5710 5711 # All used CT couplings 5712 w_coupls = [coupl.lower() for coupl in wanted_couplings] 5713 allUsedCTCouplings = [coupl for coupl in 5714 self.model.map_CTcoup_CTparam.keys() if coupl.lower() in w_coupls] 5715 5716 # Now define the list of all CT parameters that are actually used 5717 self.usedCTparameters=list(\ 5718 set(itertools.chain.from_iterable([ 5719 self.model.map_CTcoup_CTparam[coupl] for coupl in allUsedCTCouplings 5720 ]))) 5721 5722 # Now at last, make these list case insensitive 5723 self.allCTparameters = [ct.lower() for ct in self.allCTparameters] 5724 self.usedCTparameters = [ct.lower() for ct in self.usedCTparameters]
5725
5726 - def create_intparam_def(self, dp=True, mp=False):
5727 """ create intparam_definition.inc setting the internal parameters. 5728 Output the double precision and/or the multiple precision parameters 5729 depending on the parameters dp and mp. If mp only, then the file names 5730 get the 'mp_' prefix. 5731 """ 5732 5733 fsock = self.open('%sintparam_definition.inc'% 5734 ('mp_' if mp and not dp else ''), format='fortran') 5735 5736 fsock.write_comments(\ 5737 "Parameters that should not be recomputed event by event.\n") 5738 fsock.writelines("if(readlha) then\n") 5739 if dp: 5740 fsock.writelines("G = 2 * DSQRT(AS*PI) ! for the first init\n") 5741 if mp: 5742 fsock.writelines("MP__G = 2 * SQRT(MP__AS*MP__PI) ! for the first init\n") 5743 5744 for param in self.params_indep: 5745 if param.name == 'ZERO': 5746 continue 5747 # check whether the parameter is a CT parameter 5748 # if yes,just used the needed ones 5749 if not self.check_needed_param(param.name): 5750 continue 5751 if dp: 5752 fsock.writelines("%s = %s\n" % (param.name, 5753 self.p_to_f.parse(param.expr))) 5754 if mp: 5755 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 5756 self.mp_p_to_f.parse(param.expr))) 5757 5758 fsock.writelines('endif') 5759 5760 fsock.write_comments('\nParameters that should be recomputed at an event by even basis.\n') 5761 if dp: 5762 fsock.writelines("aS = G**2/4/pi\n") 5763 if mp: 5764 fsock.writelines("MP__aS = MP__G**2/4/MP__PI\n") 5765 for param in self.params_dep: 5766 # check whether the parameter is a CT parameter 5767 # if yes,just used the needed ones 5768 if not self.check_needed_param(param.name): 5769 continue 5770 if dp: 5771 fsock.writelines("%s = %s\n" % (param.name, 5772 self.p_to_f.parse(param.expr))) 5773 elif mp: 5774 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 5775 self.mp_p_to_f.parse(param.expr))) 5776 5777 fsock.write_comments("\nDefinition of the EW coupling used in the write out of aqed\n") 5778 if ('aEWM1',) in self.model['parameters']: 5779 if dp: 5780 fsock.writelines(""" gal(1) = 3.5449077018110318d0 / DSQRT(aEWM1) 5781 gal(2) = 1d0 5782 """) 5783 elif mp: 5784 fsock.writelines(""" %(mp_prefix)sgal(1) = 2 * SQRT(MP__PI/MP__aEWM1) 5785 %(mp_prefix)sgal(2) = 1d0 5786 """ %{'mp_prefix':self.mp_prefix}) 5787 pass 5788 # in Gmu scheme, aEWM1 is not external but Gf is an exteranl variable 5789 elif ('Gf',) in self.model['parameters']: 5790 if dp: 5791 fsock.writelines(""" gal(1) = 2.378414230005442133435d0*MDL_MW*DSQRT(1D0-MDL_MW**2/MDL_MZ**2)*DSQRT(MDL_Gf) 5792 gal(2) = 1d0 5793 """) 5794 elif mp: 5795 fsock.writelines(""" %(mp_prefix)sgal(1) = 2*MP__MDL_MW*SQRT(1e0_16-MP__MDL_MW**2/MP__MDL_MZ**2)*SQRT(SQRT(2e0_16)*MP__MDL_Gf) 5796 %(mp_prefix)sgal(2) = 1d0 5797 """ %{'mp_prefix':self.mp_prefix}) 5798 pass 5799 else: 5800 if dp: 5801 logger.warning('$RED aEWM1 and Gf not define in MODEL. AQED will not be written correcty in LHE FILE') 5802 fsock.writelines(""" gal(1) = 1d0 5803 gal(2) = 1d0 5804 """) 5805 elif mp: 5806 fsock.writelines(""" %(mp_prefix)sgal(1) = 1e0_16 5807 %(mp_prefix)sgal(2) = 1e0_16 5808 """%{'mp_prefix':self.mp_prefix})
5809 5810
5811 - def create_couplings(self):
5812 """ create couplings.f and all couplingsX.f """ 5813 5814 nb_def_by_file = 25 5815 5816 self.create_couplings_main(nb_def_by_file) 5817 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5818 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5819 5820 for i in range(nb_coup_indep): 5821 # For the independent couplings, we compute the double and multiple 5822 # precision ones together 5823 data = self.coups_indep[nb_def_by_file * i: 5824 min(len(self.coups_indep), nb_def_by_file * (i+1))] 5825 self.create_couplings_part(i + 1, data, dp=True, mp=self.opt['mp']) 5826 5827 for i in range(nb_coup_dep): 5828 # For the dependent couplings, we compute the double and multiple 5829 # precision ones in separate subroutines. 5830 data = self.coups_dep[nb_def_by_file * i: 5831 min(len(self.coups_dep), nb_def_by_file * (i+1))] 5832 self.create_couplings_part( i + 1 + nb_coup_indep , data, 5833 dp=True,mp=False) 5834 if self.opt['mp']: 5835 self.create_couplings_part( i + 1 + nb_coup_indep , data, 5836 dp=False,mp=True)
5837 5838
5839 - def create_couplings_main(self, nb_def_by_file=25):
5840 """ create couplings.f """ 5841 5842 fsock = self.open('couplings.f', format='fortran') 5843 5844 fsock.writelines("""subroutine coup() 5845 5846 implicit none 5847 double precision PI, ZERO 5848 logical READLHA 5849 parameter (PI=3.141592653589793d0) 5850 parameter (ZERO=0d0) 5851 include \'model_functions.inc\'""") 5852 if self.opt['mp']: 5853 fsock.writelines("""%s MP__PI, MP__ZERO 5854 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 5855 parameter (MP__ZERO=0e0_16) 5856 include \'mp_input.inc\' 5857 include \'mp_coupl.inc\' 5858 """%self.mp_real_format) 5859 fsock.writelines("""include \'input.inc\' 5860 include \'coupl.inc\' 5861 READLHA = .true. 5862 include \'intparam_definition.inc\'""") 5863 if self.opt['mp']: 5864 fsock.writelines("""include \'mp_intparam_definition.inc\'\n""") 5865 5866 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5867 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5868 5869 fsock.writelines('\n'.join(\ 5870 ['call coup%s()' % (i + 1) for i in range(nb_coup_indep)])) 5871 5872 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 5873 5874 fsock.writelines('\n'.join(\ 5875 ['call coup%s()' % (nb_coup_indep + i + 1) \ 5876 for i in range(nb_coup_dep)])) 5877 if self.opt['mp']: 5878 fsock.writelines('\n'.join(\ 5879 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 5880 for i in range(nb_coup_dep)])) 5881 fsock.writelines('''\n return \n end\n''') 5882 5883 fsock.writelines("""subroutine update_as_param() 5884 5885 implicit none 5886 double precision PI, ZERO 5887 logical READLHA 5888 parameter (PI=3.141592653589793d0) 5889 parameter (ZERO=0d0) 5890 include \'model_functions.inc\'""") 5891 fsock.writelines("""include \'input.inc\' 5892 include \'coupl.inc\' 5893 READLHA = .false.""") 5894 fsock.writelines(""" 5895 include \'intparam_definition.inc\'\n 5896 """) 5897 5898 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5899 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5900 5901 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 5902 5903 fsock.writelines('\n'.join(\ 5904 ['call coup%s()' % (nb_coup_indep + i + 1) \ 5905 for i in range(nb_coup_dep)])) 5906 fsock.writelines('''\n return \n end\n''') 5907 5908 fsock.writelines("""subroutine update_as_param2(mu_r2,as2) 5909 5910 implicit none 5911 double precision PI 5912 parameter (PI=3.141592653589793d0) 5913 double precision mu_r2, as2 5914 include \'model_functions.inc\'""") 5915 fsock.writelines("""include \'input.inc\' 5916 include \'coupl.inc\'""") 5917 fsock.writelines(""" 5918 if (mu_r2.gt.0d0) MU_R = mu_r2 5919 G = SQRT(4.0d0*PI*AS2) 5920 AS = as2 5921 5922 CALL UPDATE_AS_PARAM() 5923 """) 5924 fsock.writelines('''\n return \n end\n''') 5925 5926 if self.opt['mp']: 5927 fsock.writelines("""subroutine mp_update_as_param() 5928 5929 implicit none 5930 logical READLHA 5931 include \'model_functions.inc\'""") 5932 fsock.writelines("""%s MP__PI, MP__ZERO 5933 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 5934 parameter (MP__ZERO=0e0_16) 5935 include \'mp_input.inc\' 5936 include \'mp_coupl.inc\' 5937 """%self.mp_real_format) 5938 fsock.writelines("""include \'input.inc\' 5939 include \'coupl.inc\' 5940 include \'actualize_mp_ext_params.inc\' 5941 READLHA = .false. 5942 include \'mp_intparam_definition.inc\'\n 5943 """) 5944 5945 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5946 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5947 5948 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 5949 5950 fsock.writelines('\n'.join(\ 5951 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 5952 for i in range(nb_coup_dep)])) 5953 fsock.writelines('''\n return \n end\n''')
5954
5955 - def create_couplings_part(self, nb_file, data, dp=True, mp=False):
5956 """ create couplings[nb_file].f containing information coming from data. 5957 Outputs the computation of the double precision and/or the multiple 5958 precision couplings depending on the parameters dp and mp. 5959 If mp is True and dp is False, then the prefix 'MP_' is appended to the 5960 filename and subroutine name. 5961 """ 5962 5963 fsock = self.open('%scouplings%s.f' %('mp_' if mp and not dp else '', 5964 nb_file), format='fortran') 5965 fsock.writelines("""subroutine %scoup%s() 5966 5967 implicit none 5968 include \'model_functions.inc\'"""%('mp_' if mp and not dp else '',nb_file)) 5969 if dp: 5970 fsock.writelines(""" 5971 double precision PI, ZERO 5972 parameter (PI=3.141592653589793d0) 5973 parameter (ZERO=0d0) 5974 include 'input.inc' 5975 include 'coupl.inc'""") 5976 if mp: 5977 fsock.writelines("""%s MP__PI, MP__ZERO 5978 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 5979 parameter (MP__ZERO=0e0_16) 5980 include \'mp_input.inc\' 5981 include \'mp_coupl.inc\' 5982 """%self.mp_real_format) 5983 5984 for coupling in data: 5985 if dp: 5986 fsock.writelines('%s = %s' % (coupling.name, 5987 self.p_to_f.parse(coupling.expr))) 5988 if mp: 5989 fsock.writelines('%s%s = %s' % (self.mp_prefix,coupling.name, 5990 self.mp_p_to_f.parse(coupling.expr))) 5991 fsock.writelines('end')
5992
5993 - def create_model_functions_inc(self):
5994 """ Create model_functions.inc which contains the various declarations 5995 of auxiliary functions which might be used in the couplings expressions 5996 """ 5997 5998 additional_fct = [] 5999 # check for functions define in the UFO model 6000 ufo_fct = self.model.get('functions') 6001 if ufo_fct: 6002 for fct in ufo_fct: 6003 # already handle by default 6004 if fct.name not in ["complexconjugate", "re", "im", "sec", 6005 "csc", "asec", "acsc", "theta_function", "cond", 6006 "condif", "reglogp", "reglogm", "reglog", "recms", "arg", "cot"]: 6007 additional_fct.append(fct.name) 6008 6009 6010 fsock = self.open('model_functions.inc', format='fortran') 6011 fsock.writelines("""double complex cond 6012 double complex condif 6013 double complex reglog 6014 double complex reglogp 6015 double complex reglogm 6016 double complex recms 6017 double complex arg 6018 %s 6019 """ % "\n".join([" double complex %s" % i for i in additional_fct])) 6020 6021 6022 if self.opt['mp']: 6023 fsock.writelines("""%(complex_mp_format)s mp_cond 6024 %(complex_mp_format)s mp_condif 6025 %(complex_mp_format)s mp_reglog 6026 %(complex_mp_format)s mp_reglogp 6027 %(complex_mp_format)s mp_reglogm 6028 %(complex_mp_format)s mp_recms 6029 %(complex_mp_format)s mp_arg 6030 %(additional)s 6031 """ %\ 6032 {"additional": "\n".join([" %s %s" % (self.mp_complex_format, i) for i in additional_fct]), 6033 'complex_mp_format':self.mp_complex_format 6034 })
6035
6036 - def create_model_functions_def(self):
6037 """ Create model_functions.f which contains the various definitions 6038 of auxiliary functions which might be used in the couplings expressions 6039 Add the functions.f functions for formfactors support 6040 """ 6041 6042 fsock = self.open('model_functions.f', format='fortran') 6043 fsock.writelines("""double complex function cond(condition,truecase,falsecase) 6044 implicit none 6045 double complex condition,truecase,falsecase 6046 if(condition.eq.(0.0d0,0.0d0)) then 6047 cond=truecase 6048 else 6049 cond=falsecase 6050 endif 6051 end 6052 6053 double complex function condif(condition,truecase,falsecase) 6054 implicit none 6055 logical condition 6056 double complex truecase,falsecase 6057 if(condition) then 6058 condif=truecase 6059 else 6060 condif=falsecase 6061 endif 6062 end 6063 6064 double complex function recms(condition,expr) 6065 implicit none 6066 logical condition 6067 double complex expr 6068 if(condition)then 6069 recms=expr 6070 else 6071 recms=dcmplx(dble(expr)) 6072 endif 6073 end 6074 6075 double complex function reglog(arg) 6076 implicit none 6077 double complex TWOPII 6078 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6079 double complex arg 6080 if(arg.eq.(0.0d0,0.0d0)) then 6081 reglog=(0.0d0,0.0d0) 6082 else 6083 reglog=log(arg) 6084 endif 6085 end 6086 6087 double complex function reglogp(arg) 6088 implicit none 6089 double complex TWOPII 6090 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6091 double complex arg 6092 if(arg.eq.(0.0d0,0.0d0))then 6093 reglogp=(0.0d0,0.0d0) 6094 else 6095 if(dble(arg).lt.0.0d0.and.dimag(arg).lt.0.0d0)then 6096 reglogp=log(arg) + TWOPII 6097 else 6098 reglogp=log(arg) 6099 endif 6100 endif 6101 end 6102 6103 double complex function reglogm(arg) 6104 implicit none 6105 double complex TWOPII 6106 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6107 double complex arg 6108 if(arg.eq.(0.0d0,0.0d0))then 6109 reglogm=(0.0d0,0.0d0) 6110 else 6111 if(dble(arg).lt.0.0d0.and.dimag(arg).gt.0.0d0)then 6112 reglogm=log(arg) - TWOPII 6113 else 6114 reglogm=log(arg) 6115 endif 6116 endif 6117 end 6118 6119 double complex function arg(comnum) 6120 implicit none 6121 double complex comnum 6122 double complex iim 6123 iim = (0.0d0,1.0d0) 6124 if(comnum.eq.(0.0d0,0.0d0)) then 6125 arg=(0.0d0,0.0d0) 6126 else 6127 arg=log(comnum/abs(comnum))/iim 6128 endif 6129 end""") 6130 if self.opt['mp']: 6131 fsock.writelines(""" 6132 6133 %(complex_mp_format)s function mp_cond(condition,truecase,falsecase) 6134 implicit none 6135 %(complex_mp_format)s condition,truecase,falsecase 6136 if(condition.eq.(0.0e0_16,0.0e0_16)) then 6137 mp_cond=truecase 6138 else 6139 mp_cond=falsecase 6140 endif 6141 end 6142 6143 %(complex_mp_format)s function mp_condif(condition,truecase,falsecase) 6144 implicit none 6145 logical condition 6146 %(complex_mp_format)s truecase,falsecase 6147 if(condition) then 6148 mp_condif=truecase 6149 else 6150 mp_condif=falsecase 6151 endif 6152 end 6153 6154 %(complex_mp_format)s function mp_recms(condition,expr) 6155 implicit none 6156 logical condition 6157 %(complex_mp_format)s expr 6158 if(condition)then 6159 mp_recms=expr 6160 else 6161 mp_recms=cmplx(real(expr),kind=16) 6162 endif 6163 end 6164 6165 %(complex_mp_format)s function mp_reglog(arg) 6166 implicit none 6167 %(complex_mp_format)s TWOPII 6168 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6169 %(complex_mp_format)s arg 6170 if(arg.eq.(0.0e0_16,0.0e0_16)) then 6171 mp_reglog=(0.0e0_16,0.0e0_16) 6172 else 6173 mp_reglog=log(arg) 6174 endif 6175 end 6176 6177 %(complex_mp_format)s function mp_reglogp(arg) 6178 implicit none 6179 %(complex_mp_format)s TWOPII 6180 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6181 %(complex_mp_format)s arg 6182 if(arg.eq.(0.0e0_16,0.0e0_16))then 6183 mp_reglogp=(0.0e0_16,0.0e0_16) 6184 else 6185 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).lt.0.0e0_16)then 6186 mp_reglogp=log(arg) + TWOPII 6187 else 6188 mp_reglogp=log(arg) 6189 endif 6190 endif 6191 end 6192 6193 %(complex_mp_format)s function mp_reglogm(arg) 6194 implicit none 6195 %(complex_mp_format)s TWOPII 6196 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6197 %(complex_mp_format)s arg 6198 if(arg.eq.(0.0e0_16,0.0e0_16))then 6199 mp_reglogm=(0.0e0_16,0.0e0_16) 6200 else 6201 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).gt.0.0e0_16)then 6202 mp_reglogm=log(arg) - TWOPII 6203 else 6204 mp_reglogm=log(arg) 6205 endif 6206 endif 6207 end 6208 6209 %(complex_mp_format)s function mp_arg(comnum) 6210 implicit none 6211 %(complex_mp_format)s comnum 6212 %(complex_mp_format)s imm 6213 imm = (0.0e0_16,1.0e0_16) 6214 if(comnum.eq.(0.0e0_16,0.0e0_16)) then 6215 mp_arg=(0.0e0_16,0.0e0_16) 6216 else 6217 mp_arg=log(comnum/abs(comnum))/imm 6218 endif 6219 end"""%{'complex_mp_format':self.mp_complex_format}) 6220 6221 6222 #check for the file functions.f 6223 model_path = self.model.get('modelpath') 6224 if os.path.exists(pjoin(model_path,'Fortran','functions.f')): 6225 fsock.write_comment_line(' USER DEFINE FUNCTIONS ') 6226 input = pjoin(model_path,'Fortran','functions.f') 6227 file.writelines(fsock, open(input).read()) 6228 fsock.write_comment_line(' END USER DEFINE FUNCTIONS ') 6229 6230 # check for functions define in the UFO model 6231 ufo_fct = self.model.get('functions') 6232 if ufo_fct: 6233 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS ') 6234 for fct in ufo_fct: 6235 # already handle by default 6236 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc", "condif", 6237 "theta_function", "cond", "reglog", "reglogp", "reglogm", "recms","arg"]: 6238 ufo_fct_template = """ 6239 double complex function %(name)s(%(args)s) 6240 implicit none 6241 double complex %(args)s 6242 %(definitions)s 6243 %(name)s = %(fct)s 6244 6245 return 6246 end 6247 """ 6248 str_fct = self.p_to_f.parse(fct.expr) 6249 if not self.p_to_f.to_define: 6250 definitions = [] 6251 else: 6252 definitions=[] 6253 for d in self.p_to_f.to_define: 6254 if d == 'pi': 6255 definitions.append(' double precision pi') 6256 definitions.append(' data pi /3.1415926535897932d0/') 6257 else: 6258 definitions.append(' double complex %s' % d) 6259 6260 text = ufo_fct_template % { 6261 'name': fct.name, 6262 'args': ", ".join(fct.arguments), 6263 'fct': str_fct, 6264 'definitions': '\n'.join(definitions) 6265 } 6266 6267 fsock.writelines(text) 6268 if self.opt['mp']: 6269 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS FOR MP') 6270 for fct in ufo_fct: 6271 # already handle by default 6272 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc","condif", 6273 "theta_function", "cond", "reglog", "reglogp","reglogm", "recms","arg"]: 6274 ufo_fct_template = """ 6275 %(complex_mp_format)s function mp__%(name)s(mp__%(args)s) 6276 implicit none 6277 %(complex_mp_format)s mp__%(args)s 6278 %(definitions)s 6279 mp__%(name)s = %(fct)s 6280 6281 return 6282 end 6283 """ 6284 6285 str_fct = self.mp_p_to_f.parse(fct.expr) 6286 if not self.p_to_f.to_define: 6287 definitions = [] 6288 else: 6289 definitions=[] 6290 for d in self.p_to_f.to_define: 6291 if d == 'mp_pi': 6292 definitions.append(' %s mp_pi' % self.mp_real_format) 6293 definitions.append(' data mp_pi /3.141592653589793238462643383279502884197e+00_16/') 6294 else: 6295 definitions.append(' %s %s' % (self.mp_complex_format,d)) 6296 text = ufo_fct_template % { 6297 'name': fct.name, 6298 'args': ", mp__".join(fct.arguments), 6299 'fct': str_fct, 6300 'definitions': '\n'.join(definitions), 6301 'complex_mp_format': self.mp_complex_format 6302 } 6303 fsock.writelines(text) 6304 6305 6306 6307 fsock.write_comment_line(' STOP UFO DEFINE FUNCTIONS ')
6308 6309 6310
6311 - def create_makeinc(self):
6312 """create makeinc.inc containing the file to compile """ 6313 6314 fsock = self.open('makeinc.inc', comment='#') 6315 text = 'MODEL = couplings.o lha_read.o printout.o rw_para.o' 6316 text += ' model_functions.o ' 6317 6318 nb_coup_indep = 1 + len(self.coups_dep) // 25 6319 nb_coup_dep = 1 + len(self.coups_indep) // 25 6320 couplings_files=['couplings%s.o' % (i+1) \ 6321 for i in range(nb_coup_dep + nb_coup_indep) ] 6322 if self.opt['mp']: 6323 couplings_files+=['mp_couplings%s.o' % (i+1) for i in \ 6324 range(nb_coup_dep,nb_coup_dep + nb_coup_indep) ] 6325 text += ' '.join(couplings_files) 6326 fsock.writelines(text)
6327
6328 - def create_param_write(self):
6329 """ create param_write """ 6330 6331 fsock = self.open('param_write.inc', format='fortran') 6332 6333 fsock.writelines("""write(*,*) ' External Params' 6334 write(*,*) ' ---------------------------------' 6335 write(*,*) ' '""") 6336 def format(name): 6337 return 'write(*,*) \'%(name)s = \', %(name)s' % {'name': name}
6338 6339 # Write the external parameter 6340 lines = [format(param.name) for param in self.params_ext] 6341 fsock.writelines('\n'.join(lines)) 6342 6343 fsock.writelines("""write(*,*) ' Internal Params' 6344 write(*,*) ' ---------------------------------' 6345 write(*,*) ' '""") 6346 lines = [format(data.name) for data in self.params_indep 6347 if data.name != 'ZERO' and self.check_needed_param(data.name)] 6348 fsock.writelines('\n'.join(lines)) 6349 fsock.writelines("""write(*,*) ' Internal Params evaluated point by point' 6350 write(*,*) ' ----------------------------------------' 6351 write(*,*) ' '""") 6352 lines = [format(data.name) for data in self.params_dep \ 6353 if self.check_needed_param(data.name)] 6354 6355 fsock.writelines('\n'.join(lines)) 6356 6357 6358
6359 - def create_ident_card(self):
6360 """ create the ident_card.dat """ 6361 6362 def format(parameter): 6363 """return the line for the ident_card corresponding to this parameter""" 6364 colum = [parameter.lhablock.lower()] + \ 6365 [str(value) for value in parameter.lhacode] + \ 6366 [parameter.name] 6367 if not parameter.name: 6368 return '' 6369 return ' '.join(colum)+'\n'
6370 6371 fsock = self.open('ident_card.dat') 6372 6373 external_param = [format(param) for param in self.params_ext] 6374 fsock.writelines('\n'.join(external_param)) 6375
6376 - def create_actualize_mp_ext_param_inc(self):
6377 """ create the actualize_mp_ext_params.inc code """ 6378 6379 # In principle one should actualize all external, but for now, it is 6380 # hardcoded that only AS and MU_R can by dynamically changed by the user 6381 # so that we only update those ones. 6382 # Of course, to be on the safe side, one could decide to update all 6383 # external parameters. 6384 update_params_list=[p for p in self.params_ext if p.name in 6385 self.PS_dependent_key] 6386 6387 res_strings = ["%(mp_prefix)s%(name)s=%(name)s"\ 6388 %{'mp_prefix':self.mp_prefix,'name':param.name}\ 6389 for param in update_params_list] 6390 # When read_lha is false, it is G which is taken in input and not AS, so 6391 # this is what should be reset here too. 6392 if 'aS' in [param.name for param in update_params_list]: 6393 res_strings.append("%(mp_prefix)sG=G"%{'mp_prefix':self.mp_prefix}) 6394 6395 fsock = self.open('actualize_mp_ext_params.inc', format='fortran') 6396 fsock.writelines('\n'.join(res_strings))
6397
6398 - def create_param_read(self):
6399 """create param_read""" 6400 6401 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 6402 or self.opt['loop_induced']: 6403 fsock = self.open('param_read.inc', format='fortran') 6404 fsock.writelines(' include \'../param_card.inc\'') 6405 return 6406 6407 def format_line(parameter): 6408 """return the line for the ident_card corresponding to this 6409 parameter""" 6410 template = \ 6411 """ call LHA_get_real(npara,param,value,'%(name)s',%(name)s,%(value)s)""" \ 6412 % {'name': parameter.name, 6413 'value': self.p_to_f.parse(str(parameter.value.real))} 6414 if self.opt['mp']: 6415 template = template+ \ 6416 ("\n call MP_LHA_get_real(npara,param,value,'%(name)s',"+ 6417 "%(mp_prefix)s%(name)s,%(value)s)") \ 6418 % {'name': parameter.name,'mp_prefix': self.mp_prefix, 6419 'value': self.mp_p_to_f.parse(str(parameter.value.real))} 6420 return template 6421 6422 fsock = self.open('param_read.inc', format='fortran') 6423 res_strings = [format_line(param) \ 6424 for param in self.params_ext] 6425 6426 # Correct width sign for Majorana particles (where the width 6427 # and mass need to have the same sign) 6428 for particle in self.model.get('particles'): 6429 if particle.is_fermion() and particle.get('self_antipart') and \ 6430 particle.get('width').lower() != 'zero': 6431 6432 res_strings.append('%(width)s = sign(%(width)s,%(mass)s)' % \ 6433 {'width': particle.get('width'), 'mass': particle.get('mass')}) 6434 if self.opt['mp']: 6435 res_strings.append(\ 6436 ('%(mp_pref)s%(width)s = sign(%(mp_pref)s%(width)s,'+\ 6437 '%(mp_pref)s%(mass)s)')%{'width': particle.get('width'),\ 6438 'mass': particle.get('mass'),'mp_pref':self.mp_prefix}) 6439 6440 fsock.writelines('\n'.join(res_strings)) 6441 6442 6443 @staticmethod
6444 - def create_param_card_static(model, output_path, rule_card_path=False, 6445 mssm_convert=True):
6446 """ create the param_card.dat for a givent model --static method-- """ 6447 #1. Check if a default param_card is present: 6448 done = False 6449 if hasattr(model, 'restrict_card') and isinstance(model.restrict_card, str): 6450 restrict_name = os.path.basename(model.restrict_card)[9:-4] 6451 model_path = model.get('modelpath') 6452 if os.path.exists(pjoin(model_path,'paramcard_%s.dat' % restrict_name)): 6453 done = True 6454 files.cp(pjoin(model_path,'paramcard_%s.dat' % restrict_name), 6455 output_path) 6456 if not done: 6457 param_writer.ParamCardWriter(model, output_path) 6458 6459 if rule_card_path: 6460 if hasattr(model, 'rule_card'): 6461 model.rule_card.write_file(rule_card_path) 6462 6463 if mssm_convert: 6464 model_name = model.get('name') 6465 # IF MSSM convert the card to SLAH1 6466 if model_name == 'mssm' or model_name.startswith('mssm-'): 6467 import models.check_param_card as translator 6468 # Check the format of the param_card for Pythia and make it correct 6469 if rule_card_path: 6470 translator.make_valid_param_card(output_path, rule_card_path) 6471 translator.convert_to_slha1(output_path)
6472
6473 - def create_param_card(self):
6474 """ create the param_card.dat """ 6475 6476 rule_card = pjoin(self.dir_path, 'param_card_rule.dat') 6477 if not hasattr(self.model, 'rule_card'): 6478 rule_card=False 6479 self.create_param_card_static(self.model, 6480 output_path=pjoin(self.dir_path, 'param_card.dat'), 6481 rule_card_path=rule_card, 6482 mssm_convert=True)
6483
6484 -def ExportV4Factory(cmd, noclean, output_type='default', group_subprocesses=True):
6485 """ Determine which Export_v4 class is required. cmd is the command 6486 interface containing all potential usefull information. 6487 The output_type argument specifies from which context the output 6488 is called. It is 'madloop' for MadLoop5, 'amcatnlo' for FKS5 output 6489 and 'default' for tree-level outputs.""" 6490 6491 opt = cmd.options 6492 6493 # ========================================================================== 6494 # First check whether Ninja must be installed. 6495 # Ninja would only be required if: 6496 # a) Loop optimized output is selected 6497 # b) the process gathered from the amplitude generated use loops 6498 6499 if len(cmd._curr_amps)>0: 6500 try: 6501 curr_proc = cmd._curr_amps[0].get('process') 6502 except base_objects.PhysicsObject.PhysicsObjectError: 6503 curr_proc = None 6504 elif hasattr(cmd,'_fks_multi_proc') and \ 6505 len(cmd._fks_multi_proc.get('process_definitions'))>0: 6506 curr_proc = cmd._fks_multi_proc.get('process_definitions')[0] 6507 else: 6508 curr_proc = None 6509 6510 requires_reduction_tool = opt['loop_optimized_output'] and \ 6511 (not curr_proc is None) and \ 6512 (curr_proc.get('perturbation_couplings') != [] and \ 6513 not curr_proc.get('NLO_mode') in [None,'real','tree','LO','LOonly']) 6514 6515 # An installation is required then, but only if the specified path is the 6516 # default local one and that the Ninja library appears missing. 6517 if requires_reduction_tool: 6518 cmd.install_reduction_library() 6519 6520 # ========================================================================== 6521 # First treat the MadLoop5 standalone case 6522 MadLoop_SA_options = {'clean': not noclean, 6523 'complex_mass':cmd.options['complex_mass_scheme'], 6524 'export_format':'madloop', 6525 'mp':True, 6526 'loop_dir': os.path.join(cmd._mgme_dir,'Template','loop_material'), 6527 'cuttools_dir': cmd._cuttools_dir, 6528 'iregi_dir':cmd._iregi_dir, 6529 'pjfry_dir':cmd.options['pjfry'], 6530 'golem_dir':cmd.options['golem'], 6531 'samurai_dir':cmd.options['samurai'], 6532 'ninja_dir':cmd.options['ninja'], 6533 'collier_dir':cmd.options['collier'], 6534 'fortran_compiler':cmd.options['fortran_compiler'], 6535 'f2py_compiler':cmd.options['f2py_compiler'], 6536 'output_dependencies':cmd.options['output_dependencies'], 6537 'SubProc_prefix':'P', 6538 'compute_color_flows':cmd.options['loop_color_flows'], 6539 'mode': 'reweight' if cmd._export_format == "standalone_rw" else '', 6540 'cluster_local_path': cmd.options['cluster_local_path'] 6541 } 6542 6543 6544 if output_type.startswith('madloop'): 6545 import madgraph.loop.loop_exporters as loop_exporters 6546 if os.path.isdir(os.path.join(cmd._mgme_dir, 'Template/loop_material')): 6547 ExporterClass=None 6548 if not cmd.options['loop_optimized_output']: 6549 ExporterClass=loop_exporters.LoopProcessExporterFortranSA 6550 else: 6551 if output_type == "madloop": 6552 ExporterClass=loop_exporters.LoopProcessOptimizedExporterFortranSA 6553 MadLoop_SA_options['export_format'] = 'madloop_optimized' 6554 elif output_type == "madloop_matchbox": 6555 ExporterClass=loop_exporters.LoopProcessExporterFortranMatchBox 6556 MadLoop_SA_options['export_format'] = 'madloop_matchbox' 6557 else: 6558 raise Exception, "output_type not recognize %s" % output_type 6559 return ExporterClass(cmd._export_dir, MadLoop_SA_options) 6560 else: 6561 raise MadGraph5Error('MG5_aMC cannot find the \'loop_material\' directory'+\ 6562 ' in %s'%str(cmd._mgme_dir)) 6563 6564 # Then treat the aMC@NLO output 6565 elif output_type=='amcatnlo': 6566 import madgraph.iolibs.export_fks as export_fks 6567 ExporterClass=None 6568 amcatnlo_options = dict(opt) 6569 amcatnlo_options.update(MadLoop_SA_options) 6570 amcatnlo_options['mp'] = len(cmd._fks_multi_proc.get_virt_amplitudes()) > 0 6571 if not cmd.options['loop_optimized_output']: 6572 logger.info("Writing out the aMC@NLO code") 6573 ExporterClass = export_fks.ProcessExporterFortranFKS 6574 amcatnlo_options['export_format']='FKS5_default' 6575 else: 6576 logger.info("Writing out the aMC@NLO code, using optimized Loops") 6577 ExporterClass = export_fks.ProcessOptimizedExporterFortranFKS 6578 amcatnlo_options['export_format']='FKS5_optimized' 6579 return ExporterClass(cmd._export_dir, amcatnlo_options) 6580 6581 6582 # Then the default tree-level output 6583 elif output_type=='default': 6584 assert group_subprocesses in [True, False] 6585 6586 opt = dict(opt) 6587 opt.update({'clean': not noclean, 6588 'complex_mass': cmd.options['complex_mass_scheme'], 6589 'export_format':cmd._export_format, 6590 'mp': False, 6591 'sa_symmetry':False, 6592 'model': cmd._curr_model.get('name'), 6593 'v5_model': False if cmd._model_v4_path else True }) 6594 6595 format = cmd._export_format #shortcut 6596 6597 if format in ['standalone_msP', 'standalone_msF', 'standalone_rw']: 6598 opt['sa_symmetry'] = True 6599 elif format == 'plugin': 6600 opt['sa_symmetry'] = cmd._export_plugin.sa_symmetry 6601 6602 loop_induced_opt = dict(opt) 6603 loop_induced_opt.update(MadLoop_SA_options) 6604 loop_induced_opt['export_format'] = 'madloop_optimized' 6605 loop_induced_opt['SubProc_prefix'] = 'PV' 6606 # For loop_induced output with MadEvent, we must have access to the 6607 # color flows. 6608 loop_induced_opt['compute_color_flows'] = True 6609 for key in opt: 6610 if key not in loop_induced_opt: 6611 loop_induced_opt[key] = opt[key] 6612 6613 # Madevent output supports MadAnalysis5 6614 if format in ['madevent']: 6615 opt['madanalysis5'] = cmd.options['madanalysis5_path'] 6616 6617 if format == 'matrix' or format.startswith('standalone'): 6618 return ProcessExporterFortranSA(cmd._export_dir, opt, format=format) 6619 6620 elif format in ['madevent'] and group_subprocesses: 6621 if isinstance(cmd._curr_amps[0], 6622 loop_diagram_generation.LoopAmplitude): 6623 import madgraph.loop.loop_exporters as loop_exporters 6624 return loop_exporters.LoopInducedExporterMEGroup( 6625 cmd._export_dir,loop_induced_opt) 6626 else: 6627 return ProcessExporterFortranMEGroup(cmd._export_dir,opt) 6628 elif format in ['madevent']: 6629 if isinstance(cmd._curr_amps[0], 6630 loop_diagram_generation.LoopAmplitude): 6631 import madgraph.loop.loop_exporters as loop_exporters 6632 return loop_exporters.LoopInducedExporterMENoGroup( 6633 cmd._export_dir,loop_induced_opt) 6634 else: 6635 return ProcessExporterFortranME(cmd._export_dir,opt) 6636 elif format in ['matchbox']: 6637 return ProcessExporterFortranMatchBox(cmd._export_dir,opt) 6638 elif cmd._export_format in ['madweight'] and group_subprocesses: 6639 6640 return ProcessExporterFortranMWGroup(cmd._export_dir, opt) 6641 elif cmd._export_format in ['madweight']: 6642 return ProcessExporterFortranMW(cmd._export_dir, opt) 6643 elif format == 'plugin': 6644 return cmd._export_plugin(cmd._export_dir, opt) 6645 else: 6646 raise Exception, 'Wrong export_v4 format' 6647 else: 6648 raise MadGraph5Error, 'Output type %s not reckognized in ExportV4Factory.'
6649
6650 6651 6652 6653 #=============================================================================== 6654 # ProcessExporterFortranMWGroup 6655 #=============================================================================== 6656 -class ProcessExporterFortranMWGroup(ProcessExporterFortranMW):
6657 """Class to take care of exporting a set of matrix elements to 6658 MadEvent subprocess group format.""" 6659 6660 matrix_file = "matrix_madweight_group_v4.inc" 6661 grouped_mode = 'madweight' 6662 #=========================================================================== 6663 # generate_subprocess_directory 6664 #===========================================================================
6665 - def generate_subprocess_directory(self, subproc_group, 6666 fortran_model, 6667 group_number):
6668 """Generate the Pn directory for a subprocess group in MadEvent, 6669 including the necessary matrix_N.f files, configs.inc and various 6670 other helper files.""" 6671 6672 if not isinstance(subproc_group, group_subprocs.SubProcessGroup): 6673 raise base_objects.PhysicsObject.PhysicsObjectError,\ 6674 "subproc_group object not SubProcessGroup" 6675 6676 if not self.model: 6677 self.model = subproc_group.get('matrix_elements')[0].\ 6678 get('processes')[0].get('model') 6679 6680 pathdir = os.path.join(self.dir_path, 'SubProcesses') 6681 6682 # Create the directory PN in the specified path 6683 subprocdir = "P%d_%s" % (subproc_group.get('number'), 6684 subproc_group.get('name')) 6685 try: 6686 os.mkdir(pjoin(pathdir, subprocdir)) 6687 except os.error as error: 6688 logger.warning(error.strerror + " " + subprocdir) 6689 6690 6691 logger.info('Creating files in directory %s' % subprocdir) 6692 Ppath = pjoin(pathdir, subprocdir) 6693 6694 # Create the matrix.f files, auto_dsig.f files and all inc files 6695 # for all subprocesses in the group 6696 6697 maxamps = 0 6698 maxflows = 0 6699 tot_calls = 0 6700 6701 matrix_elements = subproc_group.get('matrix_elements') 6702 6703 for ime, matrix_element in \ 6704 enumerate(matrix_elements): 6705 filename = pjoin(Ppath, 'matrix%d.f' % (ime+1)) 6706 calls, ncolor = \ 6707 self.write_matrix_element_v4(writers.FortranWriter(filename), 6708 matrix_element, 6709 fortran_model, 6710 str(ime+1), 6711 subproc_group.get('diagram_maps')[\ 6712 ime]) 6713 6714 filename = pjoin(Ppath, 'auto_dsig%d.f' % (ime+1)) 6715 self.write_auto_dsig_file(writers.FortranWriter(filename), 6716 matrix_element, 6717 str(ime+1)) 6718 6719 # Keep track of needed quantities 6720 tot_calls += int(calls) 6721 maxflows = max(maxflows, ncolor) 6722 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 6723 6724 # Draw diagrams 6725 filename = pjoin(Ppath, "matrix%d.ps" % (ime+1)) 6726 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 6727 get('diagrams'), 6728 filename, 6729 model = \ 6730 matrix_element.get('processes')[0].\ 6731 get('model'), 6732 amplitude=True) 6733 logger.info("Generating Feynman diagrams for " + \ 6734 matrix_element.get('processes')[0].nice_string()) 6735 plot.draw() 6736 6737 # Extract number of external particles 6738 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 6739 6740 # Generate a list of diagrams corresponding to each configuration 6741 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 6742 # If a subprocess has no diagrams for this config, the number is 0 6743 6744 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 6745 6746 filename = pjoin(Ppath, 'auto_dsig.f') 6747 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 6748 subproc_group) 6749 6750 filename = pjoin(Ppath,'configs.inc') 6751 nconfigs, s_and_t_channels = self.write_configs_file(\ 6752 writers.FortranWriter(filename), 6753 subproc_group, 6754 subproc_diagrams_for_config) 6755 6756 filename = pjoin(Ppath, 'leshouche.inc') 6757 self.write_leshouche_file(writers.FortranWriter(filename), 6758 subproc_group) 6759 6760 filename = pjoin(Ppath, 'phasespace.inc') 6761 self.write_phasespace_file(writers.FortranWriter(filename), 6762 nconfigs) 6763 6764 6765 filename = pjoin(Ppath, 'maxamps.inc') 6766 self.write_maxamps_file(writers.FortranWriter(filename), 6767 maxamps, 6768 maxflows, 6769 max([len(me.get('processes')) for me in \ 6770 matrix_elements]), 6771 len(matrix_elements)) 6772 6773 filename = pjoin(Ppath, 'mirrorprocs.inc') 6774 self.write_mirrorprocs(writers.FortranWriter(filename), 6775 subproc_group) 6776 6777 filename = pjoin(Ppath, 'nexternal.inc') 6778 self.write_nexternal_file(writers.FortranWriter(filename), 6779 nexternal, ninitial) 6780 6781 filename = pjoin(Ppath, 'pmass.inc') 6782 self.write_pmass_file(writers.FortranWriter(filename), 6783 matrix_element) 6784 6785 filename = pjoin(Ppath, 'props.inc') 6786 self.write_props_file(writers.FortranWriter(filename), 6787 matrix_element, 6788 s_and_t_channels) 6789 6790 # filename = pjoin(Ppath, 'processes.dat') 6791 # files.write_to_file(filename, 6792 # self.write_processes_file, 6793 # subproc_group) 6794 6795 # Generate jpgs -> pass in make_html 6796 #os.system(os.path.join('..', '..', 'bin', 'gen_jpeg-pl')) 6797 6798 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f'] 6799 6800 for file in linkfiles: 6801 ln('../%s' % file, cwd=Ppath) 6802 6803 ln('nexternal.inc', '../../Source', cwd=Ppath, log=False) 6804 ln('leshouche.inc', '../../Source', cwd=Ppath, log=False) 6805 ln('maxamps.inc', '../../Source', cwd=Ppath, log=False) 6806 ln('../../Source/maxparticles.inc', '.', log=True, cwd=Ppath) 6807 ln('../../Source/maxparticles.inc', '.', name='genps.inc', log=True, cwd=Ppath) 6808 ln('phasespace.inc', '../', log=True, cwd=Ppath) 6809 if not tot_calls: 6810 tot_calls = 0 6811 return tot_calls
6812 6813 6814 #=========================================================================== 6815 # Helper functions 6816 #===========================================================================
6817 - def modify_grouping(self, matrix_element):
6818 """allow to modify the grouping (if grouping is in place) 6819 return two value: 6820 - True/False if the matrix_element was modified 6821 - the new(or old) matrix element""" 6822 6823 return True, matrix_element.split_lepton_grouping()
6824 6825 #=========================================================================== 6826 # write_super_auto_dsig_file 6827 #===========================================================================
6828 - def write_super_auto_dsig_file(self, writer, subproc_group):
6829 """Write the auto_dsig.f file selecting between the subprocesses 6830 in subprocess group mode""" 6831 6832 replace_dict = {} 6833 6834 # Extract version number and date from VERSION file 6835 info_lines = self.get_mg5_info_lines() 6836 replace_dict['info_lines'] = info_lines 6837 6838 matrix_elements = subproc_group.get('matrix_elements') 6839 6840 # Extract process info lines 6841 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 6842 matrix_elements]) 6843 replace_dict['process_lines'] = process_lines 6844 6845 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 6846 replace_dict['nexternal'] = nexternal 6847 6848 replace_dict['nsprocs'] = 2*len(matrix_elements) 6849 6850 # Generate dsig definition line 6851 dsig_def_line = "DOUBLE PRECISION " + \ 6852 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 6853 range(len(matrix_elements))]) 6854 replace_dict["dsig_def_line"] = dsig_def_line 6855 6856 # Generate dsig process lines 6857 call_dsig_proc_lines = [] 6858 for iproc in range(len(matrix_elements)): 6859 call_dsig_proc_lines.append(\ 6860 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 6861 {"num": iproc + 1, 6862 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 6863 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 6864 6865 if writer: 6866 file = open(os.path.join(_file_path, \ 6867 'iolibs/template_files/super_auto_dsig_mw_group_v4.inc')).read() 6868 file = file % replace_dict 6869 # Write the file 6870 writer.writelines(file) 6871 else: 6872 return replace_dict
6873 6874 #=========================================================================== 6875 # write_mirrorprocs 6876 #===========================================================================
6877 - def write_mirrorprocs(self, writer, subproc_group):
6878 """Write the mirrorprocs.inc file determining which processes have 6879 IS mirror process in subprocess group mode.""" 6880 6881 lines = [] 6882 bool_dict = {True: '.true.', False: '.false.'} 6883 matrix_elements = subproc_group.get('matrix_elements') 6884 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 6885 (len(matrix_elements), 6886 ",".join([bool_dict[me.get('has_mirror_process')] for \ 6887 me in matrix_elements]))) 6888 # Write the file 6889 writer.writelines(lines)
6890 6891 #=========================================================================== 6892 # write_configs_file 6893 #===========================================================================
6894 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
6895 """Write the configs.inc file with topology information for a 6896 subprocess group. Use the first subprocess with a diagram for each 6897 configuration.""" 6898 6899 matrix_elements = subproc_group.get('matrix_elements') 6900 model = matrix_elements[0].get('processes')[0].get('model') 6901 6902 diagrams = [] 6903 config_numbers = [] 6904 for iconfig, config in enumerate(diagrams_for_config): 6905 # Check if any diagrams correspond to this config 6906 if set(config) == set([0]): 6907 continue 6908 subproc_diags = [] 6909 for s,d in enumerate(config): 6910 if d: 6911 subproc_diags.append(matrix_elements[s].\ 6912 get('diagrams')[d-1]) 6913 else: 6914 subproc_diags.append(None) 6915 diagrams.append(subproc_diags) 6916 config_numbers.append(iconfig + 1) 6917 6918 # Extract number of external particles 6919 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 6920 6921 return len(diagrams), \ 6922 self.write_configs_file_from_diagrams(writer, diagrams, 6923 config_numbers, 6924 nexternal, ninitial, 6925 matrix_elements[0],model)
6926 6927 #=========================================================================== 6928 # write_run_configs_file 6929 #===========================================================================
6930 - def write_run_config_file(self, writer):
6931 """Write the run_configs.inc file for MadEvent""" 6932 6933 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 6934 text = open(path).read() % {'chanperjob':'2'} 6935 writer.write(text) 6936 return True
6937 6938 6939 #=========================================================================== 6940 # write_leshouche_file 6941 #===========================================================================
6942 - def write_leshouche_file(self, writer, subproc_group):
6943 """Write the leshouche.inc file for MG4""" 6944 6945 all_lines = [] 6946 6947 for iproc, matrix_element in \ 6948 enumerate(subproc_group.get('matrix_elements')): 6949 all_lines.extend(self.get_leshouche_lines(matrix_element, 6950 iproc)) 6951 6952 # Write the file 6953 writer.writelines(all_lines) 6954 6955 return True
6956