Package madgraph :: Package iolibs :: Module export_v4
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_v4

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  from __future__ import absolute_import 
  16  from madgraph.iolibs.helas_call_writers import HelasCallWriter 
  17  from six.moves import range 
  18  from six.moves import zip 
  19  """Methods and classes to export matrix elements to v4 format.""" 
  20   
  21  import copy 
  22  from six import StringIO 
  23  from distutils import dir_util 
  24  import itertools 
  25  import fractions 
  26  import glob 
  27  import logging 
  28  import math 
  29  import os 
  30  import io 
  31  import re 
  32  import shutil 
  33  import subprocess 
  34  import sys 
  35  import time 
  36  import traceback 
  37   
  38  import aloha 
  39   
  40  import madgraph.core.base_objects as base_objects 
  41  import madgraph.core.color_algebra as color 
  42  import madgraph.core.helas_objects as helas_objects 
  43  import madgraph.iolibs.drawing_eps as draw 
  44  import madgraph.iolibs.files as files 
  45  import madgraph.iolibs.group_subprocs as group_subprocs 
  46  import madgraph.iolibs.file_writers as writers 
  47  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  48  import madgraph.iolibs.template_files as template_files 
  49  import madgraph.iolibs.ufo_expression_parsers as parsers 
  50  import madgraph.iolibs.helas_call_writers as helas_call_writers 
  51  import madgraph.interface.common_run_interface as common_run_interface 
  52  import madgraph.various.diagram_symmetry as diagram_symmetry 
  53  import madgraph.various.misc as misc 
  54  import madgraph.various.banner as banner_mod 
  55  import madgraph.various.process_checks as process_checks 
  56  import madgraph.loop.loop_diagram_generation as loop_diagram_generation 
  57  import aloha.create_aloha as create_aloha 
  58  import models.import_ufo as import_ufo 
  59  import models.write_param_card as param_writer 
  60  import models.check_param_card as check_param_card 
  61   
  62   
  63  from madgraph import MadGraph5Error, MG5DIR, ReadWrite 
  64  from madgraph.iolibs.files import cp, ln, mv 
  65   
  66  from madgraph import InvalidCmd 
  67   
  68  pjoin = os.path.join 
  69   
  70  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  71  logger = logging.getLogger('madgraph.export_v4') 
  72   
  73  default_compiler= {'fortran': 'gfortran', 
  74                         'f2py': 'f2py', 
  75                         'cpp':'g++'} 
76 77 78 -class VirtualExporter(object):
79 80 #exporter variable who modified the way madgraph interacts with this class 81 82 grouped_mode = 'madevent' 83 # This variable changes the type of object called within 'generate_subprocess_directory' 84 #functions. 85 # False to avoid grouping (only identical matrix element are merged) 86 # 'madevent' group the massless quark and massless lepton 87 # 'madweight' group the gluon with the massless quark 88 sa_symmetry = False 89 # If no grouped_mode=False, uu~ and u~u will be called independently. 90 #Putting sa_symmetry generates only one of the two matrix-element. 91 check = True 92 # Ask madgraph to check if the directory already exists and propose to the user to 93 #remove it first if this is the case 94 output = 'Template' 95 # [Template, None, dir] 96 # - Template, madgraph will call copy_template 97 # - dir, madgraph will just create an empty directory for initialisation 98 # - None, madgraph do nothing for initialisation 99 exporter = 'v4' 100 # language of the output 'v4' for Fortran output 101 # 'cpp' for C++ output 102 103
104 - def __init__(self, dir_path = "", opt=None):
105 # cmd_options is a dictionary with all the optional argurment passed at output time 106 107 # Activate some monkey patching for the helas call writer. 108 helas_call_writers.HelasCallWriter.customize_argument_for_all_other_helas_object = \ 109 self.helas_call_writer_custom
110 111 112 # helper function for customise helas writter 113 @staticmethod
114 - def custom_helas_call(call, arg):
115 """static method to customise the way aloha function call are written 116 call is the default template for the call 117 arg are the dictionary used for the call 118 """ 119 return call, arg
120 121 helas_call_writer_custom = lambda x,y,z: x.custom_helas_call(y,z) 122 123
124 - def copy_template(self, model):
125 return
126
127 - def generate_subprocess_directory(self, subproc_group, helicity_model, me=None):
128 # generate_subprocess_directory(self, matrix_element, helicity_model, me_number) [for ungrouped] 129 return 0 # return an integer stating the number of call to helicity routine
130
131 - def convert_model(self, model, wanted_lorentz=[], wanted_couplings=[]):
132 return
133
134 - def finalize(self,matrix_element, cmdhistory, MG5options, outputflag):
135 return
136 137
138 - def pass_information_from_cmd(self, cmd):
139 """pass information from the command interface to the exporter. 140 Please do not modify any object of the interface from the exporter. 141 """ 142 return
143
144 - def modify_grouping(self, matrix_element):
145 return False, matrix_element
146
147 - def export_model_files(self, model_v4_path):
148 raise Exception("V4 model not supported by this type of exporter. Please use UFO model") 149 return
150
151 - def export_helas(self, HELAS_PATH):
152 raise Exception("V4 model not supported by this type of exporter. Please use UFO model") 153 return
154
155 #=============================================================================== 156 # ProcessExporterFortran 157 #=============================================================================== 158 -class ProcessExporterFortran(VirtualExporter):
159 """Class to take care of exporting a set of matrix elements to 160 Fortran (v4) format.""" 161 162 default_opt = {'clean': False, 'complex_mass':False, 163 'export_format':'madevent', 'mp': False, 164 'v5_model': True, 165 'output_options':{} 166 } 167 grouped_mode = False 168
169 - def __init__(self, dir_path = "", opt=None):
170 """Initiate the ProcessExporterFortran with directory information""" 171 self.mgme_dir = MG5DIR 172 self.dir_path = dir_path 173 self.model = None 174 175 self.opt = dict(self.default_opt) 176 if opt: 177 self.opt.update(opt) 178 179 self.cmd_options = self.opt['output_options'] 180 181 #place holder to pass information to the run_interface 182 self.proc_characteristic = banner_mod.ProcCharacteristic() 183 # call mother class 184 super(ProcessExporterFortran,self).__init__(dir_path, opt)
185 186 187 #=========================================================================== 188 # process exporter fortran switch between group and not grouped 189 #===========================================================================
190 - def export_processes(self, matrix_elements, fortran_model):
191 """Make the switch between grouped and not grouped output""" 192 193 calls = 0 194 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 195 for (group_number, me_group) in enumerate(matrix_elements): 196 calls = calls + self.generate_subprocess_directory(\ 197 me_group, fortran_model, group_number) 198 else: 199 for me_number, me in enumerate(matrix_elements.get_matrix_elements()): 200 calls = calls + self.generate_subprocess_directory(\ 201 me, fortran_model, me_number) 202 203 return calls
204 205 206 #=========================================================================== 207 # create the run_card 208 #===========================================================================
209 - def create_run_card(self, matrix_elements, history):
210 """ """ 211 212 213 # bypass this for the loop-check 214 import madgraph.loop.loop_helas_objects as loop_helas_objects 215 if isinstance(matrix_elements, loop_helas_objects.LoopHelasMatrixElement): 216 matrix_elements = None 217 218 run_card = banner_mod.RunCard() 219 220 221 default=True 222 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 223 processes = [me.get('processes') for megroup in matrix_elements 224 for me in megroup['matrix_elements']] 225 elif matrix_elements: 226 processes = [me.get('processes') 227 for me in matrix_elements['matrix_elements']] 228 else: 229 default =False 230 231 if default: 232 run_card.create_default_for_process(self.proc_characteristic, 233 history, 234 processes) 235 236 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 237 shutil.copyfile(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 238 pjoin(self.dir_path, 'Cards', 'run_card.dat'))
239 240 241 242 #=========================================================================== 243 # copy the Template in a new directory. 244 #===========================================================================
245 - def copy_template(self, model):
246 """create the directory run_name as a copy of the MadEvent 247 Template, and clean the directory 248 """ 249 250 #First copy the full template tree if dir_path doesn't exit 251 if not os.path.isdir(self.dir_path): 252 assert self.mgme_dir, \ 253 "No valid MG_ME path given for MG4 run directory creation." 254 logger.info('initialize a new directory: %s' % \ 255 os.path.basename(self.dir_path)) 256 shutil.copytree(pjoin(self.mgme_dir, 'Template/LO'), 257 self.dir_path, True) 258 # distutils.dir_util.copy_tree since dir_path already exists 259 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 260 self.dir_path) 261 # copy plot_card 262 for card in ['plot_card']: 263 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 264 try: 265 shutil.copy(pjoin(self.dir_path, 'Cards',card + '.dat'), 266 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 267 except IOError: 268 logger.warning("Failed to copy " + card + ".dat to default") 269 elif os.getcwd() == os.path.realpath(self.dir_path): 270 logger.info('working in local directory: %s' % \ 271 os.path.realpath(self.dir_path)) 272 # distutils.dir_util.copy_tree since dir_path already exists 273 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/LO'), 274 self.dir_path) 275 # for name in misc.glob('Template/LO/*', self.mgme_dir): 276 # name = os.path.basename(name) 277 # filname = pjoin(self.mgme_dir, 'Template','LO',name) 278 # if os.path.isfile(filename): 279 # files.cp(filename, pjoin(self.dir_path,name)) 280 # elif os.path.isdir(filename): 281 # shutil.copytree(filename, pjoin(self.dir_path,name), True) 282 # distutils.dir_util.copy_tree since dir_path already exists 283 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 284 self.dir_path) 285 # Copy plot_card 286 for card in ['plot_card']: 287 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 288 try: 289 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 290 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 291 except IOError: 292 logger.warning("Failed to copy " + card + ".dat to default") 293 elif not os.path.isfile(pjoin(self.dir_path, 'TemplateVersion.txt')): 294 assert self.mgme_dir, \ 295 "No valid MG_ME path given for MG4 run directory creation." 296 try: 297 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 298 except IOError: 299 MG5_version = misc.get_pkg_info() 300 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write(MG5_version['version']) 301 302 #Ensure that the Template is clean 303 if self.opt['clean']: 304 logger.info('remove old information in %s' % \ 305 os.path.basename(self.dir_path)) 306 if 'MADGRAPH_BASE' in os.environ: 307 misc.call([pjoin('bin', 'internal', 'clean_template'), 308 '--web'], cwd=self.dir_path) 309 else: 310 try: 311 misc.call([pjoin('bin', 'internal', 'clean_template')], \ 312 cwd=self.dir_path) 313 except Exception as why: 314 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 315 % (os.path.basename(self.dir_path),why)) 316 317 #Write version info 318 MG_version = misc.get_pkg_info() 319 open(pjoin(self.dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 320 MG_version['version']) 321 322 # add the makefile in Source directory 323 filename = pjoin(self.dir_path,'Source','makefile') 324 self.write_source_makefile(writers.FileWriter(filename)) 325 326 # add the DiscreteSampler information 327 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'DiscreteSampler.f'), 328 pjoin(self.dir_path, 'Source')) 329 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'StringCast.f'), 330 pjoin(self.dir_path, 'Source')) 331 332 # We need to create the correct open_data for the pdf 333 self.write_pdf_opendata()
334 335 336 #=========================================================================== 337 # Call MadAnalysis5 to generate the default cards for this process 338 #===========================================================================
339 - def create_default_madanalysis5_cards(self, history, proc_defs, processes, 340 ma5_path, output_dir, levels = ['parton','hadron']):
341 """ Call MA5 so that it writes default cards for both parton and 342 post-shower levels, tailored for this particular process.""" 343 344 if len(levels)==0: 345 return 346 start = time.time() 347 logger.info('Generating MadAnalysis5 default cards tailored to this process') 348 try: 349 MA5_interpreter = common_run_interface.CommonRunCmd.\ 350 get_MadAnalysis5_interpreter(MG5DIR,ma5_path,loglevel=100) 351 except (Exception, SystemExit) as e: 352 logger.warning('Fail to create a MadAnalysis5 instance. Therefore the default analysis with MadAnalysis5 will be empty') 353 return 354 if MA5_interpreter is None: 355 return 356 357 MA5_main = MA5_interpreter.main 358 for lvl in ['parton','hadron']: 359 if lvl in levels: 360 card_to_generate = pjoin(output_dir,'madanalysis5_%s_card_default.dat'%lvl) 361 try: 362 text = MA5_main.madgraph.generate_card(history, proc_defs, processes,lvl) 363 except (Exception, SystemExit) as e: 364 # keep the default card (skip only) 365 logger.warning('MadAnalysis5 failed to write a %s-level'%lvl+ 366 ' default analysis card for this process.') 367 logger.warning('Therefore, %s-level default analysis with MadAnalysis5 will be empty.'%lvl) 368 error=StringIO() 369 traceback.print_exc(file=error) 370 logger.debug('MadAnalysis5 error was:') 371 logger.debug('-'*60) 372 logger.debug(error.getvalue()[:-1]) 373 logger.debug('-'*60) 374 else: 375 open(card_to_generate,'w').write(text) 376 stop = time.time() 377 if stop-start >1: 378 logger.info('Cards created in %.2fs' % (stop-start))
379 380 #=========================================================================== 381 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 382 #===========================================================================
383 - def write_procdef_mg5(self, file_pos, modelname, process_str):
384 """ write an equivalent of the MG4 proc_card in order that all the Madevent 385 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 386 387 proc_card_template = template_files.mg4_proc_card.mg4_template 388 process_template = template_files.mg4_proc_card.process_template 389 process_text = '' 390 coupling = '' 391 new_process_content = [] 392 393 394 # First find the coupling and suppress the coupling from process_str 395 #But first ensure that coupling are define whithout spaces: 396 process_str = process_str.replace(' =', '=') 397 process_str = process_str.replace('= ', '=') 398 process_str = process_str.replace(',',' , ') 399 #now loop on the element and treat all the coupling 400 for info in process_str.split(): 401 if '=' in info: 402 coupling += info + '\n' 403 else: 404 new_process_content.append(info) 405 # Recombine the process_str (which is the input process_str without coupling 406 #info) 407 process_str = ' '.join(new_process_content) 408 409 #format the SubProcess 410 replace_dict = {'process': process_str, 411 'coupling': coupling} 412 process_text += process_template.substitute(replace_dict) 413 414 replace_dict = {'process': process_text, 415 'model': modelname, 416 'multiparticle':''} 417 text = proc_card_template.substitute(replace_dict) 418 419 if file_pos: 420 ff = open(file_pos, 'w') 421 ff.write(text) 422 ff.close() 423 else: 424 return replace_dict
425 426
427 - def pass_information_from_cmd(self, cmd):
428 """Pass information for MA5""" 429 430 self.proc_defs = cmd._curr_proc_defs
431 432 #=========================================================================== 433 # Create jpeg diagrams, html pages,proc_card_mg5.dat and madevent.tar.gz 434 #===========================================================================
435 - def finalize(self, matrix_elements, history='', mg5options={}, flaglist=[]):
436 """Function to finalize v4 directory, for inheritance.""" 437 438 self.create_run_card(matrix_elements, history) 439 self.create_MA5_cards(matrix_elements, history)
440
441 - def create_MA5_cards(self,matrix_elements,history):
442 """ A wrapper around the creation of the MA5 cards so that it can be 443 bypassed by daughter classes (i.e. in standalone).""" 444 if 'madanalysis5_path' in self.opt and not \ 445 self.opt['madanalysis5_path'] is None and not self.proc_defs is None: 446 processes = None 447 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 448 processes = [me.get('processes') for megroup in matrix_elements 449 for me in megroup['matrix_elements']] 450 elif matrix_elements: 451 processes = [me.get('processes') 452 for me in matrix_elements['matrix_elements']] 453 454 self.create_default_madanalysis5_cards( 455 history, self.proc_defs, processes, 456 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'), 457 levels = ['hadron','parton']) 458 459 for level in ['hadron','parton']: 460 # Copying these cards turn on the use of MadAnalysis5 by default. 461 if os.path.isfile(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level)): 462 shutil.copy(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level), 463 pjoin(self.dir_path,'Cards','madanalysis5_%s_card.dat'%level))
464 465 #=========================================================================== 466 # Create the proc_characteristic file passing information to the run_interface 467 #===========================================================================
468 - def create_proc_charac(self, matrix_elements=None, history="", **opts):
469 470 self.proc_characteristic.write(pjoin(self.dir_path, 'SubProcesses', 'proc_characteristics'))
471 472 #=========================================================================== 473 # write_matrix_element_v4 474 #===========================================================================
475 - def write_matrix_element_v4(self):
476 """Function to write a matrix.f file, for inheritance. 477 """ 478 pass
479 480 #=========================================================================== 481 # write_pdf_opendata 482 #===========================================================================
483 - def write_pdf_opendata(self):
484 """ modify the pdf opendata file, to allow direct access to cluster node 485 repository if configure""" 486 487 if not self.opt["cluster_local_path"]: 488 changer = {"pdf_systemwide": ""} 489 else: 490 to_add = """ 491 tempname='%(path)s'//Tablefile 492 open(IU,file=tempname,status='old',ERR=1) 493 return 494 1 tempname='%(path)s/Pdfdata/'//Tablefile 495 open(IU,file=tempname,status='old',ERR=2) 496 return 497 2 tempname='%(path)s/lhapdf'//Tablefile 498 open(IU,file=tempname,status='old',ERR=3) 499 return 500 3 tempname='%(path)s/../lhapdf/pdfsets/'//Tablefile 501 open(IU,file=tempname,status='old',ERR=4) 502 return 503 4 tempname='%(path)s/../lhapdf/pdfsets/6.1/'//Tablefile 504 open(IU,file=tempname,status='old',ERR=5) 505 return 506 """ % {"path" : self.opt["cluster_local_path"]} 507 508 changer = {"pdf_systemwide": to_add} 509 510 511 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "opendata.f")) 512 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_opendata.f"),"r").read() 513 ff.writelines(template % changer) 514 515 # Do the same for lhapdf set 516 if not self.opt["cluster_local_path"]: 517 changer = {"cluster_specific_path": ""} 518 else: 519 to_add=""" 520 LHAPath='%(path)s/PDFsets' 521 Inquire(File=LHAPath, exist=exists) 522 if(exists)return 523 LHAPath='%(path)s/../lhapdf/pdfsets/6.1/' 524 Inquire(File=LHAPath, exist=exists) 525 if(exists)return 526 LHAPath='%(path)s/../lhapdf/pdfsets/' 527 Inquire(File=LHAPath, exist=exists) 528 if(exists)return 529 LHAPath='./PDFsets' 530 """ % {"path" : self.opt["cluster_local_path"]} 531 changer = {"cluster_specific_path": to_add} 532 533 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f")) 534 #ff = open(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f"),"w") 535 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_wrap_lhapdf.f"),"r").read() 536 ff.writelines(template % changer) 537 538 539 return
540 541 542 543 #=========================================================================== 544 # write_maxparticles_file 545 #===========================================================================
546 - def write_maxparticles_file(self, writer, matrix_elements):
547 """Write the maxparticles.inc file for MadEvent""" 548 549 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 550 maxparticles = max([me.get_nexternal_ninitial()[0] for me in \ 551 matrix_elements.get('matrix_elements')]) 552 else: 553 maxparticles = max([me.get_nexternal_ninitial()[0] \ 554 for me in matrix_elements]) 555 556 lines = "integer max_particles\n" 557 lines += "parameter(max_particles=%d)" % maxparticles 558 559 # Write the file 560 writer.writelines(lines) 561 562 return True
563 564 565 #=========================================================================== 566 # export the model 567 #===========================================================================
568 - def export_model_files(self, model_path):
569 """Configure the files/link of the process according to the model""" 570 571 # Import the model 572 for file in os.listdir(model_path): 573 if os.path.isfile(pjoin(model_path, file)): 574 shutil.copy2(pjoin(model_path, file), \ 575 pjoin(self.dir_path, 'Source', 'MODEL'))
576 577 591 599 600 601 #=========================================================================== 602 # export the helas routine 603 #===========================================================================
604 - def export_helas(self, helas_path):
605 """Configure the files/link of the process according to the model""" 606 607 # Import helas routine 608 for filename in os.listdir(helas_path): 609 filepos = pjoin(helas_path, filename) 610 if os.path.isfile(filepos): 611 if filepos.endswith('Makefile.template'): 612 cp(filepos, self.dir_path + '/Source/DHELAS/Makefile') 613 elif filepos.endswith('Makefile'): 614 pass 615 else: 616 cp(filepos, self.dir_path + '/Source/DHELAS')
617 # following lines do the same but whithout symbolic link 618 # 619 #def export_helas(mgme_dir, dir_path): 620 # 621 # # Copy the HELAS directory 622 # helas_dir = pjoin(mgme_dir, 'HELAS') 623 # for filename in os.listdir(helas_dir): 624 # if os.path.isfile(pjoin(helas_dir, filename)): 625 # shutil.copy2(pjoin(helas_dir, filename), 626 # pjoin(dir_path, 'Source', 'DHELAS')) 627 # shutil.move(pjoin(dir_path, 'Source', 'DHELAS', 'Makefile.template'), 628 # pjoin(dir_path, 'Source', 'DHELAS', 'Makefile')) 629 # 630 631 #=========================================================================== 632 # generate_subprocess_directory 633 #===========================================================================
634 - def generate_subprocess_directory(self, matrix_element, 635 fortran_model, 636 me_number):
637 """Routine to generate a subprocess directory (for inheritance)""" 638 639 pass
640 641 #=========================================================================== 642 # get_source_libraries_list 643 #===========================================================================
644 - def get_source_libraries_list(self):
645 """ Returns the list of libraries to be compiling when compiling the 646 SOURCE directory. It is different for loop_induced processes and 647 also depends on the value of the 'output_dependencies' option""" 648 649 return ['$(LIBDIR)libdhelas.$(libext)', 650 '$(LIBDIR)libpdf.$(libext)', 651 '$(LIBDIR)libmodel.$(libext)', 652 '$(LIBDIR)libcernlib.$(libext)', 653 '$(LIBDIR)libbias.$(libext)']
654 655 #=========================================================================== 656 # write_source_makefile 657 #===========================================================================
658 - def write_source_makefile(self, writer):
659 """Write the nexternal.inc file for MG4""" 660 661 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 662 set_of_lib = ' '.join(['$(LIBRARIES)']+self.get_source_libraries_list()) 663 if self.opt['model'] == 'mssm' or self.opt['model'].startswith('mssm-'): 664 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 665 MODEL/MG5_param.dat: ../Cards/param_card.dat\n\t../bin/madevent treatcards param 666 param_card.inc: MODEL/MG5_param.dat\n\t../bin/madevent treatcards param\n''' 667 else: 668 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 669 param_card.inc: ../Cards/param_card.dat\n\t../bin/madevent treatcards param\n''' 670 671 replace_dict= {'libraries': set_of_lib, 672 'model':model_line, 673 'additional_dsample': '', 674 'additional_dependencies':''} 675 676 if writer: 677 text = open(path).read() % replace_dict 678 writer.write(text) 679 680 return replace_dict
681 682 #=========================================================================== 683 # write_nexternal_madspin 684 #===========================================================================
685 - def write_nexternal_madspin(self, writer, nexternal, ninitial):
686 """Write the nexternal_prod.inc file for madspin""" 687 688 replace_dict = {} 689 690 replace_dict['nexternal'] = nexternal 691 replace_dict['ninitial'] = ninitial 692 693 file = """ \ 694 integer nexternal_prod 695 parameter (nexternal_prod=%(nexternal)d) 696 integer nincoming_prod 697 parameter (nincoming_prod=%(ninitial)d)""" % replace_dict 698 699 # Write the file 700 if writer: 701 writer.writelines(file) 702 return True 703 else: 704 return replace_dict
705 706 #=========================================================================== 707 # write_helamp_madspin 708 #===========================================================================
709 - def write_helamp_madspin(self, writer, ncomb):
710 """Write the helamp.inc file for madspin""" 711 712 replace_dict = {} 713 714 replace_dict['ncomb'] = ncomb 715 716 file = """ \ 717 integer ncomb1 718 parameter (ncomb1=%(ncomb)d) 719 double precision helamp(ncomb1) 720 common /to_helamp/helamp """ % replace_dict 721 722 # Write the file 723 if writer: 724 writer.writelines(file) 725 return True 726 else: 727 return replace_dict
728 729 730 731 #=========================================================================== 732 # write_nexternal_file 733 #===========================================================================
734 - def write_nexternal_file(self, writer, nexternal, ninitial):
735 """Write the nexternal.inc file for MG4""" 736 737 replace_dict = {} 738 739 replace_dict['nexternal'] = nexternal 740 replace_dict['ninitial'] = ninitial 741 742 file = """ \ 743 integer nexternal 744 parameter (nexternal=%(nexternal)d) 745 integer nincoming 746 parameter (nincoming=%(ninitial)d)""" % replace_dict 747 748 # Write the file 749 if writer: 750 writer.writelines(file) 751 return True 752 else: 753 return replace_dict
754 #=========================================================================== 755 # write_pmass_file 756 #===========================================================================
757 - def write_pmass_file(self, writer, matrix_element):
758 """Write the pmass.inc file for MG4""" 759 760 model = matrix_element.get('processes')[0].get('model') 761 762 lines = [] 763 for wf in matrix_element.get_external_wavefunctions(): 764 mass = model.get('particle_dict')[wf.get('pdg_code')].get('mass') 765 if mass.lower() != "zero": 766 mass = "abs(%s)" % mass 767 768 lines.append("pmass(%d)=%s" % \ 769 (wf.get('number_external'), mass)) 770 771 # Write the file 772 writer.writelines(lines) 773 774 return True
775 776 #=========================================================================== 777 # write_ngraphs_file 778 #===========================================================================
779 - def write_ngraphs_file(self, writer, nconfigs):
780 """Write the ngraphs.inc file for MG4. Needs input from 781 write_configs_file.""" 782 783 file = " integer n_max_cg\n" 784 file = file + "parameter (n_max_cg=%d)" % nconfigs 785 786 # Write the file 787 writer.writelines(file) 788 789 return True
790 791 #=========================================================================== 792 # write_leshouche_file 793 #===========================================================================
794 - def write_leshouche_file(self, writer, matrix_element):
795 """Write the leshouche.inc file for MG4""" 796 797 # Write the file 798 writer.writelines(self.get_leshouche_lines(matrix_element, 0)) 799 800 return True
801 802 #=========================================================================== 803 # get_leshouche_lines 804 #===========================================================================
805 - def get_leshouche_lines(self, matrix_element, numproc):
806 """Write the leshouche.inc file for MG4""" 807 808 # Extract number of external particles 809 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 810 811 lines = [] 812 for iproc, proc in enumerate(matrix_element.get('processes')): 813 legs = proc.get_legs_with_decays() 814 lines.append("DATA (IDUP(i,%d,%d),i=1,%d)/%s/" % \ 815 (iproc + 1, numproc+1, nexternal, 816 ",".join([str(l.get('id')) for l in legs]))) 817 if iproc == 0 and numproc == 0: 818 for i in [1, 2]: 819 lines.append("DATA (MOTHUP(%d,i),i=1,%2r)/%s/" % \ 820 (i, nexternal, 821 ",".join([ "%3r" % 0 ] * ninitial + \ 822 [ "%3r" % i ] * (nexternal - ninitial)))) 823 824 # Here goes the color connections corresponding to the JAMPs 825 # Only one output, for the first subproc! 826 if iproc == 0: 827 # If no color basis, just output trivial color flow 828 if not matrix_element.get('color_basis'): 829 for i in [1, 2]: 830 lines.append("DATA (ICOLUP(%d,i,1,%d),i=1,%2r)/%s/" % \ 831 (i, numproc+1,nexternal, 832 ",".join([ "%3r" % 0 ] * nexternal))) 833 834 else: 835 # First build a color representation dictionnary 836 repr_dict = {} 837 for l in legs: 838 repr_dict[l.get('number')] = \ 839 proc.get('model').get_particle(l.get('id')).get_color()\ 840 * (-1)**(1+l.get('state')) 841 # Get the list of color flows 842 color_flow_list = \ 843 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 844 ninitial) 845 # And output them properly 846 for cf_i, color_flow_dict in enumerate(color_flow_list): 847 for i in [0, 1]: 848 lines.append("DATA (ICOLUP(%d,i,%d,%d),i=1,%2r)/%s/" % \ 849 (i + 1, cf_i + 1, numproc+1, nexternal, 850 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 851 for l in legs]))) 852 853 return lines
854 855 856 857 858 #=========================================================================== 859 # write_maxamps_file 860 #===========================================================================
861 - def write_maxamps_file(self, writer, maxamps, maxflows, 862 maxproc,maxsproc):
863 """Write the maxamps.inc file for MG4.""" 864 865 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 866 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 867 (maxamps, maxflows) 868 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 869 (maxproc, maxsproc) 870 871 # Write the file 872 writer.writelines(file) 873 874 return True
875 876 877 #=========================================================================== 878 # Routines to output UFO models in MG4 format 879 #=========================================================================== 880
881 - def convert_model(self, model, wanted_lorentz = [], 882 wanted_couplings = []):
883 """ Create a full valid MG4 model from a MG5 model (coming from UFO)""" 884 885 # Make sure aloha is in quadruple precision if needed 886 old_aloha_mp=aloha.mp_precision 887 aloha.mp_precision=self.opt['mp'] 888 self.model = model 889 # create the MODEL 890 write_dir=pjoin(self.dir_path, 'Source', 'MODEL') 891 model_builder = UFO_model_to_mg4(model, write_dir, self.opt + self.proc_characteristic) 892 model_builder.build(wanted_couplings) 893 894 # Backup the loop mode, because it can be changed in what follows. 895 old_loop_mode = aloha.loop_mode 896 897 # Create the aloha model or use the existing one (for loop exporters 898 # this is useful as the aloha model will be used again in the 899 # LoopHelasMatrixElements generated). We do not save the model generated 900 # here if it didn't exist already because it would be a waste of 901 # memory for tree level applications since aloha is only needed at the 902 # time of creating the aloha fortran subroutines. 903 if hasattr(self, 'aloha_model'): 904 aloha_model = self.aloha_model 905 else: 906 aloha_model = create_aloha.AbstractALOHAModel(os.path.basename(model.get('modelpath'))) 907 aloha_model.add_Lorentz_object(model.get('lorentz')) 908 909 # Compute the subroutines 910 if wanted_lorentz: 911 aloha_model.compute_subset(wanted_lorentz) 912 else: 913 aloha_model.compute_all(save=False) 914 915 # Write them out 916 write_dir=pjoin(self.dir_path, 'Source', 'DHELAS') 917 aloha_model.write(write_dir, 'Fortran') 918 919 # Revert the original aloha loop mode 920 aloha.loop_mode = old_loop_mode 921 922 #copy Helas Template 923 cp(MG5DIR + '/aloha/template_files/Makefile_F', write_dir+'/makefile') 924 if any([any([tag.startswith('L') for tag in d[1]]) for d in wanted_lorentz]): 925 cp(MG5DIR + '/aloha/template_files/aloha_functions_loop.f', 926 write_dir+'/aloha_functions.f') 927 aloha_model.loop_mode = False 928 else: 929 cp(MG5DIR + '/aloha/template_files/aloha_functions.f', 930 write_dir+'/aloha_functions.f') 931 create_aloha.write_aloha_file_inc(write_dir, '.f', '.o') 932 933 # Make final link in the Process 934 self.make_model_symbolic_link() 935 936 # Re-establish original aloha mode 937 aloha.mp_precision=old_aloha_mp
938 939 940 #=========================================================================== 941 # Helper functions 942 #===========================================================================
943 - def modify_grouping(self, matrix_element):
944 """allow to modify the grouping (if grouping is in place) 945 return two value: 946 - True/False if the matrix_element was modified 947 - the new(or old) matrix element""" 948 949 return False, matrix_element
950 951 #=========================================================================== 952 # Helper functions 953 #===========================================================================
954 - def get_mg5_info_lines(self):
955 """Return info lines for MG5, suitable to place at beginning of 956 Fortran files""" 957 958 info = misc.get_pkg_info() 959 info_lines = "" 960 if info and 'version' in info and 'date' in info: 961 info_lines = "# Generated by MadGraph5_aMC@NLO v. %s, %s\n" % \ 962 (info['version'], info['date']) 963 info_lines = info_lines + \ 964 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 965 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 966 else: 967 info_lines = "# Generated by MadGraph5_aMC@NLO\n" + \ 968 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 969 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 970 971 return info_lines
972
973 - def get_process_info_lines(self, matrix_element):
974 """Return info lines describing the processes for this matrix element""" 975 976 return"\n".join([ "C " + process.nice_string().replace('\n', '\nC * ') \ 977 for process in matrix_element.get('processes')])
978 979
980 - def get_helicity_lines(self, matrix_element,array_name='NHEL'):
981 """Return the Helicity matrix definition lines for this matrix element""" 982 983 helicity_line_list = [] 984 i = 0 985 for helicities in matrix_element.get_helicity_matrix(): 986 i = i + 1 987 int_list = [i, len(helicities)] 988 int_list.extend(helicities) 989 helicity_line_list.append(\ 990 ("DATA ("+array_name+"(I,%4r),I=1,%d) /" + \ 991 ",".join(['%2r'] * len(helicities)) + "/") % tuple(int_list)) 992 993 return "\n".join(helicity_line_list)
994
995 - def get_ic_line(self, matrix_element):
996 """Return the IC definition line coming after helicities, required by 997 switchmom in madevent""" 998 999 nexternal = matrix_element.get_nexternal_ninitial()[0] 1000 int_list = list(range(1, nexternal + 1)) 1001 1002 return "DATA (IC(I,1),I=1,%i) /%s/" % (nexternal, 1003 ",".join([str(i) for \ 1004 i in int_list]))
1005
1006 - def set_chosen_SO_index(self, process, squared_orders):
1007 """ From the squared order constraints set by the user, this function 1008 finds what indices of the squared_orders list the user intends to pick. 1009 It returns this as a string of comma-separated successive '.true.' or 1010 '.false.' for each index.""" 1011 1012 user_squared_orders = process.get('squared_orders') 1013 split_orders = process.get('split_orders') 1014 1015 if len(user_squared_orders)==0: 1016 return ','.join(['.true.']*len(squared_orders)) 1017 1018 res = [] 1019 for sqsos in squared_orders: 1020 is_a_match = True 1021 for user_sqso, value in user_squared_orders.items(): 1022 if (process.get_squared_order_type(user_sqso) =='==' and \ 1023 value!=sqsos[split_orders.index(user_sqso)]) or \ 1024 (process.get_squared_order_type(user_sqso) in ['<=','='] and \ 1025 value<sqsos[split_orders.index(user_sqso)]) or \ 1026 (process.get_squared_order_type(user_sqso) == '>' and \ 1027 value>=sqsos[split_orders.index(user_sqso)]): 1028 is_a_match = False 1029 break 1030 res.append('.true.' if is_a_match else '.false.') 1031 1032 return ','.join(res)
1033
1034 - def get_split_orders_lines(self, orders, array_name, n=5):
1035 """ Return the split orders definition as defined in the list orders and 1036 for the name of the array 'array_name'. Split rows in chunks of size n.""" 1037 1038 ret_list = [] 1039 for index, order in enumerate(orders): 1040 for k in range(0, len(order), n): 1041 ret_list.append("DATA (%s(%3r,i),i=%3r,%3r) /%s/" % \ 1042 (array_name,index + 1, k + 1, min(k + n, len(order)), 1043 ','.join(["%5r" % i for i in order[k:k + n]]))) 1044 return ret_list
1045
1046 - def format_integer_list(self, list, name, n=5):
1047 """ Return an initialization of the python list in argument following 1048 the fortran syntax using the data keyword assignment, filling an array 1049 of name 'name'. It splits rows in chunks of size n.""" 1050 1051 ret_list = [] 1052 for k in range(0, len(list), n): 1053 ret_list.append("DATA (%s(i),i=%3r,%3r) /%s/" % \ 1054 (name, k + 1, min(k + n, len(list)), 1055 ','.join(["%5r" % i for i in list[k:k + n]]))) 1056 return ret_list
1057
1058 - def get_color_data_lines(self, matrix_element, n=6):
1059 """Return the color matrix definition lines for this matrix element. Split 1060 rows in chunks of size n.""" 1061 1062 if not matrix_element.get('color_matrix'): 1063 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 1064 else: 1065 ret_list = [] 1066 my_cs = color.ColorString() 1067 for index, denominator in \ 1068 enumerate(matrix_element.get('color_matrix').\ 1069 get_line_denominators()): 1070 # First write the common denominator for this color matrix line 1071 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 1072 # Then write the numerators for the matrix elements 1073 num_list = matrix_element.get('color_matrix').\ 1074 get_line_numerators(index, denominator) 1075 1076 assert all([int(i)==i for i in num_list]) 1077 1078 for k in range(0, len(num_list), n): 1079 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 1080 (index + 1, k + 1, min(k + n, len(num_list)), 1081 ','.join(["%5i" % int(i) for i in num_list[k:k + n]]))) 1082 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[index]) 1083 ret_list.append("C %s" % repr(my_cs)) 1084 1085 return ret_list
1086 1087
1088 - def get_den_factor_line(self, matrix_element):
1089 """Return the denominator factor line for this matrix element""" 1090 1091 return "DATA IDEN/%2r/" % \ 1092 matrix_element.get_denominator_factor()
1093
1094 - def get_icolamp_lines(self, mapconfigs, matrix_element, num_matrix_element):
1095 """Return the ICOLAMP matrix, showing which JAMPs contribute to 1096 which configs (diagrams).""" 1097 1098 ret_list = [] 1099 1100 booldict = {False: ".false.", True: ".true."} 1101 1102 if not matrix_element.get('color_basis'): 1103 # No color, so only one color factor. Simply write a ".true." 1104 # for each config (i.e., each diagram with only 3 particle 1105 # vertices 1106 configs = len(mapconfigs) 1107 ret_list.append("DATA(icolamp(1,i,%d),i=1,%d)/%s/" % \ 1108 (num_matrix_element, configs, 1109 ','.join([".true." for i in range(configs)]))) 1110 return ret_list 1111 1112 # There is a color basis - create a list showing which JAMPs have 1113 # contributions to which configs 1114 1115 # Only want to include leading color flows, so find max_Nc 1116 color_basis = matrix_element.get('color_basis') 1117 1118 # We don't want to include the power of Nc's which come from the potential 1119 # loop color trace (i.e. in the case of a closed fermion loop for example) 1120 # so we subtract it here when computing max_Nc 1121 max_Nc = max(sum([[(v[4]-v[5]) for v in val] for val in 1122 color_basis.values()],[])) 1123 1124 # Crate dictionary between diagram number and JAMP number 1125 diag_jamp = {} 1126 for ijamp, col_basis_elem in \ 1127 enumerate(sorted(matrix_element.get('color_basis').keys())): 1128 for diag_tuple in matrix_element.get('color_basis')[col_basis_elem]: 1129 # Only use color flows with Nc == max_Nc. However, notice that 1130 # we don't want to include the Nc power coming from the loop 1131 # in this counting. 1132 if (diag_tuple[4]-diag_tuple[5]) == max_Nc: 1133 diag_num = diag_tuple[0] + 1 1134 # Add this JAMP number to this diag_num 1135 diag_jamp[diag_num] = diag_jamp.setdefault(diag_num, []) + \ 1136 [ijamp+1] 1137 1138 colamps = ijamp + 1 1139 for iconfig, num_diag in enumerate(mapconfigs): 1140 if num_diag == 0: 1141 continue 1142 1143 # List of True or False 1144 bool_list = [(i + 1 in diag_jamp[num_diag]) for i in range(colamps)] 1145 # Add line 1146 ret_list.append("DATA(icolamp(i,%d,%d),i=1,%d)/%s/" % \ 1147 (iconfig+1, num_matrix_element, colamps, 1148 ','.join(["%s" % booldict[b] for b in \ 1149 bool_list]))) 1150 1151 return ret_list
1152
1153 - def get_amp2_lines(self, matrix_element, config_map = []):
1154 """Return the amp2(i) = sum(amp for diag(i))^2 lines""" 1155 1156 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 1157 # Get minimum legs in a vertex 1158 vert_list = [max(diag.get_vertex_leg_numbers()) for diag in \ 1159 matrix_element.get('diagrams') if diag.get_vertex_leg_numbers()!=[]] 1160 minvert = min(vert_list) if vert_list!=[] else 0 1161 1162 ret_lines = [] 1163 if config_map: 1164 # In this case, we need to sum up all amplitudes that have 1165 # identical topologies, as given by the config_map (which 1166 # gives the topology/config for each of the diagrams 1167 diagrams = matrix_element.get('diagrams') 1168 # Combine the diagrams with identical topologies 1169 config_to_diag_dict = {} 1170 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1171 if config_map[idiag] == 0: 1172 continue 1173 try: 1174 config_to_diag_dict[config_map[idiag]].append(idiag) 1175 except KeyError: 1176 config_to_diag_dict[config_map[idiag]] = [idiag] 1177 # Write out the AMP2s summing squares of amplitudes belonging 1178 # to eiher the same diagram or different diagrams with 1179 # identical propagator properties. Note that we need to use 1180 # AMP2 number corresponding to the first diagram number used 1181 # for that AMP2. 1182 for config in sorted(config_to_diag_dict.keys()): 1183 1184 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % \ 1185 {"num": (config_to_diag_dict[config][0] + 1)} 1186 1187 amp = "+".join(["AMP(%(num)d)" % {"num": a.get('number')} for a in \ 1188 sum([diagrams[idiag].get('amplitudes') for \ 1189 idiag in config_to_diag_dict[config]], [])]) 1190 1191 # Not using \sum |M|^2 anymore since this creates troubles 1192 # when ckm is not diagonal due to the JIM mechanism. 1193 if '+' in amp: 1194 line += "(%s)*dconjg(%s)" % (amp, amp) 1195 else: 1196 line += "%s*dconjg(%s)" % (amp, amp) 1197 ret_lines.append(line) 1198 else: 1199 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1200 # Ignore any diagrams with 4-particle vertices. 1201 if diag.get_vertex_leg_numbers()!=[] and max(diag.get_vertex_leg_numbers()) > minvert: 1202 continue 1203 # Now write out the expression for AMP2, meaning the sum of 1204 # squared amplitudes belonging to the same diagram 1205 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % {"num": (idiag + 1)} 1206 line += "+".join(["AMP(%(num)d)*dconjg(AMP(%(num)d))" % \ 1207 {"num": a.get('number')} for a in \ 1208 diag.get('amplitudes')]) 1209 ret_lines.append(line) 1210 1211 return ret_lines
1212 1213 #=========================================================================== 1214 # Returns the data statements initializing the coeffictients for the JAMP 1215 # decomposition. It is used when the JAMP initialization is decided to be 1216 # done through big arrays containing the projection coefficients. 1217 #===========================================================================
1218 - def get_JAMP_coefs(self, color_amplitudes, color_basis=None, tag_letter="",\ 1219 n=50, Nc_value=3):
1220 """This functions return the lines defining the DATA statement setting 1221 the coefficients building the JAMPS out of the AMPS. Split rows in 1222 bunches of size n. 1223 One can specify the color_basis from which the color amplitudes originates 1224 so that there are commentaries telling what color structure each JAMP 1225 corresponds to.""" 1226 1227 if(not isinstance(color_amplitudes,list) or 1228 not (color_amplitudes and isinstance(color_amplitudes[0],list))): 1229 raise MadGraph5Error("Incorrect col_amps argument passed to get_JAMP_coefs") 1230 1231 res_list = [] 1232 my_cs = color.ColorString() 1233 for index, coeff_list in enumerate(color_amplitudes): 1234 # Create the list of the complete numerical coefficient. 1235 coefs_list=[coefficient[0][0]*coefficient[0][1]*\ 1236 (fractions.Fraction(Nc_value)**coefficient[0][3]) for \ 1237 coefficient in coeff_list] 1238 # Create the list of the numbers of the contributing amplitudes. 1239 # Mutliply by -1 for those which have an imaginary coefficient. 1240 ampnumbers_list=[coefficient[1]*(-1 if coefficient[0][2] else 1) \ 1241 for coefficient in coeff_list] 1242 # Find the common denominator. 1243 commondenom=abs(reduce(fractions.gcd, coefs_list).denominator) 1244 num_list=[(coefficient*commondenom).numerator \ 1245 for coefficient in coefs_list] 1246 res_list.append("DATA NCONTRIBAMPS%s(%i)/%i/"%(tag_letter,\ 1247 index+1,len(num_list))) 1248 res_list.append("DATA DENOMCCOEF%s(%i)/%i/"%(tag_letter,\ 1249 index+1,commondenom)) 1250 if color_basis: 1251 my_cs.from_immutable(sorted(color_basis.keys())[index]) 1252 res_list.append("C %s" % repr(my_cs)) 1253 for k in range(0, len(num_list), n): 1254 res_list.append("DATA (NUMCCOEF%s(%3r,i),i=%6r,%6r) /%s/" % \ 1255 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1256 ','.join(["%6r" % i for i in num_list[k:k + n]]))) 1257 res_list.append("DATA (AMPNUMBERS%s(%3r,i),i=%6r,%6r) /%s/" % \ 1258 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1259 ','.join(["%6r" % i for i in ampnumbers_list[k:k + n]]))) 1260 pass 1261 return res_list
1262 1263
1264 - def get_JAMP_lines_split_order(self, col_amps, split_order_amps, 1265 split_order_names=None, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)"):
1266 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1267 defined as a matrix element or directly as a color_amplitudes dictionary. 1268 The split_order_amps specifies the group of amplitudes sharing the same 1269 amplitude orders which should be put in together in a given set of JAMPS. 1270 The split_order_amps is supposed to have the format of the second output 1271 of the function get_split_orders_mapping function in helas_objects.py. 1272 The split_order_names is optional (it should correspond to the process 1273 'split_orders' attribute) and only present to provide comments in the 1274 JAMP definitions in the code.""" 1275 1276 # Let the user call get_JAMP_lines_split_order directly from a 1277 error_msg="Malformed '%s' argument passed to the "+\ 1278 "get_JAMP_lines_split_order function: %s"%str(split_order_amps) 1279 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1280 color_amplitudes=col_amps.get_color_amplitudes() 1281 elif(isinstance(col_amps,list)): 1282 if(col_amps and isinstance(col_amps[0],list)): 1283 color_amplitudes=col_amps 1284 else: 1285 raise MadGraph5Error(error_msg%'col_amps') 1286 else: 1287 raise MadGraph5Error(error_msg%'col_amps') 1288 1289 # Verify the sanity of the split_order_amps and split_order_names args 1290 if isinstance(split_order_amps,list): 1291 for elem in split_order_amps: 1292 if len(elem)!=2: 1293 raise MadGraph5Error(error_msg%'split_order_amps') 1294 # Check the first element of the two lists to make sure they are 1295 # integers, although in principle they should all be integers. 1296 if not isinstance(elem[0],tuple) or \ 1297 not isinstance(elem[1],tuple) or \ 1298 not isinstance(elem[0][0],int) or \ 1299 not isinstance(elem[1][0],int): 1300 raise MadGraph5Error(error_msg%'split_order_amps') 1301 else: 1302 raise MadGraph5Error(error_msg%'split_order_amps') 1303 1304 if not split_order_names is None: 1305 if isinstance(split_order_names,list): 1306 # Should specify the same number of names as there are elements 1307 # in the key of the split_order_amps. 1308 if len(split_order_names)!=len(split_order_amps[0][0]): 1309 raise MadGraph5Error(error_msg%'split_order_names') 1310 # Check the first element of the list to be a string 1311 if not isinstance(split_order_names[0],str): 1312 raise MadGraph5Error(error_msg%'split_order_names') 1313 else: 1314 raise MadGraph5Error(error_msg%'split_order_names') 1315 1316 # Now scan all contributing orders to be individually computed and 1317 # construct the list of color_amplitudes for JAMP to be constructed 1318 # accordingly. 1319 res_list=[] 1320 for i, amp_order in enumerate(split_order_amps): 1321 col_amps_order = [] 1322 for jamp in color_amplitudes: 1323 col_amps_order.append([col_amp for col_amp in jamp if col_amp[1] in amp_order[1]]) 1324 if split_order_names: 1325 res_list.append('C JAMPs contributing to orders '+' '.join( 1326 ['%s=%i'%order for order in zip(split_order_names, 1327 amp_order[0])])) 1328 if self.opt['export_format'] in ['madloop_matchbox']: 1329 res_list.extend(self.get_JAMP_lines(col_amps_order, 1330 JAMP_format="JAMP(%s,{0})".format(str(i+1)), 1331 JAMP_formatLC="LNJAMP(%s,{0})".format(str(i+1)))) 1332 else: 1333 res_list.extend(self.get_JAMP_lines(col_amps_order, 1334 JAMP_format="JAMP(%s,{0})".format(str(i+1)))) 1335 1336 return res_list
1337 1338
1339 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", 1340 split=-1):
1341 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1342 defined as a matrix element or directly as a color_amplitudes dictionary, 1343 Jamp_formatLC should be define to allow to add LeadingColor computation 1344 (usefull for MatchBox) 1345 The split argument defines how the JAMP lines should be split in order 1346 not to be too long.""" 1347 1348 # Let the user call get_JAMP_lines directly from a MatrixElement or from 1349 # the color amplitudes lists. 1350 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1351 color_amplitudes=col_amps.get_color_amplitudes() 1352 elif(isinstance(col_amps,list)): 1353 if(col_amps and isinstance(col_amps[0],list)): 1354 color_amplitudes=col_amps 1355 else: 1356 raise MadGraph5Error("Incorrect col_amps argument passed to get_JAMP_lines") 1357 else: 1358 raise MadGraph5Error("Incorrect col_amps argument passed to get_JAMP_lines") 1359 1360 1361 res_list = [] 1362 for i, coeff_list in enumerate(color_amplitudes): 1363 # It might happen that coeff_list is empty if this function was 1364 # called from get_JAMP_lines_split_order (i.e. if some color flow 1365 # does not contribute at all for a given order). 1366 # In this case we simply set it to 0. 1367 if coeff_list==[]: 1368 res_list.append(((JAMP_format+"=0D0") % str(i + 1))) 1369 continue 1370 # Break the JAMP definition into 'n=split' pieces to avoid having 1371 # arbitrarly long lines. 1372 first=True 1373 n = (len(coeff_list)+1 if split<=0 else split) 1374 while coeff_list!=[]: 1375 coefs=coeff_list[:n] 1376 coeff_list=coeff_list[n:] 1377 res = ((JAMP_format+"=") % str(i + 1)) + \ 1378 ((JAMP_format % str(i + 1)) if not first and split>0 else '') 1379 1380 first=False 1381 # Optimization: if all contributions to that color basis element have 1382 # the same coefficient (up to a sign), put it in front 1383 list_fracs = [abs(coefficient[0][1]) for coefficient in coefs] 1384 common_factor = False 1385 diff_fracs = list(set(list_fracs)) 1386 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1: 1387 common_factor = True 1388 global_factor = diff_fracs[0] 1389 res = res + '%s(' % self.coeff(1, global_factor, False, 0) 1390 1391 # loop for JAMP 1392 for (coefficient, amp_number) in coefs: 1393 if not coefficient: 1394 continue 1395 if common_factor: 1396 res = (res + "%s" + AMP_format) % \ 1397 (self.coeff(coefficient[0], 1398 coefficient[1] / abs(coefficient[1]), 1399 coefficient[2], 1400 coefficient[3]), 1401 str(amp_number)) 1402 else: 1403 res = (res + "%s" + AMP_format) % (self.coeff(coefficient[0], 1404 coefficient[1], 1405 coefficient[2], 1406 coefficient[3]), 1407 str(amp_number)) 1408 1409 if common_factor: 1410 res = res + ')' 1411 1412 res_list.append(res) 1413 1414 return res_list
1415
1416 - def get_pdf_lines(self, matrix_element, ninitial, subproc_group = False):
1417 """Generate the PDF lines for the auto_dsig.f file""" 1418 1419 processes = matrix_element.get('processes') 1420 model = processes[0].get('model') 1421 1422 pdf_definition_lines = "" 1423 pdf_data_lines = "" 1424 pdf_lines = "" 1425 1426 if ninitial == 1: 1427 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 1428 for i, proc in enumerate(processes): 1429 process_line = proc.base_string() 1430 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1431 pdf_lines = pdf_lines + "\nPD(IPROC)=1d0\n" 1432 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 1433 else: 1434 # Pick out all initial state particles for the two beams 1435 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 1436 p in processes]))), 1437 sorted(list(set([p.get_initial_pdg(2) for \ 1438 p in processes])))] 1439 1440 # Prepare all variable names 1441 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 1442 sum(initial_states,[])]) 1443 for key,val in pdf_codes.items(): 1444 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 1445 1446 # Set conversion from PDG code to number used in PDF calls 1447 pdgtopdf = {21: 0, 22: 7} 1448 1449 # Fill in missing entries of pdgtopdf 1450 for pdg in sum(initial_states,[]): 1451 if not pdg in pdgtopdf and not pdg in list(pdgtopdf.values()): 1452 pdgtopdf[pdg] = pdg 1453 elif pdg not in pdgtopdf and pdg in list(pdgtopdf.values()): 1454 # If any particle has pdg code 7, we need to use something else 1455 pdgtopdf[pdg] = 6000000 + pdg 1456 1457 # Get PDF variable declarations for all initial states 1458 for i in [0,1]: 1459 pdf_definition_lines += "DOUBLE PRECISION " + \ 1460 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1461 for pdg in \ 1462 initial_states[i]]) + \ 1463 "\n" 1464 1465 # Get PDF data lines for all initial states 1466 for i in [0,1]: 1467 pdf_data_lines += "DATA " + \ 1468 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1469 for pdg in initial_states[i]]) + \ 1470 "/%d*1D0/" % len(initial_states[i]) + \ 1471 "\n" 1472 1473 # Get PDF lines for all different initial states 1474 for i, init_states in enumerate(initial_states): 1475 if subproc_group: 1476 pdf_lines = pdf_lines + \ 1477 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 1478 % (i + 1, i + 1) 1479 else: 1480 pdf_lines = pdf_lines + \ 1481 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 1482 % (i + 1, i + 1) 1483 1484 for nbi,initial_state in enumerate(init_states): 1485 if initial_state in list(pdf_codes.keys()): 1486 if subproc_group: 1487 pdf_lines = pdf_lines + \ 1488 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP, 1," + \ 1489 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 1490 (pdf_codes[initial_state], 1491 i + 1, i + 1, pdgtopdf[initial_state], 1492 i + 1, i + 1) 1493 else: 1494 pdf_lines = pdf_lines + \ 1495 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP, %d," + \ 1496 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 1497 (pdf_codes[initial_state], 1498 i + 1, i + 1, pdgtopdf[initial_state], 1499 i + 1, 1500 i + 1, i + 1) 1501 pdf_lines = pdf_lines + "ENDIF\n" 1502 1503 # Add up PDFs for the different initial state particles 1504 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 1505 for proc in processes: 1506 process_line = proc.base_string() 1507 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1508 pdf_lines = pdf_lines + "\nPD(IPROC)=" 1509 for ibeam in [1, 2]: 1510 initial_state = proc.get_initial_pdg(ibeam) 1511 if initial_state in list(pdf_codes.keys()): 1512 pdf_lines = pdf_lines + "%s%d*" % \ 1513 (pdf_codes[initial_state], ibeam) 1514 else: 1515 pdf_lines = pdf_lines + "1d0*" 1516 # Remove last "*" from pdf_lines 1517 pdf_lines = pdf_lines[:-1] + "\n" 1518 pdf_lines = pdf_lines + "PD(0)=PD(0)+DABS(PD(IPROC))\n" 1519 1520 # Remove last line break from the return variables 1521 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1]
1522 1523 #=========================================================================== 1524 # write_props_file 1525 #===========================================================================
1526 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
1527 """Write the props.inc file for MadEvent. Needs input from 1528 write_configs_file.""" 1529 1530 lines = [] 1531 1532 particle_dict = matrix_element.get('processes')[0].get('model').\ 1533 get('particle_dict') 1534 1535 for iconf, configs in enumerate(s_and_t_channels): 1536 for vertex in configs[0] + configs[1][:-1]: 1537 leg = vertex.get('legs')[-1] 1538 if leg.get('id') not in particle_dict: 1539 # Fake propagator used in multiparticle vertices 1540 mass = 'zero' 1541 width = 'zero' 1542 pow_part = 0 1543 else: 1544 particle = particle_dict[leg.get('id')] 1545 # Get mass 1546 if particle.get('mass').lower() == 'zero': 1547 mass = particle.get('mass') 1548 else: 1549 mass = "abs(%s)" % particle.get('mass') 1550 # Get width 1551 if particle.get('width').lower() == 'zero': 1552 width = particle.get('width') 1553 else: 1554 width = "abs(%s)" % particle.get('width') 1555 1556 pow_part = 1 + int(particle.is_boson()) 1557 1558 lines.append("prmass(%d,%d) = %s" % \ 1559 (leg.get('number'), iconf + 1, mass)) 1560 lines.append("prwidth(%d,%d) = %s" % \ 1561 (leg.get('number'), iconf + 1, width)) 1562 lines.append("pow(%d,%d) = %d" % \ 1563 (leg.get('number'), iconf + 1, pow_part)) 1564 1565 # Write the file 1566 writer.writelines(lines) 1567 1568 return True
1569 1570 #=========================================================================== 1571 # write_configs_file 1572 #===========================================================================
1573 - def write_configs_file(self, writer, matrix_element):
1574 """Write the configs.inc file for MadEvent""" 1575 1576 # Extract number of external particles 1577 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1578 1579 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 1580 mapconfigs = [c[0] for c in configs] 1581 model = matrix_element.get('processes')[0].get('model') 1582 return mapconfigs, self.write_configs_file_from_diagrams(writer, 1583 [[c[1]] for c in configs], 1584 mapconfigs, 1585 nexternal, ninitial, 1586 model)
1587 1588 #=========================================================================== 1589 # write_configs_file_from_diagrams 1590 #===========================================================================
1591 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 1592 nexternal, ninitial, model):
1593 """Write the actual configs.inc file. 1594 1595 configs is the diagrams corresponding to configs (each 1596 diagrams is a list of corresponding diagrams for all 1597 subprocesses, with None if there is no corresponding diagrams 1598 for a given process). 1599 mapconfigs gives the diagram number for each config. 1600 1601 For s-channels, we need to output one PDG for each subprocess in 1602 the subprocess group, in order to be able to pick the right 1603 one for multiprocesses.""" 1604 1605 lines = [] 1606 1607 s_and_t_channels = [] 1608 1609 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 1610 for config in configs if [d for d in config if d][0].\ 1611 get_vertex_leg_numbers()!=[]] 1612 minvert = min(vert_list) if vert_list!=[] else 0 1613 1614 # Number of subprocesses 1615 nsubprocs = len(configs[0]) 1616 1617 nconfigs = 0 1618 1619 new_pdg = model.get_first_non_pdg() 1620 1621 for iconfig, helas_diags in enumerate(configs): 1622 if any(vert > minvert for vert in [d for d in helas_diags if d]\ 1623 [0].get_vertex_leg_numbers()) : 1624 # Only 3-vertices allowed in configs.inc except for vertices 1625 # which originate from a shrunk loop. 1626 continue 1627 nconfigs += 1 1628 1629 # Need s- and t-channels for all subprocesses, including 1630 # those that don't contribute to this config 1631 empty_verts = [] 1632 stchannels = [] 1633 for h in helas_diags: 1634 if h: 1635 # get_s_and_t_channels gives vertices starting from 1636 # final state external particles and working inwards 1637 stchannels.append(h.get('amplitudes')[0].\ 1638 get_s_and_t_channels(ninitial, model, new_pdg)) 1639 else: 1640 stchannels.append((empty_verts, None)) 1641 1642 # For t-channels, just need the first non-empty one 1643 tchannels = [t for s,t in stchannels if t != None][0] 1644 1645 # For s_and_t_channels (to be used later) use only first config 1646 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 1647 tchannels]) 1648 1649 # Make sure empty_verts is same length as real vertices 1650 if any([s for s,t in stchannels]): 1651 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 1652 1653 # Reorganize s-channel vertices to get a list of all 1654 # subprocesses for each vertex 1655 schannels = list(zip(*[s for s,t in stchannels])) 1656 else: 1657 schannels = [] 1658 1659 allchannels = schannels 1660 if len(tchannels) > 1: 1661 # Write out tchannels only if there are any non-trivial ones 1662 allchannels = schannels + tchannels 1663 1664 # Write out propagators for s-channel and t-channel vertices 1665 1666 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 1667 # Correspondance between the config and the diagram = amp2 1668 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 1669 mapconfigs[iconfig])) 1670 1671 for verts in allchannels: 1672 if verts in schannels: 1673 vert = [v for v in verts if v][0] 1674 else: 1675 vert = verts 1676 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1677 last_leg = vert.get('legs')[-1] 1678 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 1679 (last_leg.get('number'), nconfigs, len(daughters), 1680 ",".join([str(d) for d in daughters]))) 1681 if verts in schannels: 1682 pdgs = [] 1683 for v in verts: 1684 if v: 1685 pdgs.append(v.get('legs')[-1].get('id')) 1686 else: 1687 pdgs.append(0) 1688 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1689 (last_leg.get('number'), nconfigs, nsubprocs, 1690 ",".join([str(d) for d in pdgs]))) 1691 lines.append("data tprid(%d,%d)/0/" % \ 1692 (last_leg.get('number'), nconfigs)) 1693 elif verts in tchannels[:-1]: 1694 lines.append("data tprid(%d,%d)/%d/" % \ 1695 (last_leg.get('number'), nconfigs, 1696 abs(last_leg.get('id')))) 1697 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1698 (last_leg.get('number'), nconfigs, nsubprocs, 1699 ",".join(['0'] * nsubprocs))) 1700 1701 # Write out number of configs 1702 lines.append("# Number of configs") 1703 lines.append("data mapconfig(0)/%d/" % nconfigs) 1704 1705 # Write the file 1706 writer.writelines(lines) 1707 1708 return s_and_t_channels
1709 1710 #=========================================================================== 1711 # Global helper methods 1712 #=========================================================================== 1713
1714 - def coeff(self, ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1715 """Returns a nicely formatted string for the coefficients in JAMP lines""" 1716 1717 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power 1718 1719 if total_coeff == 1: 1720 if is_imaginary: 1721 return '+imag1*' 1722 else: 1723 return '+' 1724 elif total_coeff == -1: 1725 if is_imaginary: 1726 return '-imag1*' 1727 else: 1728 return '-' 1729 1730 res_str = '%+iD0' % total_coeff.numerator 1731 1732 if total_coeff.denominator != 1: 1733 # Check if total_coeff is an integer 1734 res_str = res_str + '/%iD0' % total_coeff.denominator 1735 1736 if is_imaginary: 1737 res_str = res_str + '*imag1' 1738 1739 return res_str + '*'
1740 1741
1742 - def set_fortran_compiler(self, default_compiler, force=False):
1743 """Set compiler based on what's available on the system""" 1744 1745 # Check for compiler 1746 if default_compiler['fortran'] and misc.which(default_compiler['fortran']): 1747 f77_compiler = default_compiler['fortran'] 1748 elif misc.which('gfortran'): 1749 f77_compiler = 'gfortran' 1750 elif misc.which('g77'): 1751 f77_compiler = 'g77' 1752 elif misc.which('f77'): 1753 f77_compiler = 'f77' 1754 elif default_compiler['fortran']: 1755 logger.warning('No Fortran Compiler detected! Please install one') 1756 f77_compiler = default_compiler['fortran'] # maybe misc fail so try with it 1757 else: 1758 raise MadGraph5Error('No Fortran Compiler detected! Please install one') 1759 logger.info('Use Fortran compiler ' + f77_compiler) 1760 1761 1762 # Check for compiler. 1. set default. 1763 if default_compiler['f2py']: 1764 f2py_compiler = default_compiler['f2py'] 1765 else: 1766 f2py_compiler = '' 1767 # Try to find the correct one. 1768 if default_compiler['f2py'] and misc.which(default_compiler['f2py']): 1769 f2py_compiler = default_compiler['f2py'] 1770 elif misc.which('f2py'): 1771 f2py_compiler = 'f2py' 1772 elif sys.version_info[1] == 6: 1773 if misc.which('f2py-2.6'): 1774 f2py_compiler = 'f2py-2.6' 1775 elif misc.which('f2py2.6'): 1776 f2py_compiler = 'f2py2.6' 1777 elif sys.version_info[1] == 7: 1778 if misc.which('f2py-2.7'): 1779 f2py_compiler = 'f2py-2.7' 1780 elif misc.which('f2py2.7'): 1781 f2py_compiler = 'f2py2.7' 1782 1783 to_replace = {'fortran': f77_compiler, 'f2py': f2py_compiler} 1784 1785 1786 self.replace_make_opt_f_compiler(to_replace) 1787 # Replace also for Template but not for cluster 1788 if 'MADGRAPH_DATA' not in os.environ and ReadWrite: 1789 self.replace_make_opt_f_compiler(to_replace, pjoin(MG5DIR, 'Template', 'LO')) 1790 1791 return f77_compiler
1792 1793 # an alias for backward compatibility 1794 set_compiler = set_fortran_compiler 1795 1796
1797 - def set_cpp_compiler(self, default_compiler, force=False):
1798 """Set compiler based on what's available on the system""" 1799 1800 # Check for compiler 1801 if default_compiler and misc.which(default_compiler): 1802 compiler = default_compiler 1803 elif misc.which('g++'): 1804 #check if clang version 1805 p = misc.Popen(['g++', '--version'], stdout=subprocess.PIPE, 1806 stderr=subprocess.PIPE) 1807 out, _ = p.communicate() 1808 out = out.decode() 1809 if 'clang' in str(out) and misc.which('clang'): 1810 compiler = 'clang' 1811 else: 1812 compiler = 'g++' 1813 elif misc.which('c++'): 1814 compiler = 'c++' 1815 elif misc.which('clang'): 1816 compiler = 'clang' 1817 elif default_compiler: 1818 logger.warning('No c++ Compiler detected! Please install one') 1819 compiler = default_compiler # maybe misc fail so try with it 1820 else: 1821 raise MadGraph5Error('No c++ Compiler detected! Please install one') 1822 logger.info('Use c++ compiler ' + compiler) 1823 self.replace_make_opt_c_compiler(compiler) 1824 # Replace also for Template but not for cluster 1825 if 'MADGRAPH_DATA' not in os.environ and ReadWrite and \ 1826 not __debug__ and not os.path.exists(pjoin(MG5DIR,'bin','create_release.py')): 1827 self.replace_make_opt_c_compiler(compiler, pjoin(MG5DIR, 'Template', 'LO')) 1828 1829 return compiler
1830 1831
1832 - def replace_make_opt_f_compiler(self, compilers, root_dir = ""):
1833 """Set FC=compiler in Source/make_opts""" 1834 1835 assert isinstance(compilers, dict) 1836 1837 mod = False #avoid to rewrite the file if not needed 1838 if not root_dir: 1839 root_dir = self.dir_path 1840 1841 compiler= compilers['fortran'] 1842 f2py_compiler = compilers['f2py'] 1843 if not f2py_compiler: 1844 f2py_compiler = 'f2py' 1845 for_update= {'DEFAULT_F_COMPILER':compiler, 1846 'DEFAULT_F2PY_COMPILER':f2py_compiler} 1847 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1848 1849 try: 1850 common_run_interface.CommonRunCmd.update_make_opts_full( 1851 make_opts, for_update) 1852 except IOError: 1853 if root_dir == self.dir_path: 1854 logger.info('Fail to set compiler. Trying to continue anyway.')
1855
1856 - def replace_make_opt_c_compiler(self, compiler, root_dir = ""):
1857 """Set CXX=compiler in Source/make_opts. 1858 The version is also checked, in order to set some extra flags 1859 if the compiler is clang (on MACOS)""" 1860 1861 is_clang = misc.detect_if_cpp_compiler_is_clang(compiler) 1862 is_lc = misc.detect_cpp_std_lib_dependence(compiler) == '-lc++' 1863 1864 1865 # list of the variable to set in the make_opts file 1866 for_update= {'DEFAULT_CPP_COMPILER':compiler, 1867 'MACFLAG':'-mmacosx-version-min=10.7' if is_clang and is_lc else '', 1868 'STDLIB': '-lc++' if is_lc else '-lstdc++', 1869 'STDLIB_FLAG': '-stdlib=libc++' if is_lc and is_clang else '' 1870 } 1871 1872 # for MOJAVE remove the MACFLAG: 1873 if is_clang: 1874 import platform 1875 version, _, _ = platform.mac_ver() 1876 if not version:# not linux 1877 version = 14 # set version to remove MACFLAG 1878 else: 1879 version = int(version.split('.')[1]) 1880 if version >= 14: 1881 for_update['MACFLAG'] = '-mmacosx-version-min=10.8' if is_lc else '' 1882 1883 if not root_dir: 1884 root_dir = self.dir_path 1885 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1886 1887 try: 1888 common_run_interface.CommonRunCmd.update_make_opts_full( 1889 make_opts, for_update) 1890 except IOError: 1891 if root_dir == self.dir_path: 1892 logger.info('Fail to set compiler. Trying to continue anyway.') 1893 1894 return
1895
1896 #=============================================================================== 1897 # ProcessExporterFortranSA 1898 #=============================================================================== 1899 -class ProcessExporterFortranSA(ProcessExporterFortran):
1900 """Class to take care of exporting a set of matrix elements to 1901 MadGraph v4 StandAlone format.""" 1902 1903 matrix_template = "matrix_standalone_v4.inc" 1904
1905 - def __init__(self, *args,**opts):
1906 """add the format information compare to standard init""" 1907 1908 if 'format' in opts: 1909 self.format = opts['format'] 1910 del opts['format'] 1911 else: 1912 self.format = 'standalone' 1913 1914 self.prefix_info = {} 1915 ProcessExporterFortran.__init__(self, *args, **opts)
1916
1917 - def copy_template(self, model):
1918 """Additional actions needed for setup of Template 1919 """ 1920 1921 #First copy the full template tree if dir_path doesn't exit 1922 if os.path.isdir(self.dir_path): 1923 return 1924 1925 logger.info('initialize a new standalone directory: %s' % \ 1926 os.path.basename(self.dir_path)) 1927 temp_dir = pjoin(self.mgme_dir, 'Template/LO') 1928 1929 # Create the directory structure 1930 os.mkdir(self.dir_path) 1931 os.mkdir(pjoin(self.dir_path, 'Source')) 1932 os.mkdir(pjoin(self.dir_path, 'Source', 'MODEL')) 1933 os.mkdir(pjoin(self.dir_path, 'Source', 'DHELAS')) 1934 os.mkdir(pjoin(self.dir_path, 'SubProcesses')) 1935 os.mkdir(pjoin(self.dir_path, 'bin')) 1936 os.mkdir(pjoin(self.dir_path, 'bin', 'internal')) 1937 os.mkdir(pjoin(self.dir_path, 'lib')) 1938 os.mkdir(pjoin(self.dir_path, 'Cards')) 1939 1940 # Information at top-level 1941 #Write version info 1942 shutil.copy(pjoin(temp_dir, 'TemplateVersion.txt'), self.dir_path) 1943 try: 1944 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 1945 except IOError: 1946 MG5_version = misc.get_pkg_info() 1947 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 1948 "5." + MG5_version['version']) 1949 1950 1951 # Add file in SubProcesses 1952 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f_sp'), 1953 pjoin(self.dir_path, 'SubProcesses', 'makefileP')) 1954 1955 if self.format == 'standalone': 1956 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'check_sa.f'), 1957 pjoin(self.dir_path, 'SubProcesses', 'check_sa.f')) 1958 1959 # Add file in Source 1960 shutil.copy(pjoin(temp_dir, 'Source', 'make_opts'), 1961 pjoin(self.dir_path, 'Source')) 1962 # add the makefile 1963 filename = pjoin(self.dir_path,'Source','makefile') 1964 self.write_source_makefile(writers.FileWriter(filename))
1965 1966 #=========================================================================== 1967 # export model files 1968 #===========================================================================
1969 - def export_model_files(self, model_path):
1970 """export the model dependent files for V4 model""" 1971 1972 super(ProcessExporterFortranSA,self).export_model_files(model_path) 1973 # Add the routine update_as_param in v4 model 1974 # This is a function created in the UFO 1975 text=""" 1976 subroutine update_as_param() 1977 call setpara('param_card.dat',.false.) 1978 return 1979 end 1980 """ 1981 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 1982 ff.write(text) 1983 ff.close() 1984 1985 text = open(pjoin(self.dir_path,'SubProcesses','check_sa.f')).read() 1986 text = text.replace('call setpara(\'param_card.dat\')', 'call setpara(\'param_card.dat\', .true.)') 1987 fsock = open(pjoin(self.dir_path,'SubProcesses','check_sa.f'), 'w') 1988 fsock.write(text) 1989 fsock.close() 1990 1991 self.make_model_symbolic_link()
1992 1993 #=========================================================================== 1994 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 1995 #===========================================================================
1996 - def write_procdef_mg5(self, file_pos, modelname, process_str):
1997 """ write an equivalent of the MG4 proc_card in order that all the Madevent 1998 Perl script of MadEvent4 are still working properly for pure MG5 run. 1999 Not needed for StandAlone so just return 2000 """ 2001 2002 return
2003 2004 2005 #=========================================================================== 2006 # Make the Helas and Model directories for Standalone directory 2007 #===========================================================================
2008 - def make(self):
2009 """Run make in the DHELAS and MODEL directories, to set up 2010 everything for running standalone 2011 """ 2012 2013 source_dir = pjoin(self.dir_path, "Source") 2014 logger.info("Running make for Helas") 2015 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2016 logger.info("Running make for Model") 2017 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran')
2018 2019 #=========================================================================== 2020 # Create proc_card_mg5.dat for Standalone directory 2021 #===========================================================================
2022 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2023 """Finalize Standalone MG4 directory by 2024 generation proc_card_mg5.dat 2025 generate a global makefile 2026 """ 2027 2028 compiler = {'fortran': mg5options['fortran_compiler'], 2029 'cpp': mg5options['cpp_compiler'], 2030 'f2py': mg5options['f2py_compiler']} 2031 2032 self.compiler_choice(compiler) 2033 self.make() 2034 2035 # Write command history as proc_card_mg5 2036 if history and os.path.isdir(pjoin(self.dir_path, 'Cards')): 2037 output_file = pjoin(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2038 history.write(output_file) 2039 2040 ProcessExporterFortran.finalize(self, matrix_elements, 2041 history, mg5options, flaglist) 2042 open(pjoin(self.dir_path,'__init__.py'),'w') 2043 open(pjoin(self.dir_path,'SubProcesses','__init__.py'),'w') 2044 2045 if False:#'mode' in self.opt and self.opt['mode'] == "reweight": 2046 #add the module to hande the NLO weight 2047 files.copytree(pjoin(MG5DIR, 'Template', 'RWGTNLO'), 2048 pjoin(self.dir_path, 'Source')) 2049 files.copytree(pjoin(MG5DIR, 'Template', 'NLO', 'Source', 'PDF'), 2050 pjoin(self.dir_path, 'Source', 'PDF')) 2051 self.write_pdf_opendata() 2052 2053 if self.prefix_info: 2054 self.write_f2py_splitter() 2055 self.write_f2py_makefile() 2056 self.write_f2py_check_sa(matrix_elements, 2057 pjoin(self.dir_path,'SubProcesses','check_sa.py')) 2058 else: 2059 # create a single makefile to compile all the subprocesses 2060 text = '''\n# For python linking (require f2py part of numpy)\nifeq ($(origin MENUM),undefined)\n MENUM=2\nendif\n''' 2061 deppython = '' 2062 for Pdir in os.listdir(pjoin(self.dir_path,'SubProcesses')): 2063 if os.path.isdir(pjoin(self.dir_path, 'SubProcesses', Pdir)): 2064 text += '%(0)s/matrix$(MENUM)py.so:\n\tcd %(0)s;make matrix$(MENUM)py.so\n'% {'0': Pdir} 2065 deppython += ' %(0)s/matrix$(MENUM)py.so ' % {'0': Pdir} 2066 text+='all: %s\n\techo \'done\'' % deppython 2067 2068 ff = open(pjoin(self.dir_path, 'SubProcesses', 'makefile'),'a') 2069 ff.write(text) 2070 ff.close()
2071
2072 - def write_f2py_splitter(self):
2073 """write a function to call the correct matrix element""" 2074 2075 template = """ 2076 %(python_information)s 2077 subroutine smatrixhel(pdgs, procid, npdg, p, ALPHAS, SCALE2, nhel, ANS) 2078 IMPLICIT NONE 2079 2080 CF2PY double precision, intent(in), dimension(0:3,npdg) :: p 2081 CF2PY integer, intent(in), dimension(npdg) :: pdgs 2082 CF2PY integer, intent(in):: procid 2083 CF2PY integer, intent(in) :: npdg 2084 CF2PY double precision, intent(out) :: ANS 2085 CF2PY double precision, intent(in) :: ALPHAS 2086 CF2PY double precision, intent(in) :: SCALE2 2087 integer pdgs(*) 2088 integer npdg, nhel, procid 2089 double precision p(*) 2090 double precision ANS, ALPHAS, PI,SCALE2 2091 include 'coupl.inc' 2092 2093 PI = 3.141592653589793D0 2094 G = 2* DSQRT(ALPHAS*PI) 2095 CALL UPDATE_AS_PARAM() 2096 if (scale2.ne.0d0) stop 1 2097 2098 %(smatrixhel)s 2099 2100 return 2101 end 2102 2103 SUBROUTINE INITIALISE(PATH) 2104 C ROUTINE FOR F2PY to read the benchmark point. 2105 IMPLICIT NONE 2106 CHARACTER*512 PATH 2107 CF2PY INTENT(IN) :: PATH 2108 CALL SETPARA(PATH) !first call to setup the paramaters 2109 RETURN 2110 END 2111 2112 2113 subroutine CHANGE_PARA(name, value) 2114 implicit none 2115 CF2PY intent(in) :: name 2116 CF2PY intent(in) :: value 2117 2118 character*512 name 2119 double precision value 2120 2121 include '../Source/MODEL/input.inc' 2122 include '../Source/MODEL/coupl.inc' 2123 2124 SELECT CASE (name) 2125 %(parameter_setup)s 2126 CASE DEFAULT 2127 write(*,*) 'no parameter matching', name, value 2128 END SELECT 2129 2130 return 2131 end 2132 2133 subroutine update_all_coup() 2134 implicit none 2135 call coup() 2136 return 2137 end 2138 2139 2140 subroutine get_pdg_order(PDG, ALLPROC) 2141 IMPLICIT NONE 2142 CF2PY INTEGER, intent(out) :: PDG(%(nb_me)i,%(maxpart)i) 2143 CF2PY INTEGER, intent(out) :: ALLPROC(%(nb_me)i) 2144 INTEGER PDG(%(nb_me)i,%(maxpart)i), PDGS(%(nb_me)i,%(maxpart)i) 2145 INTEGER ALLPROC(%(nb_me)i),PIDs(%(nb_me)i) 2146 DATA PDGS/ %(pdgs)s / 2147 DATA PIDS/ %(pids)s / 2148 PDG = PDGS 2149 ALLPROC = PIDS 2150 RETURN 2151 END 2152 2153 subroutine get_prefix(PREFIX) 2154 IMPLICIT NONE 2155 CF2PY CHARACTER*20, intent(out) :: PREFIX(%(nb_me)i) 2156 character*20 PREFIX(%(nb_me)i),PREF(%(nb_me)i) 2157 DATA PREF / '%(prefix)s'/ 2158 PREFIX = PREF 2159 RETURN 2160 END 2161 2162 2163 """ 2164 2165 allids = list(self.prefix_info.keys()) 2166 allprefix = [self.prefix_info[key][0] for key in allids] 2167 min_nexternal = min([len(ids[0]) for ids in allids]) 2168 max_nexternal = max([len(ids[0]) for ids in allids]) 2169 2170 info = [] 2171 for (key, pid), (prefix, tag) in self.prefix_info.items(): 2172 info.append('#PY %s : %s # %s %s' % (tag, key, prefix, pid)) 2173 2174 2175 text = [] 2176 for n_ext in range(min_nexternal, max_nexternal+1): 2177 current_id = [ids[0] for ids in allids if len(ids[0])==n_ext] 2178 current_pid = [ids[1] for ids in allids if len(ids[0])==n_ext] 2179 if not current_id: 2180 continue 2181 if min_nexternal != max_nexternal: 2182 if n_ext == min_nexternal: 2183 text.append(' if (npdg.eq.%i)then' % n_ext) 2184 else: 2185 text.append(' else if (npdg.eq.%i)then' % n_ext) 2186 for ii,pdgs in enumerate(current_id): 2187 pid = current_pid[ii] 2188 condition = '.and.'.join(['%i.eq.pdgs(%i)' %(pdg, i+1) for i, pdg in enumerate(pdgs)]) 2189 if ii==0: 2190 text.append( ' if(%s.and.(procid.le.0.or.procid.eq.%d)) then ! %i' % (condition, pid, ii)) 2191 else: 2192 text.append( ' else if(%s.and.(procid.le.0.or.procid.eq.%d)) then ! %i' % (condition,pid,ii)) 2193 text.append(' call %ssmatrixhel(p, nhel, ans)' % self.prefix_info[(pdgs,pid)][0]) 2194 text.append(' endif') 2195 #close the function 2196 if min_nexternal != max_nexternal: 2197 text.append('endif') 2198 2199 params = self.get_model_parameter(self.model) 2200 parameter_setup =[] 2201 for key, var in params.items(): 2202 parameter_setup.append(' CASE ("%s")\n %s = value' 2203 % (key, var)) 2204 2205 formatting = {'python_information':'\n'.join(info), 2206 'smatrixhel': '\n'.join(text), 2207 'maxpart': max_nexternal, 2208 'nb_me': len(allids), 2209 'pdgs': ','.join(str(pdg[i]) if i<len(pdg) else '0' 2210 for i in range(max_nexternal) for (pdg,pid) in allids), 2211 'prefix':'\',\''.join(allprefix), 2212 'pids': ','.join(str(pid) for (pdg,pid) in allids), 2213 'parameter_setup': '\n'.join(parameter_setup), 2214 } 2215 formatting['lenprefix'] = len(formatting['prefix']) 2216 text = template % formatting 2217 fsock = writers.FortranWriter(pjoin(self.dir_path, 'SubProcesses', 'all_matrix.f'),'w') 2218 fsock.writelines(text) 2219 fsock.close()
2220
2221 - def get_model_parameter(self, model):
2222 """ returns all the model parameter 2223 """ 2224 params = {} 2225 for p in model.get('parameters')[('external',)]: 2226 name = p.name 2227 nopref = name[4:] if name.startswith('mdl_') else name 2228 params[nopref] = name 2229 2230 block = p.lhablock 2231 lha = '_'.join([str(i) for i in p.lhacode]) 2232 params['%s_%s' % (block.upper(), lha)] = name 2233 2234 return params
2235 2236 2237 2238 2239
2240 - def write_f2py_check_sa(self, matrix_element, writer):
2241 """ Write the general check_sa.py in SubProcesses that calls all processes successively.""" 2242 # To be implemented. It is just an example file, i.e. not crucial. 2243 return
2244
2245 - def write_f2py_makefile(self):
2246 """ """ 2247 # Add file in SubProcesses 2248 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f2py'), 2249 pjoin(self.dir_path, 'SubProcesses', 'makefile'))
2250
2251 - def create_MA5_cards(self,*args,**opts):
2252 """ Overload the function of the mother so as to bypass this in StandAlone.""" 2253 pass
2254
2255 - def compiler_choice(self, compiler):
2256 """ Different daughter classes might want different compilers. 2257 So this function is meant to be overloaded if desired.""" 2258 2259 self.set_compiler(compiler)
2260 2261 #=========================================================================== 2262 # generate_subprocess_directory 2263 #===========================================================================
2264 - def generate_subprocess_directory(self, matrix_element, 2265 fortran_model, number):
2266 """Generate the Pxxxxx directory for a subprocess in MG4 standalone, 2267 including the necessary matrix.f and nexternal.inc files""" 2268 2269 cwd = os.getcwd() 2270 # Create the directory PN_xx_xxxxx in the specified path 2271 dirpath = pjoin(self.dir_path, 'SubProcesses', \ 2272 "P%s" % matrix_element.get('processes')[0].shell_string()) 2273 2274 if self.opt['sa_symmetry']: 2275 # avoid symmetric output 2276 for i,proc in enumerate(matrix_element.get('processes')): 2277 2278 tag = proc.get_tag() 2279 legs = proc.get('legs')[:] 2280 leg0 = proc.get('legs')[0] 2281 leg1 = proc.get('legs')[1] 2282 if not leg1.get('state'): 2283 proc.get('legs')[0] = leg1 2284 proc.get('legs')[1] = leg0 2285 flegs = proc.get('legs')[2:] 2286 for perm in itertools.permutations(flegs): 2287 for i,p in enumerate(perm): 2288 proc.get('legs')[i+2] = p 2289 dirpath2 = pjoin(self.dir_path, 'SubProcesses', \ 2290 "P%s" % proc.shell_string()) 2291 #restore original order 2292 proc.get('legs')[2:] = legs[2:] 2293 if os.path.exists(dirpath2): 2294 proc.get('legs')[:] = legs 2295 return 0 2296 proc.get('legs')[:] = legs 2297 2298 try: 2299 os.mkdir(dirpath) 2300 except os.error as error: 2301 logger.warning(error.strerror + " " + dirpath) 2302 2303 #try: 2304 # os.chdir(dirpath) 2305 #except os.error: 2306 # logger.error('Could not cd to directory %s' % dirpath) 2307 # return 0 2308 2309 logger.info('Creating files in directory %s' % dirpath) 2310 2311 # Extract number of external particles 2312 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2313 2314 # Create the matrix.f file and the nexternal.inc file 2315 if self.opt['export_format']=='standalone_msP': 2316 filename = pjoin(dirpath, 'matrix_prod.f') 2317 else: 2318 filename = pjoin(dirpath, 'matrix.f') 2319 2320 proc_prefix = '' 2321 if 'prefix' in self.cmd_options: 2322 if self.cmd_options['prefix'] == 'int': 2323 proc_prefix = 'M%s_' % number 2324 elif self.cmd_options['prefix'] == 'proc': 2325 proc_prefix = matrix_element.get('processes')[0].shell_string().split('_',1)[1] 2326 else: 2327 raise Exception('--prefix options supports only \'int\' and \'proc\'') 2328 for proc in matrix_element.get('processes'): 2329 ids = [l.get('id') for l in proc.get('legs_with_decays')] 2330 self.prefix_info[(tuple(ids), proc.get('id'))] = [proc_prefix, proc.get_tag()] 2331 2332 calls = self.write_matrix_element_v4( 2333 writers.FortranWriter(filename), 2334 matrix_element, 2335 fortran_model, 2336 proc_prefix=proc_prefix) 2337 2338 if self.opt['export_format'] == 'standalone_msP': 2339 filename = pjoin(dirpath,'configs_production.inc') 2340 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2341 writers.FortranWriter(filename), 2342 matrix_element) 2343 2344 filename = pjoin(dirpath,'props_production.inc') 2345 self.write_props_file(writers.FortranWriter(filename), 2346 matrix_element, 2347 s_and_t_channels) 2348 2349 filename = pjoin(dirpath,'nexternal_prod.inc') 2350 self.write_nexternal_madspin(writers.FortranWriter(filename), 2351 nexternal, ninitial) 2352 2353 if self.opt['export_format']=='standalone_msF': 2354 filename = pjoin(dirpath, 'helamp.inc') 2355 ncomb=matrix_element.get_helicity_combinations() 2356 self.write_helamp_madspin(writers.FortranWriter(filename), 2357 ncomb) 2358 2359 filename = pjoin(dirpath, 'nexternal.inc') 2360 self.write_nexternal_file(writers.FortranWriter(filename), 2361 nexternal, ninitial) 2362 2363 filename = pjoin(dirpath, 'pmass.inc') 2364 self.write_pmass_file(writers.FortranWriter(filename), 2365 matrix_element) 2366 2367 filename = pjoin(dirpath, 'ngraphs.inc') 2368 self.write_ngraphs_file(writers.FortranWriter(filename), 2369 len(matrix_element.get_all_amplitudes())) 2370 2371 # Generate diagrams 2372 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 2373 filename = pjoin(dirpath, "matrix.ps") 2374 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2375 get('diagrams'), 2376 filename, 2377 model=matrix_element.get('processes')[0].\ 2378 get('model'), 2379 amplitude=True) 2380 logger.info("Generating Feynman diagrams for " + \ 2381 matrix_element.get('processes')[0].nice_string()) 2382 plot.draw() 2383 2384 linkfiles = ['check_sa.f', 'coupl.inc'] 2385 2386 if proc_prefix and os.path.exists(pjoin(dirpath, '..', 'check_sa.f')): 2387 text = open(pjoin(dirpath, '..', 'check_sa.f')).read() 2388 pat = re.compile('smatrix', re.I) 2389 new_text, n = re.subn(pat, '%ssmatrix' % proc_prefix, text) 2390 with open(pjoin(dirpath, 'check_sa.f'),'w') as f: 2391 f.write(new_text) 2392 linkfiles.pop(0) 2393 2394 for file in linkfiles: 2395 ln('../%s' % file, cwd=dirpath) 2396 ln('../makefileP', name='makefile', cwd=dirpath) 2397 # Return to original PWD 2398 #os.chdir(cwd) 2399 2400 if not calls: 2401 calls = 0 2402 return calls
2403 2404 2405 #=========================================================================== 2406 # write_source_makefile 2407 #===========================================================================
2408 - def write_source_makefile(self, writer):
2409 """Write the nexternal.inc file for MG4""" 2410 2411 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 2412 set_of_lib = '$(LIBDIR)libdhelas.$(libext) $(LIBDIR)libmodel.$(libext)' 2413 model_line='''$(LIBDIR)libmodel.$(libext): MODEL\n\t cd MODEL; make\n''' 2414 2415 replace_dict= {'libraries': set_of_lib, 2416 'model':model_line, 2417 'additional_dsample': '', 2418 'additional_dependencies':''} 2419 2420 text = open(path).read() % replace_dict 2421 2422 if writer: 2423 writer.write(text) 2424 2425 return replace_dict
2426 2427 #=========================================================================== 2428 # write_matrix_element_v4 2429 #===========================================================================
2430 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 2431 write=True, proc_prefix=''):
2432 """Export a matrix element to a matrix.f file in MG4 standalone format 2433 if write is on False, just return the replace_dict and not write anything.""" 2434 2435 2436 if not matrix_element.get('processes') or \ 2437 not matrix_element.get('diagrams'): 2438 return 0 2439 2440 if writer: 2441 if not isinstance(writer, writers.FortranWriter): 2442 raise writers.FortranWriter.FortranWriterError(\ 2443 "writer not FortranWriter but %s" % type(writer)) 2444 # Set lowercase/uppercase Fortran code 2445 writers.FortranWriter.downcase = False 2446 2447 2448 if 'sa_symmetry' not in self.opt: 2449 self.opt['sa_symmetry']=False 2450 2451 2452 # The proc_id is for MadEvent grouping which is never used in SA. 2453 replace_dict = {'global_variable':'', 'amp2_lines':'', 2454 'proc_prefix':proc_prefix, 'proc_id':''} 2455 2456 # Extract helas calls 2457 helas_calls = fortran_model.get_matrix_element_calls(\ 2458 matrix_element) 2459 2460 replace_dict['helas_calls'] = "\n".join(helas_calls) 2461 2462 # Extract version number and date from VERSION file 2463 info_lines = self.get_mg5_info_lines() 2464 replace_dict['info_lines'] = info_lines 2465 2466 # Extract process info lines 2467 process_lines = self.get_process_info_lines(matrix_element) 2468 replace_dict['process_lines'] = process_lines 2469 2470 # Extract number of external particles 2471 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2472 replace_dict['nexternal'] = nexternal 2473 replace_dict['nincoming'] = ninitial 2474 2475 # Extract ncomb 2476 ncomb = matrix_element.get_helicity_combinations() 2477 replace_dict['ncomb'] = ncomb 2478 2479 # Extract helicity lines 2480 helicity_lines = self.get_helicity_lines(matrix_element) 2481 replace_dict['helicity_lines'] = helicity_lines 2482 2483 # Extract overall denominator 2484 # Averaging initial state color, spin, and identical FS particles 2485 replace_dict['den_factor_line'] = self.get_den_factor_line(matrix_element) 2486 2487 # Extract ngraphs 2488 ngraphs = matrix_element.get_number_of_amplitudes() 2489 replace_dict['ngraphs'] = ngraphs 2490 2491 # Extract nwavefuncs 2492 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2493 replace_dict['nwavefuncs'] = nwavefuncs 2494 2495 # Extract ncolor 2496 ncolor = max(1, len(matrix_element.get('color_basis'))) 2497 replace_dict['ncolor'] = ncolor 2498 2499 replace_dict['hel_avg_factor'] = matrix_element.get_hel_avg_factor() 2500 replace_dict['beamone_helavgfactor'], replace_dict['beamtwo_helavgfactor'] =\ 2501 matrix_element.get_beams_hel_avg_factor() 2502 2503 # Extract color data lines 2504 color_data_lines = self.get_color_data_lines(matrix_element) 2505 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2506 2507 if self.opt['export_format']=='standalone_msP': 2508 # For MadSpin need to return the AMP2 2509 amp2_lines = self.get_amp2_lines(matrix_element, [] ) 2510 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2511 replace_dict['global_variable'] = \ 2512 " Double Precision amp2(NGRAPHS)\n common/to_amps/ amp2\n" 2513 2514 # JAMP definition, depends on the number of independent split orders 2515 split_orders=matrix_element.get('processes')[0].get('split_orders') 2516 2517 if len(split_orders)==0: 2518 replace_dict['nSplitOrders']='' 2519 # Extract JAMP lines 2520 jamp_lines = self.get_JAMP_lines(matrix_element) 2521 # Consider the output of a dummy order 'ALL_ORDERS' for which we 2522 # set all amplitude order to weight 1 and only one squared order 2523 # contribution which is of course ALL_ORDERS=2. 2524 squared_orders = [(2,),] 2525 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 2526 replace_dict['chosen_so_configs'] = '.TRUE.' 2527 replace_dict['nSqAmpSplitOrders']=1 2528 replace_dict['split_order_str_list']='' 2529 else: 2530 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 2531 replace_dict['nAmpSplitOrders']=len(amp_orders) 2532 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 2533 replace_dict['nSplitOrders']=len(split_orders) 2534 replace_dict['split_order_str_list']=str(split_orders) 2535 amp_so = self.get_split_orders_lines( 2536 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 2537 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 2538 replace_dict['ampsplitorders']='\n'.join(amp_so) 2539 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 2540 jamp_lines = self.get_JAMP_lines_split_order(\ 2541 matrix_element,amp_orders,split_order_names=split_orders) 2542 2543 # Now setup the array specifying what squared split order is chosen 2544 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 2545 matrix_element.get('processes')[0],squared_orders) 2546 2547 # For convenience we also write the driver check_sa_splitOrders.f 2548 # that explicitely writes out the contribution from each squared order. 2549 # The original driver still works and is compiled with 'make' while 2550 # the splitOrders one is compiled with 'make check_sa_born_splitOrders' 2551 check_sa_writer=writers.FortranWriter('check_sa_born_splitOrders.f') 2552 self.write_check_sa_splitOrders(squared_orders,split_orders, 2553 nexternal,ninitial,proc_prefix,check_sa_writer) 2554 2555 if write: 2556 writers.FortranWriter('nsqso_born.inc').writelines( 2557 """INTEGER NSQSO_BORN 2558 PARAMETER (NSQSO_BORN=%d)"""%replace_dict['nSqAmpSplitOrders']) 2559 2560 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2561 2562 matrix_template = self.matrix_template 2563 if self.opt['export_format']=='standalone_msP' : 2564 matrix_template = 'matrix_standalone_msP_v4.inc' 2565 elif self.opt['export_format']=='standalone_msF': 2566 matrix_template = 'matrix_standalone_msF_v4.inc' 2567 elif self.opt['export_format']=='matchbox': 2568 replace_dict["proc_prefix"] = 'MG5_%i_' % matrix_element.get('processes')[0].get('id') 2569 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2570 2571 if len(split_orders)>0: 2572 if self.opt['export_format'] in ['standalone_msP', 'standalone_msF']: 2573 logger.debug("Warning: The export format %s is not "+\ 2574 " available for individual ME evaluation of given coupl. orders."+\ 2575 " Only the total ME will be computed.", self.opt['export_format']) 2576 elif self.opt['export_format'] in ['madloop_matchbox']: 2577 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2578 matrix_template = "matrix_standalone_matchbox_splitOrders_v4.inc" 2579 else: 2580 matrix_template = "matrix_standalone_splitOrders_v4.inc" 2581 2582 replace_dict['template_file'] = pjoin(_file_path, 'iolibs', 'template_files', matrix_template) 2583 replace_dict['template_file2'] = pjoin(_file_path, \ 2584 'iolibs/template_files/split_orders_helping_functions.inc') 2585 if write and writer: 2586 path = replace_dict['template_file'] 2587 content = open(path).read() 2588 content = content % replace_dict 2589 # Write the file 2590 writer.writelines(content) 2591 # Add the helper functions. 2592 if len(split_orders)>0: 2593 content = '\n' + open(replace_dict['template_file2'])\ 2594 .read()%replace_dict 2595 writer.writelines(content) 2596 return len([call for call in helas_calls if call.find('#') != 0]) 2597 else: 2598 replace_dict['return_value'] = len([call for call in helas_calls if call.find('#') != 0]) 2599 return replace_dict # for subclass update
2600
2601 - def write_check_sa_splitOrders(self,squared_orders, split_orders, nexternal, 2602 nincoming, proc_prefix, writer):
2603 """ Write out a more advanced version of the check_sa drivers that 2604 individually returns the matrix element for each contributing squared 2605 order.""" 2606 2607 check_sa_content = open(pjoin(self.mgme_dir, 'madgraph', 'iolibs', \ 2608 'template_files', 'check_sa_splitOrders.f')).read() 2609 printout_sq_orders=[] 2610 for i, squared_order in enumerate(squared_orders): 2611 sq_orders=[] 2612 for j, sqo in enumerate(squared_order): 2613 sq_orders.append('%s=%d'%(split_orders[j],sqo)) 2614 printout_sq_orders.append(\ 2615 "write(*,*) '%d) Matrix element for (%s) = ',MATELEMS(%d)"\ 2616 %(i+1,' '.join(sq_orders),i+1)) 2617 printout_sq_orders='\n'.join(printout_sq_orders) 2618 replace_dict = {'printout_sqorders':printout_sq_orders, 2619 'nSplitOrders':len(squared_orders), 2620 'nexternal':nexternal, 2621 'nincoming':nincoming, 2622 'proc_prefix':proc_prefix} 2623 2624 if writer: 2625 writer.writelines(check_sa_content % replace_dict) 2626 else: 2627 return replace_dict
2628
2629 -class ProcessExporterFortranMatchBox(ProcessExporterFortranSA):
2630 """class to take care of exporting a set of matrix element for the Matchbox 2631 code in the case of Born only routine""" 2632 2633 default_opt = {'clean': False, 'complex_mass':False, 2634 'export_format':'matchbox', 'mp': False, 2635 'sa_symmetry': True} 2636 2637 #specific template of the born 2638 2639 2640 matrix_template = "matrix_standalone_matchbox.inc" 2641 2642 @staticmethod
2643 - def get_color_string_lines(matrix_element):
2644 """Return the color matrix definition lines for this matrix element. Split 2645 rows in chunks of size n.""" 2646 2647 if not matrix_element.get('color_matrix'): 2648 return "\n".join(["out = 1"]) 2649 2650 #start the real work 2651 color_denominators = matrix_element.get('color_matrix').\ 2652 get_line_denominators() 2653 matrix_strings = [] 2654 my_cs = color.ColorString() 2655 for i_color in range(len(color_denominators)): 2656 # Then write the numerators for the matrix elements 2657 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[i_color]) 2658 t_str=repr(my_cs) 2659 t_match=re.compile(r"(\w+)\(([\s\d+\,]*)\)") 2660 # from '1 T(2,4,1) Tr(4,5,6) Epsilon(5,3,2,1) T(1,2)' returns with findall: 2661 # [('T', '2,4,1'), ('Tr', '4,5,6'), ('Epsilon', '5,3,2,1'), ('T', '1,2')] 2662 all_matches = t_match.findall(t_str) 2663 output = {} 2664 arg=[] 2665 for match in all_matches: 2666 ctype, tmparg = match[0], [m.strip() for m in match[1].split(',')] 2667 if ctype in ['ColorOne' ]: 2668 continue 2669 if ctype not in ['T', 'Tr' ]: 2670 raise MadGraph5Error('Color Structure not handled by Matchbox: %s' % ctype) 2671 tmparg += ['0'] 2672 arg +=tmparg 2673 for j, v in enumerate(arg): 2674 output[(i_color,j)] = v 2675 2676 for key in output: 2677 if matrix_strings == []: 2678 #first entry 2679 matrix_strings.append(""" 2680 if (in1.eq.%s.and.in2.eq.%s)then 2681 out = %s 2682 """ % (key[0], key[1], output[key])) 2683 else: 2684 #not first entry 2685 matrix_strings.append(""" 2686 elseif (in1.eq.%s.and.in2.eq.%s)then 2687 out = %s 2688 """ % (key[0], key[1], output[key])) 2689 if len(matrix_strings): 2690 matrix_strings.append(" else \n out = - 1 \n endif") 2691 else: 2692 return "\n out = - 1 \n " 2693 return "\n".join(matrix_strings)
2694
2695 - def make(self,*args,**opts):
2696 pass
2697
2698 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", split=-1, 2699 JAMP_formatLC=None):
2700 2701 """Adding leading color part of the colorflow""" 2702 2703 if not JAMP_formatLC: 2704 JAMP_formatLC= "LN%s" % JAMP_format 2705 2706 error_msg="Malformed '%s' argument passed to the get_JAMP_lines" 2707 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 2708 col_amps=col_amps.get_color_amplitudes() 2709 elif(isinstance(col_amps,list)): 2710 if(col_amps and isinstance(col_amps[0],list)): 2711 col_amps=col_amps 2712 else: 2713 raise MadGraph5Error(error_msg % 'col_amps') 2714 else: 2715 raise MadGraph5Error(error_msg % 'col_amps') 2716 2717 text = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(col_amps, 2718 JAMP_format=JAMP_format, 2719 AMP_format=AMP_format, 2720 split=-1) 2721 2722 2723 # Filter the col_ampls to generate only those without any 1/NC terms 2724 2725 LC_col_amps = [] 2726 for coeff_list in col_amps: 2727 to_add = [] 2728 for (coefficient, amp_number) in coeff_list: 2729 if coefficient[3]==0: 2730 to_add.append( (coefficient, amp_number) ) 2731 LC_col_amps.append(to_add) 2732 2733 text += super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(LC_col_amps, 2734 JAMP_format=JAMP_formatLC, 2735 AMP_format=AMP_format, 2736 split=-1) 2737 2738 return text
2739
2740 2741 2742 2743 #=============================================================================== 2744 # ProcessExporterFortranMW 2745 #=============================================================================== 2746 -class ProcessExporterFortranMW(ProcessExporterFortran):
2747 """Class to take care of exporting a set of matrix elements to 2748 MadGraph v4 - MadWeight format.""" 2749 2750 matrix_file="matrix_standalone_v4.inc" 2751
2752 - def copy_template(self, model):
2753 """Additional actions needed for setup of Template 2754 """ 2755 2756 super(ProcessExporterFortranMW, self).copy_template(model) 2757 2758 # Add the MW specific file 2759 shutil.copytree(pjoin(MG5DIR,'Template','MadWeight'), 2760 pjoin(self.dir_path, 'Source','MadWeight'), True) 2761 shutil.copytree(pjoin(MG5DIR,'madgraph','madweight'), 2762 pjoin(self.dir_path, 'bin','internal','madweight'), True) 2763 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','setrun.f'), 2764 pjoin(self.dir_path, 'Source','setrun.f')) 2765 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','run.inc'), 2766 pjoin(self.dir_path, 'Source','run.inc')) 2767 # File created from Template (Different in some child class) 2768 filename = os.path.join(self.dir_path,'Source','run_config.inc') 2769 self.write_run_config_file(writers.FortranWriter(filename)) 2770 2771 try: 2772 subprocess.call([os.path.join(self.dir_path, 'Source','MadWeight','bin','internal','pass_to_madweight')], 2773 stdout = os.open(os.devnull, os.O_RDWR), 2774 stderr = os.open(os.devnull, os.O_RDWR), 2775 cwd=self.dir_path) 2776 except OSError: 2777 # Probably madweight already called 2778 pass 2779 2780 # Copy the different python file in the Template 2781 self.copy_python_file() 2782 # create the appropriate cuts.f 2783 self.get_mw_cuts_version() 2784 2785 # add the makefile in Source directory 2786 filename = os.path.join(self.dir_path,'Source','makefile') 2787 self.write_source_makefile(writers.FortranWriter(filename))
2788 2789 2790 2791 2792 #=========================================================================== 2793 # convert_model 2794 #===========================================================================
2795 - def convert_model(self, model, wanted_lorentz = [], 2796 wanted_couplings = []):
2797 2798 super(ProcessExporterFortranMW,self).convert_model(model, 2799 wanted_lorentz, wanted_couplings) 2800 2801 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 2802 try: 2803 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 2804 except OSError as error: 2805 pass 2806 model_path = model.get('modelpath') 2807 # This is not safe if there is a '##' or '-' in the path. 2808 shutil.copytree(model_path, 2809 pjoin(self.dir_path,'bin','internal','ufomodel'), 2810 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 2811 if hasattr(model, 'restrict_card'): 2812 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 2813 'restrict_default.dat') 2814 if isinstance(model.restrict_card, check_param_card.ParamCard): 2815 model.restrict_card.write(out_path) 2816 else: 2817 files.cp(model.restrict_card, out_path)
2818 2819 #=========================================================================== 2820 # generate_subprocess_directory 2821 #===========================================================================
2822 - def copy_python_file(self):
2823 """copy the python file require for the Template""" 2824 2825 # madevent interface 2826 cp(_file_path+'/interface/madweight_interface.py', 2827 self.dir_path+'/bin/internal/madweight_interface.py') 2828 cp(_file_path+'/interface/extended_cmd.py', 2829 self.dir_path+'/bin/internal/extended_cmd.py') 2830 cp(_file_path+'/interface/common_run_interface.py', 2831 self.dir_path+'/bin/internal/common_run_interface.py') 2832 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 2833 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 2834 cp(_file_path+'/iolibs/save_load_object.py', 2835 self.dir_path+'/bin/internal/save_load_object.py') 2836 cp(_file_path+'/madevent/gen_crossxhtml.py', 2837 self.dir_path+'/bin/internal/gen_crossxhtml.py') 2838 cp(_file_path+'/madevent/sum_html.py', 2839 self.dir_path+'/bin/internal/sum_html.py') 2840 cp(_file_path+'/various/FO_analyse_card.py', 2841 self.dir_path+'/bin/internal/FO_analyse_card.py') 2842 cp(_file_path+'/iolibs/file_writers.py', 2843 self.dir_path+'/bin/internal/file_writers.py') 2844 #model file 2845 cp(_file_path+'../models/check_param_card.py', 2846 self.dir_path+'/bin/internal/check_param_card.py') 2847 2848 #madevent file 2849 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 2850 cp(_file_path+'/various/lhe_parser.py', 2851 self.dir_path+'/bin/internal/lhe_parser.py') 2852 2853 cp(_file_path+'/various/banner.py', 2854 self.dir_path+'/bin/internal/banner.py') 2855 cp(_file_path+'/various/shower_card.py', 2856 self.dir_path+'/bin/internal/shower_card.py') 2857 cp(_file_path+'/various/cluster.py', 2858 self.dir_path+'/bin/internal/cluster.py') 2859 2860 # logging configuration 2861 cp(_file_path+'/interface/.mg5_logging.conf', 2862 self.dir_path+'/bin/internal/me5_logging.conf') 2863 cp(_file_path+'/interface/coloring_logging.py', 2864 self.dir_path+'/bin/internal/coloring_logging.py')
2865 2866 2867 #=========================================================================== 2868 # Change the version of cuts.f to the one compatible with MW 2869 #===========================================================================
2870 - def get_mw_cuts_version(self, outpath=None):
2871 """create the appropriate cuts.f 2872 This is based on the one associated to ME output but: 2873 1) No clustering (=> remove initcluster/setclscales) 2874 2) Adding the definition of cut_bw at the file. 2875 """ 2876 2877 template = open(pjoin(MG5DIR,'Template','LO','SubProcesses','cuts.f')) 2878 2879 text = StringIO() 2880 #1) remove all dependencies in ickkw >1: 2881 nb_if = 0 2882 for line in template: 2883 if 'if(xqcut.gt.0d0' in line: 2884 nb_if = 1 2885 if nb_if == 0: 2886 text.write(line) 2887 continue 2888 if re.search(r'if\(.*\)\s*then', line): 2889 nb_if += 1 2890 elif 'endif' in line: 2891 nb_if -= 1 2892 2893 #2) add fake cut_bw (have to put the true one later) 2894 text.write(""" 2895 logical function cut_bw(p) 2896 include 'madweight_param.inc' 2897 double precision p(*) 2898 if (bw_cut) then 2899 cut_bw = .true. 2900 else 2901 stop 1 2902 endif 2903 return 2904 end 2905 """) 2906 2907 final = text.getvalue() 2908 #3) remove the call to initcluster: 2909 template = final.replace('call initcluster', '! Remove for MW!call initcluster') 2910 template = template.replace('genps.inc', 'maxparticles.inc') 2911 #Now we can write it 2912 if not outpath: 2913 fsock = open(pjoin(self.dir_path, 'SubProcesses', 'cuts.f'), 'w') 2914 elif isinstance(outpath, str): 2915 fsock = open(outpath, 'w') 2916 else: 2917 fsock = outpath 2918 fsock.write(template)
2919 2920 2921 2922 #=========================================================================== 2923 # Make the Helas and Model directories for Standalone directory 2924 #===========================================================================
2925 - def make(self):
2926 """Run make in the DHELAS, MODEL, PDF and CERNLIB directories, to set up 2927 everything for running madweight 2928 """ 2929 2930 source_dir = os.path.join(self.dir_path, "Source") 2931 logger.info("Running make for Helas") 2932 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2933 logger.info("Running make for Model") 2934 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran') 2935 logger.info("Running make for PDF") 2936 misc.compile(arg=['../lib/libpdf.a'], cwd=source_dir, mode='fortran') 2937 logger.info("Running make for CERNLIB") 2938 misc.compile(arg=['../lib/libcernlib.a'], cwd=source_dir, mode='fortran') 2939 logger.info("Running make for GENERIC") 2940 misc.compile(arg=['../lib/libgeneric.a'], cwd=source_dir, mode='fortran') 2941 logger.info("Running make for blocks") 2942 misc.compile(arg=['../lib/libblocks.a'], cwd=source_dir, mode='fortran') 2943 logger.info("Running make for tools") 2944 misc.compile(arg=['../lib/libtools.a'], cwd=source_dir, mode='fortran')
2945 2946 #=========================================================================== 2947 # Create proc_card_mg5.dat for MadWeight directory 2948 #===========================================================================
2949 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2950 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 2951 2952 compiler = {'fortran': mg5options['fortran_compiler'], 2953 'cpp': mg5options['cpp_compiler'], 2954 'f2py': mg5options['f2py_compiler']} 2955 2956 2957 2958 #proc_charac 2959 self.create_proc_charac() 2960 2961 # Write maxparticles.inc based on max of ME's/subprocess groups 2962 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 2963 self.write_maxparticles_file(writers.FortranWriter(filename), 2964 matrix_elements) 2965 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2966 pjoin(self.dir_path, 'Source','MadWeight','blocks')) 2967 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2968 pjoin(self.dir_path, 'Source','MadWeight','tools')) 2969 2970 self.set_compiler(compiler) 2971 self.make() 2972 2973 # Write command history as proc_card_mg5 2974 if os.path.isdir(os.path.join(self.dir_path, 'Cards')): 2975 output_file = os.path.join(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2976 history.write(output_file) 2977 2978 ProcessExporterFortran.finalize(self, matrix_elements, 2979 history, mg5options, flaglist)
2980 2981 2982 2983 #=========================================================================== 2984 # create the run_card for MW 2985 #===========================================================================
2986 - def create_run_card(self, matrix_elements, history):
2987 """ """ 2988 2989 run_card = banner_mod.RunCard() 2990 2991 # pass to default for MW 2992 run_card["run_tag"] = "\'not_use\'" 2993 run_card["fixed_ren_scale"] = "T" 2994 run_card["fixed_fac_scale"] = "T" 2995 run_card.remove_all_cut() 2996 2997 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 2998 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2999 python_template=True) 3000 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'), 3001 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 3002 python_template=True)
3003 3004 #=========================================================================== 3005 # export model files 3006 #===========================================================================
3007 - def export_model_files(self, model_path):
3008 """export the model dependent files for V4 model""" 3009 3010 super(ProcessExporterFortranMW,self).export_model_files(model_path) 3011 # Add the routine update_as_param in v4 model 3012 # This is a function created in the UFO 3013 text=""" 3014 subroutine update_as_param() 3015 call setpara('param_card.dat',.false.) 3016 return 3017 end 3018 """ 3019 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3020 ff.write(text) 3021 ff.close() 3022 3023 # Modify setrun.f 3024 text = open(os.path.join(self.dir_path,'Source','setrun.f')).read() 3025 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3026 fsock = open(os.path.join(self.dir_path,'Source','setrun.f'), 'w') 3027 fsock.write(text) 3028 fsock.close() 3029 3030 # Modify initialization.f 3031 text = open(os.path.join(self.dir_path,'SubProcesses','initialization.f')).read() 3032 text = text.replace('call setpara(param_name)', 'call setpara(param_name, .true.)') 3033 fsock = open(os.path.join(self.dir_path,'SubProcesses','initialization.f'), 'w') 3034 fsock.write(text) 3035 fsock.close() 3036 3037 3038 self.make_model_symbolic_link()
3039 3040 #=========================================================================== 3041 # generate_subprocess_directory 3042 #===========================================================================
3043 - def generate_subprocess_directory(self, matrix_element, 3044 fortran_model,number):
3045 """Generate the Pxxxxx directory for a subprocess in MG4 MadWeight format, 3046 including the necessary matrix.f and nexternal.inc files""" 3047 3048 cwd = os.getcwd() 3049 # Create the directory PN_xx_xxxxx in the specified path 3050 dirpath = os.path.join(self.dir_path, 'SubProcesses', \ 3051 "P%s" % matrix_element.get('processes')[0].shell_string()) 3052 3053 try: 3054 os.mkdir(dirpath) 3055 except os.error as error: 3056 logger.warning(error.strerror + " " + dirpath) 3057 3058 #try: 3059 # os.chdir(dirpath) 3060 #except os.error: 3061 # logger.error('Could not cd to directory %s' % dirpath) 3062 # return 0 3063 3064 logger.info('Creating files in directory %s' % dirpath) 3065 3066 # Extract number of external particles 3067 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3068 3069 # Create the matrix.f file and the nexternal.inc file 3070 filename = pjoin(dirpath,'matrix.f') 3071 calls,ncolor = self.write_matrix_element_v4( 3072 writers.FortranWriter(filename), 3073 matrix_element, 3074 fortran_model) 3075 3076 filename = pjoin(dirpath, 'auto_dsig.f') 3077 self.write_auto_dsig_file(writers.FortranWriter(filename), 3078 matrix_element) 3079 3080 filename = pjoin(dirpath, 'configs.inc') 3081 mapconfigs, s_and_t_channels = self.write_configs_file(\ 3082 writers.FortranWriter(filename), 3083 matrix_element) 3084 3085 filename = pjoin(dirpath, 'nexternal.inc') 3086 self.write_nexternal_file(writers.FortranWriter(filename), 3087 nexternal, ninitial) 3088 3089 filename = pjoin(dirpath, 'leshouche.inc') 3090 self.write_leshouche_file(writers.FortranWriter(filename), 3091 matrix_element) 3092 3093 filename = pjoin(dirpath, 'props.inc') 3094 self.write_props_file(writers.FortranWriter(filename), 3095 matrix_element, 3096 s_and_t_channels) 3097 3098 filename = pjoin(dirpath, 'pmass.inc') 3099 self.write_pmass_file(writers.FortranWriter(filename), 3100 matrix_element) 3101 3102 filename = pjoin(dirpath, 'ngraphs.inc') 3103 self.write_ngraphs_file(writers.FortranWriter(filename), 3104 len(matrix_element.get_all_amplitudes())) 3105 3106 filename = pjoin(dirpath, 'maxamps.inc') 3107 self.write_maxamps_file(writers.FortranWriter(filename), 3108 len(matrix_element.get('diagrams')), 3109 ncolor, 3110 len(matrix_element.get('processes')), 3111 1) 3112 3113 filename = pjoin(dirpath, 'phasespace.inc') 3114 self.write_phasespace_file(writers.FortranWriter(filename), 3115 len(matrix_element.get('diagrams')), 3116 ) 3117 3118 # Generate diagrams 3119 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 3120 filename = pjoin(dirpath, "matrix.ps") 3121 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3122 get('diagrams'), 3123 filename, 3124 model=matrix_element.get('processes')[0].\ 3125 get('model'), 3126 amplitude='') 3127 logger.info("Generating Feynman diagrams for " + \ 3128 matrix_element.get('processes')[0].nice_string()) 3129 plot.draw() 3130 3131 #import genps.inc and maxconfigs.inc into Subprocesses 3132 ln(self.dir_path + '/Source/genps.inc', self.dir_path + '/SubProcesses', log=False) 3133 #ln(self.dir_path + '/Source/maxconfigs.inc', self.dir_path + '/SubProcesses', log=False) 3134 3135 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f', 'genps.inc'] 3136 3137 for file in linkfiles: 3138 ln('../%s' % file, starting_dir=cwd) 3139 3140 ln('nexternal.inc', '../../Source', log=False, cwd=dirpath) 3141 ln('leshouche.inc', '../../Source', log=False, cwd=dirpath) 3142 ln('maxamps.inc', '../../Source', log=False, cwd=dirpath) 3143 ln('phasespace.inc', '../', log=True, cwd=dirpath) 3144 # Return to original PWD 3145 #os.chdir(cwd) 3146 3147 if not calls: 3148 calls = 0 3149 return calls
3150 3151 #=========================================================================== 3152 # write_matrix_element_v4 3153 #===========================================================================
3154 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model,proc_id = "", config_map = []):
3155 """Export a matrix element to a matrix.f file in MG4 MadWeight format""" 3156 3157 if not matrix_element.get('processes') or \ 3158 not matrix_element.get('diagrams'): 3159 return 0 3160 3161 if writer: 3162 if not isinstance(writer, writers.FortranWriter): 3163 raise writers.FortranWriter.FortranWriterError(\ 3164 "writer not FortranWriter") 3165 3166 # Set lowercase/uppercase Fortran code 3167 writers.FortranWriter.downcase = False 3168 3169 replace_dict = {} 3170 3171 # Extract version number and date from VERSION file 3172 info_lines = self.get_mg5_info_lines() 3173 replace_dict['info_lines'] = info_lines 3174 3175 # Extract process info lines 3176 process_lines = self.get_process_info_lines(matrix_element) 3177 replace_dict['process_lines'] = process_lines 3178 3179 # Set proc_id 3180 replace_dict['proc_id'] = proc_id 3181 3182 # Extract number of external particles 3183 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3184 replace_dict['nexternal'] = nexternal 3185 3186 # Extract ncomb 3187 ncomb = matrix_element.get_helicity_combinations() 3188 replace_dict['ncomb'] = ncomb 3189 3190 # Extract helicity lines 3191 helicity_lines = self.get_helicity_lines(matrix_element) 3192 replace_dict['helicity_lines'] = helicity_lines 3193 3194 # Extract overall denominator 3195 # Averaging initial state color, spin, and identical FS particles 3196 den_factor_line = self.get_den_factor_line(matrix_element) 3197 replace_dict['den_factor_line'] = den_factor_line 3198 3199 # Extract ngraphs 3200 ngraphs = matrix_element.get_number_of_amplitudes() 3201 replace_dict['ngraphs'] = ngraphs 3202 3203 # Extract nwavefuncs 3204 nwavefuncs = matrix_element.get_number_of_wavefunctions() 3205 replace_dict['nwavefuncs'] = nwavefuncs 3206 3207 # Extract ncolor 3208 ncolor = max(1, len(matrix_element.get('color_basis'))) 3209 replace_dict['ncolor'] = ncolor 3210 3211 # Extract color data lines 3212 color_data_lines = self.get_color_data_lines(matrix_element) 3213 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 3214 3215 # Extract helas calls 3216 helas_calls = fortran_model.get_matrix_element_calls(\ 3217 matrix_element) 3218 3219 replace_dict['helas_calls'] = "\n".join(helas_calls) 3220 3221 # Extract JAMP lines 3222 jamp_lines = self.get_JAMP_lines(matrix_element) 3223 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 3224 3225 replace_dict['template_file'] = os.path.join(_file_path, \ 3226 'iolibs/template_files/%s' % self.matrix_file) 3227 replace_dict['template_file2'] = '' 3228 3229 if writer: 3230 file = open(replace_dict['template_file']).read() 3231 file = file % replace_dict 3232 # Write the file 3233 writer.writelines(file) 3234 return len([call for call in helas_calls if call.find('#') != 0]),ncolor 3235 else: 3236 replace_dict['return_value'] = (len([call for call in helas_calls if call.find('#') != 0]),ncolor)
3237 3238 #=========================================================================== 3239 # write_source_makefile 3240 #===========================================================================
3241 - def write_source_makefile(self, writer):
3242 """Write the nexternal.inc file for madweight""" 3243 3244 3245 path = os.path.join(_file_path,'iolibs','template_files','madweight_makefile_source') 3246 set_of_lib = '$(LIBRARIES) $(LIBDIR)libdhelas.$(libext) $(LIBDIR)libpdf.$(libext) $(LIBDIR)libmodel.$(libext) $(LIBDIR)libcernlib.$(libext) $(LIBDIR)libtf.$(libext)' 3247 text = open(path).read() % {'libraries': set_of_lib} 3248 writer.write(text) 3249 3250 return True
3251
3252 - def write_phasespace_file(self, writer, nb_diag):
3253 """ """ 3254 3255 template = """ include 'maxparticles.inc' 3256 integer max_branches 3257 parameter (max_branches=max_particles-1) 3258 integer max_configs 3259 parameter (max_configs=%(nb_diag)s) 3260 3261 c channel position 3262 integer config_pos,perm_pos 3263 common /to_config/config_pos,perm_pos 3264 3265 """ 3266 3267 writer.write(template % {'nb_diag': nb_diag})
3268 3269 3270 #=========================================================================== 3271 # write_auto_dsig_file 3272 #===========================================================================
3273 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
3274 """Write the auto_dsig.f file for the differential cross section 3275 calculation, includes pdf call information (MadWeight format)""" 3276 3277 if not matrix_element.get('processes') or \ 3278 not matrix_element.get('diagrams'): 3279 return 0 3280 3281 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 3282 3283 if ninitial < 1 or ninitial > 2: 3284 raise writers.FortranWriter.FortranWriterError("""Need ninitial = 1 or 2 to write auto_dsig file""") 3285 3286 replace_dict = {} 3287 3288 # Extract version number and date from VERSION file 3289 info_lines = self.get_mg5_info_lines() 3290 replace_dict['info_lines'] = info_lines 3291 3292 # Extract process info lines 3293 process_lines = self.get_process_info_lines(matrix_element) 3294 replace_dict['process_lines'] = process_lines 3295 3296 # Set proc_id 3297 replace_dict['proc_id'] = proc_id 3298 replace_dict['numproc'] = 1 3299 3300 # Set dsig_line 3301 if ninitial == 1: 3302 # No conversion, since result of decay should be given in GeV 3303 dsig_line = "pd(0)*dsiguu" 3304 else: 3305 # Convert result (in GeV) to pb 3306 dsig_line = "pd(0)*conv*dsiguu" 3307 3308 replace_dict['dsig_line'] = dsig_line 3309 3310 # Extract pdf lines 3311 pdf_vars, pdf_data, pdf_lines = \ 3312 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 3313 replace_dict['pdf_vars'] = pdf_vars 3314 replace_dict['pdf_data'] = pdf_data 3315 replace_dict['pdf_lines'] = pdf_lines 3316 3317 # Lines that differ between subprocess group and regular 3318 if proc_id: 3319 replace_dict['numproc'] = int(proc_id) 3320 replace_dict['passcuts_begin'] = "" 3321 replace_dict['passcuts_end'] = "" 3322 # Set lines for subprocess group version 3323 # Set define_iconfigs_lines 3324 replace_dict['define_subdiag_lines'] = \ 3325 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3326 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3327 else: 3328 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 3329 replace_dict['passcuts_end'] = "ENDIF" 3330 replace_dict['define_subdiag_lines'] = "" 3331 3332 if writer: 3333 file = open(os.path.join(_file_path, \ 3334 'iolibs/template_files/auto_dsig_mw.inc')).read() 3335 3336 file = file % replace_dict 3337 # Write the file 3338 writer.writelines(file) 3339 else: 3340 return replace_dict
3341 #=========================================================================== 3342 # write_configs_file 3343 #===========================================================================
3344 - def write_configs_file(self, writer, matrix_element):
3345 """Write the configs.inc file for MadEvent""" 3346 3347 # Extract number of external particles 3348 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3349 3350 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 3351 mapconfigs = [c[0] for c in configs] 3352 model = matrix_element.get('processes')[0].get('model') 3353 return mapconfigs, self.write_configs_file_from_diagrams(writer, 3354 [[c[1]] for c in configs], 3355 mapconfigs, 3356 nexternal, ninitial,matrix_element, model)
3357 3358 #=========================================================================== 3359 # write_run_configs_file 3360 #===========================================================================
3361 - def write_run_config_file(self, writer):
3362 """Write the run_configs.inc file for MadWeight""" 3363 3364 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 3365 text = open(path).read() % {'chanperjob':'5'} 3366 writer.write(text) 3367 return True
3368 3369 #=========================================================================== 3370 # write_configs_file_from_diagrams 3371 #===========================================================================
3372 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 3373 nexternal, ninitial, matrix_element, model):
3374 """Write the actual configs.inc file. 3375 3376 configs is the diagrams corresponding to configs (each 3377 diagrams is a list of corresponding diagrams for all 3378 subprocesses, with None if there is no corresponding diagrams 3379 for a given process). 3380 mapconfigs gives the diagram number for each config. 3381 3382 For s-channels, we need to output one PDG for each subprocess in 3383 the subprocess group, in order to be able to pick the right 3384 one for multiprocesses.""" 3385 3386 lines = [] 3387 3388 particle_dict = matrix_element.get('processes')[0].get('model').\ 3389 get('particle_dict') 3390 3391 s_and_t_channels = [] 3392 3393 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 3394 for config in configs if [d for d in config if d][0].\ 3395 get_vertex_leg_numbers()!=[]] 3396 3397 minvert = min(vert_list) if vert_list!=[] else 0 3398 # Number of subprocesses 3399 nsubprocs = len(configs[0]) 3400 3401 nconfigs = 0 3402 3403 new_pdg = model.get_first_non_pdg() 3404 3405 for iconfig, helas_diags in enumerate(configs): 3406 if any([vert > minvert for vert in 3407 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 3408 # Only 3-vertices allowed in configs.inc 3409 continue 3410 nconfigs += 1 3411 3412 # Need s- and t-channels for all subprocesses, including 3413 # those that don't contribute to this config 3414 empty_verts = [] 3415 stchannels = [] 3416 for h in helas_diags: 3417 if h: 3418 # get_s_and_t_channels gives vertices starting from 3419 # final state external particles and working inwards 3420 stchannels.append(h.get('amplitudes')[0].\ 3421 get_s_and_t_channels(ninitial,model,new_pdg)) 3422 else: 3423 stchannels.append((empty_verts, None)) 3424 3425 # For t-channels, just need the first non-empty one 3426 tchannels = [t for s,t in stchannels if t != None][0] 3427 3428 # For s_and_t_channels (to be used later) use only first config 3429 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 3430 tchannels]) 3431 3432 # Make sure empty_verts is same length as real vertices 3433 if any([s for s,t in stchannels]): 3434 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 3435 3436 # Reorganize s-channel vertices to get a list of all 3437 # subprocesses for each vertex 3438 schannels = list(zip(*[s for s,t in stchannels])) 3439 else: 3440 schannels = [] 3441 3442 allchannels = schannels 3443 if len(tchannels) > 1: 3444 # Write out tchannels only if there are any non-trivial ones 3445 allchannels = schannels + tchannels 3446 3447 # Write out propagators for s-channel and t-channel vertices 3448 3449 #lines.append("# Diagram %d" % (mapconfigs[iconfig])) 3450 # Correspondance between the config and the diagram = amp2 3451 lines.append("* %d %d " % (nconfigs, 3452 mapconfigs[iconfig])) 3453 3454 for verts in allchannels: 3455 if verts in schannels: 3456 vert = [v for v in verts if v][0] 3457 else: 3458 vert = verts 3459 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 3460 last_leg = vert.get('legs')[-1] 3461 line=str(last_leg.get('number'))+" "+str(daughters[0])+" "+str(daughters[1]) 3462 # lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 3463 # (last_leg.get('number'), nconfigs, len(daughters), 3464 # ",".join([str(d) for d in daughters]))) 3465 3466 if last_leg.get('id') == 21 and 21 not in particle_dict: 3467 # Fake propagator used in multiparticle vertices 3468 mass = 'zero' 3469 width = 'zero' 3470 pow_part = 0 3471 else: 3472 if (last_leg.get('id')!=7): 3473 particle = particle_dict[last_leg.get('id')] 3474 # Get mass 3475 mass = particle.get('mass') 3476 # Get width 3477 width = particle.get('width') 3478 else : # fake propagator used in multiparticle vertices 3479 mass= 'zero' 3480 width= 'zero' 3481 3482 line=line+" "+mass+" "+width+" " 3483 3484 if verts in schannels: 3485 pdgs = [] 3486 for v in verts: 3487 if v: 3488 pdgs.append(v.get('legs')[-1].get('id')) 3489 else: 3490 pdgs.append(0) 3491 lines.append(line+" S "+str(last_leg.get('id'))) 3492 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3493 # (last_leg.get('number'), nconfigs, nsubprocs, 3494 # ",".join([str(d) for d in pdgs]))) 3495 # lines.append("data tprid(%d,%d)/0/" % \ 3496 # (last_leg.get('number'), nconfigs)) 3497 elif verts in tchannels[:-1]: 3498 lines.append(line+" T "+str(last_leg.get('id'))) 3499 # lines.append("data tprid(%d,%d)/%d/" % \ 3500 # (last_leg.get('number'), nconfigs, 3501 # abs(last_leg.get('id')))) 3502 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3503 # (last_leg.get('number'), nconfigs, nsubprocs, 3504 # ",".join(['0'] * nsubprocs))) 3505 3506 # Write out number of configs 3507 # lines.append("# Number of configs") 3508 # lines.append("data mapconfig(0)/%d/" % nconfigs) 3509 lines.append(" * ") # a line with just a star indicates this is the end of file 3510 # Write the file 3511 writer.writelines(lines) 3512 3513 return s_and_t_channels
3514
3515 3516 3517 #=============================================================================== 3518 # ProcessExporterFortranME 3519 #=============================================================================== 3520 -class ProcessExporterFortranME(ProcessExporterFortran):
3521 """Class to take care of exporting a set of matrix elements to 3522 MadEvent format.""" 3523 3524 matrix_file = "matrix_madevent_v4.inc" 3525 done_warning_tchannel = False 3526 3527 # helper function for customise helas writter 3528 @staticmethod
3529 - def custom_helas_call(call, arg):
3530 if arg['mass'] == '%(M)s,%(W)s,': 3531 arg['mass'] = '%(M)s, fk_%(W)s,' 3532 elif '%(W)s' in arg['mass']: 3533 raise Exception 3534 return call, arg
3535
3536 - def copy_template(self, model):
3537 """Additional actions needed for setup of Template 3538 """ 3539 3540 super(ProcessExporterFortranME, self).copy_template(model) 3541 3542 # File created from Template (Different in some child class) 3543 filename = pjoin(self.dir_path,'Source','run_config.inc') 3544 self.write_run_config_file(writers.FortranWriter(filename)) 3545 3546 # The next file are model dependant (due to SLAH convention) 3547 self.model_name = model.get('name') 3548 # Add the symmetry.f 3549 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3550 self.write_symmetry(writers.FortranWriter(filename)) 3551 # 3552 filename = pjoin(self.dir_path,'SubProcesses','addmothers.f') 3553 self.write_addmothers(writers.FortranWriter(filename)) 3554 # Copy the different python file in the Template 3555 self.copy_python_file()
3556 3557 3558 3559 3560 3561 3562 #=========================================================================== 3563 # generate_subprocess_directory 3564 #===========================================================================
3565 - def copy_python_file(self):
3566 """copy the python file require for the Template""" 3567 3568 # madevent interface 3569 cp(_file_path+'/interface/madevent_interface.py', 3570 self.dir_path+'/bin/internal/madevent_interface.py') 3571 cp(_file_path+'/interface/extended_cmd.py', 3572 self.dir_path+'/bin/internal/extended_cmd.py') 3573 cp(_file_path+'/interface/common_run_interface.py', 3574 self.dir_path+'/bin/internal/common_run_interface.py') 3575 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3576 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3577 cp(_file_path+'/iolibs/save_load_object.py', 3578 self.dir_path+'/bin/internal/save_load_object.py') 3579 cp(_file_path+'/iolibs/file_writers.py', 3580 self.dir_path+'/bin/internal/file_writers.py') 3581 #model file 3582 cp(_file_path+'../models/check_param_card.py', 3583 self.dir_path+'/bin/internal/check_param_card.py') 3584 3585 #copy all the file present in madevent directory 3586 for name in os.listdir(pjoin(_file_path, 'madevent')): 3587 if name not in ['__init__.py'] and name.endswith('.py'): 3588 cp(_file_path+'/madevent/'+name, self.dir_path+'/bin/internal/') 3589 3590 #madevent file 3591 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3592 cp(_file_path+'/various/lhe_parser.py', 3593 self.dir_path+'/bin/internal/lhe_parser.py') 3594 cp(_file_path+'/various/banner.py', 3595 self.dir_path+'/bin/internal/banner.py') 3596 cp(_file_path+'/various/histograms.py', 3597 self.dir_path+'/bin/internal/histograms.py') 3598 cp(_file_path+'/various/plot_djrs.py', 3599 self.dir_path+'/bin/internal/plot_djrs.py') 3600 cp(_file_path+'/various/systematics.py', self.dir_path+'/bin/internal/systematics.py') 3601 3602 cp(_file_path+'/various/cluster.py', 3603 self.dir_path+'/bin/internal/cluster.py') 3604 cp(_file_path+'/madevent/combine_runs.py', 3605 self.dir_path+'/bin/internal/combine_runs.py') 3606 # logging configuration 3607 cp(_file_path+'/interface/.mg5_logging.conf', 3608 self.dir_path+'/bin/internal/me5_logging.conf') 3609 cp(_file_path+'/interface/coloring_logging.py', 3610 self.dir_path+'/bin/internal/coloring_logging.py') 3611 # shower card and FO_analyse_card. 3612 # Although not needed, it is imported by banner.py 3613 cp(_file_path+'/various/shower_card.py', 3614 self.dir_path+'/bin/internal/shower_card.py') 3615 cp(_file_path+'/various/FO_analyse_card.py', 3616 self.dir_path+'/bin/internal/FO_analyse_card.py')
3617 3618
3619 - def convert_model(self, model, wanted_lorentz = [], 3620 wanted_couplings = []):
3621 3622 super(ProcessExporterFortranME,self).convert_model(model, 3623 wanted_lorentz, wanted_couplings) 3624 3625 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 3626 try: 3627 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 3628 except OSError as error: 3629 pass 3630 model_path = model.get('modelpath') 3631 # This is not safe if there is a '##' or '-' in the path. 3632 shutil.copytree(model_path, 3633 pjoin(self.dir_path,'bin','internal','ufomodel'), 3634 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 3635 if hasattr(model, 'restrict_card'): 3636 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 3637 'restrict_default.dat') 3638 if isinstance(model.restrict_card, check_param_card.ParamCard): 3639 model.restrict_card.write(out_path) 3640 else: 3641 files.cp(model.restrict_card, out_path)
3642 3643 #=========================================================================== 3644 # export model files 3645 #===========================================================================
3646 - def export_model_files(self, model_path):
3647 """export the model dependent files""" 3648 3649 super(ProcessExporterFortranME,self).export_model_files(model_path) 3650 3651 # Add the routine update_as_param in v4 model 3652 # This is a function created in the UFO 3653 text=""" 3654 subroutine update_as_param() 3655 call setpara('param_card.dat',.false.) 3656 return 3657 end 3658 """ 3659 ff = open(pjoin(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3660 ff.write(text) 3661 ff.close() 3662 3663 # Add the symmetry.f 3664 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3665 self.write_symmetry(writers.FortranWriter(filename), v5=False) 3666 3667 # Modify setrun.f 3668 text = open(pjoin(self.dir_path,'Source','setrun.f')).read() 3669 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3670 fsock = open(pjoin(self.dir_path,'Source','setrun.f'), 'w') 3671 fsock.write(text) 3672 fsock.close() 3673 3674 self.make_model_symbolic_link()
3675 3676 #=========================================================================== 3677 # generate_subprocess_directory 3678 #===========================================================================
3679 - def generate_subprocess_directory(self, matrix_element, 3680 fortran_model, 3681 me_number):
3682 """Generate the Pxxxxx directory for a subprocess in MG4 madevent, 3683 including the necessary matrix.f and various helper files""" 3684 3685 cwd = os.getcwd() 3686 path = pjoin(self.dir_path, 'SubProcesses') 3687 3688 3689 if not self.model: 3690 self.model = matrix_element.get('processes')[0].get('model') 3691 3692 3693 3694 #os.chdir(path) 3695 # Create the directory PN_xx_xxxxx in the specified path 3696 subprocdir = "P%s" % matrix_element.get('processes')[0].shell_string() 3697 try: 3698 os.mkdir(pjoin(path,subprocdir)) 3699 except os.error as error: 3700 logger.warning(error.strerror + " " + subprocdir) 3701 3702 #try: 3703 # os.chdir(subprocdir) 3704 #except os.error: 3705 # logger.error('Could not cd to directory %s' % subprocdir) 3706 # return 0 3707 3708 logger.info('Creating files in directory %s' % subprocdir) 3709 Ppath = pjoin(path, subprocdir) 3710 3711 # Extract number of external particles 3712 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3713 3714 # Add the driver.f 3715 ncomb = matrix_element.get_helicity_combinations() 3716 filename = pjoin(Ppath,'driver.f') 3717 self.write_driver(writers.FortranWriter(filename),ncomb,n_grouped_proc=1, 3718 v5=self.opt['v5_model']) 3719 3720 # Create the matrix.f file, auto_dsig.f file and all inc files 3721 filename = pjoin(Ppath, 'matrix.f') 3722 calls, ncolor = \ 3723 self.write_matrix_element_v4(writers.FortranWriter(filename), 3724 matrix_element, fortran_model, subproc_number = me_number) 3725 3726 filename = pjoin(Ppath, 'auto_dsig.f') 3727 self.write_auto_dsig_file(writers.FortranWriter(filename), 3728 matrix_element) 3729 3730 filename = pjoin(Ppath, 'configs.inc') 3731 mapconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 3732 writers.FortranWriter(filename), 3733 matrix_element) 3734 3735 filename = pjoin(Ppath, 'config_nqcd.inc') 3736 self.write_config_nqcd_file(writers.FortranWriter(filename), 3737 nqcd_list) 3738 3739 filename = pjoin(Ppath, 'config_subproc_map.inc') 3740 self.write_config_subproc_map_file(writers.FortranWriter(filename), 3741 s_and_t_channels) 3742 3743 filename = pjoin(Ppath, 'coloramps.inc') 3744 self.write_coloramps_file(writers.FortranWriter(filename), 3745 mapconfigs, 3746 matrix_element) 3747 3748 filename = pjoin(Ppath, 'get_color.f') 3749 self.write_colors_file(writers.FortranWriter(filename), 3750 matrix_element) 3751 3752 filename = pjoin(Ppath, 'decayBW.inc') 3753 self.write_decayBW_file(writers.FortranWriter(filename), 3754 s_and_t_channels) 3755 3756 filename = pjoin(Ppath, 'dname.mg') 3757 self.write_dname_file(writers.FileWriter(filename), 3758 "P"+matrix_element.get('processes')[0].shell_string()) 3759 3760 filename = pjoin(Ppath, 'iproc.dat') 3761 self.write_iproc_file(writers.FortranWriter(filename), 3762 me_number) 3763 3764 filename = pjoin(Ppath, 'leshouche.inc') 3765 self.write_leshouche_file(writers.FortranWriter(filename), 3766 matrix_element) 3767 3768 filename = pjoin(Ppath, 'maxamps.inc') 3769 self.write_maxamps_file(writers.FortranWriter(filename), 3770 len(matrix_element.get('diagrams')), 3771 ncolor, 3772 len(matrix_element.get('processes')), 3773 1) 3774 3775 filename = pjoin(Ppath, 'mg.sym') 3776 self.write_mg_sym_file(writers.FortranWriter(filename), 3777 matrix_element) 3778 3779 filename = pjoin(Ppath, 'ncombs.inc') 3780 self.write_ncombs_file(writers.FortranWriter(filename), 3781 nexternal) 3782 3783 filename = pjoin(Ppath, 'nexternal.inc') 3784 self.write_nexternal_file(writers.FortranWriter(filename), 3785 nexternal, ninitial) 3786 3787 filename = pjoin(Ppath, 'ngraphs.inc') 3788 self.write_ngraphs_file(writers.FortranWriter(filename), 3789 len(mapconfigs)) 3790 3791 3792 filename = pjoin(Ppath, 'pmass.inc') 3793 self.write_pmass_file(writers.FortranWriter(filename), 3794 matrix_element) 3795 3796 filename = pjoin(Ppath, 'props.inc') 3797 self.write_props_file(writers.FortranWriter(filename), 3798 matrix_element, 3799 s_and_t_channels) 3800 3801 # Find config symmetries and permutations 3802 symmetry, perms, ident_perms = \ 3803 diagram_symmetry.find_symmetry(matrix_element) 3804 3805 filename = pjoin(Ppath, 'symswap.inc') 3806 self.write_symswap_file(writers.FortranWriter(filename), 3807 ident_perms) 3808 3809 filename = pjoin(Ppath, 'symfact_orig.dat') 3810 self.write_symfact_file(open(filename, 'w'), symmetry) 3811 3812 # Generate diagrams 3813 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 3814 filename = pjoin(Ppath, "matrix.ps") 3815 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3816 get('diagrams'), 3817 filename, 3818 model=matrix_element.get('processes')[0].\ 3819 get('model'), 3820 amplitude=True) 3821 logger.info("Generating Feynman diagrams for " + \ 3822 matrix_element.get('processes')[0].nice_string()) 3823 plot.draw() 3824 3825 self.link_files_in_SubProcess(Ppath) 3826 3827 #import nexternal/leshouche in Source 3828 ln(pjoin(Ppath,'nexternal.inc'), pjoin(self.dir_path,'Source'), log=False) 3829 ln(pjoin(Ppath,'leshouche.inc'), pjoin(self.dir_path,'Source'), log=False) 3830 ln(pjoin(Ppath,'maxamps.inc'), pjoin(self.dir_path,'Source'), log=False) 3831 # Return to SubProcesses dir 3832 #os.chdir(os.path.pardir) 3833 3834 # Add subprocess to subproc.mg 3835 filename = pjoin(path, 'subproc.mg') 3836 files.append_to_file(filename, 3837 self.write_subproc, 3838 subprocdir) 3839 3840 # Return to original dir 3841 #os.chdir(cwd) 3842 3843 # Generate info page 3844 gen_infohtml.make_info_html(self.dir_path) 3845 3846 3847 if not calls: 3848 calls = 0 3849 return calls
3850 3851 link_Sub_files = ['addmothers.f', 3852 'cluster.f', 3853 'cluster.inc', 3854 'coupl.inc', 3855 'cuts.f', 3856 'cuts.inc', 3857 'genps.f', 3858 'genps.inc', 3859 'idenparts.f', 3860 'initcluster.f', 3861 'makefile', 3862 'message.inc', 3863 'myamp.f', 3864 'reweight.f', 3865 'run.inc', 3866 'maxconfigs.inc', 3867 'maxparticles.inc', 3868 'run_config.inc', 3869 'lhe_event_infos.inc', 3870 'setcuts.f', 3871 'setscales.f', 3872 'sudakov.inc', 3873 'symmetry.f', 3874 'unwgt.f', 3875 'dummy_fct.f' 3876 ] 3877 3891 3892
3893 - def finalize(self, matrix_elements, history, mg5options, flaglist):
3894 """Finalize ME v4 directory by creating jpeg diagrams, html 3895 pages,proc_card_mg5.dat and madevent.tar.gz.""" 3896 3897 if 'nojpeg' in flaglist: 3898 makejpg = False 3899 else: 3900 makejpg = True 3901 if 'online' in flaglist: 3902 online = True 3903 else: 3904 online = False 3905 3906 compiler = {'fortran': mg5options['fortran_compiler'], 3907 'cpp': mg5options['cpp_compiler'], 3908 'f2py': mg5options['f2py_compiler']} 3909 3910 # indicate that the output type is not grouped 3911 if not isinstance(self, ProcessExporterFortranMEGroup): 3912 self.proc_characteristic['grouped_matrix'] = False 3913 3914 self.proc_characteristic['complex_mass_scheme'] = mg5options['complex_mass_scheme'] 3915 3916 # set limitation linked to the model 3917 3918 3919 # indicate the PDG of all initial particle 3920 try: 3921 pdgs1 = [p.get_initial_pdg(1) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 3922 pdgs2 = [p.get_initial_pdg(2) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 3923 except AttributeError: 3924 pdgs1 = [p.get_initial_pdg(1) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 3925 pdgs2 = [p.get_initial_pdg(2) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 3926 self.proc_characteristic['pdg_initial1'] = pdgs1 3927 self.proc_characteristic['pdg_initial2'] = pdgs2 3928 3929 3930 modelname = self.opt['model'] 3931 if modelname == 'mssm' or modelname.startswith('mssm-'): 3932 param_card = pjoin(self.dir_path, 'Cards','param_card.dat') 3933 mg5_param = pjoin(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 3934 check_param_card.convert_to_mg5card(param_card, mg5_param) 3935 check_param_card.check_valid_param_card(mg5_param) 3936 3937 # Add the combine_events.f modify param_card path/number of @X 3938 filename = pjoin(self.dir_path,'Source','combine_events.f') 3939 try: 3940 nb_proc =[p.get('id') for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes')] 3941 except AttributeError: 3942 nb_proc =[p.get('id') for m in matrix_elements.get('matrix_elements') for p in m.get('processes')] 3943 nb_proc = len(set(nb_proc)) 3944 self.write_combine_events(writers.FortranWriter(filename), nb_proc) # already formatted 3945 # Write maxconfigs.inc based on max of ME's/subprocess groups 3946 filename = pjoin(self.dir_path,'Source','maxconfigs.inc') 3947 self.write_maxconfigs_file(writers.FortranWriter(filename), 3948 matrix_elements) 3949 3950 # Write maxparticles.inc based on max of ME's/subprocess groups 3951 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 3952 self.write_maxparticles_file(writers.FortranWriter(filename), 3953 matrix_elements) 3954 3955 # Touch "done" file 3956 os.system('touch %s/done' % pjoin(self.dir_path,'SubProcesses')) 3957 3958 # Check for compiler 3959 self.set_compiler(compiler) 3960 self.set_cpp_compiler(compiler['cpp']) 3961 3962 3963 old_pos = os.getcwd() 3964 subpath = pjoin(self.dir_path, 'SubProcesses') 3965 3966 P_dir_list = [proc for proc in os.listdir(subpath) 3967 if os.path.isdir(pjoin(subpath,proc)) and proc[0] == 'P'] 3968 3969 devnull = os.open(os.devnull, os.O_RDWR) 3970 # Convert the poscript in jpg files (if authorize) 3971 if makejpg: 3972 try: 3973 os.remove(pjoin(self.dir_path,'HTML','card.jpg')) 3974 except Exception as error: 3975 pass 3976 3977 if misc.which('gs'): 3978 logger.info("Generate jpeg diagrams") 3979 for Pdir in P_dir_list: 3980 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 3981 stdout = devnull, cwd=pjoin(subpath, Pdir)) 3982 3983 logger.info("Generate web pages") 3984 # Create the WebPage using perl script 3985 3986 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 3987 stdout = devnull,cwd=pjoin(self.dir_path)) 3988 3989 #os.chdir(os.path.pardir) 3990 3991 obj = gen_infohtml.make_info_html(self.dir_path) 3992 3993 if online: 3994 nb_channel = obj.rep_rule['nb_gen_diag'] 3995 open(pjoin(self.dir_path, 'Online'),'w').write(str(nb_channel)) 3996 #add the information to proc_charac 3997 self.proc_characteristic['nb_channel'] = obj.rep_rule['nb_gen_diag'] 3998 3999 # Write command history as proc_card_mg5 4000 if os.path.isdir(pjoin(self.dir_path,'Cards')): 4001 output_file = pjoin(self.dir_path,'Cards', 'proc_card_mg5.dat') 4002 history.write(output_file) 4003 4004 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 4005 stdout = devnull) 4006 4007 #crate the proc_characteristic file 4008 self.create_proc_charac(matrix_elements, history) 4009 4010 # create the run_card 4011 ProcessExporterFortran.finalize(self, matrix_elements, history, mg5options, flaglist) 4012 4013 # Run "make" to generate madevent.tar.gz file 4014 if os.path.exists(pjoin(self.dir_path,'SubProcesses', 'subproc.mg')): 4015 if os.path.exists(pjoin(self.dir_path,'madevent.tar.gz')): 4016 os.remove(pjoin(self.dir_path,'madevent.tar.gz')) 4017 misc.call([os.path.join(self.dir_path, 'bin', 'internal', 'make_madevent_tar')], 4018 stdout = devnull, cwd=self.dir_path) 4019 4020 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 4021 stdout = devnull, cwd=self.dir_path)
4022 4023 4024 4025 4026 4027 4028 #return to the initial dir 4029 #os.chdir(old_pos) 4030 4031 #=========================================================================== 4032 # write_matrix_element_v4 4033 #===========================================================================
4034 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 4035 proc_id = "", config_map = [], subproc_number = ""):
4036 """Export a matrix element to a matrix.f file in MG4 madevent format""" 4037 4038 if not matrix_element.get('processes') or \ 4039 not matrix_element.get('diagrams'): 4040 return 0 4041 4042 if writer: 4043 if not isinstance(writer, writers.FortranWriter): 4044 raise writers.FortranWriter.FortranWriterError(\ 4045 "writer not FortranWriter") 4046 # Set lowercase/uppercase Fortran code 4047 writers.FortranWriter.downcase = False 4048 4049 # check if MLM/.../ is supported for this matrix-element and update associate flag 4050 if self.model and 'MLM' in self.model["limitations"]: 4051 if 'MLM' not in self.proc_characteristic["limitations"]: 4052 used_couplings = matrix_element.get_used_couplings(output="set") 4053 for vertex in self.model.get('interactions'): 4054 particles = [p for p in vertex.get('particles')] 4055 if 21 in [p.get('pdg_code') for p in particles]: 4056 colors = [par.get('color') for par in particles] 4057 if 1 in colors: 4058 continue 4059 elif 'QCD' not in vertex.get('orders'): 4060 for bad_coup in vertex.get('couplings').values(): 4061 if bad_coup in used_couplings: 4062 self.proc_characteristic["limitations"].append('MLM') 4063 break 4064 4065 # The proc prefix is not used for MadEvent output so it can safely be set 4066 # to an empty string. 4067 replace_dict = {'proc_prefix':''} 4068 4069 # Extract helas calls 4070 helas_calls = fortran_model.get_matrix_element_calls(\ 4071 matrix_element) 4072 if fortran_model.width_tchannel_set_tozero and not ProcessExporterFortranME.done_warning_tchannel: 4073 logger.warning("Some T-channel width have been set to zero [new since 2.8.0]\n if you want to keep this width please set \"zerowidth_tchannel\" to False") 4074 ProcessExporterFortranME.done_warning_tchannel = True 4075 4076 replace_dict['helas_calls'] = "\n".join(helas_calls) 4077 4078 4079 #adding the support for the fake width (forbidding too small width) 4080 mass_width = matrix_element.get_all_mass_widths() 4081 mass_width = sorted(list(mass_width)) 4082 width_list = set([e[1] for e in mass_width]) 4083 4084 replace_dict['fake_width_declaration'] = \ 4085 (' double precision fk_%s \n' * len(width_list)) % tuple(width_list) 4086 replace_dict['fake_width_declaration'] += \ 4087 (' save fk_%s \n' * len(width_list)) % tuple(width_list) 4088 fk_w_defs = [] 4089 one_def = ' fk_%(w)s = SIGN(MAX(ABS(%(w)s), ABS(%(m)s*small_width_treatment)), %(w)s)' 4090 4091 for m, w in mass_width: 4092 if w == 'zero': 4093 if ' fk_zero = 0d0' not in fk_w_defs: 4094 fk_w_defs.append(' fk_zero = 0d0') 4095 continue 4096 fk_w_defs.append(one_def %{'m':m, 'w':w}) 4097 replace_dict['fake_width_definitions'] = '\n'.join(fk_w_defs) 4098 4099 # Extract version number and date from VERSION file 4100 info_lines = self.get_mg5_info_lines() 4101 replace_dict['info_lines'] = info_lines 4102 4103 # Extract process info lines 4104 process_lines = self.get_process_info_lines(matrix_element) 4105 replace_dict['process_lines'] = process_lines 4106 4107 # Set proc_id 4108 replace_dict['proc_id'] = proc_id 4109 4110 # Extract ncomb 4111 ncomb = matrix_element.get_helicity_combinations() 4112 replace_dict['ncomb'] = ncomb 4113 4114 # Extract helicity lines 4115 helicity_lines = self.get_helicity_lines(matrix_element) 4116 replace_dict['helicity_lines'] = helicity_lines 4117 4118 # Extract IC line 4119 ic_line = self.get_ic_line(matrix_element) 4120 replace_dict['ic_line'] = ic_line 4121 4122 # Extract overall denominator 4123 # Averaging initial state color, spin, and identical FS particles 4124 den_factor_line = self.get_den_factor_line(matrix_element) 4125 replace_dict['den_factor_line'] = den_factor_line 4126 4127 # Extract ngraphs 4128 ngraphs = matrix_element.get_number_of_amplitudes() 4129 replace_dict['ngraphs'] = ngraphs 4130 4131 # Extract ndiags 4132 ndiags = len(matrix_element.get('diagrams')) 4133 replace_dict['ndiags'] = ndiags 4134 4135 # Set define_iconfigs_lines 4136 replace_dict['define_iconfigs_lines'] = \ 4137 """INTEGER MAPCONFIG(0:LMAXCONFIGS), ICONFIG 4138 COMMON/TO_MCONFIGS/MAPCONFIG, ICONFIG""" 4139 4140 if proc_id: 4141 # Set lines for subprocess group version 4142 # Set define_iconfigs_lines 4143 replace_dict['define_iconfigs_lines'] += \ 4144 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4145 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4146 # Set set_amp2_line 4147 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(SUBDIAG(%s))/XTOT" % \ 4148 proc_id 4149 else: 4150 # Standard running 4151 # Set set_amp2_line 4152 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(MAPCONFIG(ICONFIG))/XTOT" 4153 4154 # Extract nwavefuncs 4155 nwavefuncs = matrix_element.get_number_of_wavefunctions() 4156 replace_dict['nwavefuncs'] = nwavefuncs 4157 4158 # Extract ncolor 4159 ncolor = max(1, len(matrix_element.get('color_basis'))) 4160 replace_dict['ncolor'] = ncolor 4161 4162 # Extract color data lines 4163 color_data_lines = self.get_color_data_lines(matrix_element) 4164 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 4165 4166 4167 # Set the size of Wavefunction 4168 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 4169 replace_dict['wavefunctionsize'] = 18 4170 else: 4171 replace_dict['wavefunctionsize'] = 6 4172 4173 # Extract amp2 lines 4174 amp2_lines = self.get_amp2_lines(matrix_element, config_map) 4175 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 4176 4177 # The JAMP definition depends on the splitting order 4178 split_orders=matrix_element.get('processes')[0].get('split_orders') 4179 if len(split_orders)>0: 4180 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 4181 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 4182 matrix_element.get('processes')[0],squared_orders) 4183 else: 4184 # Consider the output of a dummy order 'ALL_ORDERS' for which we 4185 # set all amplitude order to weight 1 and only one squared order 4186 # contribution which is of course ALL_ORDERS=2. 4187 squared_orders = [(2,),] 4188 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 4189 replace_dict['chosen_so_configs'] = '.TRUE.' 4190 4191 replace_dict['nAmpSplitOrders']=len(amp_orders) 4192 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 4193 replace_dict['split_order_str_list']=str(split_orders) 4194 replace_dict['nSplitOrders']=max(len(split_orders),1) 4195 amp_so = self.get_split_orders_lines( 4196 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 4197 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 4198 replace_dict['ampsplitorders']='\n'.join(amp_so) 4199 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 4200 4201 4202 # Extract JAMP lines 4203 # If no split_orders then artificiall add one entry called 'ALL_ORDERS' 4204 jamp_lines = self.get_JAMP_lines_split_order(\ 4205 matrix_element,amp_orders,split_order_names= 4206 split_orders if len(split_orders)>0 else ['ALL_ORDERS']) 4207 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 4208 4209 replace_dict['template_file'] = pjoin(_file_path, \ 4210 'iolibs/template_files/%s' % self.matrix_file) 4211 replace_dict['template_file2'] = pjoin(_file_path, \ 4212 'iolibs/template_files/split_orders_helping_functions.inc') 4213 4214 s1,s2 = matrix_element.get_spin_state_initial() 4215 replace_dict['nb_spin_state1'] = s1 4216 replace_dict['nb_spin_state2'] = s2 4217 4218 if writer: 4219 file = open(replace_dict['template_file']).read() 4220 file = file % replace_dict 4221 # Add the split orders helper functions. 4222 file = file + '\n' + open(replace_dict['template_file2'])\ 4223 .read()%replace_dict 4224 # Write the file 4225 writer.writelines(file) 4226 return len([call for call in helas_calls if call.find('#') != 0]), ncolor 4227 else: 4228 replace_dict['return_value'] = (len([call for call in helas_calls if call.find('#') != 0]), ncolor) 4229 return replace_dict
4230 4231 #=========================================================================== 4232 # write_auto_dsig_file 4233 #===========================================================================
4234 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
4235 """Write the auto_dsig.f file for the differential cross section 4236 calculation, includes pdf call information""" 4237 4238 if not matrix_element.get('processes') or \ 4239 not matrix_element.get('diagrams'): 4240 return 0 4241 4242 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 4243 self.proc_characteristic['ninitial'] = ninitial 4244 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 4245 4246 # Add information relevant for MLM matching: 4247 # Maximum QCD power in all the contributions 4248 max_qcd_order = 0 4249 for diag in matrix_element.get('diagrams'): 4250 orders = diag.calculate_orders() 4251 if 'QCD' in orders: 4252 max_qcd_order = max(max_qcd_order,orders['QCD']) 4253 max_n_light_final_partons = max(len([1 for id in proc.get_final_ids() 4254 if proc.get('model').get_particle(id).get('mass')=='ZERO' and 4255 proc.get('model').get_particle(id).get('color')>1]) 4256 for proc in matrix_element.get('processes')) 4257 # Maximum number of final state light jets to be matched 4258 self.proc_characteristic['max_n_matched_jets'] = max( 4259 self.proc_characteristic['max_n_matched_jets'], 4260 min(max_qcd_order,max_n_light_final_partons)) 4261 4262 # List of default pdgs to be considered for the CKKWl merging cut 4263 self.proc_characteristic['colored_pdgs'] = \ 4264 sorted(list(set([abs(p.get('pdg_code')) for p in 4265 matrix_element.get('processes')[0].get('model').get('particles') if 4266 p.get('color')>1]))) 4267 4268 if ninitial < 1 or ninitial > 2: 4269 raise writers.FortranWriter.FortranWriterError("""Need ninitial = 1 or 2 to write auto_dsig file""") 4270 4271 replace_dict = {} 4272 4273 # Extract version number and date from VERSION file 4274 info_lines = self.get_mg5_info_lines() 4275 replace_dict['info_lines'] = info_lines 4276 4277 # Extract process info lines 4278 process_lines = self.get_process_info_lines(matrix_element) 4279 replace_dict['process_lines'] = process_lines 4280 4281 # Set proc_id 4282 replace_dict['proc_id'] = proc_id 4283 replace_dict['numproc'] = 1 4284 4285 # Set dsig_line 4286 if ninitial == 1: 4287 # No conversion, since result of decay should be given in GeV 4288 dsig_line = "pd(0)*dsiguu" 4289 else: 4290 # Convert result (in GeV) to pb 4291 dsig_line = "pd(0)*conv*dsiguu" 4292 4293 replace_dict['dsig_line'] = dsig_line 4294 4295 # Extract pdf lines 4296 pdf_vars, pdf_data, pdf_lines = \ 4297 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 4298 replace_dict['pdf_vars'] = pdf_vars 4299 replace_dict['pdf_data'] = pdf_data 4300 replace_dict['pdf_lines'] = pdf_lines 4301 4302 # Lines that differ between subprocess group and regular 4303 if proc_id: 4304 replace_dict['numproc'] = int(proc_id) 4305 replace_dict['passcuts_begin'] = "" 4306 replace_dict['passcuts_end'] = "" 4307 # Set lines for subprocess group version 4308 # Set define_iconfigs_lines 4309 replace_dict['define_subdiag_lines'] = \ 4310 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4311 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4312 replace_dict['cutsdone'] = "" 4313 else: 4314 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 4315 replace_dict['passcuts_end'] = "ENDIF" 4316 replace_dict['define_subdiag_lines'] = "" 4317 replace_dict['cutsdone'] = " cutsdone=.false.\n cutspassed=.false." 4318 4319 if not isinstance(self, ProcessExporterFortranMEGroup): 4320 ncomb=matrix_element.get_helicity_combinations() 4321 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 4322 else: 4323 replace_dict['read_write_good_hel'] = "" 4324 4325 context = {'read_write_good_hel':True} 4326 4327 if writer: 4328 file = open(pjoin(_file_path, \ 4329 'iolibs/template_files/auto_dsig_v4.inc')).read() 4330 file = file % replace_dict 4331 4332 # Write the file 4333 writer.writelines(file, context=context) 4334 else: 4335 return replace_dict, context
4336 #=========================================================================== 4337 # write_coloramps_file 4338 #===========================================================================
4339 - def write_coloramps_file(self, writer, mapconfigs, matrix_element):
4340 """Write the coloramps.inc file for MadEvent""" 4341 4342 lines = self.get_icolamp_lines(mapconfigs, matrix_element, 1) 4343 lines.insert(0, "logical icolamp(%d,%d,1)" % \ 4344 (max(len(list(matrix_element.get('color_basis').keys())), 1), 4345 len(mapconfigs))) 4346 4347 4348 # Write the file 4349 writer.writelines(lines) 4350 4351 return True
4352 4353 #=========================================================================== 4354 # write_colors_file 4355 #===========================================================================
4356 - def write_colors_file(self, writer, matrix_elements):
4357 """Write the get_color.f file for MadEvent, which returns color 4358 for all particles used in the matrix element.""" 4359 4360 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 4361 matrix_elements = [matrix_elements] 4362 4363 model = matrix_elements[0].get('processes')[0].get('model') 4364 4365 # We need the both particle and antiparticle wf_ids, since the identity 4366 # depends on the direction of the wf. 4367 wf_ids = set(sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 4368 for wf in d.get('wavefunctions')],[]) \ 4369 for d in me.get('diagrams')], []) \ 4370 for me in matrix_elements], [])) 4371 4372 leg_ids = set(sum([sum([sum([[l.get('id'), 4373 model.get_particle(l.get('id')).get_anti_pdg_code()] \ 4374 for l in p.get_legs_with_decays()], []) \ 4375 for p in me.get('processes')], []) \ 4376 for me in matrix_elements], [])) 4377 particle_ids = sorted(list(wf_ids.union(leg_ids))) 4378 4379 lines = """function get_color(ipdg) 4380 implicit none 4381 integer get_color, ipdg 4382 4383 if(ipdg.eq.%d)then 4384 get_color=%d 4385 return 4386 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 4387 4388 for part_id in particle_ids[1:]: 4389 lines += """else if(ipdg.eq.%d)then 4390 get_color=%d 4391 return 4392 """ % (part_id, model.get_particle(part_id).get_color()) 4393 # Dummy particle for multiparticle vertices with pdg given by 4394 # first code not in the model 4395 lines += """else if(ipdg.eq.%d)then 4396 c This is dummy particle used in multiparticle vertices 4397 get_color=2 4398 return 4399 """ % model.get_first_non_pdg() 4400 lines += """else 4401 write(*,*)'Error: No color given for pdg ',ipdg 4402 get_color=0 4403 return 4404 endif 4405 end 4406 """ 4407 4408 # Write the file 4409 writer.writelines(lines) 4410 4411 return True
4412 4413 #=========================================================================== 4414 # write_config_nqcd_file 4415 #===========================================================================
4416 - def write_config_nqcd_file(self, writer, nqcd_list):
4417 """Write the config_nqcd.inc with the number of QCD couplings 4418 for each config""" 4419 4420 lines = [] 4421 for iconf, n in enumerate(nqcd_list): 4422 lines.append("data nqcd(%d)/%d/" % (iconf+1, n)) 4423 4424 # Write the file 4425 writer.writelines(lines) 4426 4427 return True
4428 4429 #=========================================================================== 4430 # write_maxconfigs_file 4431 #===========================================================================
4432 - def write_maxconfigs_file(self, writer, matrix_elements):
4433 """Write the maxconfigs.inc file for MadEvent""" 4434 4435 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 4436 maxconfigs = max([me.get_num_configs() for me in \ 4437 matrix_elements.get('matrix_elements')]) 4438 else: 4439 maxconfigs = max([me.get_num_configs() for me in matrix_elements]) 4440 4441 lines = "integer lmaxconfigs\n" 4442 lines += "parameter(lmaxconfigs=%d)" % maxconfigs 4443 4444 # Write the file 4445 writer.writelines(lines) 4446 4447 return True
4448 4449 #=========================================================================== 4450 # read_write_good_hel 4451 #===========================================================================
4452 - def read_write_good_hel(self, ncomb):
4453 """return the code to read/write the good_hel common_block""" 4454 4455 convert = {'ncomb' : ncomb} 4456 output = """ 4457 subroutine write_good_hel(stream_id) 4458 implicit none 4459 integer stream_id 4460 INTEGER NCOMB 4461 PARAMETER ( NCOMB=%(ncomb)d) 4462 LOGICAL GOODHEL(NCOMB) 4463 INTEGER NTRY 4464 common/BLOCK_GOODHEL/NTRY,GOODHEL 4465 write(stream_id,*) GOODHEL 4466 return 4467 end 4468 4469 4470 subroutine read_good_hel(stream_id) 4471 implicit none 4472 include 'genps.inc' 4473 integer stream_id 4474 INTEGER NCOMB 4475 PARAMETER ( NCOMB=%(ncomb)d) 4476 LOGICAL GOODHEL(NCOMB) 4477 INTEGER NTRY 4478 common/BLOCK_GOODHEL/NTRY,GOODHEL 4479 read(stream_id,*) GOODHEL 4480 NTRY = MAXTRIES + 1 4481 return 4482 end 4483 4484 subroutine init_good_hel() 4485 implicit none 4486 INTEGER NCOMB 4487 PARAMETER ( NCOMB=%(ncomb)d) 4488 LOGICAL GOODHEL(NCOMB) 4489 INTEGER NTRY 4490 INTEGER I 4491 4492 do i=1,NCOMB 4493 GOODHEL(I) = .false. 4494 enddo 4495 NTRY = 0 4496 end 4497 4498 integer function get_maxsproc() 4499 implicit none 4500 get_maxsproc = 1 4501 return 4502 end 4503 4504 """ % convert 4505 4506 return output
4507 4508 #=========================================================================== 4509 # write_config_subproc_map_file 4510 #===========================================================================
4511 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
4512 """Write a dummy config_subproc.inc file for MadEvent""" 4513 4514 lines = [] 4515 4516 for iconfig in range(len(s_and_t_channels)): 4517 lines.append("DATA CONFSUB(1,%d)/1/" % \ 4518 (iconfig + 1)) 4519 4520 # Write the file 4521 writer.writelines(lines) 4522 4523 return True
4524 4525 #=========================================================================== 4526 # write_configs_file 4527 #===========================================================================
4528 - def write_configs_file(self, writer, matrix_element):
4529 """Write the configs.inc file for MadEvent""" 4530 4531 # Extract number of external particles 4532 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4533 4534 model = matrix_element.get('processes')[0].get('model') 4535 configs = [(i+1, d) for (i, d) in \ 4536 enumerate(matrix_element.get('diagrams'))] 4537 mapconfigs = [c[0] for c in configs] 4538 return mapconfigs, self.write_configs_file_from_diagrams(writer, 4539 [[c[1]] for c in configs], 4540 mapconfigs, 4541 nexternal, ninitial, 4542 model)
4543 4544 #=========================================================================== 4545 # write_run_configs_file 4546 #===========================================================================
4547 - def write_run_config_file(self, writer):
4548 """Write the run_configs.inc file for MadEvent""" 4549 4550 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 4551 4552 if self.proc_characteristic['loop_induced']: 4553 job_per_chan = 1 4554 else: 4555 job_per_chan = 5 4556 4557 if writer: 4558 text = open(path).read() % {'chanperjob': job_per_chan} 4559 writer.write(text) 4560 return True 4561 else: 4562 return {'chanperjob': job_per_chan}
4563 4564 #=========================================================================== 4565 # write_configs_file_from_diagrams 4566 #===========================================================================
4567 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 4568 nexternal, ninitial, model):
4569 """Write the actual configs.inc file. 4570 4571 configs is the diagrams corresponding to configs (each 4572 diagrams is a list of corresponding diagrams for all 4573 subprocesses, with None if there is no corresponding diagrams 4574 for a given process). 4575 mapconfigs gives the diagram number for each config. 4576 4577 For s-channels, we need to output one PDG for each subprocess in 4578 the subprocess group, in order to be able to pick the right 4579 one for multiprocesses.""" 4580 4581 lines = [] 4582 4583 s_and_t_channels = [] 4584 4585 nqcd_list = [] 4586 4587 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 4588 for config in configs if [d for d in config if d][0].\ 4589 get_vertex_leg_numbers()!=[]] 4590 minvert = min(vert_list) if vert_list!=[] else 0 4591 4592 # Number of subprocesses 4593 nsubprocs = len(configs[0]) 4594 4595 nconfigs = 0 4596 4597 new_pdg = model.get_first_non_pdg() 4598 4599 for iconfig, helas_diags in enumerate(configs): 4600 if any([vert > minvert for vert in 4601 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 4602 # Only 3-vertices allowed in configs.inc 4603 continue 4604 nconfigs += 1 4605 4606 # Need s- and t-channels for all subprocesses, including 4607 # those that don't contribute to this config 4608 empty_verts = [] 4609 stchannels = [] 4610 for h in helas_diags: 4611 if h: 4612 # get_s_and_t_channels gives vertices starting from 4613 # final state external particles and working inwards 4614 stchannels.append(h.get('amplitudes')[0].\ 4615 get_s_and_t_channels(ninitial, model, 4616 new_pdg)) 4617 else: 4618 stchannels.append((empty_verts, None)) 4619 4620 # For t-channels, just need the first non-empty one 4621 tchannels = [t for s,t in stchannels if t != None][0] 4622 4623 # For s_and_t_channels (to be used later) use only first config 4624 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 4625 tchannels]) 4626 4627 # Make sure empty_verts is same length as real vertices 4628 if any([s for s,t in stchannels]): 4629 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 4630 4631 # Reorganize s-channel vertices to get a list of all 4632 # subprocesses for each vertex 4633 schannels = list(zip(*[s for s,t in stchannels])) 4634 else: 4635 schannels = [] 4636 4637 allchannels = schannels 4638 if len(tchannels) > 1: 4639 # Write out tchannels only if there are any non-trivial ones 4640 allchannels = schannels + tchannels 4641 4642 # Write out propagators for s-channel and t-channel vertices 4643 4644 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 4645 # Correspondance between the config and the diagram = amp2 4646 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 4647 mapconfigs[iconfig])) 4648 # Number of QCD couplings in this diagram 4649 nqcd = 0 4650 for h in helas_diags: 4651 if h: 4652 try: 4653 nqcd = h.calculate_orders()['QCD'] 4654 except KeyError: 4655 pass 4656 break 4657 else: 4658 continue 4659 4660 nqcd_list.append(nqcd) 4661 4662 for verts in allchannels: 4663 if verts in schannels: 4664 vert = [v for v in verts if v][0] 4665 else: 4666 vert = verts 4667 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 4668 last_leg = vert.get('legs')[-1] 4669 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 4670 (last_leg.get('number'), nconfigs, len(daughters), 4671 ",".join([str(d) for d in daughters]))) 4672 if verts in schannels: 4673 pdgs = [] 4674 for v in verts: 4675 if v: 4676 pdgs.append(v.get('legs')[-1].get('id')) 4677 else: 4678 pdgs.append(0) 4679 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4680 (last_leg.get('number'), nconfigs, nsubprocs, 4681 ",".join([str(d) for d in pdgs]))) 4682 lines.append("data tprid(%d,%d)/0/" % \ 4683 (last_leg.get('number'), nconfigs)) 4684 elif verts in tchannels[:-1]: 4685 lines.append("data tprid(%d,%d)/%d/" % \ 4686 (last_leg.get('number'), nconfigs, 4687 abs(last_leg.get('id')))) 4688 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4689 (last_leg.get('number'), nconfigs, nsubprocs, 4690 ",".join(['0'] * nsubprocs))) 4691 4692 # Write out number of configs 4693 lines.append("# Number of configs") 4694 lines.append("data mapconfig(0)/%d/" % nconfigs) 4695 4696 # Write the file 4697 writer.writelines(lines) 4698 4699 return s_and_t_channels, nqcd_list
4700 4701 #=========================================================================== 4702 # write_decayBW_file 4703 #===========================================================================
4704 - def write_decayBW_file(self, writer, s_and_t_channels):
4705 """Write the decayBW.inc file for MadEvent""" 4706 4707 lines = [] 4708 4709 booldict = {None: "0", True: "1", False: "2"} 4710 4711 for iconf, config in enumerate(s_and_t_channels): 4712 schannels = config[0] 4713 for vertex in schannels: 4714 # For the resulting leg, pick out whether it comes from 4715 # decay or not, as given by the onshell flag 4716 leg = vertex.get('legs')[-1] 4717 lines.append("data gForceBW(%d,%d)/%s/" % \ 4718 (leg.get('number'), iconf + 1, 4719 booldict[leg.get('onshell')])) 4720 4721 # Write the file 4722 writer.writelines(lines) 4723 4724 return True
4725 4726 #=========================================================================== 4727 # write_dname_file 4728 #===========================================================================
4729 - def write_dname_file(self, writer, dir_name):
4730 """Write the dname.mg file for MG4""" 4731 4732 line = "DIRNAME=%s" % dir_name 4733 4734 # Write the file 4735 writer.write(line + "\n") 4736 4737 return True
4738 4739 #=========================================================================== 4740 # write_driver 4741 #===========================================================================
4742 - def write_driver(self, writer, ncomb, n_grouped_proc, v5=True):
4743 """Write the SubProcess/driver.f file for MG4""" 4744 4745 path = pjoin(_file_path,'iolibs','template_files','madevent_driver.f') 4746 4747 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4748 card = 'Source/MODEL/MG5_param.dat' 4749 else: 4750 card = 'param_card.dat' 4751 # Requiring each helicity configuration to be probed by 10 points for 4752 # matrix element before using the resulting grid for MC over helicity 4753 # sampling. 4754 # We multiply this by 2 because each grouped subprocess is called at most 4755 # twice for each IMIRROR. 4756 replace_dict = {'param_card_name':card, 4757 'ncomb':ncomb, 4758 'hel_init_points':n_grouped_proc*10*2} 4759 if not v5: 4760 replace_dict['secondparam']=',.true.' 4761 else: 4762 replace_dict['secondparam']='' 4763 4764 if writer: 4765 text = open(path).read() % replace_dict 4766 writer.write(text) 4767 return True 4768 else: 4769 return replace_dict
4770 4771 #=========================================================================== 4772 # write_addmothers 4773 #===========================================================================
4774 - def write_addmothers(self, writer):
4775 """Write the SubProcess/addmothers.f""" 4776 4777 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 4778 4779 text = open(path).read() % {'iconfig': 'diag_number'} 4780 writer.write(text) 4781 4782 return True
4783 4784 4785 #=========================================================================== 4786 # write_combine_events 4787 #===========================================================================
4788 - def write_combine_events(self, writer, nb_proc=100):
4789 """Write the SubProcess/driver.f file for MG4""" 4790 4791 path = pjoin(_file_path,'iolibs','template_files','madevent_combine_events.f') 4792 4793 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4794 card = 'Source/MODEL/MG5_param.dat' 4795 else: 4796 card = 'param_card.dat' 4797 4798 #set maxpup (number of @X in the process card) 4799 4800 text = open(path).read() % {'param_card_name':card, 'maxpup':nb_proc+1} 4801 #the +1 is just a security. This is not needed but I feel(OM) safer with it. 4802 writer.write(text) 4803 4804 return True
4805 4806 4807 #=========================================================================== 4808 # write_symmetry 4809 #===========================================================================
4810 - def write_symmetry(self, writer, v5=True):
4811 """Write the SubProcess/driver.f file for ME""" 4812 4813 path = pjoin(_file_path,'iolibs','template_files','madevent_symmetry.f') 4814 4815 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4816 card = 'Source/MODEL/MG5_param.dat' 4817 else: 4818 card = 'param_card.dat' 4819 4820 if v5: 4821 replace_dict = {'param_card_name':card, 'setparasecondarg':''} 4822 else: 4823 replace_dict= {'param_card_name':card, 'setparasecondarg':',.true.'} 4824 4825 if writer: 4826 text = open(path).read() 4827 text = text % replace_dict 4828 writer.write(text) 4829 return True 4830 else: 4831 return replace_dict
4832 4833 4834 4835 #=========================================================================== 4836 # write_iproc_file 4837 #===========================================================================
4838 - def write_iproc_file(self, writer, me_number):
4839 """Write the iproc.dat file for MG4""" 4840 line = "%d" % (me_number + 1) 4841 4842 # Write the file 4843 for line_to_write in writer.write_line(line): 4844 writer.write(line_to_write) 4845 return True
4846 4847 #=========================================================================== 4848 # write_mg_sym_file 4849 #===========================================================================
4850 - def write_mg_sym_file(self, writer, matrix_element):
4851 """Write the mg.sym file for MadEvent.""" 4852 4853 lines = [] 4854 4855 # Extract process with all decays included 4856 final_legs = [leg for leg in matrix_element.get('processes')[0].get_legs_with_decays() if leg.get('state') == True] 4857 4858 ninitial = len([leg for leg in matrix_element.get('processes')[0].get('legs') if leg.get('state') == False]) 4859 4860 identical_indices = {} 4861 4862 # Extract identical particle info 4863 for i, leg in enumerate(final_legs): 4864 if leg.get('id') in identical_indices: 4865 identical_indices[leg.get('id')].append(\ 4866 i + ninitial + 1) 4867 else: 4868 identical_indices[leg.get('id')] = [i + ninitial + 1] 4869 4870 # Remove keys which have only one particle 4871 for key in list(identical_indices.keys()): 4872 if len(identical_indices[key]) < 2: 4873 del identical_indices[key] 4874 4875 # Write mg.sym file 4876 lines.append(str(len(list(identical_indices.keys())))) 4877 for key in identical_indices.keys(): 4878 lines.append(str(len(identical_indices[key]))) 4879 for number in identical_indices[key]: 4880 lines.append(str(number)) 4881 4882 # Write the file 4883 writer.writelines(lines) 4884 4885 return True
4886 4887 #=========================================================================== 4888 # write_mg_sym_file 4889 #===========================================================================
4890 - def write_default_mg_sym_file(self, writer):
4891 """Write the mg.sym file for MadEvent.""" 4892 4893 lines = "0" 4894 4895 # Write the file 4896 writer.writelines(lines) 4897 4898 return True
4899 4900 #=========================================================================== 4901 # write_ncombs_file 4902 #===========================================================================
4903 - def write_ncombs_file(self, writer, nexternal):
4904 """Write the ncombs.inc file for MadEvent.""" 4905 4906 # ncomb (used for clustering) is 2^nexternal 4907 file = " integer n_max_cl\n" 4908 file = file + "parameter (n_max_cl=%d)" % (2 ** nexternal) 4909 4910 # Write the file 4911 writer.writelines(file) 4912 4913 return True
4914 4915 #=========================================================================== 4916 # write_processes_file 4917 #===========================================================================
4918 - def write_processes_file(self, writer, subproc_group):
4919 """Write the processes.dat file with info about the subprocesses 4920 in this group.""" 4921 4922 lines = [] 4923 4924 for ime, me in \ 4925 enumerate(subproc_group.get('matrix_elements')): 4926 lines.append("%s %s" % (str(ime+1) + " " * (7-len(str(ime+1))), 4927 ",".join(p.base_string() for p in \ 4928 me.get('processes')))) 4929 if me.get('has_mirror_process'): 4930 mirror_procs = [copy.copy(p) for p in me.get('processes')] 4931 for proc in mirror_procs: 4932 legs = copy.copy(proc.get('legs_with_decays')) 4933 legs.insert(0, legs.pop(1)) 4934 proc.set("legs_with_decays", legs) 4935 lines.append("mirror %s" % ",".join(p.base_string() for p in \ 4936 mirror_procs)) 4937 else: 4938 lines.append("mirror none") 4939 4940 # Write the file 4941 writer.write("\n".join(lines)) 4942 4943 return True
4944 4945 #=========================================================================== 4946 # write_symswap_file 4947 #===========================================================================
4948 - def write_symswap_file(self, writer, ident_perms):
4949 """Write the file symswap.inc for MG4 by comparing diagrams using 4950 the internal matrix element value functionality.""" 4951 4952 lines = [] 4953 4954 # Write out lines for symswap.inc file (used to permute the 4955 # external leg momenta 4956 for iperm, perm in enumerate(ident_perms): 4957 lines.append("data (isym(i,%d),i=1,nexternal)/%s/" % \ 4958 (iperm+1, ",".join([str(i+1) for i in perm]))) 4959 lines.append("data nsym/%d/" % len(ident_perms)) 4960 4961 # Write the file 4962 writer.writelines(lines) 4963 4964 return True
4965 4966 #=========================================================================== 4967 # write_symfact_file 4968 #===========================================================================
4969 - def write_symfact_file(self, writer, symmetry):
4970 """Write the files symfact.dat for MG4 by comparing diagrams using 4971 the internal matrix element value functionality.""" 4972 4973 pos = max(2, int(math.ceil(math.log10(len(symmetry))))) 4974 form = "%"+str(pos)+"r %"+str(pos+1)+"r" 4975 # Write out lines for symswap.inc file (used to permute the 4976 # external leg momenta 4977 lines = [ form %(i+1, s) for i,s in enumerate(symmetry) if s != 0] 4978 # Write the file 4979 writer.write('\n'.join(lines)) 4980 writer.write('\n') 4981 4982 return True
4983 4984 #=========================================================================== 4985 # write_symperms_file 4986 #===========================================================================
4987 - def write_symperms_file(self, writer, perms):
4988 """Write the symperms.inc file for subprocess group, used for 4989 symmetric configurations""" 4990 4991 lines = [] 4992 for iperm, perm in enumerate(perms): 4993 lines.append("data (perms(i,%d),i=1,nexternal)/%s/" % \ 4994 (iperm+1, ",".join([str(i+1) for i in perm]))) 4995 4996 # Write the file 4997 writer.writelines(lines) 4998 4999 return True
5000 5001 #=========================================================================== 5002 # write_subproc 5003 #===========================================================================
5004 - def write_subproc(self, writer, subprocdir):
5005 """Append this subprocess to the subproc.mg file for MG4""" 5006 5007 # Write line to file 5008 writer.write(subprocdir + "\n") 5009 5010 return True
5011
5012 #=============================================================================== 5013 # ProcessExporterFortranMEGroup 5014 #=============================================================================== 5015 -class ProcessExporterFortranMEGroup(ProcessExporterFortranME):
5016 """Class to take care of exporting a set of matrix elements to 5017 MadEvent subprocess group format.""" 5018 5019 matrix_file = "matrix_madevent_group_v4.inc" 5020 grouped_mode = 'madevent' 5021 #=========================================================================== 5022 # generate_subprocess_directory 5023 #===========================================================================
5024 - def generate_subprocess_directory(self, subproc_group, 5025 fortran_model, 5026 group_number):
5027 """Generate the Pn directory for a subprocess group in MadEvent, 5028 including the necessary matrix_N.f files, configs.inc and various 5029 other helper files.""" 5030 5031 assert isinstance(subproc_group, group_subprocs.SubProcessGroup), \ 5032 "subproc_group object not SubProcessGroup" 5033 5034 if not self.model: 5035 self.model = subproc_group.get('matrix_elements')[0].\ 5036 get('processes')[0].get('model') 5037 5038 cwd = os.getcwd() 5039 path = pjoin(self.dir_path, 'SubProcesses') 5040 5041 os.chdir(path) 5042 pathdir = os.getcwd() 5043 5044 # Create the directory PN in the specified path 5045 subprocdir = "P%d_%s" % (subproc_group.get('number'), 5046 subproc_group.get('name')) 5047 try: 5048 os.mkdir(subprocdir) 5049 except os.error as error: 5050 logger.warning(error.strerror + " " + subprocdir) 5051 5052 try: 5053 os.chdir(subprocdir) 5054 except os.error: 5055 logger.error('Could not cd to directory %s' % subprocdir) 5056 return 0 5057 5058 logger.info('Creating files in directory %s' % subprocdir) 5059 5060 # Create the matrix.f files, auto_dsig.f files and all inc files 5061 # for all subprocesses in the group 5062 5063 maxamps = 0 5064 maxflows = 0 5065 tot_calls = 0 5066 5067 matrix_elements = subproc_group.get('matrix_elements') 5068 5069 # Add the driver.f, all grouped ME's must share the same number of 5070 # helicity configuration 5071 ncomb = matrix_elements[0].get_helicity_combinations() 5072 for me in matrix_elements[1:]: 5073 if ncomb!=me.get_helicity_combinations(): 5074 raise MadGraph5Error("All grouped processes must share the "+\ 5075 "same number of helicity configurations.") 5076 5077 filename = 'driver.f' 5078 self.write_driver(writers.FortranWriter(filename),ncomb, 5079 n_grouped_proc=len(matrix_elements), v5=self.opt['v5_model']) 5080 5081 for ime, matrix_element in \ 5082 enumerate(matrix_elements): 5083 filename = 'matrix%d.f' % (ime+1) 5084 calls, ncolor = \ 5085 self.write_matrix_element_v4(writers.FortranWriter(filename), 5086 matrix_element, 5087 fortran_model, 5088 proc_id=str(ime+1), 5089 config_map=subproc_group.get('diagram_maps')[ime], 5090 subproc_number=group_number) 5091 5092 filename = 'auto_dsig%d.f' % (ime+1) 5093 self.write_auto_dsig_file(writers.FortranWriter(filename), 5094 matrix_element, 5095 str(ime+1)) 5096 5097 # Keep track of needed quantities 5098 tot_calls += int(calls) 5099 maxflows = max(maxflows, ncolor) 5100 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 5101 5102 # Draw diagrams 5103 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 5104 filename = "matrix%d.ps" % (ime+1) 5105 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 5106 get('diagrams'), 5107 filename, 5108 model = \ 5109 matrix_element.get('processes')[0].\ 5110 get('model'), 5111 amplitude=True) 5112 logger.info("Generating Feynman diagrams for " + \ 5113 matrix_element.get('processes')[0].nice_string()) 5114 plot.draw() 5115 5116 # Extract number of external particles 5117 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 5118 5119 # Generate a list of diagrams corresponding to each configuration 5120 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 5121 # If a subprocess has no diagrams for this config, the number is 0 5122 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 5123 5124 filename = 'auto_dsig.f' 5125 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 5126 subproc_group) 5127 5128 filename = 'coloramps.inc' 5129 self.write_coloramps_file(writers.FortranWriter(filename), 5130 subproc_diagrams_for_config, 5131 maxflows, 5132 matrix_elements) 5133 5134 filename = 'get_color.f' 5135 self.write_colors_file(writers.FortranWriter(filename), 5136 matrix_elements) 5137 5138 filename = 'config_subproc_map.inc' 5139 self.write_config_subproc_map_file(writers.FortranWriter(filename), 5140 subproc_diagrams_for_config) 5141 5142 filename = 'configs.inc' 5143 nconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 5144 writers.FortranWriter(filename), 5145 subproc_group, 5146 subproc_diagrams_for_config) 5147 5148 filename = 'config_nqcd.inc' 5149 self.write_config_nqcd_file(writers.FortranWriter(filename), 5150 nqcd_list) 5151 5152 filename = 'decayBW.inc' 5153 self.write_decayBW_file(writers.FortranWriter(filename), 5154 s_and_t_channels) 5155 5156 filename = 'dname.mg' 5157 self.write_dname_file(writers.FortranWriter(filename), 5158 subprocdir) 5159 5160 filename = 'iproc.dat' 5161 self.write_iproc_file(writers.FortranWriter(filename), 5162 group_number) 5163 5164 filename = 'leshouche.inc' 5165 self.write_leshouche_file(writers.FortranWriter(filename), 5166 subproc_group) 5167 5168 filename = 'maxamps.inc' 5169 self.write_maxamps_file(writers.FortranWriter(filename), 5170 maxamps, 5171 maxflows, 5172 max([len(me.get('processes')) for me in \ 5173 matrix_elements]), 5174 len(matrix_elements)) 5175 5176 # Note that mg.sym is not relevant for this case 5177 filename = 'mg.sym' 5178 self.write_default_mg_sym_file(writers.FortranWriter(filename)) 5179 5180 filename = 'mirrorprocs.inc' 5181 self.write_mirrorprocs(writers.FortranWriter(filename), 5182 subproc_group) 5183 5184 filename = 'ncombs.inc' 5185 self.write_ncombs_file(writers.FortranWriter(filename), 5186 nexternal) 5187 5188 filename = 'nexternal.inc' 5189 self.write_nexternal_file(writers.FortranWriter(filename), 5190 nexternal, ninitial) 5191 5192 filename = 'ngraphs.inc' 5193 self.write_ngraphs_file(writers.FortranWriter(filename), 5194 nconfigs) 5195 5196 filename = 'pmass.inc' 5197 self.write_pmass_file(writers.FortranWriter(filename), 5198 matrix_element) 5199 5200 filename = 'props.inc' 5201 self.write_props_file(writers.FortranWriter(filename), 5202 matrix_element, 5203 s_and_t_channels) 5204 5205 filename = 'processes.dat' 5206 files.write_to_file(filename, 5207 self.write_processes_file, 5208 subproc_group) 5209 5210 # Find config symmetries and permutations 5211 symmetry, perms, ident_perms = \ 5212 diagram_symmetry.find_symmetry(subproc_group) 5213 5214 filename = 'symswap.inc' 5215 self.write_symswap_file(writers.FortranWriter(filename), 5216 ident_perms) 5217 5218 filename = 'symfact_orig.dat' 5219 self.write_symfact_file(open(filename, 'w'), symmetry) 5220 5221 # check consistency 5222 for i, sym_fact in enumerate(symmetry): 5223 5224 if sym_fact >= 0: 5225 continue 5226 if nqcd_list[i] != nqcd_list[abs(sym_fact)-1]: 5227 misc.sprint(i, sym_fact, nqcd_list[i], nqcd_list[abs(sym_fact)]) 5228 raise Exception("identical diagram with different QCD powwer") 5229 5230 5231 filename = 'symperms.inc' 5232 self.write_symperms_file(writers.FortranWriter(filename), 5233 perms) 5234 5235 # Generate jpgs -> pass in make_html 5236 #os.system(pjoin('..', '..', 'bin', 'gen_jpeg-pl')) 5237 5238 self.link_files_in_SubProcess(pjoin(pathdir,subprocdir)) 5239 5240 #import nexternal/leshouch in Source 5241 ln('nexternal.inc', '../../Source', log=False) 5242 ln('leshouche.inc', '../../Source', log=False) 5243 ln('maxamps.inc', '../../Source', log=False) 5244 5245 # Return to SubProcesses dir) 5246 os.chdir(pathdir) 5247 5248 # Add subprocess to subproc.mg 5249 filename = 'subproc.mg' 5250 files.append_to_file(filename, 5251 self.write_subproc, 5252 subprocdir) 5253 5254 # Return to original dir 5255 os.chdir(cwd) 5256 5257 if not tot_calls: 5258 tot_calls = 0 5259 return tot_calls
5260 5261 #=========================================================================== 5262 # write_super_auto_dsig_file 5263 #===========================================================================
5264 - def write_super_auto_dsig_file(self, writer, subproc_group):
5265 """Write the auto_dsig.f file selecting between the subprocesses 5266 in subprocess group mode""" 5267 5268 replace_dict = {} 5269 5270 # Extract version number and date from VERSION file 5271 info_lines = self.get_mg5_info_lines() 5272 replace_dict['info_lines'] = info_lines 5273 5274 matrix_elements = subproc_group.get('matrix_elements') 5275 5276 # Extract process info lines 5277 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 5278 matrix_elements]) 5279 replace_dict['process_lines'] = process_lines 5280 5281 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 5282 replace_dict['nexternal'] = nexternal 5283 5284 replace_dict['nsprocs'] = 2*len(matrix_elements) 5285 5286 # Generate dsig definition line 5287 dsig_def_line = "DOUBLE PRECISION " + \ 5288 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 5289 range(len(matrix_elements))]) 5290 replace_dict["dsig_def_line"] = dsig_def_line 5291 5292 # Generate dsig process lines 5293 call_dsig_proc_lines = [] 5294 for iproc in range(len(matrix_elements)): 5295 call_dsig_proc_lines.append(\ 5296 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 5297 {"num": iproc + 1, 5298 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 5299 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 5300 5301 ncomb=matrix_elements[0].get_helicity_combinations() 5302 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 5303 5304 s1,s2 = matrix_elements[0].get_spin_state_initial() 5305 replace_dict['nb_spin_state1'] = s1 5306 replace_dict['nb_spin_state2'] = s2 5307 5308 if writer: 5309 file = open(pjoin(_file_path, \ 5310 'iolibs/template_files/super_auto_dsig_group_v4.inc')).read() 5311 file = file % replace_dict 5312 5313 # Write the file 5314 writer.writelines(file) 5315 else: 5316 return replace_dict
5317 5318 #=========================================================================== 5319 # write_mirrorprocs 5320 #===========================================================================
5321 - def write_mirrorprocs(self, writer, subproc_group):
5322 """Write the mirrorprocs.inc file determining which processes have 5323 IS mirror process in subprocess group mode.""" 5324 5325 lines = [] 5326 bool_dict = {True: '.true.', False: '.false.'} 5327 matrix_elements = subproc_group.get('matrix_elements') 5328 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 5329 (len(matrix_elements), 5330 ",".join([bool_dict[me.get('has_mirror_process')] for \ 5331 me in matrix_elements]))) 5332 # Write the file 5333 writer.writelines(lines)
5334 5335 #=========================================================================== 5336 # write_addmothers 5337 #===========================================================================
5338 - def write_addmothers(self, writer):
5339 """Write the SubProcess/addmothers.f""" 5340 5341 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 5342 5343 text = open(path).read() % {'iconfig': 'lconfig'} 5344 writer.write(text) 5345 5346 return True
5347 5348 5349 #=========================================================================== 5350 # write_coloramps_file 5351 #===========================================================================
5352 - def write_coloramps_file(self, writer, diagrams_for_config, maxflows, 5353 matrix_elements):
5354 """Write the coloramps.inc file for MadEvent in Subprocess group mode""" 5355 5356 # Create a map from subprocess (matrix element) to a list of 5357 # the diagrams corresponding to each config 5358 5359 lines = [] 5360 5361 subproc_to_confdiag = {} 5362 for config in diagrams_for_config: 5363 for subproc, diag in enumerate(config): 5364 try: 5365 subproc_to_confdiag[subproc].append(diag) 5366 except KeyError: 5367 subproc_to_confdiag[subproc] = [diag] 5368 5369 for subproc in sorted(subproc_to_confdiag.keys()): 5370 lines.extend(self.get_icolamp_lines(subproc_to_confdiag[subproc], 5371 matrix_elements[subproc], 5372 subproc + 1)) 5373 5374 lines.insert(0, "logical icolamp(%d,%d,%d)" % \ 5375 (maxflows, 5376 len(diagrams_for_config), 5377 len(matrix_elements))) 5378 5379 # Write the file 5380 writer.writelines(lines) 5381 5382 return True
5383 5384 #=========================================================================== 5385 # write_config_subproc_map_file 5386 #===========================================================================
5387 - def write_config_subproc_map_file(self, writer, config_subproc_map):
5388 """Write the config_subproc_map.inc file for subprocess groups""" 5389 5390 lines = [] 5391 # Output only configs that have some corresponding diagrams 5392 iconfig = 0 5393 for config in config_subproc_map: 5394 if set(config) == set([0]): 5395 continue 5396 lines.append("DATA (CONFSUB(i,%d),i=1,%d)/%s/" % \ 5397 (iconfig + 1, len(config), 5398 ",".join([str(i) for i in config]))) 5399 iconfig += 1 5400 # Write the file 5401 writer.writelines(lines) 5402 5403 return True
5404 5405 #=========================================================================== 5406 # read_write_good_hel 5407 #===========================================================================
5408 - def read_write_good_hel(self, ncomb):
5409 """return the code to read/write the good_hel common_block""" 5410 5411 convert = {'ncomb' : ncomb} 5412 5413 output = """ 5414 subroutine write_good_hel(stream_id) 5415 implicit none 5416 integer stream_id 5417 INTEGER NCOMB 5418 PARAMETER ( NCOMB=%(ncomb)d) 5419 LOGICAL GOODHEL(NCOMB, 2) 5420 INTEGER NTRY(2) 5421 common/BLOCK_GOODHEL/NTRY,GOODHEL 5422 write(stream_id,*) GOODHEL 5423 return 5424 end 5425 5426 5427 subroutine read_good_hel(stream_id) 5428 implicit none 5429 include 'genps.inc' 5430 integer stream_id 5431 INTEGER NCOMB 5432 PARAMETER ( NCOMB=%(ncomb)d) 5433 LOGICAL GOODHEL(NCOMB, 2) 5434 INTEGER NTRY(2) 5435 common/BLOCK_GOODHEL/NTRY,GOODHEL 5436 read(stream_id,*) GOODHEL 5437 NTRY(1) = MAXTRIES + 1 5438 NTRY(2) = MAXTRIES + 1 5439 return 5440 end 5441 5442 subroutine init_good_hel() 5443 implicit none 5444 INTEGER NCOMB 5445 PARAMETER ( NCOMB=%(ncomb)d) 5446 LOGICAL GOODHEL(NCOMB, 2) 5447 INTEGER NTRY(2) 5448 INTEGER I 5449 5450 do i=1,NCOMB 5451 GOODHEL(I,1) = .false. 5452 GOODHEL(I,2) = .false. 5453 enddo 5454 NTRY(1) = 0 5455 NTRY(2) = 0 5456 end 5457 5458 integer function get_maxsproc() 5459 implicit none 5460 include 'maxamps.inc' 5461 5462 get_maxsproc = maxsproc 5463 return 5464 end 5465 5466 """ % convert 5467 5468 return output
5469 5470 5471 5472 #=========================================================================== 5473 # write_configs_file 5474 #===========================================================================
5475 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
5476 """Write the configs.inc file with topology information for a 5477 subprocess group. Use the first subprocess with a diagram for each 5478 configuration.""" 5479 5480 matrix_elements = subproc_group.get('matrix_elements') 5481 model = matrix_elements[0].get('processes')[0].get('model') 5482 5483 diagrams = [] 5484 config_numbers = [] 5485 for iconfig, config in enumerate(diagrams_for_config): 5486 # Check if any diagrams correspond to this config 5487 if set(config) == set([0]): 5488 continue 5489 subproc_diags = [] 5490 for s,d in enumerate(config): 5491 if d: 5492 subproc_diags.append(matrix_elements[s].\ 5493 get('diagrams')[d-1]) 5494 else: 5495 subproc_diags.append(None) 5496 diagrams.append(subproc_diags) 5497 config_numbers.append(iconfig + 1) 5498 5499 # Extract number of external particles 5500 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 5501 5502 return len(diagrams), \ 5503 self.write_configs_file_from_diagrams(writer, diagrams, 5504 config_numbers, 5505 nexternal, ninitial, 5506 model)
5507 5508 #=========================================================================== 5509 # write_run_configs_file 5510 #===========================================================================
5511 - def write_run_config_file(self, writer):
5512 """Write the run_configs.inc file for MadEvent""" 5513 5514 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 5515 if self.proc_characteristic['loop_induced']: 5516 job_per_chan = 1 5517 else: 5518 job_per_chan = 2 5519 text = open(path).read() % {'chanperjob':job_per_chan} 5520 writer.write(text) 5521 return True
5522 5523 5524 #=========================================================================== 5525 # write_leshouche_file 5526 #===========================================================================
5527 - def write_leshouche_file(self, writer, subproc_group):
5528 """Write the leshouche.inc file for MG4""" 5529 5530 all_lines = [] 5531 5532 for iproc, matrix_element in \ 5533 enumerate(subproc_group.get('matrix_elements')): 5534 all_lines.extend(self.get_leshouche_lines(matrix_element, 5535 iproc)) 5536 # Write the file 5537 writer.writelines(all_lines) 5538 return True
5539 5540
5541 - def finalize(self,*args, **opts):
5542 5543 super(ProcessExporterFortranMEGroup, self).finalize(*args, **opts) 5544 #ensure that the grouping information is on the correct value 5545 self.proc_characteristic['grouped_matrix'] = True
5546 5547 5548 #=============================================================================== 5549 # UFO_model_to_mg4 5550 #=============================================================================== 5551 5552 python_to_fortran = lambda x: parsers.UFOExpressionParserFortran().parse(x)
5553 5554 -class UFO_model_to_mg4(object):
5555 """ A converter of the UFO-MG5 Model to the MG4 format """ 5556 5557 # The list below shows the only variables the user is allowed to change by 5558 # himself for each PS point. If he changes any other, then calling 5559 # UPDATE_AS_PARAM() (or equivalently MP_UPDATE_AS_PARAM()) will not 5560 # correctly account for the change. 5561 PS_dependent_key = ['aS','MU_R'] 5562 mp_complex_format = 'complex*32' 5563 mp_real_format = 'real*16' 5564 # Warning, it is crucial none of the couplings/parameters of the model 5565 # starts with this prefix. I should add a check for this. 5566 # You can change it as the global variable to check_param_card.ParamCard 5567 mp_prefix = check_param_card.ParamCard.mp_prefix 5568
5569 - def __init__(self, model, output_path, opt=None):
5570 """ initialization of the objects """ 5571 5572 self.model = model 5573 self.model_name = model['name'] 5574 self.dir_path = output_path 5575 5576 self.opt = {'complex_mass': False, 'export_format': 'madevent', 'mp':True, 5577 'loop_induced': False} 5578 if opt: 5579 self.opt.update(opt) 5580 5581 self.coups_dep = [] # (name, expression, type) 5582 self.coups_indep = [] # (name, expression, type) 5583 self.params_dep = [] # (name, expression, type) 5584 self.params_indep = [] # (name, expression, type) 5585 self.params_ext = [] # external parameter 5586 self.p_to_f = parsers.UFOExpressionParserFortran(self.model) 5587 self.mp_p_to_f = parsers.UFOExpressionParserMPFortran(self.model)
5588
5590 """modify the parameter if some of them are identical up to the case""" 5591 5592 lower_dict={} 5593 duplicate = set() 5594 keys = list(self.model['parameters'].keys()) 5595 keys.sort() 5596 for key in keys: 5597 for param in self.model['parameters'][key]: 5598 lower_name = param.name.lower() 5599 if not lower_name: 5600 continue 5601 try: 5602 lower_dict[lower_name].append(param) 5603 except KeyError as error: 5604 lower_dict[lower_name] = [param] 5605 else: 5606 duplicate.add(lower_name) 5607 logger.debug('%s is define both as lower case and upper case.' 5608 % lower_name) 5609 if not duplicate: 5610 return 5611 5612 re_expr = r'''\b(%s)\b''' 5613 to_change = [] 5614 change={} 5615 for value in duplicate: 5616 for i, var in enumerate(lower_dict[value]): 5617 to_change.append(var.name) 5618 new_name = '%s%s' % (var.name.lower(), 5619 ('__%d'%(i+1) if i>0 else '')) 5620 change[var.name] = new_name 5621 var.name = new_name 5622 5623 # Apply the modification to the map_CTcoup_CTparam of the model 5624 # if it has one (giving for each coupling the CT parameters whcih 5625 # are necessary and which should be exported to the model. 5626 if hasattr(self.model,'map_CTcoup_CTparam'): 5627 for coup, ctparams in self.model.map_CTcoup_CTparam: 5628 for i, ctparam in enumerate(ctparams): 5629 try: 5630 self.model.map_CTcoup_CTparam[coup][i] = change[ctparam] 5631 except KeyError: 5632 pass 5633 5634 replace = lambda match_pattern: change[match_pattern.groups()[0]] 5635 rep_pattern = re.compile(re_expr % '|'.join(to_change)) 5636 5637 # change parameters 5638 for key in keys: 5639 if key == ('external',): 5640 continue 5641 for param in self.model['parameters'][key]: 5642 param.expr = rep_pattern.sub(replace, param.expr) 5643 5644 # change couplings 5645 for key in self.model['couplings'].keys(): 5646 for coup in self.model['couplings'][key]: 5647 coup.expr = rep_pattern.sub(replace, coup.expr) 5648 5649 # change mass/width 5650 for part in self.model['particles']: 5651 if str(part.get('mass')) in to_change: 5652 part.set('mass', rep_pattern.sub(replace, str(part.get('mass')))) 5653 if str(part.get('width')) in to_change: 5654 part.set('width', rep_pattern.sub(replace, str(part.get('width'))))
5655
5656 - def refactorize(self, wanted_couplings = []):
5657 """modify the couplings to fit with MG4 convention """ 5658 5659 # Keep only separation in alphaS 5660 keys = list(self.model['parameters'].keys()) 5661 keys.sort(key=len) 5662 for key in keys: 5663 to_add = [o for o in self.model['parameters'][key] if o.name] 5664 5665 if key == ('external',): 5666 self.params_ext += to_add 5667 elif any([(k in key) for k in self.PS_dependent_key]): 5668 self.params_dep += to_add 5669 else: 5670 self.params_indep += to_add 5671 # same for couplings 5672 keys = list(self.model['couplings'].keys()) 5673 keys.sort(key=len) 5674 for key, coup_list in self.model['couplings'].items(): 5675 if any([(k in key) for k in self.PS_dependent_key]): 5676 self.coups_dep += [c for c in coup_list if 5677 (not wanted_couplings or c.name in \ 5678 wanted_couplings)] 5679 else: 5680 self.coups_indep += [c for c in coup_list if 5681 (not wanted_couplings or c.name in \ 5682 wanted_couplings)] 5683 5684 # MG4 use G and not aS as it basic object for alphas related computation 5685 #Pass G in the independant list 5686 if 'G' in self.params_dep: 5687 index = self.params_dep.index('G') 5688 G = self.params_dep.pop(index) 5689 # G.expr = '2*cmath.sqrt(as*pi)' 5690 # self.params_indep.insert(0, self.params_dep.pop(index)) 5691 # No need to add it if not defined 5692 5693 if 'aS' not in self.params_ext: 5694 logger.critical('aS not define as external parameter adding it!') 5695 #self.model['parameters']['aS'] = base_objects.ParamCardVariable('aS', 0.138,'DUMMY',(1,)) 5696 self.params_indep.append( base_objects. ModelVariable('aS', '0.138','real')) 5697 self.params_indep.append( base_objects. ModelVariable('G', '4.1643','real'))
5698 - def build(self, wanted_couplings = [], full=True):
5699 """modify the couplings to fit with MG4 convention and creates all the 5700 different files""" 5701 5702 self.pass_parameter_to_case_insensitive() 5703 self.refactorize(wanted_couplings) 5704 5705 # write the files 5706 if full: 5707 if wanted_couplings: 5708 # extract the wanted ct parameters 5709 self.extract_needed_CTparam(wanted_couplings=wanted_couplings) 5710 self.write_all()
5711 5712
5713 - def open(self, name, comment='c', format='default'):
5714 """ Open the file name in the correct directory and with a valid 5715 header.""" 5716 5717 file_path = pjoin(self.dir_path, name) 5718 5719 if format == 'fortran': 5720 fsock = writers.FortranWriter(file_path, 'w') 5721 write_class = io.FileIO 5722 5723 write_class.writelines(fsock, comment * 77 + '\n') 5724 write_class.writelines(fsock, '%(comment)s written by the UFO converter\n' % \ 5725 {'comment': comment + (6 - len(comment)) * ' '}) 5726 write_class.writelines(fsock, comment * 77 + '\n\n') 5727 else: 5728 fsock = open(file_path, 'w') 5729 fsock.writelines(comment * 77 + '\n') 5730 fsock.writelines('%(comment)s written by the UFO converter\n' % \ 5731 {'comment': comment + (6 - len(comment)) * ' '}) 5732 fsock.writelines(comment * 77 + '\n\n') 5733 return fsock
5734 5735
5736 - def write_all(self):
5737 """ write all the files """ 5738 #write the part related to the external parameter 5739 self.create_ident_card() 5740 self.create_param_read() 5741 5742 #write the definition of the parameter 5743 self.create_input() 5744 self.create_intparam_def(dp=True,mp=False) 5745 if self.opt['mp']: 5746 self.create_intparam_def(dp=False,mp=True) 5747 5748 # definition of the coupling. 5749 self.create_actualize_mp_ext_param_inc() 5750 self.create_coupl_inc() 5751 self.create_write_couplings() 5752 self.create_couplings() 5753 5754 # the makefile 5755 self.create_makeinc() 5756 self.create_param_write() 5757 5758 # The model functions 5759 self.create_model_functions_inc() 5760 self.create_model_functions_def() 5761 5762 # The param_card.dat 5763 self.create_param_card() 5764 5765 5766 # All the standard files 5767 self.copy_standard_file()
5768 5769 ############################################################################ 5770 ## ROUTINE CREATING THE FILES ############################################ 5771 ############################################################################ 5772
5773 - def copy_standard_file(self):
5774 """Copy the standard files for the fortran model.""" 5775 5776 #copy the library files 5777 file_to_link = ['formats.inc','printout.f', \ 5778 'rw_para.f', 'testprog.f'] 5779 5780 for filename in file_to_link: 5781 cp( MG5DIR + '/models/template_files/fortran/' + filename, \ 5782 self.dir_path) 5783 5784 file = open(os.path.join(MG5DIR,\ 5785 'models/template_files/fortran/rw_para.f')).read() 5786 5787 includes=["include \'coupl.inc\'","include \'input.inc\'", 5788 "include \'model_functions.inc\'"] 5789 if self.opt['mp']: 5790 includes.extend(["include \'mp_coupl.inc\'","include \'mp_input.inc\'"]) 5791 # In standalone and madloop we do no use the compiled param card but 5792 # still parse the .dat one so we must load it. 5793 if self.opt['loop_induced']: 5794 #loop induced follow MadEvent way to handle the card. 5795 load_card = '' 5796 lha_read_filename='lha_read.f' 5797 elif self.opt['export_format'] in ['madloop','madloop_optimized', 'madloop_matchbox']: 5798 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5799 lha_read_filename='lha_read_mp.f' 5800 elif self.opt['export_format'].startswith('standalone') \ 5801 or self.opt['export_format'] in ['madweight', 'plugin']\ 5802 or self.opt['export_format'].startswith('matchbox'): 5803 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5804 lha_read_filename='lha_read.f' 5805 else: 5806 load_card = '' 5807 lha_read_filename='lha_read.f' 5808 cp( MG5DIR + '/models/template_files/fortran/' + lha_read_filename, \ 5809 os.path.join(self.dir_path,'lha_read.f')) 5810 5811 file=file%{'includes':'\n '.join(includes), 5812 'load_card':load_card} 5813 writer=open(os.path.join(self.dir_path,'rw_para.f'),'w') 5814 writer.writelines(file) 5815 writer.close() 5816 5817 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 5818 or self.opt['loop_induced']: 5819 cp( MG5DIR + '/models/template_files/fortran/makefile_madevent', 5820 self.dir_path + '/makefile') 5821 if self.opt['export_format'] in ['FKS5_default', 'FKS5_optimized']: 5822 path = pjoin(self.dir_path, 'makefile') 5823 text = open(path).read() 5824 text = text.replace('madevent','aMCatNLO') 5825 open(path, 'w').writelines(text) 5826 elif self.opt['export_format'] in ['standalone', 'standalone_msP','standalone_msF', 5827 'madloop','madloop_optimized', 'standalone_rw', 5828 'madweight','matchbox','madloop_matchbox', 'plugin']: 5829 cp( MG5DIR + '/models/template_files/fortran/makefile_standalone', 5830 self.dir_path + '/makefile') 5831 #elif self.opt['export_format'] in []: 5832 #pass 5833 else: 5834 raise MadGraph5Error('Unknown format')
5835
5836 - def create_coupl_inc(self):
5837 """ write coupling.inc """ 5838 5839 fsock = self.open('coupl.inc', format='fortran') 5840 if self.opt['mp']: 5841 mp_fsock = self.open('mp_coupl.inc', format='fortran') 5842 mp_fsock_same_name = self.open('mp_coupl_same_name.inc',\ 5843 format='fortran') 5844 5845 # Write header 5846 header = """double precision G 5847 common/strong/ G 5848 5849 double complex gal(2) 5850 common/weak/ gal 5851 5852 double precision MU_R 5853 common/rscale/ MU_R 5854 5855 double precision Nf 5856 parameter(Nf=%d) 5857 """ % self.model.get_nflav() 5858 5859 fsock.writelines(header) 5860 5861 if self.opt['mp']: 5862 header = """%(real_mp_format)s %(mp_prefix)sG 5863 common/MP_strong/ %(mp_prefix)sG 5864 5865 %(complex_mp_format)s %(mp_prefix)sgal(2) 5866 common/MP_weak/ %(mp_prefix)sgal 5867 5868 %(complex_mp_format)s %(mp_prefix)sMU_R 5869 common/MP_rscale/ %(mp_prefix)sMU_R 5870 5871 """ 5872 5873 5874 5875 5876 mp_fsock.writelines(header%{'real_mp_format':self.mp_real_format, 5877 'complex_mp_format':self.mp_complex_format, 5878 'mp_prefix':self.mp_prefix}) 5879 mp_fsock_same_name.writelines(header%{'real_mp_format':self.mp_real_format, 5880 'complex_mp_format':self.mp_complex_format, 5881 'mp_prefix':''}) 5882 5883 # Write the Mass definition/ common block 5884 masses = set() 5885 widths = set() 5886 if self.opt['complex_mass']: 5887 complex_mass = set() 5888 5889 for particle in self.model.get('particles'): 5890 #find masses 5891 one_mass = particle.get('mass') 5892 if one_mass.lower() != 'zero': 5893 masses.add(one_mass) 5894 5895 # find width 5896 one_width = particle.get('width') 5897 if one_width.lower() != 'zero': 5898 widths.add(one_width) 5899 if self.opt['complex_mass'] and one_mass.lower() != 'zero': 5900 complex_mass.add('CMASS_%s' % one_mass) 5901 5902 if masses: 5903 fsock.writelines('double precision '+','.join(masses)+'\n') 5904 fsock.writelines('common/masses/ '+','.join(masses)+'\n\n') 5905 if self.opt['mp']: 5906 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5907 ','.join(masses)+'\n') 5908 mp_fsock_same_name.writelines('common/MP_masses/ '+\ 5909 ','.join(masses)+'\n\n') 5910 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5911 self.mp_prefix+m for m in masses])+'\n') 5912 mp_fsock.writelines('common/MP_masses/ '+\ 5913 ','.join([self.mp_prefix+m for m in masses])+'\n\n') 5914 5915 if widths: 5916 fsock.writelines('double precision '+','.join(widths)+'\n') 5917 fsock.writelines('common/widths/ '+','.join(widths)+'\n\n') 5918 if self.opt['mp']: 5919 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5920 ','.join(widths)+'\n') 5921 mp_fsock_same_name.writelines('common/MP_widths/ '+\ 5922 ','.join(widths)+'\n\n') 5923 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5924 self.mp_prefix+w for w in widths])+'\n') 5925 mp_fsock.writelines('common/MP_widths/ '+\ 5926 ','.join([self.mp_prefix+w for w in widths])+'\n\n') 5927 5928 # Write the Couplings 5929 coupling_list = [coupl.name for coupl in self.coups_dep + self.coups_indep] 5930 fsock.writelines('double complex '+', '.join(coupling_list)+'\n') 5931 fsock.writelines('common/couplings/ '+', '.join(coupling_list)+'\n') 5932 if self.opt['mp']: 5933 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5934 ','.join(coupling_list)+'\n') 5935 mp_fsock_same_name.writelines('common/MP_couplings/ '+\ 5936 ','.join(coupling_list)+'\n\n') 5937 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5938 self.mp_prefix+c for c in coupling_list])+'\n') 5939 mp_fsock.writelines('common/MP_couplings/ '+\ 5940 ','.join([self.mp_prefix+c for c in coupling_list])+'\n\n') 5941 5942 # Write complex mass for complex mass scheme (if activated) 5943 if self.opt['complex_mass'] and complex_mass: 5944 fsock.writelines('double complex '+', '.join(complex_mass)+'\n') 5945 fsock.writelines('common/complex_mass/ '+', '.join(complex_mass)+'\n') 5946 if self.opt['mp']: 5947 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5948 ','.join(complex_mass)+'\n') 5949 mp_fsock_same_name.writelines('common/MP_complex_mass/ '+\ 5950 ','.join(complex_mass)+'\n\n') 5951 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5952 self.mp_prefix+cm for cm in complex_mass])+'\n') 5953 mp_fsock.writelines('common/MP_complex_mass/ '+\ 5954 ','.join([self.mp_prefix+cm for cm in complex_mass])+'\n\n')
5955
5956 - def create_write_couplings(self):
5957 """ write the file coupl_write.inc """ 5958 5959 fsock = self.open('coupl_write.inc', format='fortran') 5960 5961 fsock.writelines("""write(*,*) ' Couplings of %s' 5962 write(*,*) ' ---------------------------------' 5963 write(*,*) ' '""" % self.model_name) 5964 def format(coupl): 5965 return 'write(*,2) \'%(name)s = \', %(name)s' % {'name': coupl.name}
5966 5967 # Write the Couplings 5968 lines = [format(coupl) for coupl in self.coups_dep + self.coups_indep] 5969 fsock.writelines('\n'.join(lines)) 5970 5971
5972 - def create_input(self):
5973 """create input.inc containing the definition of the parameters""" 5974 5975 fsock = self.open('input.inc', format='fortran') 5976 if self.opt['mp']: 5977 mp_fsock = self.open('mp_input.inc', format='fortran') 5978 5979 #find mass/ width since they are already define 5980 already_def = set() 5981 for particle in self.model.get('particles'): 5982 already_def.add(particle.get('mass').lower()) 5983 already_def.add(particle.get('width').lower()) 5984 if self.opt['complex_mass']: 5985 already_def.add('cmass_%s' % particle.get('mass').lower()) 5986 5987 is_valid = lambda name: name.lower() not in ['g', 'mu_r', 'zero'] and \ 5988 name.lower() not in already_def 5989 5990 real_parameters = [param.name for param in self.params_dep + 5991 self.params_indep if param.type == 'real' 5992 and is_valid(param.name)] 5993 5994 real_parameters += [param.name for param in self.params_ext 5995 if param.type == 'real'and 5996 is_valid(param.name)] 5997 5998 # check the parameter is a CT parameter or not 5999 # if yes, just use the needed ones 6000 real_parameters = [param for param in real_parameters \ 6001 if self.check_needed_param(param)] 6002 6003 fsock.writelines('double precision '+','.join(real_parameters)+'\n') 6004 fsock.writelines('common/params_R/ '+','.join(real_parameters)+'\n\n') 6005 if self.opt['mp']: 6006 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 6007 self.mp_prefix+p for p in real_parameters])+'\n') 6008 mp_fsock.writelines('common/MP_params_R/ '+','.join([\ 6009 self.mp_prefix+p for p in real_parameters])+'\n\n') 6010 6011 complex_parameters = [param.name for param in self.params_dep + 6012 self.params_indep if param.type == 'complex' and 6013 is_valid(param.name)] 6014 6015 # check the parameter is a CT parameter or not 6016 # if yes, just use the needed ones 6017 complex_parameters = [param for param in complex_parameters \ 6018 if self.check_needed_param(param)] 6019 6020 if complex_parameters: 6021 fsock.writelines('double complex '+','.join(complex_parameters)+'\n') 6022 fsock.writelines('common/params_C/ '+','.join(complex_parameters)+'\n\n') 6023 if self.opt['mp']: 6024 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 6025 self.mp_prefix+p for p in complex_parameters])+'\n') 6026 mp_fsock.writelines('common/MP_params_C/ '+','.join([\ 6027 self.mp_prefix+p for p in complex_parameters])+'\n\n')
6028
6029 - def check_needed_param(self, param):
6030 """ Returns whether the parameter in argument is needed for this 6031 specific computation or not.""" 6032 6033 # If this is a leading order model or if there was no CT parameter 6034 # employed in this NLO model, one can directly return that the 6035 # parameter is needed since only CTParameters are filtered. 6036 if not hasattr(self, 'allCTparameters') or \ 6037 self.allCTparameters is None or self.usedCTparameters is None or \ 6038 len(self.allCTparameters)==0: 6039 return True 6040 6041 # We must allow the conjugate shorthand for the complex parameter as 6042 # well so we check wether either the parameter name or its name with 6043 # 'conjg__' substituted with '' is present in the list. 6044 # This is acceptable even if some parameter had an original name 6045 # including 'conjg__' in it, because at worst we export a parameter 6046 # was not needed. 6047 param = param.lower() 6048 cjg_param = param.replace('conjg__','',1) 6049 6050 # First make sure it is a CTparameter 6051 if param not in self.allCTparameters and \ 6052 cjg_param not in self.allCTparameters: 6053 return True 6054 6055 # Now check if it is in the list of CTparameters actually used 6056 return (param in self.usedCTparameters or \ 6057 cjg_param in self.usedCTparameters)
6058
6059 - def extract_needed_CTparam(self,wanted_couplings=[]):
6060 """ Extract what are the needed CT parameters given the wanted_couplings""" 6061 6062 if not hasattr(self.model,'map_CTcoup_CTparam') or not wanted_couplings: 6063 # Setting these lists to none wil disable the filtering in 6064 # check_needed_param 6065 self.allCTparameters = None 6066 self.usedCTparameters = None 6067 return 6068 6069 # All CTparameters appearin in all CT couplings 6070 allCTparameters=list(self.model.map_CTcoup_CTparam.values()) 6071 # Define in this class the list of all CT parameters 6072 self.allCTparameters=list(\ 6073 set(itertools.chain.from_iterable(allCTparameters))) 6074 6075 # All used CT couplings 6076 w_coupls = [coupl.lower() for coupl in wanted_couplings] 6077 allUsedCTCouplings = [coupl for coupl in 6078 self.model.map_CTcoup_CTparam.keys() if coupl.lower() in w_coupls] 6079 6080 # Now define the list of all CT parameters that are actually used 6081 self.usedCTparameters=list(\ 6082 set(itertools.chain.from_iterable([ 6083 self.model.map_CTcoup_CTparam[coupl] for coupl in allUsedCTCouplings 6084 ]))) 6085 6086 # Now at last, make these list case insensitive 6087 self.allCTparameters = [ct.lower() for ct in self.allCTparameters] 6088 self.usedCTparameters = [ct.lower() for ct in self.usedCTparameters]
6089
6090 - def create_intparam_def(self, dp=True, mp=False):
6091 """ create intparam_definition.inc setting the internal parameters. 6092 Output the double precision and/or the multiple precision parameters 6093 depending on the parameters dp and mp. If mp only, then the file names 6094 get the 'mp_' prefix. 6095 """ 6096 6097 fsock = self.open('%sintparam_definition.inc'% 6098 ('mp_' if mp and not dp else ''), format='fortran') 6099 6100 fsock.write_comments(\ 6101 "Parameters that should not be recomputed event by event.\n") 6102 fsock.writelines("if(readlha) then\n") 6103 if dp: 6104 fsock.writelines("G = 2 * DSQRT(AS*PI) ! for the first init\n") 6105 if mp: 6106 fsock.writelines("MP__G = 2 * SQRT(MP__AS*MP__PI) ! for the first init\n") 6107 6108 for param in self.params_indep: 6109 if param.name == 'ZERO': 6110 continue 6111 # check whether the parameter is a CT parameter 6112 # if yes,just used the needed ones 6113 if not self.check_needed_param(param.name): 6114 continue 6115 if dp: 6116 fsock.writelines("%s = %s\n" % (param.name, 6117 self.p_to_f.parse(param.expr))) 6118 if mp: 6119 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 6120 self.mp_p_to_f.parse(param.expr))) 6121 6122 fsock.writelines('endif') 6123 6124 fsock.write_comments('\nParameters that should be recomputed at an event by even basis.\n') 6125 if dp: 6126 fsock.writelines("aS = G**2/4/pi\n") 6127 if mp: 6128 fsock.writelines("MP__aS = MP__G**2/4/MP__PI\n") 6129 for param in self.params_dep: 6130 # check whether the parameter is a CT parameter 6131 # if yes,just used the needed ones 6132 if not self.check_needed_param(param.name): 6133 continue 6134 if dp: 6135 fsock.writelines("%s = %s\n" % (param.name, 6136 self.p_to_f.parse(param.expr))) 6137 elif mp: 6138 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 6139 self.mp_p_to_f.parse(param.expr))) 6140 6141 fsock.write_comments("\nDefinition of the EW coupling used in the write out of aqed\n") 6142 if ('aEWM1',) in self.model['parameters']: 6143 if dp: 6144 fsock.writelines(""" gal(1) = 3.5449077018110318d0 / DSQRT(aEWM1) 6145 gal(2) = 1d0 6146 """) 6147 elif mp: 6148 fsock.writelines(""" %(mp_prefix)sgal(1) = 2 * SQRT(MP__PI/MP__aEWM1) 6149 %(mp_prefix)sgal(2) = 1d0 6150 """ %{'mp_prefix':self.mp_prefix}) 6151 pass 6152 # in Gmu scheme, aEWM1 is not external but Gf is an exteranl variable 6153 elif ('Gf',) in self.model['parameters']: 6154 if dp: 6155 fsock.writelines(""" gal(1) = 2.378414230005442133435d0*MDL_MW*DSQRT(1D0-MDL_MW**2/MDL_MZ**2)*DSQRT(MDL_Gf) 6156 gal(2) = 1d0 6157 """) 6158 elif mp: 6159 fsock.writelines(""" %(mp_prefix)sgal(1) = 2*MP__MDL_MW*SQRT(1e0_16-MP__MDL_MW**2/MP__MDL_MZ**2)*SQRT(SQRT(2e0_16)*MP__MDL_Gf) 6160 %(mp_prefix)sgal(2) = 1d0 6161 """ %{'mp_prefix':self.mp_prefix}) 6162 pass 6163 else: 6164 if dp: 6165 logger.warning('$RED aEWM1 and Gf not define in MODEL. AQED will not be written correcty in LHE FILE') 6166 fsock.writelines(""" gal(1) = 1d0 6167 gal(2) = 1d0 6168 """) 6169 elif mp: 6170 fsock.writelines(""" %(mp_prefix)sgal(1) = 1e0_16 6171 %(mp_prefix)sgal(2) = 1e0_16 6172 """%{'mp_prefix':self.mp_prefix})
6173 6174
6175 - def create_couplings(self):
6176 """ create couplings.f and all couplingsX.f """ 6177 6178 nb_def_by_file = 25 6179 6180 self.create_couplings_main(nb_def_by_file) 6181 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6182 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6183 6184 for i in range(nb_coup_indep): 6185 # For the independent couplings, we compute the double and multiple 6186 # precision ones together 6187 data = self.coups_indep[nb_def_by_file * i: 6188 min(len(self.coups_indep), nb_def_by_file * (i+1))] 6189 self.create_couplings_part(i + 1, data, dp=True, mp=self.opt['mp']) 6190 6191 for i in range(nb_coup_dep): 6192 # For the dependent couplings, we compute the double and multiple 6193 # precision ones in separate subroutines. 6194 data = self.coups_dep[nb_def_by_file * i: 6195 min(len(self.coups_dep), nb_def_by_file * (i+1))] 6196 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6197 dp=True,mp=False) 6198 if self.opt['mp']: 6199 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6200 dp=False,mp=True)
6201 6202
6203 - def create_couplings_main(self, nb_def_by_file=25):
6204 """ create couplings.f """ 6205 6206 fsock = self.open('couplings.f', format='fortran') 6207 6208 fsock.writelines("""subroutine coup() 6209 6210 implicit none 6211 double precision PI, ZERO 6212 logical READLHA 6213 parameter (PI=3.141592653589793d0) 6214 parameter (ZERO=0d0) 6215 include \'model_functions.inc\'""") 6216 if self.opt['mp']: 6217 fsock.writelines("""%s MP__PI, MP__ZERO 6218 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6219 parameter (MP__ZERO=0e0_16) 6220 include \'mp_input.inc\' 6221 include \'mp_coupl.inc\' 6222 """%self.mp_real_format) 6223 fsock.writelines("""include \'input.inc\' 6224 include \'coupl.inc\' 6225 READLHA = .true. 6226 include \'intparam_definition.inc\'""") 6227 if self.opt['mp']: 6228 fsock.writelines("""include \'mp_intparam_definition.inc\'\n""") 6229 6230 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6231 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6232 6233 fsock.writelines('\n'.join(\ 6234 ['call coup%s()' % (i + 1) for i in range(nb_coup_indep)])) 6235 6236 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6237 6238 fsock.writelines('\n'.join(\ 6239 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6240 for i in range(nb_coup_dep)])) 6241 if self.opt['mp']: 6242 fsock.writelines('\n'.join(\ 6243 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6244 for i in range(nb_coup_dep)])) 6245 fsock.writelines('''\n return \n end\n''') 6246 6247 fsock.writelines("""subroutine update_as_param() 6248 6249 implicit none 6250 double precision PI, ZERO 6251 logical READLHA 6252 parameter (PI=3.141592653589793d0) 6253 parameter (ZERO=0d0) 6254 include \'model_functions.inc\'""") 6255 fsock.writelines("""include \'input.inc\' 6256 include \'coupl.inc\' 6257 READLHA = .false.""") 6258 fsock.writelines(""" 6259 include \'intparam_definition.inc\'\n 6260 """) 6261 6262 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6263 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6264 6265 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6266 6267 fsock.writelines('\n'.join(\ 6268 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6269 for i in range(nb_coup_dep)])) 6270 fsock.writelines('''\n return \n end\n''') 6271 6272 fsock.writelines("""subroutine update_as_param2(mu_r2,as2) 6273 6274 implicit none 6275 double precision PI 6276 parameter (PI=3.141592653589793d0) 6277 double precision mu_r2, as2 6278 include \'model_functions.inc\'""") 6279 fsock.writelines("""include \'input.inc\' 6280 include \'coupl.inc\'""") 6281 fsock.writelines(""" 6282 if (mu_r2.gt.0d0) MU_R = mu_r2 6283 G = SQRT(4.0d0*PI*AS2) 6284 AS = as2 6285 6286 CALL UPDATE_AS_PARAM() 6287 """) 6288 fsock.writelines('''\n return \n end\n''') 6289 6290 if self.opt['mp']: 6291 fsock.writelines("""subroutine mp_update_as_param() 6292 6293 implicit none 6294 logical READLHA 6295 include \'model_functions.inc\'""") 6296 fsock.writelines("""%s MP__PI, MP__ZERO 6297 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6298 parameter (MP__ZERO=0e0_16) 6299 include \'mp_input.inc\' 6300 include \'mp_coupl.inc\' 6301 """%self.mp_real_format) 6302 fsock.writelines("""include \'input.inc\' 6303 include \'coupl.inc\' 6304 include \'actualize_mp_ext_params.inc\' 6305 READLHA = .false. 6306 include \'mp_intparam_definition.inc\'\n 6307 """) 6308 6309 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6310 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6311 6312 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6313 6314 fsock.writelines('\n'.join(\ 6315 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6316 for i in range(nb_coup_dep)])) 6317 fsock.writelines('''\n return \n end\n''')
6318
6319 - def create_couplings_part(self, nb_file, data, dp=True, mp=False):
6320 """ create couplings[nb_file].f containing information coming from data. 6321 Outputs the computation of the double precision and/or the multiple 6322 precision couplings depending on the parameters dp and mp. 6323 If mp is True and dp is False, then the prefix 'MP_' is appended to the 6324 filename and subroutine name. 6325 """ 6326 6327 fsock = self.open('%scouplings%s.f' %('mp_' if mp and not dp else '', 6328 nb_file), format='fortran') 6329 fsock.writelines("""subroutine %scoup%s() 6330 6331 implicit none 6332 include \'model_functions.inc\'"""%('mp_' if mp and not dp else '',nb_file)) 6333 if dp: 6334 fsock.writelines(""" 6335 double precision PI, ZERO 6336 parameter (PI=3.141592653589793d0) 6337 parameter (ZERO=0d0) 6338 include 'input.inc' 6339 include 'coupl.inc'""") 6340 if mp: 6341 fsock.writelines("""%s MP__PI, MP__ZERO 6342 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6343 parameter (MP__ZERO=0e0_16) 6344 include \'mp_input.inc\' 6345 include \'mp_coupl.inc\' 6346 """%self.mp_real_format) 6347 6348 for coupling in data: 6349 if dp: 6350 fsock.writelines('%s = %s' % (coupling.name, 6351 self.p_to_f.parse(coupling.expr))) 6352 if mp: 6353 fsock.writelines('%s%s = %s' % (self.mp_prefix,coupling.name, 6354 self.mp_p_to_f.parse(coupling.expr))) 6355 fsock.writelines('end')
6356
6357 - def create_model_functions_inc(self):
6358 """ Create model_functions.inc which contains the various declarations 6359 of auxiliary functions which might be used in the couplings expressions 6360 """ 6361 6362 additional_fct = [] 6363 # check for functions define in the UFO model 6364 ufo_fct = self.model.get('functions') 6365 if ufo_fct: 6366 for fct in ufo_fct: 6367 # already handle by default 6368 if str(fct.name) not in ["complexconjugate", "re", "im", "sec", 6369 "csc", "asec", "acsc", "theta_function", "cond", 6370 "condif", "reglogp", "reglogm", "reglog", "recms", "arg", "cot", 6371 "grreglog","regsqrt"]: 6372 additional_fct.append(fct.name) 6373 6374 fsock = self.open('model_functions.inc', format='fortran') 6375 fsock.writelines("""double complex cond 6376 double complex condif 6377 double complex reglog 6378 double complex reglogp 6379 double complex reglogm 6380 double complex recms 6381 double complex arg 6382 double complex grreglog 6383 double complex regsqrt 6384 %s 6385 """ % "\n".join([" double complex %s" % i for i in additional_fct])) 6386 6387 6388 if self.opt['mp']: 6389 fsock.writelines("""%(complex_mp_format)s mp_cond 6390 %(complex_mp_format)s mp_condif 6391 %(complex_mp_format)s mp_reglog 6392 %(complex_mp_format)s mp_reglogp 6393 %(complex_mp_format)s mp_reglogm 6394 %(complex_mp_format)s mp_recms 6395 %(complex_mp_format)s mp_arg 6396 %(complex_mp_format)s mp_grreglog 6397 %(complex_mp_format)s mp_regsqrt 6398 %(additional)s 6399 """ %\ 6400 {"additional": "\n".join([" %s mp_%s" % (self.mp_complex_format, i) for i in additional_fct]), 6401 'complex_mp_format':self.mp_complex_format 6402 })
6403
6404 - def create_model_functions_def(self):
6405 """ Create model_functions.f which contains the various definitions 6406 of auxiliary functions which might be used in the couplings expressions 6407 Add the functions.f functions for formfactors support 6408 """ 6409 6410 fsock = self.open('model_functions.f', format='fortran') 6411 fsock.writelines("""double complex function cond(condition,truecase,falsecase) 6412 implicit none 6413 double complex condition,truecase,falsecase 6414 if(condition.eq.(0.0d0,0.0d0)) then 6415 cond=truecase 6416 else 6417 cond=falsecase 6418 endif 6419 end 6420 6421 double complex function condif(condition,truecase,falsecase) 6422 implicit none 6423 logical condition 6424 double complex truecase,falsecase 6425 if(condition) then 6426 condif=truecase 6427 else 6428 condif=falsecase 6429 endif 6430 end 6431 6432 double complex function recms(condition,expr) 6433 implicit none 6434 logical condition 6435 double complex expr 6436 if(condition)then 6437 recms=expr 6438 else 6439 recms=dcmplx(dble(expr)) 6440 endif 6441 end 6442 6443 double complex function reglog(arg) 6444 implicit none 6445 double complex TWOPII 6446 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6447 double complex arg 6448 if(arg.eq.(0.0d0,0.0d0)) then 6449 reglog=(0.0d0,0.0d0) 6450 else 6451 reglog=log(arg) 6452 endif 6453 end 6454 6455 double complex function reglogp(arg) 6456 implicit none 6457 double complex TWOPII 6458 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6459 double complex arg 6460 if(arg.eq.(0.0d0,0.0d0))then 6461 reglogp=(0.0d0,0.0d0) 6462 else 6463 if(dble(arg).lt.0.0d0.and.dimag(arg).lt.0.0d0)then 6464 reglogp=log(arg) + TWOPII 6465 else 6466 reglogp=log(arg) 6467 endif 6468 endif 6469 end 6470 6471 double complex function reglogm(arg) 6472 implicit none 6473 double complex TWOPII 6474 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6475 double complex arg 6476 if(arg.eq.(0.0d0,0.0d0))then 6477 reglogm=(0.0d0,0.0d0) 6478 else 6479 if(dble(arg).lt.0.0d0.and.dimag(arg).gt.0.0d0)then 6480 reglogm=log(arg) - TWOPII 6481 else 6482 reglogm=log(arg) 6483 endif 6484 endif 6485 end 6486 6487 double complex function regsqrt(arg_in) 6488 implicit none 6489 double complex arg_in 6490 double complex arg 6491 arg=arg_in 6492 if(dabs(dimag(arg)).eq.0.0d0)then 6493 arg=dcmplx(dble(arg),0.0d0) 6494 endif 6495 if(dabs(dble(arg)).eq.0.0d0)then 6496 arg=dcmplx(0.0d0,dimag(arg)) 6497 endif 6498 regsqrt=sqrt(arg) 6499 end 6500 6501 double complex function grreglog(logsw,expr1_in,expr2_in) 6502 implicit none 6503 double complex TWOPII 6504 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6505 double complex expr1_in,expr2_in 6506 double complex expr1,expr2 6507 double precision logsw 6508 double precision imagexpr 6509 logical firstsheet 6510 expr1=expr1_in 6511 expr2=expr2_in 6512 if(dabs(dimag(expr1)).eq.0.0d0)then 6513 expr1=dcmplx(dble(expr1),0.0d0) 6514 endif 6515 if(dabs(dble(expr1)).eq.0.0d0)then 6516 expr1=dcmplx(0.0d0,dimag(expr1)) 6517 endif 6518 if(dabs(dimag(expr2)).eq.0.0d0)then 6519 expr2=dcmplx(dble(expr2),0.0d0) 6520 endif 6521 if(dabs(dble(expr2)).eq.0.0d0)then 6522 expr2=dcmplx(0.0d0,dimag(expr2)) 6523 endif 6524 if(expr1.eq.(0.0d0,0.0d0))then 6525 grreglog=(0.0d0,0.0d0) 6526 else 6527 imagexpr=dimag(expr1)*dimag(expr2) 6528 firstsheet=imagexpr.ge.0.0d0 6529 firstsheet=firstsheet.or.dble(expr1).ge.0.0d0 6530 firstsheet=firstsheet.or.dble(expr2).ge.0.0d0 6531 if(firstsheet)then 6532 grreglog=log(expr1) 6533 else 6534 if(dimag(expr1).gt.0.0d0)then 6535 grreglog=log(expr1) - logsw*TWOPII 6536 else 6537 grreglog=log(expr1) + logsw*TWOPII 6538 endif 6539 endif 6540 endif 6541 end 6542 6543 double complex function arg(comnum) 6544 implicit none 6545 double complex comnum 6546 double complex iim 6547 iim = (0.0d0,1.0d0) 6548 if(comnum.eq.(0.0d0,0.0d0)) then 6549 arg=(0.0d0,0.0d0) 6550 else 6551 arg=log(comnum/abs(comnum))/iim 6552 endif 6553 end""") 6554 if self.opt['mp']: 6555 fsock.writelines(""" 6556 6557 %(complex_mp_format)s function mp_cond(condition,truecase,falsecase) 6558 implicit none 6559 %(complex_mp_format)s condition,truecase,falsecase 6560 if(condition.eq.(0.0e0_16,0.0e0_16)) then 6561 mp_cond=truecase 6562 else 6563 mp_cond=falsecase 6564 endif 6565 end 6566 6567 %(complex_mp_format)s function mp_condif(condition,truecase,falsecase) 6568 implicit none 6569 logical condition 6570 %(complex_mp_format)s truecase,falsecase 6571 if(condition) then 6572 mp_condif=truecase 6573 else 6574 mp_condif=falsecase 6575 endif 6576 end 6577 6578 %(complex_mp_format)s function mp_recms(condition,expr) 6579 implicit none 6580 logical condition 6581 %(complex_mp_format)s expr 6582 if(condition)then 6583 mp_recms=expr 6584 else 6585 mp_recms=cmplx(real(expr),kind=16) 6586 endif 6587 end 6588 6589 %(complex_mp_format)s function mp_reglog(arg) 6590 implicit none 6591 %(complex_mp_format)s TWOPII 6592 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6593 %(complex_mp_format)s arg 6594 if(arg.eq.(0.0e0_16,0.0e0_16)) then 6595 mp_reglog=(0.0e0_16,0.0e0_16) 6596 else 6597 mp_reglog=log(arg) 6598 endif 6599 end 6600 6601 %(complex_mp_format)s function mp_reglogp(arg) 6602 implicit none 6603 %(complex_mp_format)s TWOPII 6604 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6605 %(complex_mp_format)s arg 6606 if(arg.eq.(0.0e0_16,0.0e0_16))then 6607 mp_reglogp=(0.0e0_16,0.0e0_16) 6608 else 6609 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).lt.0.0e0_16)then 6610 mp_reglogp=log(arg) + TWOPII 6611 else 6612 mp_reglogp=log(arg) 6613 endif 6614 endif 6615 end 6616 6617 %(complex_mp_format)s function mp_reglogm(arg) 6618 implicit none 6619 %(complex_mp_format)s TWOPII 6620 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6621 %(complex_mp_format)s arg 6622 if(arg.eq.(0.0e0_16,0.0e0_16))then 6623 mp_reglogm=(0.0e0_16,0.0e0_16) 6624 else 6625 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).gt.0.0e0_16)then 6626 mp_reglogm=log(arg) - TWOPII 6627 else 6628 mp_reglogm=log(arg) 6629 endif 6630 endif 6631 end 6632 6633 %(complex_mp_format)s function mp_regsqrt(arg_in) 6634 implicit none 6635 %(complex_mp_format)s arg_in 6636 %(complex_mp_format)s arg 6637 arg=arg_in 6638 if(abs(imagpart(arg)).eq.0.0e0_16)then 6639 arg=cmplx(real(arg,kind=16),0.0e0_16) 6640 endif 6641 if(abs(real(arg,kind=16)).eq.0.0e0_16)then 6642 arg=cmplx(0.0e0_16,imagpart(arg)) 6643 endif 6644 mp_regsqrt=sqrt(arg) 6645 end 6646 6647 6648 %(complex_mp_format)s function mp_grreglog(logsw,expr1_in,expr2_in) 6649 implicit none 6650 %(complex_mp_format)s TWOPII 6651 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6652 %(complex_mp_format)s expr1_in,expr2_in 6653 %(complex_mp_format)s expr1,expr2 6654 %(real_mp_format)s logsw 6655 %(real_mp_format)s imagexpr 6656 logical firstsheet 6657 expr1=expr1_in 6658 expr2=expr2_in 6659 if(abs(imagpart(expr1)).eq.0.0e0_16)then 6660 expr1=cmplx(real(expr1,kind=16),0.0e0_16) 6661 endif 6662 if(abs(real(expr1,kind=16)).eq.0.0e0_16)then 6663 expr1=cmplx(0.0e0_16,imagpart(expr1)) 6664 endif 6665 if(abs(imagpart(expr2)).eq.0.0e0_16)then 6666 expr2=cmplx(real(expr2,kind=16),0.0e0_16) 6667 endif 6668 if(abs(real(expr2,kind=16)).eq.0.0e0_16)then 6669 expr2=cmplx(0.0e0_16,imagpart(expr2)) 6670 endif 6671 if(expr1.eq.(0.0e0_16,0.0e0_16))then 6672 mp_grreglog=(0.0e0_16,0.0e0_16) 6673 else 6674 imagexpr=imagpart(expr1)*imagpart(expr2) 6675 firstsheet=imagexpr.ge.0.0e0_16 6676 firstsheet=firstsheet.or.real(expr1,kind=16).ge.0.0e0_16 6677 firstsheet=firstsheet.or.real(expr2,kind=16).ge.0.0e0_16 6678 if(firstsheet)then 6679 mp_grreglog=log(expr1) 6680 else 6681 if(imagpart(expr1).gt.0.0e0_16)then 6682 mp_grreglog=log(expr1) - logsw*TWOPII 6683 else 6684 mp_grreglog=log(expr1) + logsw*TWOPII 6685 endif 6686 endif 6687 endif 6688 end 6689 6690 %(complex_mp_format)s function mp_arg(comnum) 6691 implicit none 6692 %(complex_mp_format)s comnum 6693 %(complex_mp_format)s imm 6694 imm = (0.0e0_16,1.0e0_16) 6695 if(comnum.eq.(0.0e0_16,0.0e0_16)) then 6696 mp_arg=(0.0e0_16,0.0e0_16) 6697 else 6698 mp_arg=log(comnum/abs(comnum))/imm 6699 endif 6700 end"""%{'complex_mp_format':self.mp_complex_format,'real_mp_format':self.mp_real_format}) 6701 6702 6703 #check for the file functions.f 6704 model_path = self.model.get('modelpath') 6705 if os.path.exists(pjoin(model_path,'Fortran','functions.f')): 6706 fsock.write_comment_line(' USER DEFINE FUNCTIONS ') 6707 input = pjoin(model_path,'Fortran','functions.f') 6708 file.writelines(fsock, open(input).read()) 6709 fsock.write_comment_line(' END USER DEFINE FUNCTIONS ') 6710 6711 # check for functions define in the UFO model 6712 ufo_fct = self.model.get('functions') 6713 if ufo_fct: 6714 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS ') 6715 done = [] 6716 for fct in ufo_fct: 6717 # already handle by default 6718 if str(fct.name.lower()) not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc", "condif", 6719 "theta_function", "cond", "reglog", "reglogp", "reglogm", "recms","arg", 6720 "grreglog","regsqrt"] + done: 6721 done.append(str(fct.name.lower())) 6722 ufo_fct_template = """ 6723 double complex function %(name)s(%(args)s) 6724 implicit none 6725 double complex %(args)s 6726 %(definitions)s 6727 %(name)s = %(fct)s 6728 6729 return 6730 end 6731 """ 6732 str_fct = self.p_to_f.parse(fct.expr) 6733 if not self.p_to_f.to_define: 6734 definitions = [] 6735 else: 6736 definitions=[] 6737 for d in self.p_to_f.to_define: 6738 if d == 'pi': 6739 definitions.append(' double precision pi') 6740 definitions.append(' data pi /3.1415926535897932d0/') 6741 else: 6742 definitions.append(' double complex %s' % d) 6743 6744 text = ufo_fct_template % { 6745 'name': fct.name, 6746 'args': ", ".join(fct.arguments), 6747 'fct': str_fct, 6748 'definitions': '\n'.join(definitions) 6749 } 6750 6751 fsock.writelines(text) 6752 if self.opt['mp']: 6753 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS FOR MP') 6754 for fct in ufo_fct: 6755 # already handle by default 6756 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc","condif", 6757 "theta_function", "cond", "reglog", "reglogp","reglogm", "recms","arg", 6758 "grreglog","regsqrt"]: 6759 ufo_fct_template = """ 6760 %(complex_mp_format)s function mp_%(name)s(mp__%(args)s) 6761 implicit none 6762 %(complex_mp_format)s mp__%(args)s 6763 %(definitions)s 6764 mp_%(name)s = %(fct)s 6765 6766 return 6767 end 6768 """ 6769 str_fct = self.mp_p_to_f.parse(fct.expr) 6770 if not self.mp_p_to_f.to_define: 6771 definitions = [] 6772 else: 6773 definitions=[] 6774 for d in self.mp_p_to_f.to_define: 6775 if d == 'pi': 6776 definitions.append(' %s mp__pi' % self.mp_real_format) 6777 definitions.append(' data mp__pi /3.141592653589793238462643383279502884197e+00_16/') 6778 else: 6779 definitions.append(' %s mp_%s' % (self.mp_complex_format,d)) 6780 text = ufo_fct_template % { 6781 'name': fct.name, 6782 'args': ", mp__".join(fct.arguments), 6783 'fct': str_fct, 6784 'definitions': '\n'.join(definitions), 6785 'complex_mp_format': self.mp_complex_format 6786 } 6787 fsock.writelines(text) 6788 6789 6790 6791 fsock.write_comment_line(' STOP UFO DEFINE FUNCTIONS ')
6792 6793 6794
6795 - def create_makeinc(self):
6796 """create makeinc.inc containing the file to compile """ 6797 6798 fsock = self.open('makeinc.inc', comment='#') 6799 text = 'MODEL = couplings.o lha_read.o printout.o rw_para.o' 6800 text += ' model_functions.o ' 6801 6802 nb_coup_indep = 1 + len(self.coups_dep) // 25 6803 nb_coup_dep = 1 + len(self.coups_indep) // 25 6804 couplings_files=['couplings%s.o' % (i+1) \ 6805 for i in range(nb_coup_dep + nb_coup_indep) ] 6806 if self.opt['mp']: 6807 couplings_files+=['mp_couplings%s.o' % (i+1) for i in \ 6808 range(nb_coup_dep,nb_coup_dep + nb_coup_indep) ] 6809 text += ' '.join(couplings_files) 6810 fsock.writelines(text)
6811
6812 - def create_param_write(self):
6813 """ create param_write """ 6814 6815 fsock = self.open('param_write.inc', format='fortran') 6816 6817 fsock.writelines("""write(*,*) ' External Params' 6818 write(*,*) ' ---------------------------------' 6819 write(*,*) ' '""") 6820 def format(name): 6821 return 'write(*,*) \'%(name)s = \', %(name)s' % {'name': name}
6822 6823 # Write the external parameter 6824 lines = [format(param.name) for param in self.params_ext] 6825 fsock.writelines('\n'.join(lines)) 6826 6827 fsock.writelines("""write(*,*) ' Internal Params' 6828 write(*,*) ' ---------------------------------' 6829 write(*,*) ' '""") 6830 lines = [format(data.name) for data in self.params_indep 6831 if data.name != 'ZERO' and self.check_needed_param(data.name)] 6832 fsock.writelines('\n'.join(lines)) 6833 fsock.writelines("""write(*,*) ' Internal Params evaluated point by point' 6834 write(*,*) ' ----------------------------------------' 6835 write(*,*) ' '""") 6836 lines = [format(data.name) for data in self.params_dep \ 6837 if self.check_needed_param(data.name)] 6838 6839 fsock.writelines('\n'.join(lines)) 6840 6841 6842
6843 - def create_ident_card(self):
6844 """ create the ident_card.dat """ 6845 6846 def format(parameter): 6847 """return the line for the ident_card corresponding to this parameter""" 6848 colum = [parameter.lhablock.lower()] + \ 6849 [str(value) for value in parameter.lhacode] + \ 6850 [parameter.name] 6851 if not parameter.name: 6852 return '' 6853 return ' '.join(colum)+'\n'
6854 6855 fsock = self.open('ident_card.dat') 6856 6857 external_param = [format(param) for param in self.params_ext] 6858 fsock.writelines('\n'.join(external_param)) 6859
6860 - def create_actualize_mp_ext_param_inc(self):
6861 """ create the actualize_mp_ext_params.inc code """ 6862 6863 # In principle one should actualize all external, but for now, it is 6864 # hardcoded that only AS and MU_R can by dynamically changed by the user 6865 # so that we only update those ones. 6866 # Of course, to be on the safe side, one could decide to update all 6867 # external parameters. 6868 update_params_list=[p for p in self.params_ext if p.name in 6869 self.PS_dependent_key] 6870 6871 res_strings = ["%(mp_prefix)s%(name)s=%(name)s"\ 6872 %{'mp_prefix':self.mp_prefix,'name':param.name}\ 6873 for param in update_params_list] 6874 # When read_lha is false, it is G which is taken in input and not AS, so 6875 # this is what should be reset here too. 6876 if 'aS' in [param.name for param in update_params_list]: 6877 res_strings.append("%(mp_prefix)sG=G"%{'mp_prefix':self.mp_prefix}) 6878 6879 fsock = self.open('actualize_mp_ext_params.inc', format='fortran') 6880 fsock.writelines('\n'.join(res_strings))
6881
6882 - def create_param_read(self):
6883 """create param_read""" 6884 6885 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 6886 or self.opt['loop_induced']: 6887 fsock = self.open('param_read.inc', format='fortran') 6888 fsock.writelines(' include \'../param_card.inc\'') 6889 return 6890 6891 def format_line(parameter): 6892 """return the line for the ident_card corresponding to this 6893 parameter""" 6894 template = \ 6895 """ call LHA_get_real(npara,param,value,'%(name)s',%(name)s,%(value)s)""" \ 6896 % {'name': parameter.name, 6897 'value': self.p_to_f.parse(str(parameter.value.real))} 6898 if self.opt['mp']: 6899 template = template+ \ 6900 ("\n call MP_LHA_get_real(npara,param,value,'%(name)s',"+ 6901 "%(mp_prefix)s%(name)s,%(value)s)") \ 6902 % {'name': parameter.name,'mp_prefix': self.mp_prefix, 6903 'value': self.mp_p_to_f.parse(str(parameter.value.real))} 6904 return template 6905 6906 fsock = self.open('param_read.inc', format='fortran') 6907 res_strings = [format_line(param) \ 6908 for param in self.params_ext] 6909 6910 # Correct width sign for Majorana particles (where the width 6911 # and mass need to have the same sign) 6912 for particle in self.model.get('particles'): 6913 if particle.is_fermion() and particle.get('self_antipart') and \ 6914 particle.get('width').lower() != 'zero': 6915 6916 res_strings.append('%(width)s = sign(%(width)s,%(mass)s)' % \ 6917 {'width': particle.get('width'), 'mass': particle.get('mass')}) 6918 if self.opt['mp']: 6919 res_strings.append(\ 6920 ('%(mp_pref)s%(width)s = sign(%(mp_pref)s%(width)s,'+\ 6921 '%(mp_pref)s%(mass)s)')%{'width': particle.get('width'),\ 6922 'mass': particle.get('mass'),'mp_pref':self.mp_prefix}) 6923 6924 fsock.writelines('\n'.join(res_strings)) 6925 6926 6927 @staticmethod
6928 - def create_param_card_static(model, output_path, rule_card_path=False, 6929 mssm_convert=True):
6930 """ create the param_card.dat for a givent model --static method-- """ 6931 #1. Check if a default param_card is present: 6932 done = False 6933 if hasattr(model, 'restrict_card') and isinstance(model.restrict_card, str): 6934 restrict_name = os.path.basename(model.restrict_card)[9:-4] 6935 model_path = model.get('modelpath') 6936 if os.path.exists(pjoin(model_path,'paramcard_%s.dat' % restrict_name)): 6937 done = True 6938 files.cp(pjoin(model_path,'paramcard_%s.dat' % restrict_name), 6939 output_path) 6940 if not done: 6941 param_writer.ParamCardWriter(model, output_path) 6942 6943 if rule_card_path: 6944 if hasattr(model, 'rule_card'): 6945 model.rule_card.write_file(rule_card_path) 6946 6947 if mssm_convert: 6948 model_name = model.get('name') 6949 # IF MSSM convert the card to SLAH1 6950 if model_name == 'mssm' or model_name.startswith('mssm-'): 6951 import models.check_param_card as translator 6952 # Check the format of the param_card for Pythia and make it correct 6953 if rule_card_path: 6954 translator.make_valid_param_card(output_path, rule_card_path) 6955 translator.convert_to_slha1(output_path)
6956
6957 - def create_param_card(self):
6958 """ create the param_card.dat """ 6959 6960 rule_card = pjoin(self.dir_path, 'param_card_rule.dat') 6961 if not hasattr(self.model, 'rule_card'): 6962 rule_card=False 6963 self.create_param_card_static(self.model, 6964 output_path=pjoin(self.dir_path, 'param_card.dat'), 6965 rule_card_path=rule_card, 6966 mssm_convert=True)
6967
6968 -def ExportV4Factory(cmd, noclean, output_type='default', group_subprocesses=True, cmd_options={}):
6969 """ Determine which Export_v4 class is required. cmd is the command 6970 interface containing all potential usefull information. 6971 The output_type argument specifies from which context the output 6972 is called. It is 'madloop' for MadLoop5, 'amcatnlo' for FKS5 output 6973 and 'default' for tree-level outputs.""" 6974 6975 opt = dict(cmd.options) 6976 opt['output_options'] = cmd_options 6977 6978 # ========================================================================== 6979 # First check whether Ninja must be installed. 6980 # Ninja would only be required if: 6981 # a) Loop optimized output is selected 6982 # b) the process gathered from the amplitude generated use loops 6983 6984 if len(cmd._curr_amps)>0: 6985 try: 6986 curr_proc = cmd._curr_amps[0].get('process') 6987 except base_objects.PhysicsObject.PhysicsObjectError: 6988 curr_proc = None 6989 elif hasattr(cmd,'_fks_multi_proc') and \ 6990 len(cmd._fks_multi_proc.get('process_definitions'))>0: 6991 curr_proc = cmd._fks_multi_proc.get('process_definitions')[0] 6992 else: 6993 curr_proc = None 6994 6995 requires_reduction_tool = opt['loop_optimized_output'] and \ 6996 (not curr_proc is None) and \ 6997 (curr_proc.get('perturbation_couplings') != [] and \ 6998 not curr_proc.get('NLO_mode') in [None,'real','tree','LO','LOonly']) 6999 7000 # An installation is required then, but only if the specified path is the 7001 # default local one and that the Ninja library appears missing. 7002 if requires_reduction_tool: 7003 cmd.install_reduction_library() 7004 7005 # ========================================================================== 7006 # First treat the MadLoop5 standalone case 7007 MadLoop_SA_options = {'clean': not noclean, 7008 'complex_mass':cmd.options['complex_mass_scheme'], 7009 'export_format':'madloop', 7010 'mp':True, 7011 'loop_dir': os.path.join(cmd._mgme_dir,'Template','loop_material'), 7012 'cuttools_dir': cmd._cuttools_dir, 7013 'iregi_dir':cmd._iregi_dir, 7014 'golem_dir':cmd.options['golem'], 7015 'samurai_dir':cmd.options['samurai'], 7016 'ninja_dir':cmd.options['ninja'], 7017 'collier_dir':cmd.options['collier'], 7018 'fortran_compiler':cmd.options['fortran_compiler'], 7019 'f2py_compiler':cmd.options['f2py_compiler'], 7020 'output_dependencies':cmd.options['output_dependencies'], 7021 'SubProc_prefix':'P', 7022 'compute_color_flows':cmd.options['loop_color_flows'], 7023 'mode': 'reweight' if cmd._export_format == "standalone_rw" else '', 7024 'cluster_local_path': cmd.options['cluster_local_path'], 7025 'output_options': cmd_options 7026 } 7027 7028 if output_type.startswith('madloop'): 7029 import madgraph.loop.loop_exporters as loop_exporters 7030 if os.path.isdir(os.path.join(cmd._mgme_dir, 'Template/loop_material')): 7031 ExporterClass=None 7032 if not cmd.options['loop_optimized_output']: 7033 ExporterClass=loop_exporters.LoopProcessExporterFortranSA 7034 else: 7035 if output_type == "madloop": 7036 ExporterClass=loop_exporters.LoopProcessOptimizedExporterFortranSA 7037 MadLoop_SA_options['export_format'] = 'madloop_optimized' 7038 elif output_type == "madloop_matchbox": 7039 ExporterClass=loop_exporters.LoopProcessExporterFortranMatchBox 7040 MadLoop_SA_options['export_format'] = 'madloop_matchbox' 7041 else: 7042 raise Exception("output_type not recognize %s" % output_type) 7043 return ExporterClass(cmd._export_dir, MadLoop_SA_options) 7044 else: 7045 raise MadGraph5Error('MG5_aMC cannot find the \'loop_material\' directory'+\ 7046 ' in %s'%str(cmd._mgme_dir)) 7047 7048 # Then treat the aMC@NLO output 7049 elif output_type=='amcatnlo': 7050 import madgraph.iolibs.export_fks as export_fks 7051 ExporterClass=None 7052 amcatnlo_options = dict(opt) 7053 amcatnlo_options.update(MadLoop_SA_options) 7054 amcatnlo_options['mp'] = len(cmd._fks_multi_proc.get_virt_amplitudes()) > 0 7055 if not cmd.options['loop_optimized_output']: 7056 logger.info("Writing out the aMC@NLO code") 7057 ExporterClass = export_fks.ProcessExporterFortranFKS 7058 amcatnlo_options['export_format']='FKS5_default' 7059 else: 7060 logger.info("Writing out the aMC@NLO code, using optimized Loops") 7061 ExporterClass = export_fks.ProcessOptimizedExporterFortranFKS 7062 amcatnlo_options['export_format']='FKS5_optimized' 7063 return ExporterClass(cmd._export_dir, amcatnlo_options) 7064 7065 7066 # Then the default tree-level output 7067 elif output_type=='default': 7068 assert group_subprocesses in [True, False] 7069 7070 opt = dict(opt) 7071 opt.update({'clean': not noclean, 7072 'complex_mass': cmd.options['complex_mass_scheme'], 7073 'export_format':cmd._export_format, 7074 'mp': False, 7075 'sa_symmetry':False, 7076 'model': cmd._curr_model.get('name'), 7077 'v5_model': False if cmd._model_v4_path else True }) 7078 7079 format = cmd._export_format #shortcut 7080 7081 if format in ['standalone_msP', 'standalone_msF', 'standalone_rw']: 7082 opt['sa_symmetry'] = True 7083 elif format == 'plugin': 7084 opt['sa_symmetry'] = cmd._export_plugin.sa_symmetry 7085 7086 loop_induced_opt = dict(opt) 7087 loop_induced_opt.update(MadLoop_SA_options) 7088 loop_induced_opt['export_format'] = 'madloop_optimized' 7089 loop_induced_opt['SubProc_prefix'] = 'PV' 7090 # For loop_induced output with MadEvent, we must have access to the 7091 # color flows. 7092 loop_induced_opt['compute_color_flows'] = True 7093 for key in opt: 7094 if key not in loop_induced_opt: 7095 loop_induced_opt[key] = opt[key] 7096 7097 # Madevent output supports MadAnalysis5 7098 if format in ['madevent']: 7099 opt['madanalysis5'] = cmd.options['madanalysis5_path'] 7100 7101 if format == 'matrix' or format.startswith('standalone'): 7102 return ProcessExporterFortranSA(cmd._export_dir, opt, format=format) 7103 7104 elif format in ['madevent'] and group_subprocesses: 7105 if isinstance(cmd._curr_amps[0], 7106 loop_diagram_generation.LoopAmplitude): 7107 import madgraph.loop.loop_exporters as loop_exporters 7108 return loop_exporters.LoopInducedExporterMEGroup( 7109 cmd._export_dir,loop_induced_opt) 7110 else: 7111 return ProcessExporterFortranMEGroup(cmd._export_dir,opt) 7112 elif format in ['madevent']: 7113 if isinstance(cmd._curr_amps[0], 7114 loop_diagram_generation.LoopAmplitude): 7115 import madgraph.loop.loop_exporters as loop_exporters 7116 return loop_exporters.LoopInducedExporterMENoGroup( 7117 cmd._export_dir,loop_induced_opt) 7118 else: 7119 return ProcessExporterFortranME(cmd._export_dir,opt) 7120 elif format in ['matchbox']: 7121 return ProcessExporterFortranMatchBox(cmd._export_dir,opt) 7122 elif cmd._export_format in ['madweight'] and group_subprocesses: 7123 7124 return ProcessExporterFortranMWGroup(cmd._export_dir, opt) 7125 elif cmd._export_format in ['madweight']: 7126 return ProcessExporterFortranMW(cmd._export_dir, opt) 7127 elif format == 'plugin': 7128 if isinstance(cmd._curr_amps[0], 7129 loop_diagram_generation.LoopAmplitude): 7130 return cmd._export_plugin(cmd._export_dir, loop_induced_opt) 7131 else: 7132 return cmd._export_plugin(cmd._export_dir, opt) 7133 7134 else: 7135 raise Exception('Wrong export_v4 format') 7136 else: 7137 raise MadGraph5Error('Output type %s not reckognized in ExportV4Factory.')
7138
7139 7140 7141 7142 #=============================================================================== 7143 # ProcessExporterFortranMWGroup 7144 #=============================================================================== 7145 -class ProcessExporterFortranMWGroup(ProcessExporterFortranMW):
7146 """Class to take care of exporting a set of matrix elements to 7147 MadEvent subprocess group format.""" 7148 7149 matrix_file = "matrix_madweight_group_v4.inc" 7150 grouped_mode = 'madweight' 7151 #=========================================================================== 7152 # generate_subprocess_directory 7153 #===========================================================================
7154 - def generate_subprocess_directory(self, subproc_group, 7155 fortran_model, 7156 group_number):
7157 """Generate the Pn directory for a subprocess group in MadEvent, 7158 including the necessary matrix_N.f files, configs.inc and various 7159 other helper files.""" 7160 7161 if not isinstance(subproc_group, group_subprocs.SubProcessGroup): 7162 raise base_objects.PhysicsObject.PhysicsObjectError("subproc_group object not SubProcessGroup") 7163 7164 if not self.model: 7165 self.model = subproc_group.get('matrix_elements')[0].\ 7166 get('processes')[0].get('model') 7167 7168 pathdir = os.path.join(self.dir_path, 'SubProcesses') 7169 7170 # Create the directory PN in the specified path 7171 subprocdir = "P%d_%s" % (subproc_group.get('number'), 7172 subproc_group.get('name')) 7173 try: 7174 os.mkdir(pjoin(pathdir, subprocdir)) 7175 except os.error as error: 7176 logger.warning(error.strerror + " " + subprocdir) 7177 7178 7179 logger.info('Creating files in directory %s' % subprocdir) 7180 Ppath = pjoin(pathdir, subprocdir) 7181 7182 # Create the matrix.f files, auto_dsig.f files and all inc files 7183 # for all subprocesses in the group 7184 7185 maxamps = 0 7186 maxflows = 0 7187 tot_calls = 0 7188 7189 matrix_elements = subproc_group.get('matrix_elements') 7190 7191 for ime, matrix_element in \ 7192 enumerate(matrix_elements): 7193 filename = pjoin(Ppath, 'matrix%d.f' % (ime+1)) 7194 calls, ncolor = \ 7195 self.write_matrix_element_v4(writers.FortranWriter(filename), 7196 matrix_element, 7197 fortran_model, 7198 str(ime+1), 7199 subproc_group.get('diagram_maps')[\ 7200 ime]) 7201 7202 filename = pjoin(Ppath, 'auto_dsig%d.f' % (ime+1)) 7203 self.write_auto_dsig_file(writers.FortranWriter(filename), 7204 matrix_element, 7205 str(ime+1)) 7206 7207 # Keep track of needed quantities 7208 tot_calls += int(calls) 7209 maxflows = max(maxflows, ncolor) 7210 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 7211 7212 # Draw diagrams 7213 filename = pjoin(Ppath, "matrix%d.ps" % (ime+1)) 7214 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 7215 get('diagrams'), 7216 filename, 7217 model = \ 7218 matrix_element.get('processes')[0].\ 7219 get('model'), 7220 amplitude=True) 7221 logger.info("Generating Feynman diagrams for " + \ 7222 matrix_element.get('processes')[0].nice_string()) 7223 plot.draw() 7224 7225 # Extract number of external particles 7226 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 7227 7228 # Generate a list of diagrams corresponding to each configuration 7229 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 7230 # If a subprocess has no diagrams for this config, the number is 0 7231 7232 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 7233 7234 filename = pjoin(Ppath, 'auto_dsig.f') 7235 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 7236 subproc_group) 7237 7238 filename = pjoin(Ppath,'configs.inc') 7239 nconfigs, s_and_t_channels = self.write_configs_file(\ 7240 writers.FortranWriter(filename), 7241 subproc_group, 7242 subproc_diagrams_for_config) 7243 7244 filename = pjoin(Ppath, 'leshouche.inc') 7245 self.write_leshouche_file(writers.FortranWriter(filename), 7246 subproc_group) 7247 7248 filename = pjoin(Ppath, 'phasespace.inc') 7249 self.write_phasespace_file(writers.FortranWriter(filename), 7250 nconfigs) 7251 7252 7253 filename = pjoin(Ppath, 'maxamps.inc') 7254 self.write_maxamps_file(writers.FortranWriter(filename), 7255 maxamps, 7256 maxflows, 7257 max([len(me.get('processes')) for me in \ 7258 matrix_elements]), 7259 len(matrix_elements)) 7260 7261 filename = pjoin(Ppath, 'mirrorprocs.inc') 7262 self.write_mirrorprocs(writers.FortranWriter(filename), 7263 subproc_group) 7264 7265 filename = pjoin(Ppath, 'nexternal.inc') 7266 self.write_nexternal_file(writers.FortranWriter(filename), 7267 nexternal, ninitial) 7268 7269 filename = pjoin(Ppath, 'pmass.inc') 7270 self.write_pmass_file(writers.FortranWriter(filename), 7271 matrix_element) 7272 7273 filename = pjoin(Ppath, 'props.inc') 7274 self.write_props_file(writers.FortranWriter(filename), 7275 matrix_element, 7276 s_and_t_channels) 7277 7278 # filename = pjoin(Ppath, 'processes.dat') 7279 # files.write_to_file(filename, 7280 # self.write_processes_file, 7281 # subproc_group) 7282 7283 # Generate jpgs -> pass in make_html 7284 #os.system(os.path.join('..', '..', 'bin', 'gen_jpeg-pl')) 7285 7286 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f'] 7287 7288 for file in linkfiles: 7289 ln('../%s' % file, cwd=Ppath) 7290 7291 ln('nexternal.inc', '../../Source', cwd=Ppath, log=False) 7292 ln('leshouche.inc', '../../Source', cwd=Ppath, log=False) 7293 ln('maxamps.inc', '../../Source', cwd=Ppath, log=False) 7294 ln('../../Source/maxparticles.inc', '.', log=True, cwd=Ppath) 7295 ln('../../Source/maxparticles.inc', '.', name='genps.inc', log=True, cwd=Ppath) 7296 ln('phasespace.inc', '../', log=True, cwd=Ppath) 7297 if not tot_calls: 7298 tot_calls = 0 7299 return tot_calls
7300 7301 7302 #=========================================================================== 7303 # Helper functions 7304 #===========================================================================
7305 - def modify_grouping(self, matrix_element):
7306 """allow to modify the grouping (if grouping is in place) 7307 return two value: 7308 - True/False if the matrix_element was modified 7309 - the new(or old) matrix element""" 7310 7311 return True, matrix_element.split_lepton_grouping()
7312 7313 #=========================================================================== 7314 # write_super_auto_dsig_file 7315 #===========================================================================
7316 - def write_super_auto_dsig_file(self, writer, subproc_group):
7317 """Write the auto_dsig.f file selecting between the subprocesses 7318 in subprocess group mode""" 7319 7320 replace_dict = {} 7321 7322 # Extract version number and date from VERSION file 7323 info_lines = self.get_mg5_info_lines() 7324 replace_dict['info_lines'] = info_lines 7325 7326 matrix_elements = subproc_group.get('matrix_elements') 7327 7328 # Extract process info lines 7329 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 7330 matrix_elements]) 7331 replace_dict['process_lines'] = process_lines 7332 7333 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 7334 replace_dict['nexternal'] = nexternal 7335 7336 replace_dict['nsprocs'] = 2*len(matrix_elements) 7337 7338 # Generate dsig definition line 7339 dsig_def_line = "DOUBLE PRECISION " + \ 7340 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 7341 range(len(matrix_elements))]) 7342 replace_dict["dsig_def_line"] = dsig_def_line 7343 7344 # Generate dsig process lines 7345 call_dsig_proc_lines = [] 7346 for iproc in range(len(matrix_elements)): 7347 call_dsig_proc_lines.append(\ 7348 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 7349 {"num": iproc + 1, 7350 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 7351 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 7352 7353 if writer: 7354 file = open(os.path.join(_file_path, \ 7355 'iolibs/template_files/super_auto_dsig_mw_group_v4.inc')).read() 7356 file = file % replace_dict 7357 # Write the file 7358 writer.writelines(file) 7359 else: 7360 return replace_dict
7361 7362 #=========================================================================== 7363 # write_mirrorprocs 7364 #===========================================================================
7365 - def write_mirrorprocs(self, writer, subproc_group):
7366 """Write the mirrorprocs.inc file determining which processes have 7367 IS mirror process in subprocess group mode.""" 7368 7369 lines = [] 7370 bool_dict = {True: '.true.', False: '.false.'} 7371 matrix_elements = subproc_group.get('matrix_elements') 7372 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 7373 (len(matrix_elements), 7374 ",".join([bool_dict[me.get('has_mirror_process')] for \ 7375 me in matrix_elements]))) 7376 # Write the file 7377 writer.writelines(lines)
7378 7379 #=========================================================================== 7380 # write_configs_file 7381 #===========================================================================
7382 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
7383 """Write the configs.inc file with topology information for a 7384 subprocess group. Use the first subprocess with a diagram for each 7385 configuration.""" 7386 7387 matrix_elements = subproc_group.get('matrix_elements') 7388 model = matrix_elements[0].get('processes')[0].get('model') 7389 7390 diagrams = [] 7391 config_numbers = [] 7392 for iconfig, config in enumerate(diagrams_for_config): 7393 # Check if any diagrams correspond to this config 7394 if set(config) == set([0]): 7395 continue 7396 subproc_diags = [] 7397 for s,d in enumerate(config): 7398 if d: 7399 subproc_diags.append(matrix_elements[s].\ 7400 get('diagrams')[d-1]) 7401 else: 7402 subproc_diags.append(None) 7403 diagrams.append(subproc_diags) 7404 config_numbers.append(iconfig + 1) 7405 7406 # Extract number of external particles 7407 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 7408 7409 return len(diagrams), \ 7410 self.write_configs_file_from_diagrams(writer, diagrams, 7411 config_numbers, 7412 nexternal, ninitial, 7413 matrix_elements[0],model)
7414 7415 #=========================================================================== 7416 # write_run_configs_file 7417 #===========================================================================
7418 - def write_run_config_file(self, writer):
7419 """Write the run_configs.inc file for MadEvent""" 7420 7421 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 7422 text = open(path).read() % {'chanperjob':'2'} 7423 writer.write(text) 7424 return True
7425 7426 7427 #=========================================================================== 7428 # write_leshouche_file 7429 #===========================================================================
7430 - def write_leshouche_file(self, writer, subproc_group):
7431 """Write the leshouche.inc file for MG4""" 7432 7433 all_lines = [] 7434 7435 for iproc, matrix_element in \ 7436 enumerate(subproc_group.get('matrix_elements')): 7437 all_lines.extend(self.get_leshouche_lines(matrix_element, 7438 iproc)) 7439 7440 # Write the file 7441 writer.writelines(all_lines) 7442 7443 return True
7444