Package madgraph :: Package iolibs :: Module export_fks
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_fks

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Methods and classes to export matrix elements to fks format.""" 
  16   
  17  from distutils import dir_util 
  18  import glob 
  19  import logging 
  20  import os 
  21  import re 
  22  import shutil 
  23  import subprocess 
  24  import string 
  25  import copy 
  26   
  27  import madgraph.core.color_algebra as color 
  28  import madgraph.core.helas_objects as helas_objects 
  29  import madgraph.core.base_objects as base_objects 
  30  import madgraph.fks.fks_helas_objects as fks_helas_objects 
  31  import madgraph.fks.fks_base as fks 
  32  import madgraph.fks.fks_common as fks_common 
  33  import madgraph.iolibs.drawing_eps as draw 
  34  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  35  import madgraph.iolibs.files as files 
  36  import madgraph.various.misc as misc 
  37  import madgraph.iolibs.file_writers as writers 
  38  import madgraph.iolibs.template_files as template_files 
  39  import madgraph.iolibs.ufo_expression_parsers as parsers 
  40  import madgraph.iolibs.export_v4 as export_v4 
  41  import madgraph.loop.loop_exporters as loop_exporters 
  42  import madgraph.various.q_polynomial as q_polynomial 
  43   
  44  import aloha.create_aloha as create_aloha 
  45   
  46  import models.write_param_card as write_param_card 
  47  import models.check_param_card as check_param_card 
  48  from madgraph import MadGraph5Error, MG5DIR, InvalidCmd 
  49  from madgraph.iolibs.files import cp, ln, mv 
  50   
  51  pjoin = os.path.join 
  52   
  53  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  54  logger = logging.getLogger('madgraph.export_fks') 
  55   
  56  #================================================================================= 
  57  # Class for used of the (non-optimized) Loop process 
  58  #================================================================================= 
59 -class ProcessExporterFortranFKS(loop_exporters.LoopProcessExporterFortranSA):
60 """Class to take care of exporting a set of matrix elements to 61 Fortran (v4) format.""" 62 63 #=============================================================================== 64 # copy the Template in a new directory. 65 #===============================================================================
66 - def copy_fkstemplate(self):
67 """create the directory run_name as a copy of the MadEvent 68 Template, and clean the directory 69 For now it is just the same as copy_v4template, but it will be modified 70 """ 71 mgme_dir = self.mgme_dir 72 dir_path = self.dir_path 73 clean =self.opt['clean'] 74 75 #First copy the full template tree if dir_path doesn't exit 76 if not os.path.isdir(dir_path): 77 if not mgme_dir: 78 raise MadGraph5Error, \ 79 "No valid MG_ME path given for MG4 run directory creation." 80 logger.info('initialize a new directory: %s' % \ 81 os.path.basename(dir_path)) 82 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True) 83 # distutils.dir_util.copy_tree since dir_path already exists 84 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template', 'Common'), 85 dir_path) 86 elif not os.path.isfile(os.path.join(dir_path, 'TemplateVersion.txt')): 87 if not mgme_dir: 88 raise MadGraph5Error, \ 89 "No valid MG_ME path given for MG4 run directory creation." 90 try: 91 shutil.copy(os.path.join(mgme_dir, 'MGMEVersion.txt'), dir_path) 92 except IOError: 93 MG5_version = misc.get_pkg_info() 94 open(os.path.join(dir_path, 'MGMEVersion.txt'), 'w').write( \ 95 "5." + MG5_version['version']) 96 97 #Ensure that the Template is clean 98 if clean: 99 logger.info('remove old information in %s' % os.path.basename(dir_path)) 100 if os.environ.has_key('MADGRAPH_BASE'): 101 subprocess.call([os.path.join('bin', 'internal', 'clean_template'), 102 '--web'],cwd=dir_path) 103 else: 104 try: 105 subprocess.call([os.path.join('bin', 'internal', 'clean_template')], \ 106 cwd=dir_path) 107 except Exception, why: 108 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 109 % (os.path.basename(dir_path),why)) 110 #Write version info 111 MG_version = misc.get_pkg_info() 112 open(os.path.join(dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 113 MG_version['version']) 114 115 # We must link the CutTools to the Library folder of the active Template 116 self.link_CutTools(dir_path) 117 118 link_tir_libs=[] 119 tir_libs=[] 120 os.remove(os.path.join(self.dir_path,'SubProcesses','makefile_loop.inc')) 121 dirpath = os.path.join(self.dir_path, 'SubProcesses') 122 filename = pjoin(self.dir_path, 'SubProcesses','makefile_loop') 123 calls = self.write_makefile_TIR(writers.MakefileWriter(filename), 124 link_tir_libs,tir_libs) 125 os.remove(os.path.join(self.dir_path,'Source','make_opts.inc')) 126 filename = pjoin(self.dir_path, 'Source','make_opts') 127 calls = self.write_make_opts(writers.MakefileWriter(filename), 128 link_tir_libs,tir_libs) 129 130 # Duplicate run_card and FO_analyse_card 131 for card in ['run_card', 'FO_analyse_card', 'shower_card']: 132 try: 133 shutil.copy(pjoin(self.dir_path, 'Cards', 134 card + '.dat'), 135 pjoin(self.dir_path, 'Cards', 136 card + '_default.dat')) 137 except IOError: 138 logger.warning("Failed to copy " + card + ".dat to default") 139 140 cwd = os.getcwd() 141 dirpath = os.path.join(self.dir_path, 'SubProcesses') 142 try: 143 os.chdir(dirpath) 144 except os.error: 145 logger.error('Could not cd to directory %s' % dirpath) 146 return 0 147 148 # We add here the user-friendly MadLoop option setter. 149 cpfiles= ["SubProcesses/MadLoopParamReader.f", 150 "Cards/MadLoopParams.dat", 151 "SubProcesses/MadLoopParams.inc"] 152 153 for file in cpfiles: 154 shutil.copy(os.path.join(self.loop_dir,'StandAlone/', file), 155 os.path.join(self.dir_path, file)) 156 157 # We need minimal editing of MadLoopCommons.f 158 MadLoopCommon = open(os.path.join(self.loop_dir,'StandAlone', 159 "SubProcesses","MadLoopCommons.inc")).read() 160 writer = writers.FortranWriter(os.path.join(self.dir_path, 161 "SubProcesses","MadLoopCommons.f")) 162 writer.writelines(MadLoopCommon%{ 163 'print_banner_commands':self.MadLoop_banner}) 164 writer.close() 165 166 # Write the cts_mpc.h and cts_mprec.h files imported from CutTools 167 self.write_mp_files(writers.FortranWriter('cts_mprec.h'),\ 168 writers.FortranWriter('cts_mpc.h')) 169 170 171 # Finally make sure to turn off MC over Hel for the default mode. 172 FKS_card_path = pjoin(self.dir_path,'Cards','FKS_params.dat') 173 FKS_card_file = open(FKS_card_path,'r') 174 FKS_card = FKS_card_file.read() 175 FKS_card_file.close() 176 FKS_card = re.sub(r"#NHelForMCoverHels\n-?\d+", 177 "#NHelForMCoverHels\n-1", FKS_card) 178 FKS_card_file = open(FKS_card_path,'w') 179 FKS_card_file.write(FKS_card) 180 FKS_card_file.close() 181 182 # Return to original PWD 183 os.chdir(cwd) 184 # Copy the different python files in the Template 185 self.copy_python_files()
186 187 # I put it here not in optimized one, because I want to use the same makefile_loop.inc 188 # Also, we overload this function (i.e. it is already defined in 189 # LoopProcessExporterFortranSA) because the path of the template makefile 190 # is different.
191 - def write_makefile_TIR(self, writer, link_tir_libs,tir_libs,tir_include=[]):
192 """ Create the file makefile_loop which links to the TIR libraries.""" 193 194 file = open(os.path.join(self.mgme_dir,'Template','NLO', 195 'SubProcesses','makefile_loop.inc')).read() 196 replace_dict={} 197 replace_dict['link_tir_libs']=' '.join(link_tir_libs) 198 replace_dict['tir_libs']=' '.join(tir_libs) 199 replace_dict['dotf']='%.f' 200 replace_dict['doto']='%.o' 201 replace_dict['tir_include']=' '.join(tir_include) 202 file=file%replace_dict 203 if writer: 204 writer.writelines(file) 205 else: 206 return file
207 208 # I put it here not in optimized one, because I want to use the same make_opts.inc
209 - def write_make_opts(self, writer, link_tir_libs,tir_libs):
210 """ Create the file make_opts which links to the TIR libraries.""" 211 file = open(os.path.join(self.mgme_dir,'Template','NLO', 212 'Source','make_opts.inc')).read() 213 replace_dict={} 214 replace_dict['link_tir_libs']=' '.join(link_tir_libs) 215 replace_dict['tir_libs']=' '.join(tir_libs) 216 replace_dict['dotf']='%.f' 217 replace_dict['doto']='%.o' 218 file=file%replace_dict 219 if writer: 220 writer.writelines(file) 221 else: 222 return file
223 224 #=========================================================================== 225 # copy_python_files 226 #===========================================================================
227 - def copy_python_files(self):
228 """copy python files required for the Template""" 229 230 cp(_file_path+'/interface/amcatnlo_run_interface.py', 231 self.dir_path+'/bin/internal/amcatnlo_run_interface.py') 232 cp(_file_path+'/interface/extended_cmd.py', 233 self.dir_path+'/bin/internal/extended_cmd.py') 234 cp(_file_path+'/interface/common_run_interface.py', 235 self.dir_path+'/bin/internal/common_run_interface.py') 236 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 237 cp(_file_path+'/various/shower_card.py', self.dir_path+'/bin/internal/shower_card.py') 238 cp(_file_path+'/various/FO_analyse_card.py', self.dir_path+'/bin/internal/FO_analyse_card.py') 239 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 240 cp(_file_path+'/iolibs/save_load_object.py', 241 self.dir_path+'/bin/internal/save_load_object.py') 242 cp(_file_path+'/iolibs/file_writers.py', 243 self.dir_path+'/bin/internal/file_writers.py') 244 cp(_file_path+'../models/check_param_card.py', 245 self.dir_path+'/bin/internal/check_param_card.py') 246 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 247 cp(_file_path+'/various/gen_crossxhtml.py', 248 self.dir_path+'/bin/internal/gen_crossxhtml.py') 249 cp(_file_path+'/various/banner.py', 250 self.dir_path+'/bin/internal/banner.py') 251 cp(_file_path+'/various/cluster.py', 252 self.dir_path+'/bin/internal/cluster.py') 253 cp(_file_path+'/various/sum_html.py', 254 self.dir_path+'/bin/internal/sum_html.py') 255 cp(_file_path+'/various/lhe_parser.py', 256 self.dir_path+'/bin/internal/lhe_parser.py') 257 cp(_file_path+'/interface/.mg5_logging.conf', 258 self.dir_path+'/bin/internal/me5_logging.conf') 259 cp(_file_path+'/interface/coloring_logging.py', 260 self.dir_path+'/bin/internal/coloring_logging.py')
261 262
263 - def convert_model_to_mg4(self, model, wanted_lorentz = [], 264 wanted_couplings = []):
265 266 super(ProcessExporterFortranFKS,self).convert_model_to_mg4(model, 267 wanted_lorentz, wanted_couplings) 268 269 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 270 try: 271 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 272 except OSError as error: 273 pass 274 model_path = model.get('modelpath') 275 shutil.copytree(model_path, 276 pjoin(self.dir_path,'bin','internal','ufomodel'), 277 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 278 if hasattr(model, 'restrict_card'): 279 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 280 'restrict_default.dat') 281 if isinstance(model.restrict_card, check_param_card.ParamCard): 282 model.restrict_card.write(out_path) 283 else: 284 files.cp(model.restrict_card, out_path)
285 286 287 288 #=========================================================================== 289 # write_maxparticles_file 290 #===========================================================================
291 - def write_maxparticles_file(self, writer, matrix_elements):
292 """Write the maxparticles.inc file for MadEvent""" 293 294 maxparticles = max([me.get_nexternal_ninitial()[0] \ 295 for me in matrix_elements]) 296 297 lines = "integer max_particles, max_branch\n" 298 lines += "parameter (max_particles=%d) \n" % maxparticles 299 lines += "parameter (max_branch=max_particles-1)" 300 301 # Write the file 302 writer.writelines(lines) 303 304 return True
305 306 307 #=========================================================================== 308 # write_maxconfigs_file 309 #===========================================================================
310 - def write_maxconfigs_file(self, writer, matrix_elements):
311 """Write the maxconfigs.inc file for MadEvent""" 312 313 maxconfigs = max([me.get_num_configs() for me in matrix_elements]) 314 315 lines = "integer lmaxconfigs\n" 316 lines += "parameter (lmaxconfigs=%d)" % maxconfigs 317 318 # Write the file 319 writer.writelines(lines) 320 321 return True
322 323 324 #=============================================================================== 325 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 326 #===============================================================================
327 - def write_procdef_mg5(self, file_pos, modelname, process_str):
328 """ write an equivalent of the MG4 proc_card in order that all the Madevent 329 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 330 331 proc_card_template = template_files.mg4_proc_card.mg4_template 332 process_template = template_files.mg4_proc_card.process_template 333 process_text = '' 334 coupling = '' 335 new_process_content = [] 336 337 # First find the coupling and suppress the coupling from process_str 338 #But first ensure that coupling are define whithout spaces: 339 process_str = process_str.replace(' =', '=') 340 process_str = process_str.replace('= ', '=') 341 process_str = process_str.replace(',',' , ') 342 #now loop on the element and treat all the coupling 343 for info in process_str.split(): 344 if '=' in info: 345 coupling += info + '\n' 346 else: 347 new_process_content.append(info) 348 # Recombine the process_str (which is the input process_str without coupling 349 #info) 350 process_str = ' '.join(new_process_content) 351 352 #format the SubProcess 353 process_text += process_template.substitute({'process': process_str, \ 354 'coupling': coupling}) 355 356 text = proc_card_template.substitute({'process': process_text, 357 'model': modelname, 358 'multiparticle':''}) 359 ff = open(file_pos, 'w') 360 ff.write(text) 361 ff.close()
362 363 364 #=============================================================================== 365 # write a initial states map, useful for the fast PDF NLO interface 366 #===============================================================================
367 - def write_init_map(self, file_pos, initial_states):
368 """ Write an initial state process map. Each possible PDF 369 combination gets an unique identifier.""" 370 371 text='' 372 for i,e in enumerate(initial_states): 373 text=text+str(i+1)+' '+str(len(e)) 374 for t in e: 375 text=text+' ' 376 for p in t: 377 text=text+' '+str(p) 378 text=text+'\n' 379 380 ff = open(file_pos, 'w') 381 ff.write(text) 382 ff.close()
383
384 - def get_ME_identifier(self, matrix_element):
385 """ A function returning a string uniquely identifying the matrix 386 element given in argument so that it can be used as a prefix to all 387 MadLoop5 subroutines and common blocks related to it. This allows 388 to compile several processes into one library as requested by the 389 BLHA (Binoth LesHouches Accord) guidelines. The MadFKS design 390 necessitates that there is no process prefix.""" 391 392 return ''
393 394 #=============================================================================== 395 # write_coef_specs 396 #===============================================================================
397 - def write_coef_specs_file(self, virt_me_list):
398 """writes the coef_specs.inc in the DHELAS folder. Should not be called in the 399 non-optimized mode""" 400 raise fks_common.FKSProcessError(), \ 401 "write_coef_specs should be called only in the loop-optimized mode"
402 403 404 #=============================================================================== 405 # generate_directories_fks 406 #===============================================================================
407 - def generate_directories_fks(self, matrix_element, fortran_model, me_number, 408 me_ntot, path=os.getcwd(),OLP='MadLoop'):
409 """Generate the Pxxxxx_i directories for a subprocess in MadFKS, 410 including the necessary matrix.f and various helper files""" 411 proc = matrix_element.born_matrix_element['processes'][0] 412 413 if not self.model: 414 self.model = matrix_element.get('processes')[0].get('model') 415 416 cwd = os.getcwd() 417 try: 418 os.chdir(path) 419 except OSError, error: 420 error_msg = "The directory %s should exist in order to be able " % path + \ 421 "to \"export\" in it. If you see this error message by " + \ 422 "typing the command \"export\" please consider to use " + \ 423 "instead the command \"output\". " 424 raise MadGraph5Error, error_msg 425 426 calls = 0 427 428 self.fksdirs = [] 429 #first make and cd the direcrory corresponding to the born process: 430 borndir = "P%s" % \ 431 (matrix_element.get('processes')[0].shell_string()) 432 os.mkdir(borndir) 433 os.chdir(borndir) 434 logger.info('Writing files in %s (%d / %d)' % (borndir, me_number + 1, me_ntot)) 435 436 ## write the files corresponding to the born process in the P* directory 437 self.generate_born_fks_files(matrix_element, 438 fortran_model, me_number, path) 439 440 # With NJET you want to generate the order file per subprocess and most 441 # likely also generate it for each subproc. 442 if OLP=='NJET': 443 filename = 'OLE_order.lh' 444 self.write_lh_order(filename, matrix_element, OLP) 445 446 if matrix_element.virt_matrix_element: 447 calls += self.generate_virt_directory( \ 448 matrix_element.virt_matrix_element, \ 449 fortran_model, \ 450 os.path.join(path, borndir)) 451 452 #write the infortions for the different real emission processes 453 454 self.write_real_matrix_elements(matrix_element, fortran_model) 455 456 self.write_pdf_calls(matrix_element, fortran_model) 457 458 filename = 'nFKSconfigs.inc' 459 self.write_nfksconfigs_file(writers.FortranWriter(filename), 460 matrix_element, 461 fortran_model) 462 463 filename = 'iproc.dat' 464 self.write_iproc_file(writers.FortranWriter(filename), 465 me_number) 466 467 filename = 'fks_info.inc' 468 self.write_fks_info_file(writers.FortranWriter(filename), 469 matrix_element, 470 fortran_model) 471 472 filename = 'leshouche_info.inc' 473 self.write_leshouche_info_file(writers.FortranWriter(filename), 474 matrix_element, 475 fortran_model) 476 477 filename = 'configs_and_props_info.inc' 478 nconfigs=self.write_configs_and_props_info_file( 479 writers.FortranWriter(filename), 480 matrix_element, 481 fortran_model) 482 483 filename = 'real_from_born_configs.inc' 484 self.write_real_from_born_configs( 485 writers.FortranWriter(filename), 486 matrix_element, 487 fortran_model) 488 489 filename = 'ngraphs.inc' 490 self.write_ngraphs_file(writers.FortranWriter(filename), 491 nconfigs) 492 493 #write the wrappers 494 filename = 'real_me_chooser.f' 495 self.write_real_me_wrapper(writers.FortranWriter(filename), 496 matrix_element, 497 fortran_model) 498 499 filename = 'parton_lum_chooser.f' 500 self.write_pdf_wrapper(writers.FortranWriter(filename), 501 matrix_element, 502 fortran_model) 503 504 filename = 'get_color.f' 505 self.write_colors_file(writers.FortranWriter(filename), 506 matrix_element) 507 508 filename = 'nexternal.inc' 509 (nexternal, ninitial) = \ 510 matrix_element.real_processes[0].get_nexternal_ninitial() 511 self.write_nexternal_file(writers.FortranWriter(filename), 512 nexternal, ninitial) 513 514 filename = 'pmass.inc' 515 self.write_pmass_file(writers.FortranWriter(filename), 516 matrix_element.real_processes[0].matrix_element) 517 518 #draw the diagrams 519 self.draw_feynman_diagrams(matrix_element) 520 521 linkfiles = ['BinothLHADummy.f', 522 'check_poles.f', 523 'MCmasses_HERWIG6.inc', 524 'MCmasses_HERWIGPP.inc', 525 'MCmasses_PYTHIA6Q.inc', 526 'MCmasses_PYTHIA6PT.inc', 527 'MCmasses_PYTHIA8.inc', 528 'add_write_info.f', 529 'coupl.inc', 530 'cuts.f', 531 'FKS_params.dat', 532 'initial_states_map.dat', 533 'OLE_order.olc', 534 'FKSParams.inc', 535 'FKSParamReader.f', 536 'cuts.inc', 537 'unlops.inc', 538 'pythia_unlops.f', 539 'driver_mintMC.f', 540 'driver_mintFO.f', 541 'driver_vegas.f', 542 'appl_interface.cc', 543 'appl_interface_dummy.f', 544 'appl_common.inc', 545 'reweight_appl.inc', 546 'driver_reweight.f', 547 'fastjetfortran_madfks_core.cc', 548 'fastjetfortran_madfks_full.cc', 549 'fjcore.cc', 550 'fastjet_wrapper.f', 551 'fjcore.hh', 552 'fks_Sij.f', 553 'fks_powers.inc', 554 'fks_singular.f', 555 'fks_inc_chooser.f', 556 'leshouche_inc_chooser.f', 557 'configs_and_props_inc_chooser.f', 558 'genps.inc', 559 'genps_fks.f', 560 'boostwdir2.f', 561 'madfks_mcatnlo.inc', 562 'open_output_files.f', 563 'open_output_files_dummy.f', 564 'madfks_plot.f', 565 'analysis_dummy.f', 566 'mint-integrator2.f', 567 'MC_integer.f', 568 'mint.inc', 569 'montecarlocounter.f', 570 'q_es.inc', 571 'recluster.cc', 572 'Boosts.h', 573 'reweight.inc', 574 'reweight0.inc', 575 'reweight1.inc', 576 'reweightNLO.inc', 577 'reweight_all.inc', 578 'reweight_events.f', 579 'reweight_xsec.f', 580 'reweight_xsec_events.f', 581 'reweight_xsec_events_pdf_dummy.f', 582 'iproc_map.f', 583 'run.inc', 584 'setcuts.f', 585 'setscales.f', 586 'symmetry_fks_test_MC.f', 587 'symmetry_fks_test_ME.f', 588 'symmetry_fks_test_Sij.f', 589 'symmetry_fks_v3.f', 590 'trapfpe.c', 591 'vegas2.for', 592 'write_ajob.f', 593 'handling_lhe_events.f', 594 'write_event.f', 595 'fill_MC_mshell.f', 596 'maxparticles.inc', 597 'message.inc', 598 'initcluster.f', 599 'cluster.inc', 600 'cluster.f', 601 'reweight.f', 602 'sudakov.inc', 603 'maxconfigs.inc', 604 'timing_variables.inc'] 605 606 for file in linkfiles: 607 ln('../' + file , '.') 608 os.system("ln -s ../../Cards/param_card.dat .") 609 610 #copy the makefile 611 os.system("ln -s ../makefile_fks_dir ./makefile") 612 if matrix_element.virt_matrix_element: 613 os.system("ln -s ../BinothLHA.f ./BinothLHA.f") 614 elif OLP!='MadLoop': 615 os.system("ln -s ../BinothLHA_OLP.f ./BinothLHA.f") 616 else: 617 os.system("ln -s ../BinothLHA_user.f ./BinothLHA.f") 618 619 620 #import nexternal/leshouches in Source 621 ln('nexternal.inc', '../../Source', log=False) 622 ln('leshouche_info.inc', '../../Source', log=False) 623 624 625 # Return to SubProcesses dir 626 os.chdir(os.path.pardir) 627 # Add subprocess to subproc.mg 628 filename = 'subproc.mg' 629 files.append_to_file(filename, 630 self.write_subproc, 631 borndir) 632 633 634 os.chdir(cwd) 635 # Generate info page 636 gen_infohtml.make_info_html_nlo(self.dir_path) 637 638 639 return calls
640 641
642 - def finalize_fks_directory(self, matrix_elements, history, makejpg = False, 643 online = False, 644 compiler_dict={'fortran': 'gfortran', 'cpp': 'g++'}, 645 output_dependencies = 'external', MG5DIR = None):
646 """Finalize FKS directory by creating jpeg diagrams, html 647 pages,proc_card_mg5.dat and madevent.tar.gz.""" 648 649 # modelname = self.model.get('name') 650 # if modelname == 'mssm' or modelname.startswith('mssm-'): 651 # param_card = os.path.join(self.dir_path, 'Cards','param_card.dat') 652 # mg5_param = os.path.join(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 653 # check_param_card.convert_to_mg5card(param_card, mg5_param) 654 # check_param_card.check_valid_param_card(mg5_param) 655 656 657 # # Write maxconfigs.inc based on max of ME's/subprocess groups 658 filename = os.path.join(self.dir_path,'Source','maxconfigs.inc') 659 self.write_maxconfigs_file(writers.FortranWriter(filename), 660 matrix_elements['real_matrix_elements']) 661 662 # # Write maxparticles.inc based on max of ME's/subprocess groups 663 filename = os.path.join(self.dir_path,'Source','maxparticles.inc') 664 self.write_maxparticles_file(writers.FortranWriter(filename), 665 matrix_elements['real_matrix_elements']) 666 667 # Touch "done" file 668 os.system('touch %s/done' % os.path.join(self.dir_path,'SubProcesses')) 669 670 # Check for compiler 671 fcompiler_chosen = self.set_fortran_compiler(compiler_dict['fortran']) 672 ccompiler_chosen = self.set_cpp_compiler(compiler_dict['cpp']) 673 674 old_pos = os.getcwd() 675 os.chdir(os.path.join(self.dir_path, 'SubProcesses')) 676 P_dir_list = [proc for proc in os.listdir('.') if os.path.isdir(proc) and \ 677 proc[0] == 'P'] 678 679 devnull = os.open(os.devnull, os.O_RDWR) 680 # Convert the poscript in jpg files (if authorize) 681 if makejpg: 682 logger.info("Generate jpeg diagrams") 683 for Pdir in P_dir_list: 684 os.chdir(Pdir) 685 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 686 stdout = devnull) 687 os.chdir(os.path.pardir) 688 # 689 logger.info("Generate web pages") 690 # Create the WebPage using perl script 691 692 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 693 stdout = devnull) 694 695 os.chdir(os.path.pardir) 696 # 697 # obj = gen_infohtml.make_info_html(self.dir_path) 698 # [mv(name, './HTML/') for name in os.listdir('.') if \ 699 # (name.endswith('.html') or name.endswith('.jpg')) and \ 700 # name != 'index.html'] 701 # if online: 702 # nb_channel = obj.rep_rule['nb_gen_diag'] 703 # open(os.path.join('./Online'),'w').write(str(nb_channel)) 704 705 # Write command history as proc_card_mg5 706 if os.path.isdir('Cards'): 707 output_file = os.path.join('Cards', 'proc_card_mg5.dat') 708 history.write(output_file) 709 710 # Duplicate run_card and FO_analyse_card 711 for card in ['run_card', 'FO_analyse_card', 'shower_card']: 712 try: 713 shutil.copy(pjoin(self.dir_path, 'Cards', 714 card + '.dat'), 715 pjoin(self.dir_path, 'Cards', 716 card + '_default.dat')) 717 except IOError: 718 logger.warning("Failed to copy " + card + ".dat to default") 719 720 721 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 722 stdout = devnull) 723 724 # Run "make" to generate madevent.tar.gz file 725 if os.path.exists(pjoin('SubProcesses', 'subproc.mg')): 726 if os.path.exists('amcatnlo.tar.gz'): 727 os.remove('amcatnlo.tar.gz') 728 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'make_amcatnlo_tar')], 729 stdout = devnull) 730 # 731 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 732 stdout = devnull) 733 734 #return to the initial dir 735 os.chdir(old_pos) 736 737 # Setup stdHep 738 # Find the correct fortran compiler 739 base_compiler= ['FC=g77','FC=gfortran'] 740 741 StdHep_path = pjoin(MG5DIR, 'vendor', 'StdHEP') 742 743 if output_dependencies == 'external': 744 # check if stdhep has to be compiled (only the first time) 745 if not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP', 'lib', 'libstdhep.a')) or \ 746 not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP', 'lib', 'libFmcfio.a')): 747 if 'FC' not in os.environ or not os.environ['FC']: 748 path = os.path.join(StdHep_path, 'src', 'make_opts') 749 text = open(path).read() 750 for base in base_compiler: 751 text = text.replace(base,'FC=%s' % fcompiler_chosen) 752 open(path, 'w').writelines(text) 753 754 logger.info('Compiling StdHEP. This has to be done only once.') 755 misc.compile(cwd = pjoin(MG5DIR, 'vendor', 'StdHEP')) 756 logger.info('Done.') 757 #then link the libraries in the exported dir 758 files.ln(pjoin(StdHep_path, 'lib', 'libstdhep.a'), \ 759 pjoin(self.dir_path, 'MCatNLO', 'lib')) 760 files.ln(pjoin(StdHep_path, 'lib', 'libFmcfio.a'), \ 761 pjoin(self.dir_path, 'MCatNLO', 'lib')) 762 763 elif output_dependencies == 'internal': 764 StdHEP_internal_path = pjoin(self.dir_path,'Source','StdHEP') 765 shutil.copytree(StdHep_path,StdHEP_internal_path, symlinks=True) 766 # Create the links to the lib folder 767 linkfiles = ['libstdhep.a', 'libFmcfio.a'] 768 for file in linkfiles: 769 ln(pjoin(os.path.pardir,os.path.pardir,'Source','StdHEP','lib',file), 770 os.path.join(self.dir_path, 'MCatNLO', 'lib')) 771 if 'FC' not in os.environ or not os.environ['FC']: 772 path = pjoin(StdHEP_internal_path, 'src', 'make_opts') 773 text = open(path).read() 774 for base in base_compiler: 775 text = text.replace(base,'FC=%s' % fcompiler_chosen) 776 open(path, 'w').writelines(text) 777 # To avoid compiler version conflicts, we force a clean here 778 misc.compile(['clean'],cwd = StdHEP_internal_path) 779 780 elif output_dependencies == 'environment_paths': 781 # Here the user chose to define the dependencies path in one of 782 # his environmental paths 783 libStdHep = misc.which_lib('libstdhep.a') 784 libFmcfio = misc.which_lib('libFmcfio.a') 785 if not libStdHep is None and not libFmcfio is None: 786 logger.info('MG5_aMC is using StdHep installation found at %s.'%\ 787 os.path.dirname(libStdHep)) 788 ln(pjoin(libStdHep),pjoin(self.dir_path, 'MCatNLO', 'lib'),abspath=True) 789 ln(pjoin(libFmcfio),pjoin(self.dir_path, 'MCatNLO', 'lib'),abspath=True) 790 else: 791 raise InvalidCmd("Could not find the location of the files"+\ 792 " libstdhep.a and libFmcfio.a in you environment paths.") 793 794 else: 795 raise MadGraph5Error, 'output_dependencies option %s not recognized'\ 796 %output_dependencies
797 798
799 - def write_real_from_born_configs(self, writer, matrix_element, fortran_model):
800 """Writes the real_from_born_configs.inc file that contains 801 the mapping to go for a given born configuration (that is used 802 e.g. in the multi-channel phase-space integration to the 803 corresponding real-emission diagram, i.e. the real emission 804 diagram in which the combined ij is split in i_fks and 805 j_fks.""" 806 lines=[] 807 lines2=[] 808 max_links=0 809 born_me=matrix_element.born_matrix_element 810 for iFKS, conf in enumerate(matrix_element.get_fks_info_list()): 811 iFKS=iFKS+1 812 links=conf['fks_info']['rb_links'] 813 max_links=max(max_links,len(links)) 814 for i,diags in enumerate(links): 815 if not i == diags['born_conf']: 816 print links 817 raise MadGraph5Error, "born_conf should be canonically ordered" 818 real_configs=', '.join(['%d' % int(diags['real_conf']+1) for diags in links]) 819 lines.append("data (real_from_born_conf(irfbc,%d),irfbc=1,%d) /%s/" \ 820 % (iFKS,len(links),real_configs)) 821 822 lines2.append("integer irfbc") 823 lines2.append("integer real_from_born_conf(%d,%d)" \ 824 % (max_links,len(matrix_element.get_fks_info_list()))) 825 # Write the file 826 writer.writelines(lines2+lines)
827 828 829
830 - def write_configs_and_props_info_file(self, writer, matrix_element, fortran_model):
831 """writes the configs_and_props_info.inc file that cointains 832 all the (real-emission) configurations (IFOREST) as well as 833 the masses and widths of intermediate particles""" 834 lines = [] 835 lines2 = [] 836 nconfs = len(matrix_element.get_fks_info_list()) 837 (nexternal, ninitial) = matrix_element.real_processes[0].get_nexternal_ninitial() 838 839 lines.append("integer ifr,lmaxconfigs_used,max_branch_used") 840 lines.append("integer mapconfig_d(%3d,0:lmaxconfigs_used)" % nconfs) 841 lines.append("integer iforest_d(%3d,2,-max_branch_used:-1,lmaxconfigs_used)" % nconfs) 842 lines.append("integer sprop_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nconfs) 843 lines.append("integer tprid_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nconfs) 844 lines.append("double precision pmass_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nconfs) 845 lines.append("double precision pwidth_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nconfs) 846 lines.append("integer pow_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nconfs) 847 848 max_iconfig=0 849 max_leg_number=0 850 851 for iFKS, conf in enumerate(matrix_element.get_fks_info_list()): 852 iFKS=iFKS+1 853 iconfig = 0 854 s_and_t_channels = [] 855 mapconfigs = [] 856 fks_matrix_element=matrix_element.real_processes[conf['n_me'] - 1].matrix_element 857 base_diagrams = fks_matrix_element.get('base_amplitude').get('diagrams') 858 model = fks_matrix_element.get('base_amplitude').get('process').get('model') 859 minvert = min([max([len(vert.get('legs')) for vert in \ 860 diag.get('vertices')]) for diag in base_diagrams]) 861 862 lines.append("# ") 863 lines.append("# nFKSprocess %d" % iFKS) 864 for idiag, diag in enumerate(base_diagrams): 865 if any([len(vert.get('legs')) > minvert for vert in 866 diag.get('vertices')]): 867 # Only 3-vertices allowed in configs.inc 868 continue 869 iconfig = iconfig + 1 870 helas_diag = fks_matrix_element.get('diagrams')[idiag] 871 mapconfigs.append(helas_diag.get('number')) 872 lines.append("# Diagram %d for nFKSprocess %d" % \ 873 (helas_diag.get('number'),iFKS)) 874 # Correspondance between the config and the amplitudes 875 lines.append("data mapconfig_d(%3d,%4d)/%4d/" % (iFKS,iconfig, 876 helas_diag.get('number'))) 877 878 # Need to reorganize the topology so that we start with all 879 # final state external particles and work our way inwards 880 schannels, tchannels = helas_diag.get('amplitudes')[0].\ 881 get_s_and_t_channels(ninitial, model, 990) 882 883 s_and_t_channels.append([schannels, tchannels]) 884 885 # Write out propagators for s-channel and t-channel vertices 886 allchannels = schannels 887 if len(tchannels) > 1: 888 # Write out tchannels only if there are any non-trivial ones 889 allchannels = schannels + tchannels 890 891 for vert in allchannels: 892 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 893 last_leg = vert.get('legs')[-1] 894 lines.append("data (iforest_d(%3d, ifr,%3d,%4d),ifr=1,%d)/%s/" % \ 895 (iFKS,last_leg.get('number'), iconfig, len(daughters), 896 ",".join(["%3d" % d for d in daughters]))) 897 if vert in schannels: 898 lines.append("data sprop_d(%3d,%4d,%4d)/%8d/" % \ 899 (iFKS,last_leg.get('number'), iconfig, 900 last_leg.get('id'))) 901 elif vert in tchannels[:-1]: 902 lines.append("data tprid_d(%3d,%4d,%4d)/%8d/" % \ 903 (iFKS,last_leg.get('number'), iconfig, 904 abs(last_leg.get('id')))) 905 906 # update what the array sizes (mapconfig,iforest,etc) will be 907 max_leg_number = min(max_leg_number,last_leg.get('number')) 908 max_iconfig = max(max_iconfig,iconfig) 909 910 # Write out number of configs 911 lines.append("# Number of configs for nFKSprocess %d" % iFKS) 912 lines.append("data mapconfig_d(%3d,0)/%4d/" % (iFKS,iconfig)) 913 914 # write the props.inc information 915 lines2.append("# ") 916 particle_dict = fks_matrix_element.get('processes')[0].get('model').\ 917 get('particle_dict') 918 919 for iconf, configs in enumerate(s_and_t_channels): 920 for vertex in configs[0] + configs[1][:-1]: 921 leg = vertex.get('legs')[-1] 922 if leg.get('id') == 21 and 21 not in particle_dict: 923 # Fake propagator used in multiparticle vertices 924 mass = 'zero' 925 width = 'zero' 926 pow_part = 0 927 else: 928 particle = particle_dict[leg.get('id')] 929 # Get mass 930 if particle.get('mass').lower() == 'zero': 931 mass = particle.get('mass') 932 else: 933 mass = "abs(%s)" % particle.get('mass') 934 # Get width 935 if particle.get('width').lower() == 'zero': 936 width = particle.get('width') 937 else: 938 width = "abs(%s)" % particle.get('width') 939 940 pow_part = 1 + int(particle.is_boson()) 941 942 lines2.append("pmass_d (%3d,%3d,%4d) = %s " % \ 943 (iFKS,leg.get('number'), iconf + 1, mass)) 944 lines2.append("pwidth_d(%3d,%3d,%4d) = %s " % \ 945 (iFKS,leg.get('number'), iconf + 1, width)) 946 lines2.append("pow_d (%3d,%3d,%4d) = %d " % \ 947 (iFKS,leg.get('number'), iconf + 1, pow_part)) 948 949 950 951 952 lines.append("# ") 953 # insert the declaration of the sizes arrays at the beginning of the file 954 lines.insert(1,"parameter (lmaxconfigs_used=%4d)" % max_iconfig) 955 lines.insert(2,"parameter (max_branch_used =%4d)" % -max_leg_number) 956 957 # Write the file 958 writer.writelines(lines+lines2) 959 960 return max_iconfig
961 962 963
964 - def write_leshouche_info_file(self, writer, matrix_element, fortran_model):
965 """writes the leshouche_info.inc file which contains the LHA informations 966 for all the real emission processes""" 967 lines = [] 968 nconfs = len(matrix_element.get_fks_info_list()) 969 (nexternal, ninitial) = matrix_element.real_processes[0].get_nexternal_ninitial() 970 971 lines.append('integer idup_d(%d,%d,maxproc_used)' % (nconfs, nexternal)) 972 lines.append('integer mothup_d(%d,%d,%d,maxproc_used)' % (nconfs, 2, nexternal)) 973 lines.append('integer icolup_d(%d,%d,%d,maxflow_used)' % (nconfs, 2, nexternal)) 974 lines.append('integer ilh') 975 lines.append('') 976 977 maxproc = 0 978 maxflow = 0 979 for i, conf in enumerate(matrix_element.get_fks_info_list()): 980 # for i, real in enumerate(matrix_element.real_processes): 981 (newlines, nprocs, nflows) = self.get_leshouche_lines( 982 matrix_element.real_processes[conf['n_me'] - 1].matrix_element, i + 1) 983 lines.extend(newlines) 984 maxproc = max(maxproc, nprocs) 985 maxflow = max(maxflow, nflows) 986 987 firstlines = ['integer maxproc_used, maxflow_used', 988 'parameter (maxproc_used = %d)' % maxproc, 989 'parameter (maxflow_used = %d)' % maxflow ] 990 writer.writelines(firstlines + lines)
991 992
993 - def write_pdf_wrapper(self, writer, matrix_element, fortran_model):
994 """writes the wrapper which allows to chose among the different real matrix elements""" 995 996 file = \ 997 """double precision function dlum() 998 implicit none 999 include 'timing_variables.inc' 1000 integer nfksprocess 1001 common/c_nfksprocess/nfksprocess 1002 call cpu_time(tbefore) 1003 """ 1004 for n, info in enumerate(matrix_element.get_fks_info_list()): 1005 file += \ 1006 """if (nfksprocess.eq.%(n)d) then 1007 call dlum_%(n_me)d(dlum) 1008 else""" % {'n': n + 1, 'n_me' : info['n_me']} 1009 file += \ 1010 """ 1011 write(*,*) 'ERROR: invalid n in dlum :', nfksprocess 1012 stop 1013 endif 1014 call cpu_time(tAfter) 1015 tPDF = tPDF + (tAfter-tBefore) 1016 return 1017 end 1018 """ 1019 # Write the file 1020 writer.writelines(file) 1021 return 0
1022 1023
1024 - def write_real_me_wrapper(self, writer, matrix_element, fortran_model):
1025 """writes the wrapper which allows to chose among the different real matrix elements""" 1026 1027 file = \ 1028 """subroutine smatrix_real(p, wgt) 1029 implicit none 1030 include 'nexternal.inc' 1031 double precision p(0:3, nexternal) 1032 double precision wgt 1033 integer nfksprocess 1034 common/c_nfksprocess/nfksprocess 1035 """ 1036 for n, info in enumerate(matrix_element.get_fks_info_list()): 1037 file += \ 1038 """if (nfksprocess.eq.%(n)d) then 1039 call smatrix_%(n_me)d(p, wgt) 1040 else""" % {'n': n + 1, 'n_me' : info['n_me']} 1041 file += \ 1042 """ 1043 write(*,*) 'ERROR: invalid n in real_matrix :', nfksprocess 1044 stop 1045 endif 1046 return 1047 end 1048 """ 1049 # Write the file 1050 writer.writelines(file) 1051 return 0
1052 1053
1054 - def draw_feynman_diagrams(self, matrix_element):
1055 """Create the ps files containing the feynman diagrams for the born process, 1056 as well as for all the real emission processes""" 1057 1058 filename = 'born.ps' 1059 plot = draw.MultiEpsDiagramDrawer(matrix_element.born_matrix_element.\ 1060 get('base_amplitude').get('diagrams'), 1061 filename, 1062 model=matrix_element.born_matrix_element.\ 1063 get('processes')[0].get('model'), 1064 amplitude=True, diagram_type='born') 1065 plot.draw() 1066 1067 for n, fksreal in enumerate(matrix_element.real_processes): 1068 filename = 'matrix_%d.ps' % (n + 1) 1069 plot = draw.MultiEpsDiagramDrawer(fksreal.matrix_element.\ 1070 get('base_amplitude').get('diagrams'), 1071 filename, 1072 model=fksreal.matrix_element.\ 1073 get('processes')[0].get('model'), 1074 amplitude=True, diagram_type='real') 1075 plot.draw()
1076 1077
1078 - def write_real_matrix_elements(self, matrix_element, fortran_model):
1079 """writes the matrix_i.f files which contain the real matrix elements""" 1080 1081 for n, fksreal in enumerate(matrix_element.real_processes): 1082 filename = 'matrix_%d.f' % (n + 1) 1083 self.write_matrix_element_fks(writers.FortranWriter(filename), 1084 fksreal.matrix_element, n + 1, 1085 fortran_model)
1086
1087 - def write_pdf_calls(self, matrix_element, fortran_model):
1088 """writes the parton_lum_i.f files which contain the real matrix elements""" 1089 for n, fksreal in enumerate(matrix_element.real_processes): 1090 filename = 'parton_lum_%d.f' % (n + 1) 1091 self.write_pdf_file(writers.FortranWriter(filename), 1092 fksreal.matrix_element, n + 1, 1093 fortran_model)
1094 1095
1096 - def generate_born_fks_files(self, matrix_element, fortran_model, me_number, path):
1097 """generates the files needed for the born amplitude in the P* directory, which will 1098 be needed by the P* directories""" 1099 pathdir = os.getcwd() 1100 1101 filename = 'born.f' 1102 calls_born, ncolor_born = \ 1103 self.write_born_fks(writers.FortranWriter(filename),\ 1104 matrix_element, 1105 fortran_model) 1106 1107 filename = 'born_hel.f' 1108 self.write_born_hel(writers.FortranWriter(filename),\ 1109 matrix_element, 1110 fortran_model) 1111 1112 1113 filename = 'born_conf.inc' 1114 nconfigs, mapconfigs, s_and_t_channels = \ 1115 self.write_configs_file( 1116 writers.FortranWriter(filename), 1117 matrix_element.born_matrix_element, 1118 fortran_model) 1119 1120 filename = 'born_props.inc' 1121 self.write_props_file(writers.FortranWriter(filename), 1122 matrix_element.born_matrix_element, 1123 fortran_model, 1124 s_and_t_channels) 1125 1126 filename = 'born_decayBW.inc' 1127 self.write_decayBW_file(writers.FortranWriter(filename), 1128 s_and_t_channels) 1129 1130 filename = 'born_leshouche.inc' 1131 nflows = self.write_leshouche_file(writers.FortranWriter(filename), 1132 matrix_element.born_matrix_element, 1133 fortran_model) 1134 1135 filename = 'born_nhel.inc' 1136 self.write_born_nhel_file(writers.FortranWriter(filename), 1137 matrix_element.born_matrix_element, nflows, 1138 fortran_model, 1139 ncolor_born) 1140 1141 filename = 'born_ngraphs.inc' 1142 self.write_ngraphs_file(writers.FortranWriter(filename), 1143 matrix_element.born_matrix_element.get_number_of_amplitudes()) 1144 1145 filename = 'ncombs.inc' 1146 self.write_ncombs_file(writers.FortranWriter(filename), 1147 matrix_element.born_matrix_element, 1148 fortran_model) 1149 1150 filename = 'born_maxamps.inc' 1151 maxamps = len(matrix_element.get('diagrams')) 1152 maxflows = ncolor_born 1153 self.write_maxamps_file(writers.FortranWriter(filename), 1154 maxamps, 1155 maxflows, 1156 max([len(matrix_element.get('processes')) for me in \ 1157 matrix_element.born_matrix_element]),1) 1158 1159 filename = 'config_subproc_map.inc' 1160 self.write_config_subproc_map_file(writers.FortranWriter(filename), 1161 s_and_t_channels) 1162 1163 filename = 'coloramps.inc' 1164 self.write_coloramps_file(writers.FortranWriter(filename), 1165 mapconfigs, 1166 matrix_element.born_matrix_element, 1167 fortran_model) 1168 1169 #write the sborn_sf.f and the b_sf_files 1170 filename = ['sborn_sf.f', 'sborn_sf_dum.f'] 1171 for i, links in enumerate([matrix_element.color_links, []]): 1172 self.write_sborn_sf(writers.FortranWriter(filename[i]), 1173 links, 1174 fortran_model) 1175 self.color_link_files = [] 1176 for i in range(len(matrix_element.color_links)): 1177 filename = 'b_sf_%3.3d.f' % (i + 1) 1178 self.color_link_files.append(filename) 1179 self.write_b_sf_fks(writers.FortranWriter(filename), 1180 matrix_element, i, 1181 fortran_model)
1182
1183 - def generate_virtuals_from_OLP(self,FKSHMultiproc,export_path, OLP):
1184 """Generates the library for computing the loop matrix elements 1185 necessary for this process using the OLP specified.""" 1186 1187 # Start by writing the BLHA order file 1188 virtual_path = pjoin(export_path,'OLP_virtuals') 1189 if not os.path.exists(virtual_path): 1190 os.makedirs(virtual_path) 1191 filename = os.path.join(virtual_path,'OLE_order.lh') 1192 self.write_lh_order(filename, FKSHMultiproc.get('matrix_elements'),OLP) 1193 1194 fail_msg='Generation of the virtuals with %s failed.\n'%OLP+\ 1195 'Please check the virt_generation.log file in %s.'\ 1196 %str(pjoin(virtual_path,'virt_generation.log')) 1197 1198 # Perform some tasks specific to certain OLP's 1199 if OLP=='GoSam': 1200 cp(pjoin(self.mgme_dir,'Template','loop_material','OLP_specifics', 1201 'GoSam','makevirt'),pjoin(virtual_path,'makevirt')) 1202 cp(pjoin(self.mgme_dir,'Template','loop_material','OLP_specifics', 1203 'GoSam','gosam.rc'),pjoin(virtual_path,'gosam.rc')) 1204 ln(pjoin(export_path,'Cards','param_card.dat'),virtual_path) 1205 # Now generate the process 1206 logger.info('Generating the loop matrix elements with %s...'%OLP) 1207 virt_generation_log = \ 1208 open(pjoin(virtual_path,'virt_generation.log'), 'w') 1209 retcode = subprocess.call(['./makevirt'],cwd=virtual_path, 1210 stdout=virt_generation_log, stderr=virt_generation_log) 1211 virt_generation_log.close() 1212 # Check what extension is used for the share libraries on this system 1213 possible_other_extensions = ['so','dylib'] 1214 shared_lib_ext='so' 1215 for ext in possible_other_extensions: 1216 if os.path.isfile(pjoin(virtual_path,'Virtuals','lib', 1217 'libgolem_olp.'+ext)): 1218 shared_lib_ext = ext 1219 1220 # Now check that everything got correctly generated 1221 files_to_check = ['olp_module.mod',str(pjoin('lib', 1222 'libgolem_olp.'+shared_lib_ext))] 1223 if retcode != 0 or any([not os.path.exists(pjoin(virtual_path, 1224 'Virtuals',f)) for f in files_to_check]): 1225 raise fks_common.FKSProcessError(fail_msg) 1226 # link the library to the lib folder 1227 ln(pjoin(virtual_path,'Virtuals','lib','libgolem_olp.'+shared_lib_ext), 1228 pjoin(export_path,'lib')) 1229 1230 # Specify in make_opts the right library necessitated by the OLP 1231 make_opts_content=open(pjoin(export_path,'Source','make_opts')).read() 1232 make_opts=open(pjoin(export_path,'Source','make_opts'),'w') 1233 if OLP=='GoSam': 1234 # apparently -rpath=../$(LIBDIR) is not necessary. 1235 #make_opts_content=make_opts_content.replace('libOLP=', 1236 # 'libOLP=-Wl,-rpath=../$(LIBDIR),-lgolem_olp') 1237 make_opts_content=make_opts_content.replace('libOLP=', 1238 'libOLP=-Wl,-lgolem_olp') 1239 make_opts.write(make_opts_content) 1240 make_opts.close() 1241 1242 # A priori this is generic to all OLP's 1243 1244 # Parse the contract file returned and propagate the process label to 1245 # the include of the BinothLHA.f file 1246 proc_to_label = self.parse_contract_file( 1247 pjoin(virtual_path,'OLE_order.olc')) 1248 1249 self.write_BinothLHA_inc(FKSHMultiproc,proc_to_label,\ 1250 pjoin(export_path,'SubProcesses')) 1251 1252 # Link the contract file to within the SubProcess directory 1253 ln(pjoin(virtual_path,'OLE_order.olc'),pjoin(export_path,'SubProcesses'))
1254
1255 - def write_BinothLHA_inc(self, FKSHMultiproc, proc_to_label, SubProcPath):
1256 """ Write the file Binoth_proc.inc in each SubProcess directory so as 1257 to provide the right process_label to use in the OLP call to get the 1258 loop matrix element evaluation. The proc_to_label is the dictionary of 1259 the format of the one returned by the function parse_contract_file.""" 1260 1261 for matrix_element in FKSHMultiproc.get('matrix_elements'): 1262 proc = matrix_element.get('processes')[0] 1263 name = "P%s"%proc.shell_string() 1264 proc_pdgs=(tuple([leg.get('id') for leg in proc.get('legs') if \ 1265 not leg.get('state')]), 1266 tuple([leg.get('id') for leg in proc.get('legs') if \ 1267 leg.get('state')])) 1268 incFile = open(pjoin(SubProcPath, name,'Binoth_proc.inc'),'w') 1269 try: 1270 incFile.write( 1271 """ INTEGER PROC_LABEL 1272 PARAMETER (PROC_LABEL=%d)"""%(proc_to_label[proc_pdgs])) 1273 except KeyError: 1274 raise fks_common.FKSProcessError('Could not found the target'+\ 1275 ' process %s > %s in '%(str(proc_pdgs[0]),str(proc_pdgs[1]))+\ 1276 ' the proc_to_label argument in write_BinothLHA_inc.') 1277 incFile.close()
1278
1279 - def parse_contract_file(self, contract_file_path):
1280 """ Parses the BLHA contract file, make sure all parameters could be 1281 understood by the OLP and return a mapping of the processes (characterized 1282 by the pdg's of the initial and final state particles) to their process 1283 label. The format of the mapping is {((in_pdgs),(out_pdgs)):proc_label}. 1284 """ 1285 1286 proc_def_to_label = {} 1287 1288 if not os.path.exists(contract_file_path): 1289 raise fks_common.FKSProcessError('Could not find the contract file'+\ 1290 ' OLE_order.olc in %s.'%str(contract_file_path)) 1291 1292 comment_re=re.compile(r"^\s*#") 1293 proc_def_re=re.compile( 1294 r"^(?P<in_pdgs>(\s*-?\d+\s*)+)->(?P<out_pdgs>(\s*-?\d+\s*)+)\|"+ 1295 r"\s*(?P<proc_class>\d+)\s*(?P<proc_label>\d+)\s*$") 1296 line_OK_re=re.compile(r"^.*\|\s*OK") 1297 for line in file(contract_file_path): 1298 # Ignore comments 1299 if not comment_re.match(line) is None: 1300 continue 1301 # Check if it is a proc definition line 1302 proc_def = proc_def_re.match(line) 1303 if not proc_def is None: 1304 if int(proc_def.group('proc_class'))!=1: 1305 raise fks_common.FKSProcessError( 1306 'aMCatNLO can only handle loop processes generated by the OLP which have only '+\ 1307 ' process class attribute. Found %s instead in: \n%s'\ 1308 %(proc_def.group('proc_class'),line)) 1309 in_pdgs=tuple([int(in_pdg) for in_pdg in \ 1310 proc_def.group('in_pdgs').split()]) 1311 out_pdgs=tuple([int(out_pdg) for out_pdg in \ 1312 proc_def.group('out_pdgs').split()]) 1313 proc_def_to_label[(in_pdgs,out_pdgs)]=\ 1314 int(proc_def.group('proc_label')) 1315 continue 1316 # For the other types of line, just make sure they end with | OK 1317 if line_OK_re.match(line) is None: 1318 raise fks_common.FKSProcessError( 1319 'The OLP could not process the following line: \n%s'%line) 1320 1321 return proc_def_to_label
1322 1323
1324 - def generate_virt_directory(self, loop_matrix_element, fortran_model, dir_name):
1325 """writes the V**** directory inside the P**** directories specified in 1326 dir_name""" 1327 1328 cwd = os.getcwd() 1329 1330 matrix_element = loop_matrix_element 1331 1332 # Create the MadLoop5_resources directory if not already existing 1333 dirpath = os.path.join(dir_name, 'MadLoop5_resources') 1334 try: 1335 os.mkdir(dirpath) 1336 except os.error as error: 1337 logger.warning(error.strerror + " " + dirpath) 1338 1339 # Create the directory PN_xx_xxxxx in the specified path 1340 name = "V%s" % matrix_element.get('processes')[0].shell_string() 1341 dirpath = os.path.join(dir_name, name) 1342 1343 try: 1344 os.mkdir(dirpath) 1345 except os.error as error: 1346 logger.warning(error.strerror + " " + dirpath) 1347 1348 try: 1349 os.chdir(dirpath) 1350 except os.error: 1351 logger.error('Could not cd to directory %s' % dirpath) 1352 return 0 1353 1354 logger.info('Creating files in directory %s' % name) 1355 1356 # Extract number of external particles 1357 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1358 1359 calls=self.write_matrix_element_v4(None,matrix_element,fortran_model) 1360 # The born matrix element, if needed 1361 filename = 'born_matrix.f' 1362 calls = self.write_bornmatrix( 1363 writers.FortranWriter(filename), 1364 matrix_element, 1365 fortran_model) 1366 1367 filename = 'nexternal.inc' 1368 self.write_nexternal_file(writers.FortranWriter(filename), 1369 (nexternal-2), ninitial) 1370 1371 filename = 'pmass.inc' 1372 self.write_pmass_file(writers.FortranWriter(filename), 1373 matrix_element) 1374 1375 filename = 'ngraphs.inc' 1376 self.write_ngraphs_file(writers.FortranWriter(filename), 1377 len(matrix_element.get_all_amplitudes())) 1378 1379 filename = "loop_matrix.ps" 1380 plot = draw.MultiEpsDiagramDrawer(base_objects.DiagramList( 1381 matrix_element.get('base_amplitude').get('loop_diagrams')[:1000]), 1382 filename, 1383 model=matrix_element.get('processes')[0].get('model'), 1384 amplitude='') 1385 logger.info("Drawing loop Feynman diagrams for " + \ 1386 matrix_element.get('processes')[0].nice_string(print_weighted=False)) 1387 plot.draw() 1388 1389 filename = "born_matrix.ps" 1390 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 1391 get('born_diagrams'),filename,model=matrix_element.get('processes')[0].\ 1392 get('model'),amplitude='') 1393 logger.info("Generating born Feynman diagrams for " + \ 1394 matrix_element.get('processes')[0].nice_string(print_weighted=False)) 1395 plot.draw() 1396 1397 linkfiles = ['coupl.inc', 'mp_coupl.inc', 'mp_coupl_same_name.inc', 1398 'cts_mprec.h', 'cts_mpc.h', 'MadLoopParamReader.f', 1399 'MadLoopCommons.f','MadLoopParams.inc'] 1400 1401 # We should move to MadLoop5_resources directory from the SubProcesses 1402 1403 ln(pjoin('../../..','Cards','MadLoopParams.dat'), 1404 pjoin('..','MadLoop5_resources')) 1405 1406 for file in linkfiles: 1407 ln('../../%s' % file) 1408 1409 os.system("ln -s ../../makefile_loop makefile") 1410 1411 linkfiles = ['mpmodule.mod'] 1412 1413 for file in linkfiles: 1414 ln('../../../lib/%s' % file) 1415 1416 # Return to original PWD 1417 os.chdir(cwd) 1418 1419 if not calls: 1420 calls = 0 1421 return calls
1422
1423 - def get_qed_qcd_orders_from_weighted(self, nexternal, weighted):
1424 """computes the QED/QCD orders from the knowledge of the n of ext particles 1425 and of the weighted orders""" 1426 # n vertices = nexternal - 2 =QED + QCD 1427 # weighted = 2*QED + QCD 1428 QED = weighted - nexternal + 2 1429 QCD = weighted - 2 * QED 1430 return QED, QCD
1431 1432 1433 1434 #=============================================================================== 1435 # write_lh_order 1436 #=============================================================================== 1437 #test written
1438 - def write_lh_order(self, filename, matrix_elements, OLP='MadLoop'):
1439 """Creates the OLE_order.lh file. This function should be edited according 1440 to the OLP which is used. For now it is generic.""" 1441 1442 if isinstance(matrix_elements,fks_helas_objects.FKSHelasProcess): 1443 fksborns=fks_helas_objects.FKSHelasProcessList([matrix_elements]) 1444 elif isinstance(matrix_elements,fks_helas_objects.FKSHelasProcessList): 1445 fksborns= matrix_elements 1446 else: 1447 raise fks_common.FKSProcessError('Wrong type of argument for '+\ 1448 'matrix_elements in function write_lh_order.') 1449 1450 if len(fksborns)==0: 1451 raise fks_common.FKSProcessError('No matrix elements provided to '+\ 1452 'the function write_lh_order.') 1453 return 1454 1455 # We assume the orders to be common to all Subprocesses 1456 1457 orders = fksborns[0].orders 1458 if 'QED' in orders.keys() and 'QCD' in orders.keys(): 1459 QED=orders['QED'] 1460 QCD=orders['QCD'] 1461 elif 'QED' in orders.keys(): 1462 QED=orders['QED'] 1463 QCD=0 1464 elif 'QCD' in orders.keys(): 1465 QED=0 1466 QCD=orders['QCD'] 1467 else: 1468 QED, QCD = self.get_qed_qcd_orders_from_weighted(\ 1469 fksborns[0].born_matrix_element.get_nexternal_ninitial()[0], 1470 orders['WEIGHTED']) 1471 1472 replace_dict = {} 1473 replace_dict['mesq'] = 'CHaveraged' 1474 replace_dict['corr'] = ' '.join(matrix_elements.get('processes')[0].\ 1475 get('perturbation_couplings')) 1476 replace_dict['irreg'] = 'CDR' 1477 replace_dict['aspow'] = QCD 1478 replace_dict['aepow'] = QED 1479 replace_dict['modelfile'] = './param_card.dat' 1480 replace_dict['params'] = 'alpha_s' 1481 proc_lines=[] 1482 for fksborn in fksborns: 1483 proc_lines.append(fksborn.get_lh_pdg_string()) 1484 replace_dict['pdgs'] = '\n'.join(proc_lines) 1485 replace_dict['symfin'] = 'Yes' 1486 content = \ 1487 "#OLE_order written by MadGraph5_aMC@NLO\n\ 1488 \n\ 1489 MatrixElementSquareType %(mesq)s\n\ 1490 CorrectionType %(corr)s\n\ 1491 IRregularisation %(irreg)s\n\ 1492 AlphasPower %(aspow)d\n\ 1493 AlphaPower %(aepow)d\n\ 1494 NJetSymmetrizeFinal %(symfin)s\n\ 1495 ModelFile %(modelfile)s\n\ 1496 Parameters %(params)s\n\ 1497 \n\ 1498 # process\n\ 1499 %(pdgs)s\n\ 1500 " % replace_dict 1501 1502 file = open(filename, 'w') 1503 file.write(content) 1504 file.close 1505 return
1506 1507 1508 #=============================================================================== 1509 # write_born_fks 1510 #=============================================================================== 1511 # test written
1512 - def write_born_fks(self, writer, fksborn, fortran_model):
1513 """Export a matrix element to a born.f file in MadFKS format""" 1514 1515 matrix_element = fksborn.born_matrix_element 1516 1517 if not matrix_element.get('processes') or \ 1518 not matrix_element.get('diagrams'): 1519 return 0 1520 1521 if not isinstance(writer, writers.FortranWriter): 1522 raise writers.FortranWriter.FortranWriterError(\ 1523 "writer not FortranWriter") 1524 # Set lowercase/uppercase Fortran code 1525 writers.FortranWriter.downcase = False 1526 1527 replace_dict = {} 1528 1529 # Extract version number and date from VERSION file 1530 info_lines = self.get_mg5_info_lines() 1531 replace_dict['info_lines'] = info_lines 1532 1533 # Extract process info lines 1534 process_lines = self.get_process_info_lines(matrix_element) 1535 replace_dict['process_lines'] = process_lines 1536 1537 1538 # Extract ncomb 1539 ncomb = matrix_element.get_helicity_combinations() 1540 replace_dict['ncomb'] = ncomb 1541 1542 # Extract helicity lines 1543 helicity_lines = self.get_helicity_lines(matrix_element) 1544 replace_dict['helicity_lines'] = helicity_lines 1545 1546 # Extract IC line 1547 ic_line = self.get_ic_line(matrix_element) 1548 replace_dict['ic_line'] = ic_line 1549 1550 # Extract overall denominator 1551 # Averaging initial state color, spin, and identical FS particles 1552 #den_factor_line = get_den_factor_line(matrix_element) 1553 1554 # Extract ngraphs 1555 ngraphs = matrix_element.get_number_of_amplitudes() 1556 replace_dict['ngraphs'] = ngraphs 1557 1558 # Extract nwavefuncs 1559 nwavefuncs = matrix_element.get_number_of_wavefunctions() 1560 replace_dict['nwavefuncs'] = nwavefuncs 1561 1562 # Extract ncolor 1563 ncolor = max(1, len(matrix_element.get('color_basis'))) 1564 replace_dict['ncolor'] = ncolor 1565 1566 # Extract color data lines 1567 color_data_lines = self.get_color_data_lines(matrix_element) 1568 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 1569 1570 # Extract helas calls 1571 helas_calls = fortran_model.get_matrix_element_calls(\ 1572 matrix_element) 1573 replace_dict['helas_calls'] = "\n".join(helas_calls) 1574 1575 # Extract amp2 lines 1576 amp2_lines = self.get_amp2_lines(matrix_element) 1577 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 1578 1579 # Extract JAMP lines 1580 jamp_lines = self.get_JAMP_lines(matrix_element) 1581 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 1582 1583 # Set the size of Wavefunction 1584 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 1585 replace_dict['wavefunctionsize'] = 20 1586 else: 1587 replace_dict['wavefunctionsize'] = 8 1588 1589 # Extract glu_ij_lines 1590 ij_lines = self.get_ij_lines(fksborn) 1591 replace_dict['ij_lines'] = '\n'.join(ij_lines) 1592 1593 # Extract den_factor_lines 1594 den_factor_lines = self.get_den_factor_lines(fksborn) 1595 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 1596 1597 # Extract the number of FKS process 1598 replace_dict['nconfs'] = len(fksborn.get_fks_info_list()) 1599 1600 file = open(os.path.join(_file_path, \ 1601 'iolibs/template_files/born_fks.inc')).read() 1602 file = file % replace_dict 1603 1604 # Write the file 1605 writer.writelines(file) 1606 1607 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor
1608 1609
1610 - def write_born_hel(self, writer, fksborn, fortran_model):
1611 """Export a matrix element to a born_hel.f file in MadFKS format""" 1612 1613 matrix_element = fksborn.born_matrix_element 1614 1615 if not matrix_element.get('processes') or \ 1616 not matrix_element.get('diagrams'): 1617 return 0 1618 1619 if not isinstance(writer, writers.FortranWriter): 1620 raise writers.FortranWriter.FortranWriterError(\ 1621 "writer not FortranWriter") 1622 # Set lowercase/uppercase Fortran code 1623 writers.FortranWriter.downcase = False 1624 1625 replace_dict = {} 1626 1627 # Extract version number and date from VERSION file 1628 info_lines = self.get_mg5_info_lines() 1629 replace_dict['info_lines'] = info_lines 1630 1631 # Extract process info lines 1632 process_lines = self.get_process_info_lines(matrix_element) 1633 replace_dict['process_lines'] = process_lines 1634 1635 1636 # Extract ncomb 1637 ncomb = matrix_element.get_helicity_combinations() 1638 replace_dict['ncomb'] = ncomb 1639 1640 # Extract helicity lines 1641 helicity_lines = self.get_helicity_lines(matrix_element) 1642 replace_dict['helicity_lines'] = helicity_lines 1643 1644 # Extract IC line 1645 ic_line = self.get_ic_line(matrix_element) 1646 replace_dict['ic_line'] = ic_line 1647 1648 # Extract overall denominator 1649 # Averaging initial state color, spin, and identical FS particles 1650 #den_factor_line = get_den_factor_line(matrix_element) 1651 1652 # Extract ngraphs 1653 ngraphs = matrix_element.get_number_of_amplitudes() 1654 replace_dict['ngraphs'] = ngraphs 1655 1656 # Extract nwavefuncs 1657 nwavefuncs = matrix_element.get_number_of_wavefunctions() 1658 replace_dict['nwavefuncs'] = nwavefuncs 1659 1660 # Extract ncolor 1661 ncolor = max(1, len(matrix_element.get('color_basis'))) 1662 replace_dict['ncolor'] = ncolor 1663 1664 # Extract color data lines 1665 color_data_lines = self.get_color_data_lines(matrix_element) 1666 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 1667 1668 # Extract amp2 lines 1669 amp2_lines = self.get_amp2_lines(matrix_element) 1670 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 1671 1672 # Extract JAMP lines 1673 jamp_lines = self.get_JAMP_lines(matrix_element) 1674 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 1675 1676 # Extract den_factor_lines 1677 den_factor_lines = self.get_den_factor_lines(fksborn) 1678 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 1679 1680 # Extract the number of FKS process 1681 replace_dict['nconfs'] = len(fksborn.get_fks_info_list()) 1682 1683 file = open(os.path.join(_file_path, \ 1684 'iolibs/template_files/born_fks_hel.inc')).read() 1685 file = file % replace_dict 1686 1687 # Write the file 1688 writer.writelines(file) 1689 1690 return
1691 1692 1693 #=============================================================================== 1694 # write_born_sf_fks 1695 #=============================================================================== 1696 #test written
1697 - def write_sborn_sf(self, writer, color_links, fortran_model):
1698 """Creates the sborn_sf.f file, containing the calls to the different 1699 color linked borns""" 1700 1701 replace_dict = {} 1702 nborns = len(color_links) 1703 ifkss = [] 1704 iborns = [] 1705 mms = [] 1706 nns = [] 1707 iflines = "\n" 1708 1709 #header for the sborn_sf.f file 1710 file = """subroutine sborn_sf(p_born,m,n,wgt) 1711 implicit none 1712 include "nexternal.inc" 1713 double precision p_born(0:3,nexternal-1),wgt 1714 double complex wgt1(2) 1715 integer m,n \n""" 1716 1717 if nborns > 0: 1718 1719 for i, c_link in enumerate(color_links): 1720 iborn = i+1 1721 1722 iff = {True : 'if', False : 'elseif'}[i==0] 1723 1724 m, n = c_link['link'] 1725 1726 if m != n: 1727 iflines += \ 1728 "c b_sf_%(iborn)3.3d links partons %(m)d and %(n)d \n\ 1729 %(iff)s ((m.eq.%(m)d .and. n.eq.%(n)d).or.(m.eq.%(n)d .and. n.eq.%(m)d)) then \n\ 1730 call sb_sf_%(iborn)3.3d(p_born,wgt)\n\n" \ 1731 %{'m':m, 'n': n, 'iff': iff, 'iborn': iborn} 1732 else: 1733 iflines += \ 1734 "c b_sf_%(iborn)3.3d links partons %(m)d and %(n)d \n\ 1735 %(iff)s (m.eq.%(m)d .and. n.eq.%(n)d) then \n\ 1736 call sb_sf_%(iborn)3.3d(p_born,wgt)\n\n" \ 1737 %{'m':m, 'n': n, 'iff': iff, 'iborn': iborn} 1738 1739 1740 file += iflines + \ 1741 """else 1742 wgt = 0d0 1743 endif 1744 1745 return 1746 end""" 1747 elif nborns == 0: 1748 #write a dummy file 1749 file+=""" 1750 c This is a dummy function because 1751 c this subdir has no soft singularities 1752 wgt = 0d0 1753 1754 return 1755 end""" 1756 # Write the end of the file 1757 1758 writer.writelines(file)
1759 1760 1761 #=============================================================================== 1762 # write_b_sf_fks 1763 #=============================================================================== 1764 #test written
1765 - def write_b_sf_fks(self, writer, fksborn, i, fortran_model):
1766 """Create the b_sf_xxx.f file for the soft linked born in MadFKS format""" 1767 1768 matrix_element = copy.copy(fksborn.born_matrix_element) 1769 1770 if not matrix_element.get('processes') or \ 1771 not matrix_element.get('diagrams'): 1772 return 0 1773 1774 if not isinstance(writer, writers.FortranWriter): 1775 raise writers.FortranWriter.FortranWriterError(\ 1776 "writer not FortranWriter") 1777 # Set lowercase/uppercase Fortran code 1778 writers.FortranWriter.downcase = False 1779 1780 iborn = i + 1 1781 link = fksborn.color_links[i] 1782 1783 replace_dict = {} 1784 1785 replace_dict['iborn'] = iborn 1786 1787 # Extract version number and date from VERSION file 1788 info_lines = self.get_mg5_info_lines() 1789 replace_dict['info_lines'] = info_lines 1790 1791 # Extract process info lines 1792 process_lines = self.get_process_info_lines(matrix_element) 1793 replace_dict['process_lines'] = process_lines + \ 1794 "\nc spectators: %d %d \n" % tuple(link['link']) 1795 1796 # Extract ncomb 1797 ncomb = matrix_element.get_helicity_combinations() 1798 replace_dict['ncomb'] = ncomb 1799 1800 # Extract helicity lines 1801 helicity_lines = self.get_helicity_lines(matrix_element) 1802 replace_dict['helicity_lines'] = helicity_lines 1803 1804 # Extract IC line 1805 ic_line = self.get_ic_line(matrix_element) 1806 replace_dict['ic_line'] = ic_line 1807 1808 # Extract den_factor_lines 1809 den_factor_lines = self.get_den_factor_lines(fksborn) 1810 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 1811 1812 # Extract ngraphs 1813 ngraphs = matrix_element.get_number_of_amplitudes() 1814 replace_dict['ngraphs'] = ngraphs 1815 1816 # Extract nwavefuncs 1817 nwavefuncs = matrix_element.get_number_of_wavefunctions() 1818 replace_dict['nwavefuncs'] = nwavefuncs 1819 1820 # Extract ncolor 1821 ncolor1 = max(1, len(link['orig_basis'])) 1822 replace_dict['ncolor1'] = ncolor1 1823 ncolor2 = max(1, len(link['link_basis'])) 1824 replace_dict['ncolor2'] = ncolor2 1825 1826 # Extract color data lines 1827 color_data_lines = self.get_color_data_lines_from_color_matrix(\ 1828 link['link_matrix']) 1829 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 1830 1831 # Extract amp2 lines 1832 amp2_lines = self.get_amp2_lines(matrix_element) 1833 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 1834 1835 # Extract JAMP lines 1836 jamp_lines = self.get_JAMP_lines(matrix_element) 1837 new_jamp_lines = [] 1838 for line in jamp_lines: 1839 line = string.replace(line, 'JAMP', 'JAMP1') 1840 new_jamp_lines.append(line) 1841 replace_dict['jamp1_lines'] = '\n'.join(new_jamp_lines) 1842 1843 matrix_element.set('color_basis', link['link_basis'] ) 1844 jamp_lines = self.get_JAMP_lines(matrix_element) 1845 new_jamp_lines = [] 1846 for line in jamp_lines: 1847 line = string.replace(line, 'JAMP', 'JAMP2') 1848 new_jamp_lines.append(line) 1849 replace_dict['jamp2_lines'] = '\n'.join(new_jamp_lines) 1850 1851 1852 # Extract the number of FKS process 1853 replace_dict['nconfs'] = len(fksborn.get_fks_info_list()) 1854 1855 file = open(os.path.join(_file_path, \ 1856 'iolibs/template_files/b_sf_xxx_fks.inc')).read() 1857 file = file % replace_dict 1858 1859 # Write the file 1860 writer.writelines(file) 1861 1862 return 0 , ncolor1
1863 1864 1865 #=============================================================================== 1866 # write_born_nhel_file 1867 #=============================================================================== 1868 #test written
1869 - def write_born_nhel_file(self, writer, matrix_element, nflows, fortran_model, ncolor):
1870 """Write the born_nhel.inc file for MG4.""" 1871 1872 ncomb = matrix_element.get_helicity_combinations() 1873 file = " integer max_bhel, max_bcol \n" 1874 file = file + "parameter (max_bhel=%d)\nparameter(max_bcol=%d)" % \ 1875 (ncomb, nflows) 1876 1877 # Write the file 1878 writer.writelines(file) 1879 1880 return True
1881 1882 #=============================================================================== 1883 # write_fks_info_file 1884 #===============================================================================
1885 - def write_nfksconfigs_file(self, writer, fksborn, fortran_model):
1886 """Writes the content of nFKSconfigs.inc, which just gives the 1887 total FKS dirs as a parameter""" 1888 replace_dict = {} 1889 replace_dict['nconfs'] = len(fksborn.get_fks_info_list()) 1890 content = \ 1891 """ INTEGER FKS_CONFIGS 1892 PARAMETER (FKS_CONFIGS=%(nconfs)d) 1893 1894 """ % replace_dict 1895 1896 writer.writelines(content)
1897 1898 1899 #=============================================================================== 1900 # write_fks_info_file 1901 #===============================================================================
1902 - def write_fks_info_file(self, writer, fksborn, fortran_model): #test_written
1903 """Writes the content of fks_info.inc, which lists the informations on the 1904 possible splittings of the born ME""" 1905 1906 replace_dict = {} 1907 fks_info_list = fksborn.get_fks_info_list() 1908 replace_dict['nconfs'] = len(fks_info_list) 1909 replace_dict['fks_i_values'] = ', '.join(['%d' % info['fks_info']['i'] \ 1910 for info in fks_info_list]) 1911 replace_dict['fks_j_values'] = ', '.join(['%d' % info['fks_info']['j'] \ 1912 for info in fks_info_list]) 1913 1914 col_lines = [] 1915 pdg_lines = [] 1916 charge_lines = [] 1917 fks_j_from_i_lines = [] 1918 for i, info in enumerate(fks_info_list): 1919 col_lines.append( \ 1920 'DATA (PARTICLE_TYPE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 1921 % (i + 1, ', '.join('%d' % col for col in fksborn.real_processes[info['n_me']-1].colors) )) 1922 pdg_lines.append( \ 1923 'DATA (PDG_TYPE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 1924 % (i + 1, ', '.join('%d' % pdg for pdg in info['pdgs']))) 1925 charge_lines.append(\ 1926 'DATA (PARTICLE_CHARGE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /'\ 1927 % (i + 1, ', '.join('%19.15fd0' % charg\ 1928 for charg in fksborn.real_processes[info['n_me']-1].charges) )) 1929 fks_j_from_i_lines.extend(self.get_fks_j_from_i_lines(fksborn.real_processes[info['n_me']-1],\ 1930 i + 1)) 1931 1932 replace_dict['col_lines'] = '\n'.join(col_lines) 1933 replace_dict['pdg_lines'] = '\n'.join(pdg_lines) 1934 replace_dict['charge_lines'] = '\n'.join(charge_lines) 1935 replace_dict['fks_j_from_i_lines'] = '\n'.join(fks_j_from_i_lines) 1936 1937 content = \ 1938 """ INTEGER IPOS, JPOS 1939 INTEGER FKS_I_D(%(nconfs)d), FKS_J_D(%(nconfs)d) 1940 INTEGER FKS_J_FROM_I_D(%(nconfs)d, NEXTERNAL, 0:NEXTERNAL) 1941 INTEGER PARTICLE_TYPE_D(%(nconfs)d, NEXTERNAL), PDG_TYPE_D(%(nconfs)d, NEXTERNAL) 1942 REAL*8 PARTICLE_CHARGE_D(%(nconfs)d, NEXTERNAL) 1943 1944 data fks_i_D / %(fks_i_values)s / 1945 data fks_j_D / %(fks_j_values)s / 1946 1947 %(fks_j_from_i_lines)s 1948 1949 C 1950 C Particle type: 1951 C octet = 8, triplet = 3, singlet = 1 1952 %(col_lines)s 1953 1954 C 1955 C Particle type according to PDG: 1956 C 1957 %(pdg_lines)s 1958 1959 C 1960 C Particle charge: 1961 C charge is set 0. with QCD corrections, which is irrelevant 1962 %(charge_lines)s 1963 """ % replace_dict 1964 if not isinstance(writer, writers.FortranWriter): 1965 raise writers.FortranWriter.FortranWriterError(\ 1966 "writer not FortranWriter") 1967 # Set lowercase/uppercase Fortran code 1968 writers.FortranWriter.downcase = False 1969 1970 writer.writelines(content) 1971 1972 return True
1973 1974 1975 #=============================================================================== 1976 # write_matrix_element_fks 1977 #=============================================================================== 1978 #test written
1979 - def write_matrix_element_fks(self, writer, matrix_element, n, fortran_model):
1980 """Export a matrix element to a matrix.f file in MG4 madevent format""" 1981 1982 if not matrix_element.get('processes') or \ 1983 not matrix_element.get('diagrams'): 1984 return 0,0 1985 1986 if not isinstance(writer, writers.FortranWriter): 1987 raise writers.FortranWriter.FortranWriterError(\ 1988 "writer not FortranWriter") 1989 # Set lowercase/uppercase Fortran code 1990 writers.FortranWriter.downcase = False 1991 1992 replace_dict = {} 1993 replace_dict['N_me'] = n 1994 1995 # Extract version number and date from VERSION file 1996 info_lines = self.get_mg5_info_lines() 1997 replace_dict['info_lines'] = info_lines 1998 1999 # Extract process info lines 2000 process_lines = self.get_process_info_lines(matrix_element) 2001 replace_dict['process_lines'] = process_lines 2002 2003 # Extract ncomb 2004 ncomb = matrix_element.get_helicity_combinations() 2005 replace_dict['ncomb'] = ncomb 2006 2007 # Extract helicity lines 2008 helicity_lines = self.get_helicity_lines(matrix_element) 2009 replace_dict['helicity_lines'] = helicity_lines 2010 2011 # Extract IC line 2012 ic_line = self.get_ic_line(matrix_element) 2013 replace_dict['ic_line'] = ic_line 2014 2015 # Extract overall denominator 2016 # Averaging initial state color, spin, and identical FS particles 2017 den_factor_line = self.get_den_factor_line(matrix_element) 2018 replace_dict['den_factor_line'] = den_factor_line 2019 2020 # Extract ngraphs 2021 ngraphs = matrix_element.get_number_of_amplitudes() 2022 replace_dict['ngraphs'] = ngraphs 2023 2024 # Extract ncolor 2025 ncolor = max(1, len(matrix_element.get('color_basis'))) 2026 replace_dict['ncolor'] = ncolor 2027 2028 # Extract color data lines 2029 color_data_lines = self.get_color_data_lines(matrix_element) 2030 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2031 2032 # Extract helas calls 2033 helas_calls = fortran_model.get_matrix_element_calls(\ 2034 matrix_element) 2035 replace_dict['helas_calls'] = "\n".join(helas_calls) 2036 2037 # Extract nwavefuncs (important to place after get_matrix_element_calls 2038 # so that 'me_id' is set) 2039 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2040 replace_dict['nwavefuncs'] = nwavefuncs 2041 2042 # Extract amp2 lines 2043 amp2_lines = self.get_amp2_lines(matrix_element) 2044 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2045 2046 # Set the size of Wavefunction 2047 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 2048 replace_dict['wavefunctionsize'] = 20 2049 else: 2050 replace_dict['wavefunctionsize'] = 8 2051 2052 # Extract JAMP lines 2053 jamp_lines = self.get_JAMP_lines(matrix_element) 2054 2055 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2056 2057 realfile = open(os.path.join(_file_path, \ 2058 'iolibs/template_files/realmatrix_fks.inc')).read() 2059 2060 realfile = realfile % replace_dict 2061 2062 # Write the file 2063 writer.writelines(realfile) 2064 2065 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor
2066 2067 2068 #=============================================================================== 2069 # write_pdf_file 2070 #===============================================================================
2071 - def write_pdf_file(self, writer, matrix_element, n, fortran_model):
2072 #test written 2073 """Write the auto_dsig.f file for MadFKS, which contains 2074 pdf call information""" 2075 2076 if not matrix_element.get('processes') or \ 2077 not matrix_element.get('diagrams'): 2078 return 0 2079 2080 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 2081 2082 if ninitial < 1 or ninitial > 2: 2083 raise writers.FortranWriter.FortranWriterError, \ 2084 """Need ninitial = 1 or 2 to write auto_dsig file""" 2085 2086 replace_dict = {} 2087 2088 replace_dict['N_me'] = n 2089 2090 # Extract version number and date from VERSION file 2091 info_lines = self.get_mg5_info_lines() 2092 replace_dict['info_lines'] = info_lines 2093 2094 # Extract process info lines 2095 process_lines = self.get_process_info_lines(matrix_element) 2096 replace_dict['process_lines'] = process_lines 2097 2098 pdf_vars, pdf_data, pdf_lines = \ 2099 self.get_pdf_lines_mir(matrix_element, ninitial, False, False) 2100 replace_dict['pdf_vars'] = pdf_vars 2101 replace_dict['pdf_data'] = pdf_data 2102 replace_dict['pdf_lines'] = pdf_lines 2103 2104 pdf_vars_mirr, pdf_data_mirr, pdf_lines_mirr = \ 2105 self.get_pdf_lines_mir(matrix_element, ninitial, False, True) 2106 replace_dict['pdf_lines_mirr'] = pdf_lines_mirr 2107 2108 file = open(os.path.join(_file_path, \ 2109 'iolibs/template_files/parton_lum_n_fks.inc')).read() 2110 file = file % replace_dict 2111 2112 # Write the file 2113 writer.writelines(file)
2114 2115 2116 2117 #=============================================================================== 2118 # write_coloramps_file 2119 #=============================================================================== 2120 #test written
2121 - def write_coloramps_file(self, writer, mapconfigs, matrix_element, fortran_model):
2122 """Write the coloramps.inc file for MadEvent""" 2123 2124 lines = [] 2125 lines.append( "logical icolamp(%d,%d,1)" % \ 2126 (max(len(matrix_element.get('color_basis').keys()), 1), 2127 len(mapconfigs))) 2128 2129 lines += self.get_icolamp_lines(mapconfigs, matrix_element, 1) 2130 2131 # Write the file 2132 writer.writelines(lines) 2133 2134 return True
2135 2136 2137 #=============================================================================== 2138 # write_leshouche_file 2139 #=============================================================================== 2140 #test written
2141 - def write_leshouche_file(self, writer, matrix_element, fortran_model):
2142 """Write the leshouche.inc file for MG4""" 2143 2144 # Extract number of external particles 2145 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2146 2147 lines = [] 2148 for iproc, proc in enumerate(matrix_element.get('processes')): 2149 legs = proc.get_legs_with_decays() 2150 lines.append("DATA (IDUP(i,%d),i=1,%d)/%s/" % \ 2151 (iproc + 1, nexternal, 2152 ",".join([str(l.get('id')) for l in legs]))) 2153 for i in [1, 2]: 2154 lines.append("DATA (MOTHUP(%d,i,%3r),i=1,%2r)/%s/" % \ 2155 (i, iproc + 1, nexternal, 2156 ",".join([ "%3r" % 0 ] * ninitial + \ 2157 [ "%3r" % i ] * (nexternal - ninitial)))) 2158 2159 # Here goes the color connections corresponding to the JAMPs 2160 # Only one output, for the first subproc! 2161 if iproc == 0: 2162 # If no color basis, just output trivial color flow 2163 if not matrix_element.get('color_basis'): 2164 for i in [1, 2]: 2165 lines.append("DATA (ICOLUP(%d,i, 1),i=1,%2r)/%s/" % \ 2166 (i, nexternal, 2167 ",".join([ "%3r" % 0 ] * nexternal))) 2168 color_flow_list = [] 2169 2170 else: 2171 # First build a color representation dictionnary 2172 repr_dict = {} 2173 for l in legs: 2174 repr_dict[l.get('number')] = \ 2175 proc.get('model').get_particle(l.get('id')).get_color()\ 2176 * (-1)**(1+l.get('state')) 2177 # Get the list of color flows 2178 color_flow_list = \ 2179 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 2180 ninitial) 2181 # And output them properly 2182 for cf_i, color_flow_dict in enumerate(color_flow_list): 2183 for i in [0, 1]: 2184 lines.append("DATA (ICOLUP(%d,i,%3r),i=1,%2r)/%s/" % \ 2185 (i + 1, cf_i + 1, nexternal, 2186 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 2187 for l in legs]))) 2188 2189 # Write the file 2190 writer.writelines(lines) 2191 2192 return len(color_flow_list)
2193 2194 2195 #=============================================================================== 2196 # write_configs_file 2197 #=============================================================================== 2198 #test_written
2199 - def write_configs_file(self, writer, matrix_element, fortran_model):
2200 """Write the configs.inc file for MadEvent""" 2201 2202 # Extract number of external particles 2203 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2204 lines = [] 2205 2206 iconfig = 0 2207 2208 s_and_t_channels = [] 2209 mapconfigs = [] 2210 2211 model = matrix_element.get('processes')[0].get('model') 2212 # new_pdg = model.get_first_non_pdg() 2213 2214 base_diagrams = matrix_element.get('base_amplitude').get('diagrams') 2215 model = matrix_element.get('base_amplitude').get('process').get('model') 2216 minvert = min([max([len(vert.get('legs')) for vert in \ 2217 diag.get('vertices')]) for diag in base_diagrams]) 2218 2219 for idiag, diag in enumerate(base_diagrams): 2220 if any([len(vert.get('legs')) > minvert for vert in 2221 diag.get('vertices')]): 2222 # Only 3-vertices allowed in configs.inc 2223 continue 2224 iconfig = iconfig + 1 2225 helas_diag = matrix_element.get('diagrams')[idiag] 2226 mapconfigs.append(helas_diag.get('number')) 2227 lines.append("# Diagram %d, Amplitude %d" % \ 2228 (helas_diag.get('number'),helas_diag.get('amplitudes')[0]['number'])) 2229 # Correspondance between the config and the amplitudes 2230 lines.append("data mapconfig(%4d)/%4d/" % (iconfig, 2231 helas_diag.get('amplitudes')[0]['number'])) 2232 2233 # Need to reorganize the topology so that we start with all 2234 # final state external particles and work our way inwards 2235 schannels, tchannels = helas_diag.get('amplitudes')[0].\ 2236 get_s_and_t_channels(ninitial, model, 990) 2237 2238 s_and_t_channels.append([schannels, tchannels]) 2239 2240 # Write out propagators for s-channel and t-channel vertices 2241 allchannels = schannels 2242 if len(tchannels) > 1: 2243 # Write out tchannels only if there are any non-trivial ones 2244 allchannels = schannels + tchannels 2245 2246 for vert in allchannels: 2247 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 2248 last_leg = vert.get('legs')[-1] 2249 lines.append("data (iforest(i,%3d,%4d),i=1,%d)/%s/" % \ 2250 (last_leg.get('number'), iconfig, len(daughters), 2251 ",".join(["%3d" % d for d in daughters]))) 2252 if vert in schannels: 2253 lines.append("data sprop(%4d,%4d)/%8d/" % \ 2254 (last_leg.get('number'), iconfig, 2255 last_leg.get('id'))) 2256 elif vert in tchannels[:-1]: 2257 lines.append("data tprid(%4d,%4d)/%8d/" % \ 2258 (last_leg.get('number'), iconfig, 2259 abs(last_leg.get('id')))) 2260 2261 # Write out number of configs 2262 lines.append("# Number of configs") 2263 lines.append("data mapconfig(0)/%4d/" % iconfig) 2264 2265 # Write the file 2266 writer.writelines(lines) 2267 2268 return iconfig, mapconfigs, s_and_t_channels
2269 2270 2271 #=============================================================================== 2272 # write_decayBW_file 2273 #=============================================================================== 2274 #test written
2275 - def write_decayBW_file(self, writer, s_and_t_channels):
2276 """Write the decayBW.inc file for MadEvent""" 2277 2278 lines = [] 2279 2280 booldict = {False: ".false.", True: ".false."} 2281 ####Changed by MZ 2011-11-23!!!! 2282 2283 for iconf, config in enumerate(s_and_t_channels): 2284 schannels = config[0] 2285 for vertex in schannels: 2286 # For the resulting leg, pick out whether it comes from 2287 # decay or not, as given by the from_group flag 2288 leg = vertex.get('legs')[-1] 2289 lines.append("data gForceBW(%d,%d)/%s/" % \ 2290 (leg.get('number'), iconf + 1, 2291 booldict[leg.get('from_group')])) 2292 2293 # Write the file 2294 writer.writelines(lines) 2295 2296 return True
2297 2298 2299 #=============================================================================== 2300 # write_dname_file 2301 #===============================================================================
2302 - def write_dname_file(self, writer, matrix_element, fortran_model):
2303 """Write the dname.mg file for MG4""" 2304 2305 line = "DIRNAME=P%s" % \ 2306 matrix_element.get('processes')[0].shell_string() 2307 2308 # Write the file 2309 writer.write(line + "\n") 2310 2311 return True
2312 2313 2314 #=============================================================================== 2315 # write_iproc_file 2316 #===============================================================================
2317 - def write_iproc_file(self, writer, me_number):
2318 """Write the iproc.dat file for MG4""" 2319 2320 line = "%d" % (me_number + 1) 2321 2322 # Write the file 2323 for line_to_write in writer.write_line(line): 2324 writer.write(line_to_write) 2325 return True
2326 2327 2328 #=============================================================================== 2329 # Helper functions 2330 #=============================================================================== 2331 2332 2333 #=============================================================================== 2334 # get_fks_j_from_i_lines 2335 #=============================================================================== 2336
2337 - def get_fks_j_from_i_lines(self, me, i = 0): #test written
2338 """generate the lines for fks.inc describing initializating the 2339 fks_j_from_i array""" 2340 lines = [] 2341 if not me.isfinite: 2342 for ii, js in me.fks_j_from_i.items(): 2343 if js: 2344 lines.append('DATA (FKS_J_FROM_I_D(%d, %d, JPOS), JPOS = 0, %d) / %d, %s /' \ 2345 % (i, ii, len(js), len(js), ', '.join(["%d" % j for j in js]))) 2346 else: 2347 lines.append('DATA (FKS_J_FROM_I_D(%d, JPOS), JPOS = 0, %d) / %d, %s /' \ 2348 % (2, 1, 1, '1')) 2349 lines.append('') 2350 2351 return lines 2352 2353 2354 #=============================================================================== 2355 # get_leshouche_lines 2356 #===============================================================================
2357 - def get_leshouche_lines(self, matrix_element, ime):
2358 #test written 2359 """Write the leshouche.inc file for MG4""" 2360 2361 # Extract number of external particles 2362 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2363 2364 lines = [] 2365 for iproc, proc in enumerate(matrix_element.get('processes')): 2366 legs = proc.get_legs_with_decays() 2367 lines.append("DATA (IDUP_D(%d,ilh,%d),ilh=1,%d)/%s/" % \ 2368 (ime, iproc + 1, nexternal, 2369 ",".join([str(l.get('id')) for l in legs]))) 2370 for i in [1, 2]: 2371 lines.append("DATA (MOTHUP_D(%d,%d,ilh,%3r),ilh=1,%2r)/%s/" % \ 2372 (ime, i, iproc + 1, nexternal, 2373 ",".join([ "%3r" % 0 ] * ninitial + \ 2374 [ "%3r" % i ] * (nexternal - ninitial)))) 2375 2376 # Here goes the color connections corresponding to the JAMPs 2377 # Only one output, for the first subproc! 2378 if iproc == 0: 2379 # If no color basis, just output trivial color flow 2380 if not matrix_element.get('color_basis'): 2381 for i in [1, 2]: 2382 lines.append("DATA (ICOLUP_D(%d,%d,ilh, 1),ilh=1,%2r)/%s/" % \ 2383 (ime, i, nexternal, 2384 ",".join([ "%3r" % 0 ] * nexternal))) 2385 color_flow_list = [] 2386 nflow = 1 2387 2388 else: 2389 # First build a color representation dictionnary 2390 repr_dict = {} 2391 for l in legs: 2392 repr_dict[l.get('number')] = \ 2393 proc.get('model').get_particle(l.get('id')).get_color()\ 2394 * (-1)**(1+l.get('state')) 2395 # Get the list of color flows 2396 color_flow_list = \ 2397 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 2398 ninitial) 2399 # And output them properly 2400 for cf_i, color_flow_dict in enumerate(color_flow_list): 2401 for i in [0, 1]: 2402 lines.append("DATA (ICOLUP_D(%d,%d,ilh,%3r),ilh=1,%2r)/%s/" % \ 2403 (ime, i + 1, cf_i + 1, nexternal, 2404 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 2405 for l in legs]))) 2406 2407 nflow = len(color_flow_list) 2408 2409 nproc = len(matrix_element.get('processes')) 2410 lines.append('') 2411 2412 return lines, nproc, nflow
2413 2414 2415 #=============================================================================== 2416 # get_den_factor_lines 2417 #===============================================================================
2418 - def get_den_factor_lines(self, fks_born):
2419 """returns the lines with the information on the denominator keeping care 2420 of the identical particle factors in the various real emissions""" 2421 2422 lines = [] 2423 info_list = fks_born.get_fks_info_list() 2424 lines.append('INTEGER IDEN_VALUES(%d)' % len(info_list)) 2425 lines.append('DATA IDEN_VALUES /' + \ 2426 ', '.join(['%d' % ( 2427 fks_born.born_matrix_element.get_denominator_factor() / \ 2428 fks_born.born_matrix_element['identical_particle_factor'] * \ 2429 fks_born.real_processes[info['n_me'] - 1].matrix_element['identical_particle_factor'] ) \ 2430 for info in info_list]) + '/') 2431 2432 return lines
2433 2434 2435 #=============================================================================== 2436 # get_ij_lines 2437 #===============================================================================
2438 - def get_ij_lines(self, fks_born):
2439 """returns the lines with the information on the particle number of the born 2440 that splits""" 2441 info_list = fks_born.get_fks_info_list() 2442 lines = [] 2443 lines.append('INTEGER IJ_VALUES(%d)' % len(info_list)) 2444 lines.append('DATA IJ_VALUES /' + \ 2445 ', '.join(['%d' % info['fks_info']['ij'] for info in info_list]) + '/') 2446 2447 return lines
2448 2449
2450 - def get_pdf_lines_mir(self, matrix_element, ninitial, subproc_group = False,\ 2451 mirror = False): #test written
2452 """Generate the PDF lines for the auto_dsig.f file""" 2453 2454 processes = matrix_element.get('processes') 2455 model = processes[0].get('model') 2456 2457 pdf_definition_lines = "" 2458 pdf_data_lines = "" 2459 pdf_lines = "" 2460 2461 if ninitial == 1: 2462 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 2463 for i, proc in enumerate(processes): 2464 process_line = proc.base_string() 2465 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 2466 pdf_lines = pdf_lines + "\nPD(IPROC) = 1d0\n" 2467 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 2468 else: 2469 # Pick out all initial state particles for the two beams 2470 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 2471 p in processes]))), 2472 sorted(list(set([p.get_initial_pdg(2) for \ 2473 p in processes])))] 2474 2475 # Prepare all variable names 2476 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 2477 sum(initial_states,[])]) 2478 for key,val in pdf_codes.items(): 2479 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 2480 2481 # Set conversion from PDG code to number used in PDF calls 2482 pdgtopdf = {21: 0, 22: 7} 2483 # Fill in missing entries of pdgtopdf 2484 for pdg in sum(initial_states,[]): 2485 if not pdg in pdgtopdf and not pdg in pdgtopdf.values(): 2486 pdgtopdf[pdg] = pdg 2487 elif pdg not in pdgtopdf and pdg in pdgtopdf.values(): 2488 # If any particle has pdg code 7, we need to use something else 2489 pdgtopdf[pdg] = 6000000 + pdg 2490 2491 # Get PDF variable declarations for all initial states 2492 for i in [0,1]: 2493 pdf_definition_lines += "DOUBLE PRECISION " + \ 2494 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 2495 for pdg in \ 2496 initial_states[i]]) + \ 2497 "\n" 2498 2499 # Get PDF data lines for all initial states 2500 for i in [0,1]: 2501 pdf_data_lines += "DATA " + \ 2502 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 2503 for pdg in initial_states[i]]) + \ 2504 "/%d*1D0/" % len(initial_states[i]) + \ 2505 "\n" 2506 2507 # Get PDF values for the different initial states 2508 for i, init_states in enumerate(initial_states): 2509 if not mirror: 2510 ibeam = i + 1 2511 else: 2512 ibeam = 2 - i 2513 if subproc_group: 2514 pdf_lines = pdf_lines + \ 2515 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 2516 % (ibeam, ibeam) 2517 else: 2518 pdf_lines = pdf_lines + \ 2519 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 2520 % (ibeam, ibeam) 2521 2522 for initial_state in init_states: 2523 if initial_state in pdf_codes.keys(): 2524 if subproc_group: 2525 pdf_lines = pdf_lines + \ 2526 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP," + \ 2527 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 2528 (pdf_codes[initial_state], 2529 i + 1, ibeam, pdgtopdf[initial_state], 2530 ibeam, ibeam) 2531 else: 2532 pdf_lines = pdf_lines + \ 2533 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP," + \ 2534 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 2535 (pdf_codes[initial_state], 2536 i + 1, ibeam, pdgtopdf[initial_state], 2537 ibeam, ibeam) 2538 pdf_lines = pdf_lines + "ENDIF\n" 2539 2540 # Add up PDFs for the different initial state particles 2541 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 2542 for proc in processes: 2543 process_line = proc.base_string() 2544 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 2545 pdf_lines = pdf_lines + "\nPD(IPROC) = " 2546 for ibeam in [1, 2]: 2547 initial_state = proc.get_initial_pdg(ibeam) 2548 if initial_state in pdf_codes.keys(): 2549 pdf_lines = pdf_lines + "%s%d*" % \ 2550 (pdf_codes[initial_state], ibeam) 2551 else: 2552 pdf_lines = pdf_lines + "1d0*" 2553 # Remove last "*" from pdf_lines 2554 pdf_lines = pdf_lines[:-1] + "\n" 2555 2556 # Remove last line break from pdf_lines 2557 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1] 2558 2559 2560 #test written
2561 - def get_color_data_lines_from_color_matrix(self, color_matrix, n=6):
2562 """Return the color matrix definition lines for the given color_matrix. Split 2563 rows in chunks of size n.""" 2564 2565 if not color_matrix: 2566 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 2567 else: 2568 ret_list = [] 2569 my_cs = color.ColorString() 2570 for index, denominator in \ 2571 enumerate(color_matrix.get_line_denominators()): 2572 # First write the common denominator for this color matrix line 2573 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 2574 # Then write the numerators for the matrix elements 2575 num_list = color_matrix.get_line_numerators(index, denominator) 2576 for k in xrange(0, len(num_list), n): 2577 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 2578 (index + 1, k + 1, min(k + n, len(num_list)), 2579 ','.join(["%5r" % i for i in num_list[k:k + n]]))) 2580 2581 return ret_list
2582 2583 #=========================================================================== 2584 # write_maxamps_file 2585 #===========================================================================
2586 - def write_maxamps_file(self, writer, maxamps, maxflows, 2587 maxproc,maxsproc):
2588 """Write the maxamps.inc file for MG4.""" 2589 2590 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 2591 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 2592 (maxamps, maxflows) 2593 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 2594 (maxproc, maxsproc) 2595 2596 # Write the file 2597 writer.writelines(file) 2598 2599 return True
2600 2601 #=============================================================================== 2602 # write_ncombs_file 2603 #===============================================================================
2604 - def write_ncombs_file(self, writer, matrix_element, fortran_model):
2605 # #test written 2606 """Write the ncombs.inc file for MadEvent.""" 2607 2608 # Extract number of external particles 2609 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2610 2611 # ncomb (used for clustering) is 2^(nexternal) 2612 file = " integer n_max_cl\n" 2613 file = file + "parameter (n_max_cl=%d)" % (2 ** (nexternal+1)) 2614 2615 # Write the file 2616 writer.writelines(file) 2617 2618 return True
2619 2620 #=========================================================================== 2621 # write_config_subproc_map_file 2622 #===========================================================================
2623 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
2624 """Write a dummy config_subproc.inc file for MadEvent""" 2625 2626 lines = [] 2627 2628 for iconfig in range(len(s_and_t_channels)): 2629 lines.append("DATA CONFSUB(1,%d)/1/" % \ 2630 (iconfig + 1)) 2631 2632 # Write the file 2633 writer.writelines(lines) 2634 2635 return True
2636 2637 #=========================================================================== 2638 # write_colors_file 2639 #===========================================================================
2640 - def write_colors_file(self, writer, matrix_element):
2641 """Write the get_color.f file for MadEvent, which returns color 2642 for all particles used in the matrix element.""" 2643 2644 matrix_elements=matrix_element.real_processes[0].matrix_element 2645 2646 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 2647 matrix_elements = [matrix_elements] 2648 2649 model = matrix_elements[0].get('processes')[0].get('model') 2650 2651 # We need the both particle and antiparticle wf_ids, since the identity 2652 # depends on the direction of the wf. 2653 wf_ids = set(sum([sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 2654 for wf in d.get('wavefunctions')],[]) \ 2655 for d in me.get('diagrams')],[]) \ 2656 for me in [real_proc.matrix_element]],[])\ 2657 for real_proc in matrix_element.real_processes],[])) 2658 leg_ids = set(sum([sum([sum([[l.get('id') for l in \ 2659 p.get_legs_with_decays()] for p in \ 2660 me.get('processes')], []) for me in \ 2661 [real_proc.matrix_element]], []) for real_proc in \ 2662 matrix_element.real_processes],[])) 2663 particle_ids = sorted(list(wf_ids.union(leg_ids))) 2664 2665 lines = """function get_color(ipdg) 2666 implicit none 2667 integer get_color, ipdg 2668 2669 if(ipdg.eq.%d)then 2670 get_color=%d 2671 return 2672 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 2673 2674 for part_id in particle_ids[1:]: 2675 lines += """else if(ipdg.eq.%d)then 2676 get_color=%d 2677 return 2678 """ % (part_id, model.get_particle(part_id).get_color()) 2679 # Dummy particle for multiparticle vertices with pdg given by 2680 # first code not in the model 2681 lines += """else if(ipdg.eq.%d)then 2682 c This is dummy particle used in multiparticle vertices 2683 get_color=2 2684 return 2685 """ % model.get_first_non_pdg() 2686 lines += """else 2687 write(*,*)'Error: No color given for pdg ',ipdg 2688 get_color=0 2689 return 2690 endif 2691 end 2692 """ 2693 2694 # Write the file 2695 writer.writelines(lines) 2696 2697 return True
2698 2699 #=============================================================================== 2700 # write_props_file 2701 #=============================================================================== 2702 #test_written
2703 - def write_props_file(self, writer, matrix_element, fortran_model, s_and_t_channels):
2704 """Write the props.inc file for MadEvent. Needs input from 2705 write_configs_file. With respect to the parent routine, it has some 2706 more specific formats that allow the props.inc file to be read by the 2707 link program""" 2708 2709 lines = [] 2710 2711 particle_dict = matrix_element.get('processes')[0].get('model').\ 2712 get('particle_dict') 2713 2714 for iconf, configs in enumerate(s_and_t_channels): 2715 for vertex in configs[0] + configs[1][:-1]: 2716 leg = vertex.get('legs')[-1] 2717 if leg.get('id') == 21 and 21 not in particle_dict: 2718 # Fake propagator used in multiparticle vertices 2719 mass = 'zero' 2720 width = 'zero' 2721 pow_part = 0 2722 else: 2723 particle = particle_dict[leg.get('id')] 2724 # Get mass 2725 if particle.get('mass').lower() == 'zero': 2726 mass = particle.get('mass') 2727 else: 2728 mass = "abs(%s)" % particle.get('mass') 2729 # Get width 2730 if particle.get('width').lower() == 'zero': 2731 width = particle.get('width') 2732 else: 2733 width = "abs(%s)" % particle.get('width') 2734 2735 pow_part = 1 + int(particle.is_boson()) 2736 2737 lines.append("pmass(%3d,%4d) = %s" % \ 2738 (leg.get('number'), iconf + 1, mass)) 2739 lines.append("pwidth(%3d,%4d) = %s" % \ 2740 (leg.get('number'), iconf + 1, width)) 2741 lines.append("pow(%3d,%4d) = %d" % \ 2742 (leg.get('number'), iconf + 1, pow_part)) 2743 2744 # Write the file 2745 writer.writelines(lines) 2746 2747 return True
2748 2749 2750 #=========================================================================== 2751 # write_subproc 2752 #===========================================================================
2753 - def write_subproc(self, writer, subprocdir):
2754 """Append this subprocess to the subproc.mg file for MG4""" 2755 2756 # Write line to file 2757 writer.write(subprocdir + "\n") 2758 2759 return True
2760 2761 2762 2763 2764 2765 #================================================================================= 2766 # Class for using the optimized Loop process 2767 #=================================================================================
2768 -class ProcessOptimizedExporterFortranFKS(loop_exporters.LoopProcessOptimizedExporterFortranSA,\ 2769 ProcessExporterFortranFKS):
2770 """Class to take care of exporting a set of matrix elements to 2771 Fortran (v4) format.""" 2772 2773 #=============================================================================== 2774 # copy the Template in a new directory. 2775 #===============================================================================
2776 - def copy_fkstemplate(self):
2777 """create the directory run_name as a copy of the MadEvent 2778 Template, and clean the directory 2779 For now it is just the same as copy_v4template, but it will be modified 2780 """ 2781 mgme_dir = self.mgme_dir 2782 dir_path = self.dir_path 2783 clean =self.opt['clean'] 2784 2785 #First copy the full template tree if dir_path doesn't exit 2786 if not os.path.isdir(dir_path): 2787 if not mgme_dir: 2788 raise MadGraph5Error, \ 2789 "No valid MG_ME path given for MG4 run directory creation." 2790 logger.info('initialize a new directory: %s' % \ 2791 os.path.basename(dir_path)) 2792 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True) 2793 # distutils.dir_util.copy_tree since dir_path already exists 2794 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template', 'Common'), 2795 dir_path) 2796 elif not os.path.isfile(os.path.join(dir_path, 'TemplateVersion.txt')): 2797 if not mgme_dir: 2798 raise MadGraph5Error, \ 2799 "No valid MG_ME path given for MG4 run directory creation." 2800 try: 2801 shutil.copy(os.path.join(mgme_dir, 'MGMEVersion.txt'), dir_path) 2802 except IOError: 2803 MG5_version = misc.get_pkg_info() 2804 open(os.path.join(dir_path, 'MGMEVersion.txt'), 'w').write( \ 2805 "5." + MG5_version['version']) 2806 2807 #Ensure that the Template is clean 2808 if clean: 2809 logger.info('remove old information in %s' % os.path.basename(dir_path)) 2810 if os.environ.has_key('MADGRAPH_BASE'): 2811 subprocess.call([os.path.join('bin', 'internal', 'clean_template'), 2812 '--web'], cwd=dir_path) 2813 else: 2814 try: 2815 subprocess.call([os.path.join('bin', 'internal', 'clean_template')], \ 2816 cwd=dir_path) 2817 except Exception, why: 2818 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 2819 % (os.path.basename(dir_path),why)) 2820 #Write version info 2821 MG_version = misc.get_pkg_info() 2822 open(os.path.join(dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 2823 MG_version['version']) 2824 2825 # We must link the CutTools to the Library folder of the active Template 2826 self.link_CutTools(dir_path) 2827 # We must link the TIR to the Library folder of the active Template 2828 link_tir_libs=[] 2829 tir_libs=[] 2830 tir_include=[] 2831 # special for PJFry++/Golem95 2832 link_pjfry_lib="" 2833 pjfry_lib="" 2834 for tir in self.all_tir: 2835 tir_dir="%s_dir"%tir 2836 libpath=getattr(self,tir_dir) 2837 libname="lib%s.a"%tir 2838 tir_name=tir 2839 libpath = self.link_TIR(os.path.join(self.dir_path, 'lib'), 2840 libpath,libname,tir_name=tir_name) 2841 setattr(self,tir_dir,libpath) 2842 if libpath != "": 2843 if tir in ['pjfry','golem']: 2844 # Apparently it is necessary to link against the original 2845 # location of the pjfry/golem library, so it needs a special treatment. 2846 link_tir_libs.append('-L%s/ -l%s'%(libpath,tir)) 2847 tir_libs.append('%s/lib%s.$(libext)'%(libpath,tir)) 2848 if tir=='golem': 2849 trg_path = pjoin(os.path.dirname(libpath),'include') 2850 golem_include = misc.find_includes_path(trg_path,'.mod') 2851 if golem_include is None: 2852 logger.error( 2853 'Could not find the include directory for golem, looking in %s.\n' % str(trg_path)+ 2854 'Generation carries on but you will need to edit the include path by hand in the makefiles.') 2855 golem_include = '<Not_found_define_it_yourself>' 2856 tir_include.append('-I %s'%golem_include) 2857 else: 2858 link_tir_libs.append('-l%s'%tir) 2859 tir_libs.append('$(LIBDIR)lib%s.$(libext)'%tir) 2860 2861 os.remove(os.path.join(self.dir_path,'SubProcesses','makefile_loop.inc')) 2862 cwd = os.getcwd() 2863 dirpath = os.path.join(self.dir_path, 'SubProcesses') 2864 try: 2865 os.chdir(dirpath) 2866 except os.error: 2867 logger.error('Could not cd to directory %s' % dirpath) 2868 return 0 2869 filename = 'makefile_loop' 2870 calls = self.write_makefile_TIR(writers.MakefileWriter(filename), 2871 link_tir_libs,tir_libs,tir_include=tir_include) 2872 os.remove(os.path.join(self.dir_path,'Source','make_opts.inc')) 2873 dirpath = os.path.join(self.dir_path, 'Source') 2874 try: 2875 os.chdir(dirpath) 2876 except os.error: 2877 logger.error('Could not cd to directory %s' % dirpath) 2878 return 0 2879 filename = 'make_opts' 2880 calls = self.write_make_opts(writers.MakefileWriter(filename), 2881 link_tir_libs,tir_libs) 2882 # Return to original PWD 2883 os.chdir(cwd) 2884 2885 cwd = os.getcwd() 2886 dirpath = os.path.join(self.dir_path, 'SubProcesses') 2887 try: 2888 os.chdir(dirpath) 2889 except os.error: 2890 logger.error('Could not cd to directory %s' % dirpath) 2891 return 0 2892 2893 # We add here the user-friendly MadLoop option setter. 2894 cpfiles= ["SubProcesses/MadLoopParamReader.f", 2895 "Cards/MadLoopParams.dat", 2896 "SubProcesses/MadLoopParams.inc"] 2897 2898 for file in cpfiles: 2899 shutil.copy(os.path.join(self.loop_dir,'StandAlone/', file), 2900 os.path.join(self.dir_path, file)) 2901 2902 # We need minimal editing of MadLoopCommons.f 2903 MadLoopCommon = open(os.path.join(self.loop_dir,'StandAlone', 2904 "SubProcesses","MadLoopCommons.inc")).read() 2905 writer = writers.FortranWriter(os.path.join(self.dir_path, 2906 "SubProcesses","MadLoopCommons.f")) 2907 writer.writelines(MadLoopCommon%{ 2908 'print_banner_commands':self.MadLoop_banner}) 2909 writer.close() 2910 2911 # link the files from the MODEL 2912 model_path = self.dir_path + '/Source/MODEL/' 2913 # Note that for the [real=] mode, these files are not present 2914 if os.path.isfile(os.path.join(model_path,'mp_coupl.inc')): 2915 ln(model_path + '/mp_coupl.inc', self.dir_path + '/SubProcesses') 2916 if os.path.isfile(os.path.join(model_path,'mp_coupl_same_name.inc')): 2917 ln(model_path + '/mp_coupl_same_name.inc', \ 2918 self.dir_path + '/SubProcesses') 2919 2920 # Write the cts_mpc.h and cts_mprec.h files imported from CutTools 2921 self.write_mp_files(writers.FortranWriter('cts_mprec.h'),\ 2922 writers.FortranWriter('cts_mpc.h'),) 2923 2924 self.copy_python_files() 2925 2926 # Return to original PWD 2927 os.chdir(cwd)
2928
2929 - def generate_virt_directory(self, loop_matrix_element, fortran_model, dir_name):
2930 """writes the V**** directory inside the P**** directories specified in 2931 dir_name""" 2932 2933 cwd = os.getcwd() 2934 2935 matrix_element = loop_matrix_element 2936 2937 # Create the MadLoop5_resources directory if not already existing 2938 dirpath = os.path.join(dir_name, 'MadLoop5_resources') 2939 try: 2940 os.mkdir(dirpath) 2941 except os.error as error: 2942 logger.warning(error.strerror + " " + dirpath) 2943 2944 # Create the directory PN_xx_xxxxx in the specified path 2945 name = "V%s" % matrix_element.get('processes')[0].shell_string() 2946 dirpath = os.path.join(dir_name, name) 2947 2948 try: 2949 os.mkdir(dirpath) 2950 except os.error as error: 2951 logger.warning(error.strerror + " " + dirpath) 2952 2953 try: 2954 os.chdir(dirpath) 2955 except os.error: 2956 logger.error('Could not cd to directory %s' % dirpath) 2957 return 0 2958 2959 logger.info('Creating files in directory %s' % name) 2960 2961 # Extract number of external particles 2962 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2963 2964 calls=self.write_matrix_element_v4(None,matrix_element,fortran_model) 2965 2966 # The born matrix element, if needed 2967 filename = 'born_matrix.f' 2968 calls = self.write_bornmatrix( 2969 writers.FortranWriter(filename), 2970 matrix_element, 2971 fortran_model) 2972 2973 filename = 'nexternal.inc' 2974 self.write_nexternal_file(writers.FortranWriter(filename), 2975 (nexternal-2), ninitial) 2976 2977 filename = 'pmass.inc' 2978 self.write_pmass_file(writers.FortranWriter(filename), 2979 matrix_element) 2980 2981 filename = 'ngraphs.inc' 2982 self.write_ngraphs_file(writers.FortranWriter(filename), 2983 len(matrix_element.get_all_amplitudes())) 2984 2985 filename = "loop_matrix.ps" 2986 writers.FortranWriter(filename).writelines("""C Post-helas generation loop-drawing is not ready yet.""") 2987 plot = draw.MultiEpsDiagramDrawer(base_objects.DiagramList( 2988 matrix_element.get('base_amplitude').get('loop_diagrams')[:1000]), 2989 filename, 2990 model=matrix_element.get('processes')[0].get('model'), 2991 amplitude='') 2992 logger.info("Drawing loop Feynman diagrams for " + \ 2993 matrix_element.get('processes')[0].nice_string(\ 2994 print_weighted=False)) 2995 plot.draw() 2996 2997 filename = "born_matrix.ps" 2998 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2999 get('born_diagrams'), 3000 filename, 3001 model=matrix_element.get('processes')[0].\ 3002 get('model'), 3003 amplitude='') 3004 logger.info("Generating born Feynman diagrams for " + \ 3005 matrix_element.get('processes')[0].nice_string(\ 3006 print_weighted=False)) 3007 plot.draw() 3008 3009 linkfiles = ['coupl.inc', 'mp_coupl.inc', 'mp_coupl_same_name.inc', 3010 'cts_mprec.h', 'cts_mpc.h', 'MadLoopParamReader.f', 3011 'MadLoopParams.inc','MadLoopCommons.f'] 3012 3013 for file in linkfiles: 3014 ln('../../%s' % file) 3015 3016 3017 os.system("ln -s ../../makefile_loop makefile") 3018 3019 # We should move to MadLoop5_resources directory from the SubProcesses 3020 ln(pjoin('../../..','Cards','MadLoopParams.dat'), 3021 pjoin('..','MadLoop5_resources')) 3022 3023 linkfiles = ['mpmodule.mod'] 3024 3025 for file in linkfiles: 3026 ln('../../../lib/%s' % file) 3027 3028 # Return to original PWD 3029 os.chdir(cwd) 3030 3031 if not calls: 3032 calls = 0 3033 return calls
3034 3035 3036 #=============================================================================== 3037 # write_coef_specs 3038 #===============================================================================
3039 - def write_coef_specs_file(self, virt_me_list):
3040 """ writes the coef_specs.inc in the DHELAS folder. Should not be called in the 3041 non-optimized mode""" 3042 filename = os.path.join(self.dir_path, 'Source', 'DHELAS', 'coef_specs.inc') 3043 3044 general_replace_dict = {} 3045 general_replace_dict['max_lwf_size'] = 4 3046 3047 max_loop_vertex_ranks = [me.get_max_loop_vertex_rank() for me in virt_me_list] 3048 general_replace_dict['vertex_max_coefs'] = max(\ 3049 [q_polynomial.get_number_of_coefs_for_rank(n) 3050 for n in max_loop_vertex_ranks]) 3051 3052 IncWriter=writers.FortranWriter(filename,'w') 3053 IncWriter.writelines("""INTEGER MAXLWFSIZE 3054 PARAMETER (MAXLWFSIZE=%(max_lwf_size)d) 3055 INTEGER VERTEXMAXCOEFS 3056 PARAMETER (VERTEXMAXCOEFS=%(vertex_max_coefs)d)"""\ 3057 % general_replace_dict) 3058 IncWriter.close()
3059