Package models :: Module usermod
[hide private]
[frames] | no frames]

Source Code for Module models.usermod

  1  ################################################################################ 
  2  # 
  3  # Copyright (c) 2011 The MadGraph Development team and Contributors 
  4  # 
  5  # This file is a part of the MadGraph 5 project, an application which  
  6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
  7  # high-energy processes in the Standard Model and beyond. 
  8  # 
  9  # It is subject to the MadGraph license which should accompany this  
 10  # distribution. 
 11  # 
 12  # For more information, please visit: http://madgraph.phys.ucl.ac.be 
 13  # 
 14  ################################################################################ 
 15  """ Set of Tool in order to modify a given UFO model. 
 16      (mainly by adding-suppressing interactions and allow to modify by text the  
 17      different part of the model. Check of consistency of the model are performed. 
 18      This produce a new valid UFO model in output. 
 19  """ 
 20   
 21  import glob 
 22  import logging 
 23  import os 
 24  import re 
 25  import sys 
 26   
 27  import madgraph.core.base_objects as base_objects 
 28  import madgraph.iolibs.files as files 
 29  import madgraph.various.misc as misc 
 30  import models as ufomodels 
 31  import models.import_ufo as import_ufo 
 32  import models.check_param_card as check_param_card 
 33   
 34  pjoin =os.path.join 
 35  logger = logging.getLogger('madgraph.model') 
 36   
37 -class USRMODERROR(Exception): pass
38
39 -class UFOModel(object):
40 """ The class storing the current status of the model """ 41
42 - def __init__(self, modelpath, addon='__1'):
43 """load the model from a valid UFO directory (otherwise keep everything 44 as empty.""" 45 self.modelpath = modelpath 46 model = ufomodels.load_model(modelpath) 47 48 # Check the validity of the model. Too old UFO (before UFO 1.0) 49 if not hasattr(model, 'all_orders'): 50 raise USRMODERROR, 'Base Model doesn\'t follows UFO convention (no couplings_order information)\n' +\ 51 'MG5 is able to load such model but NOT to the add model feature.' 52 if isinstance(model.all_particles[0].mass, basestring): 53 raise USRMODERROR, 'Base Model doesn\'t follows UFO convention (Mass/Width of particles are string name, not object)\n' +\ 54 'MG5 is able to load such model but NOT to the add model feature.' 55 56 57 self.particles = model.all_particles 58 if any(hasattr(p, 'loop_particles') for p in self.particles): 59 raise USRMODERROR, 'Base Model doesn\'t follows UFO convention ' 60 self.vertices = model.all_vertices 61 self.couplings = model.all_couplings 62 self.lorentz = model.all_lorentz 63 self.parameters = model.all_parameters 64 self.Parameter = self.parameters[0].__class__ 65 self.orders = model.all_orders 66 67 self.functions = model.all_functions 68 self.new_external = [] 69 # UFO optional file 70 if hasattr(model, 'all_propagators'): 71 self.propagators = model.all_propagators 72 else: 73 self.propagators = [] 74 75 # UFO NLO extension 76 if hasattr(model, 'all_CTvertices'): 77 self.CTvertices = model.all_CTvertices 78 else: 79 self.CTvertices = [] 80 81 #translate for how to write the python file 82 if 'self.expr = expression' in open(pjoin(self.modelpath, 'object_library.py')).read(): 83 self.translate = {'expr': 'expression'} 84 else: 85 self.translate = {} 86 87 #translate for the expression of the UFO model 88 self.old_new = {} 89 self.addon = addon 90 91 # particle id -> object 92 self.particle_dict = {} 93 for particle in self.particles: 94 self.particle_dict[particle.pdg_code] = particle 95 96 # path to all model that should be used for the Fortran file. 97 self.all_path = [self.modelpath]
98
99 - def write(self, outputdir):
100 """ """ 101 if not os.path.exists(outputdir): 102 os.mkdir(outputdir) 103 files.cp(os.path.join(self.modelpath, '__init__.py'), outputdir) 104 files.cp(os.path.join(self.modelpath, 'object_library.py'), outputdir) 105 files.cp(os.path.join(self.modelpath, 'write_param_card.py'), outputdir) 106 107 self.write_particles(outputdir) 108 self.write_vertices(outputdir) 109 self.write_couplings(outputdir) 110 self.write_lorentz(outputdir) 111 self.write_parameters(outputdir) 112 self.write_orders(outputdir) 113 self.write_functions(outputdir) 114 self.write_propagators(outputdir) 115 self.write_ctvertices(outputdir) 116 117 self.write_external_files(outputdir) 118 self.write_restrict_card(outputdir)
119 120
121 - def write_restrict_card(self, outputdir):
122 """ propagate model restriction of the original model. """ 123 124 restrict_list = [l for l in os.listdir(self.modelpath) if l.startswith('restrict_')] 125 if not self.new_external: 126 # no new entry in the card => just copy the restrict_card.dat 127 for p in restrict_list: 128 files.cp(pjoin(self.modelpath, p), outputdir) 129 130 else: 131 # need to add the parameter and ensure that they will not be restricted! 132 for p in restrict_list: 133 param_card = check_param_card.ParamCard(pjoin(self.modelpath, p)) 134 for parameter in self.new_external: 135 block = parameter.lhablock 136 lhaid = parameter.lhacode 137 value = parameter.value 138 if value == 0: 139 value = 1e-99 140 elif value == 1: 141 value = 9.999999e-1 142 try: 143 param_card.add_param(block.lower(), lhaid, value, 'from addon') 144 except check_param_card.InvalidParamCard: 145 logger.warning("%s will not acting for %s %s" % (p, block, lhaid)) 146 param_card[block.lower()].get(lhaid).value = value 147 # all added -> write it 148 param_card.write(pjoin(outputdir, p))
149 150 151 152 153 154 155 156
157 - def format_param(self, param):
158 """convert param to string in order to have it written correctly for the 159 UFO file""" 160 161 if isinstance(param, basestring): 162 return "'%s'" % param.replace('\'', '\\\'').replace('\"', '\\\"') 163 elif isinstance(param, int) or isinstance(param, float) or \ 164 isinstance(param, complex): 165 return "%s" % param 166 elif isinstance(param, long): 167 return ("%s" % param).replace('L','') 168 elif isinstance(param, list): 169 return '[%s]' % ', '.join(self.format_param(p) for p in param) 170 elif isinstance(param, tuple): 171 if len(param) == 1: 172 return '(%s,)' % self.format_param(param[0]) 173 else: 174 return '(%s)' % ','.join([self.format_param(p) for p in param]) 175 elif isinstance(param, dict): 176 return '{%s}' % ','.join(['%s: %s' % (self.format_param(key), self.format_param(value)) for key, value in param.items()]) 177 elif param.__class__.__name__ == 'Parameter': 178 return 'Param.%s' % param.__repr__() 179 elif param.__class__.__name__ == 'Coupling': 180 return 'C.%s' % param.__repr__() 181 elif param.__class__.__name__ == 'Lorentz': 182 return 'L.%s' % param.__repr__() 183 elif param.__class__.__name__ == 'Particle': 184 return 'P.%s' % param.__repr__() 185 elif param is None: 186 return 'None' 187 else: 188 raise Exception, '%s unknow type for writting UFO' % param.__class__.__name__
189 190 191
192 - def create_data_text(self, obj):
193 """ create the data associate to the object""" 194 # Most of the object comes from the UFOBASECLASS 195 # BUT NOT ALL (some object) need to deal with both 196 197 nb_space = 0 198 if hasattr(obj, 'require_args_all'): 199 args = obj.require_args_all 200 elif hasattr(obj, 'require_args'): 201 args = obj.require_args 202 else: 203 args = [] 204 if args: 205 text = """%s = %s(""" % (obj.__repr__(), obj.__class__.__name__) 206 else: 207 text = """%s = %s(""" % (obj.name, obj.__class__.__name__) 208 209 210 for data in args: 211 if data in self.translate: 212 data = self.translate[data] 213 if not nb_space: 214 add_space = len(text) 215 else: 216 add_space = 0 217 218 try: 219 expr = getattr(obj, data) 220 except: 221 if data in ['counterterm', 'propagator', 'loop_particles']: 222 expr = None 223 setattr(obj, data, None) 224 else: 225 raise 226 name =str(data) 227 if name in self.translate: 228 name = self.translate[name] 229 #if data == 'lhablock': 230 # print data, type(self.format_param(getattr(obj, data))) 231 text += '%s%s = %s,\n' % (' ' * nb_space,name, self.format_param(getattr(obj, data))) 232 nb_space += add_space 233 234 if hasattr(obj, 'get_all'): 235 other_attr = [name for name in obj.get_all().keys() 236 if name not in args] 237 else: 238 other_attr = obj.__dict__.keys() 239 240 for data in other_attr: 241 name =str(data) 242 if name in ['partial_widths', 'loop_particles']: 243 continue 244 if name in self.translate: 245 name = self.translate[name] 246 if not nb_space: 247 add_space = len(text) 248 else: 249 add_space = 0 250 text += '%s%s = %s,\n' % (' ' * nb_space, name, self.format_param(getattr(obj, data))) 251 nb_space += add_space 252 253 text = text[:-2] + ')\n\n' 254 255 return text
256
257 - def create_file_content(self, datalist):
258 """ """ 259 return '\n'.join([self.create_data_text(obj) for obj in datalist])
260 261
262 - def write_particles(self, outputdir):
263 """ """ 264 text = """ 265 # This file was automatically created by The UFO_usermod 266 267 from __future__ import division 268 from object_library import all_particles, Particle 269 import parameters as Param 270 271 """ 272 text += self.create_file_content(self.particles) 273 ff = open(os.path.join(outputdir, 'particles.py'), 'w') 274 ff.writelines(text) 275 ff.close() 276 return
277
278 - def write_vertices(self, outputdir):
279 """ """ 280 text = """ 281 # This file was automatically created by The UFO_usermod 282 283 from object_library import all_vertices, Vertex 284 import particles as P 285 import couplings as C 286 import lorentz as L 287 288 """ 289 text += self.create_file_content(self.vertices) 290 ff = open(os.path.join(outputdir, 'vertices.py'), 'w') 291 ff.writelines(text) 292 ff.close() 293 return
294
295 - def write_ctvertices(self, outputdir):
296 """ """ 297 298 if not self.CTvertices: 299 return 300 301 text = """ 302 # This file was automatically created by The UFO_usermod 303 304 from object_library import all_vertices, all_CTvertices, Vertex, CTVertex 305 import particles as P 306 import couplings as C 307 import lorentz as L 308 309 """ 310 text += self.create_file_content(self.CTvertices) 311 ff = open(os.path.join(outputdir, 'CT_vertices.py'), 'w') 312 ff.writelines(text) 313 ff.close() 314 return
315 316
317 - def write_couplings(self, outputdir):
318 """ """ 319 text = """ 320 # This file was automatically created by The UFO_usermod 321 322 from object_library import all_couplings, Coupling 323 """ 324 text += self.create_file_content(self.couplings) 325 ff = open(os.path.join(outputdir, 'couplings.py'), 'w') 326 ff.writelines(text) 327 ff.close() 328 return
329
330 - def write_lorentz(self, outputdir):
331 """ """ 332 text = """ 333 # This file was automatically created by The UFO_usermod 334 335 from object_library import all_lorentz, Lorentz 336 """ 337 338 text += self.create_file_content(self.lorentz) 339 ff = open(os.path.join(outputdir, 'lorentz.py'), 'w') 340 ff.writelines(text) 341 ff.close() 342 return
343
344 - def write_parameters(self, outputdir):
345 """ """ 346 text = """ 347 # This file was automatically created by The UFO_usermod 348 349 from object_library import all_parameters, Parameter 350 """ 351 352 text += self.create_file_content(self.parameters) 353 ff = open(os.path.join(outputdir, 'parameters.py'), 'w') 354 ff.writelines(text) 355 ff.close() 356 return
357
358 - def write_orders(self, outputdir):
359 """ """ 360 text = """ 361 # This file was automatically created by The UFO_usermod 362 363 from object_library import all_orders, CouplingOrder 364 """ 365 366 text += self.create_file_content(self.orders) 367 ff = open(os.path.join(outputdir, 'coupling_orders.py'), 'w') 368 ff.writelines(text) 369 ff.close() 370 return
371
372 - def write_functions(self, outputdir):
373 """ """ 374 text = """ 375 # This file was automatically created by The UFO_usermod 376 377 import cmath 378 from object_library import all_functions, Function 379 380 """ 381 382 text += self.create_file_content(self.functions) 383 ff = open(os.path.join(outputdir, 'function_library.py'), 'w') 384 ff.writelines(text) 385 ff.close() 386 return
387
388 - def write_propagators(self, outputdir):
389 """ """ 390 391 text = """ 392 # This file was automatically created by The UFO_usermod 393 from object_library import all_propagators, Propagator 394 """ 395 396 text += self.create_file_content(self.propagators) 397 ff = open(os.path.join(outputdir, 'propagators.py'), 'w') 398 ff.writelines(text) 399 ff.close() 400 return
401
402 - def write_external_files(self, outputdir):
403 """Copy/merge the routines written in Fortran/C++/pyhton""" 404 405 #1. Special case for the formfactor written in Fortran 406 re_fct = re.compile('''^\s{7,70}[\w\s]*function (\w*)\(''',re.M+re.I) 407 present_fct = set() 408 for dirpath in self.all_path: 409 if os.path.exists(pjoin(dirpath, 'Fortran', 'functions.f')): 410 text = open(pjoin(dirpath, 'Fortran', 'functions.f')).read() 411 new_fct = re_fct.findall(text) 412 nb_old = len(present_fct) 413 nb_added = len(new_fct) 414 new_fct = set([f.lower() for f in new_fct]) 415 present_fct.update(new_fct) 416 if len(present_fct) < nb_old + nb_added: 417 logger.critical('''Some Functions in functions.f are define in more than one model. 418 This require AT LEAST manual modification of the resulting file. But more likely the 419 model need to be consider as un-physical! Use it very carefully.''') 420 421 if not os.path.exists(pjoin(outputdir, 'Fortran')): 422 os.mkdir(pjoin(outputdir, 'Fortran')) 423 fsock = open(pjoin(outputdir, 'Fortran','functions.f'),'a') 424 fsock.write(text) 425 fsock.close() 426 427 #2. Ohter files present in Fortran/Cpp/Python directory 428 # ASk user to handle it if any! 429 for dirpath in self.all_path: 430 for subdir in ['Fortran', 'CPP', 'Python']: 431 if os.path.exists(pjoin(dirpath, subdir)): 432 for filepath in os.listdir(pjoin(dirpath, subdir)): 433 if filepath == 'functions.f': 434 continue 435 if '.' not in filepath: 436 continue 437 logger.warning('Manual HELAS routine associated to the model. Those are not modified automaticaly!! So you need to manually checked them') 438 nb = 0 439 name, extension = filepath.rsplit('.', 1) 440 441 while 1: 442 filename = '%s%s%s' %(name, '.moved' * nb, extension) 443 if os.path.exists(pjoin(outputdir, subdir, filename)): 444 nb+=1 445 else: 446 break 447 if not os.path.exists(pjoin(outputdir, subdir)): 448 os.mkdir(pjoin(outputdir, subdir)) 449 files.cp(pjoin(dirpath, subdir, filepath), pjoin(outputdir, subdir, filename))
450
451 - def get_particle(self, name):
452 """ """ 453 for part in self.particles: 454 if part.name == name: 455 return part 456 457 raise USRMODERROR, 'no particle %s in the model' % name
458
459 - def add_parameter(self, parameter, identify_pid={}):
460 """wrapper to call the correct function""" 461 462 if parameter.nature == 'internal': 463 self.add_internal_parameter(parameter) 464 else: 465 self.add_external_parameter(parameter, identify_pid)
466
467 - def add_particle(self, particle, identify=None):
468 """Add a particle in a consistent way""" 469 470 name = particle.name 471 if identify: 472 name = identify 473 old_part = next((p for p in self.particles if p.name==name), None) 474 if not old_part: 475 first = True 476 for p in self.particles: 477 if p.name.lower() == name.lower(): 478 if not first: 479 raise Exception 480 else: 481 first =False 482 old_part = p 483 484 485 486 if old_part: 487 #Check if the two particles have the same pdgcode 488 if old_part.pdg_code == particle.pdg_code: 489 particle.replace = old_part 490 return self.check_mass_width_of_particle(old_part, particle) 491 elif identify: 492 if particle.spin != old_part.spin: 493 raise USRMODERROR, "identify particles should have the same spin" 494 elif particle.color != old_part.color: 495 raise USRMODERROR, "identify particles should have the same color" 496 particle.replace = old_part 497 return self.check_mass_width_of_particle(old_part, particle) 498 else: 499 logger.warning('The particle name \'%s\' is present in both model with different pdg code' % name) 500 logger.warning('The particle coming from the plug-in model will be rename to \'%s%s\'' % (name, self.addon)) 501 particle.name = '%s%s' % (name, self.addon) 502 self.particles.append(particle) 503 return 504 elif identify: 505 raise USRMODERROR, "Particle %s is not in the model" % identify 506 507 pdg = particle.pdg_code 508 if pdg in self.particle_dict: 509 particle.replace = self.particle_dict[pdg] 510 return self.check_mass_width_of_particle(self.particle_dict[pdg], particle) 511 else: 512 if hasattr(particle, 'replace'): 513 del particle.replace 514 self.particles.append(particle)
515 516
517 - def check_mass_width_of_particle(self, p_base, p_plugin):
518 # Check the mass 519 if p_base.mass.name != p_plugin.mass.name: 520 #different name but actually the same 521 if p_plugin.mass.name in self.old_new: 522 if self.old_new[p_plugin.mass.name] != p_base.mass.name: 523 raise USRMODERROR, 'Some inconsistency in the mass assignment in the model: equivalent of %s is %s != %s ' % ( p_plugin.mass.name, self.old_new[p_plugin.mass.name], p_base.mass.name) 524 elif p_base.mass.name.lower() == 'zero': 525 p_base.mass = p_plugin.mass 526 elif p_plugin.mass.name.lower() == 'zero': 527 pass 528 else: 529 raise USRMODERROR, 'Some inconsistency in the mass assignment in the model\n' + \ 530 ' Mass: %s and %s\n' %(p_base.mass.name, p_plugin.mass.name) + \ 531 ' conflict name %s\n' % self.old_new + \ 532 ' pdg_code: %s %s' % (p_base.pdg_code, p_plugin.pdg_code) 533 # Check the width 534 if p_base.width.name != p_plugin.width.name: 535 #different name but actually the same 536 if p_plugin.width.name in self.old_new: 537 if self.old_new[p_plugin.width.name] != p_base.width.name: 538 raise USRMODERROR, 'Some inconsistency in the mass assignment in the model' 539 elif p_base.width.name.lower() == 'zero': 540 p_base.width = p_plugin.width 541 elif p_plugin.width.name.lower() == 'zero': 542 pass 543 else: 544 raise USRMODERROR, 'Some inconsistency in the mass assignment in the model' 545 546 return
547
548 - def add_external_parameter(self, parameter, identify_pid):
549 """adding a param_card parameter inside the current model. 550 if the parameter block/lhcode already exists then just do nothing 551 (but if the name are different then keep the info for future translation) 552 If the name already exists in the model. raise an exception. 553 """ 554 555 name = parameter.name 556 # check if a parameter already has this name 557 old_param = next((p for p in self.parameters if p.name==name), None) 558 if old_param: 559 if old_param.lhablock == parameter.lhablock and \ 560 old_param.lhacode == parameter.lhacode: 561 return #Nothing to do! 562 else: 563 logger.info('The two model defines the parameter \'%s\'\n' % parameter.name + 564 ' the original model for %s :%s\n' %(old_param.lhablock, old_param.lhacode)+ 565 ' the plugin for %s :%s\n' %(parameter.lhablock,parameter.lhacode)+ 566 ' We will rename the one from the plugin to %s%s' % (parameter.name, self.addon)) 567 if old_param.nature == 'internal': 568 logger.warning('''The parameter %s is actually an internal parameter of the base model. 569 his value is given by %s. 570 If those two parameters are expected to be identical, you need to provide the value in the param_card according to this formula. 571 ''') 572 #add the parameter with a new name. 573 self.old_new[parameter.name] = '%s%s' % (parameter.name, self.addon) 574 parameter.name = '%s%s' % (parameter.name, self.addon) 575 # 576 #self.parameters.append(parameter) 577 #return 578 #check if a parameter already has this lhablock/code information 579 lhacode = parameter.lhacode 580 if parameter.lhablock.lower() in ['mass', 'decay']: 581 if int(parameter.lhacode[0]) in identify_pid: 582 lhacode = [identify_pid[int(parameter.lhacode[0])]] 583 584 old_param = next((p for p in self.parameters if p.lhacode==lhacode \ 585 and p.lhablock==parameter.lhablock), None) 586 if old_param: 587 logger.info('The two model defines the block \'%s\' with id \'%s\' with different parameter name \'%s\', \'%s\'\n'\ 588 % (old_param.lhablock, old_param.lhacode, parameter.name, old_param.name) + \ 589 ' We will merge those two parameters in a single one') 590 if parameter.name in self.old_new.values(): 591 key = [k for k in self.old_new if self.old_new[k] == parameter.name][0] 592 self.old_new[key] = old_param.name 593 self.old_new[parameter.name] = old_param.name 594 else: 595 self.old_new[parameter.name] = old_param.name 596 # self.add_internal_parameter(iden_param) 597 598 else: 599 #Just add the new parameter to the current list 600 self.parameters.append(parameter) 601 self.new_external.append(parameter)
602
603 - def add_internal_parameter(self, parameter):
604 """ add a parameter of type internal """ 605 606 name = parameter.name 607 # check if a parameter already has this name 608 old_param = next((p for p in self.parameters if p.name==name), None) 609 if old_param: 610 if old_param.value == parameter.value: 611 return #Nothing to do! 612 else: 613 if self.old_new: 614 pattern = re.compile(r'\b(%s)\b' % '|'.join(self.old_new.keys())) 615 def replace(matchobj): 616 return self.old_new[matchobj.group(0)]
617 parameter.value = pattern.sub(replace, parameter.value) 618 self.old_new[parameter.name] = '%s%s' % (parameter.name, self.addon) 619 620 parameter.name = '%s%s' % (parameter.name, self.addon) 621 self.parameters.append(parameter) 622 return 623 624 # No name conflict: 625 if self.old_new: 626 pattern = re.compile(r'\b(%s)\b' % '|'.join(self.old_new.keys())) 627 def replace(matchobj): 628 return self.old_new[matchobj.group(0)]
629 parameter.value = pattern.sub(replace, parameter.value) 630 631 self.parameters.append(parameter) 632 633 634 635
636 - def add_coupling(self, coupling):
637 """add one coupling""" 638 639 # avoid name duplication 640 name = coupling.name 641 same_name = next((p for p in self.couplings if p.name==name), None) 642 if same_name: 643 coupling.name = '%s%s' % (coupling.name, self.addon) 644 645 if self.old_new: 646 pattern = re.compile(r'\b(%s)\b' % '|'.join(self.old_new.keys())) 647 def replace(matchobj): 648 return self.old_new[matchobj.group(0)]
649 coupling.value = pattern.sub(replace, coupling.value) 650 651 old_coupling = next((p for p in self.couplings if p.value==coupling.value), None) 652 653 if old_coupling: 654 coupling.replace = old_coupling #tag for replacement 655 else: 656 self.couplings.append(coupling) 657
658 - def add_coupling_order(self, coupling_order):
659 """adding a new coupling order inside the model""" 660 661 name = coupling_order.name 662 same_name = next((p for p in self.orders if p.name==name), None) 663 if same_name: 664 if coupling_order.hierarchy != same_name.hierarchy: 665 logger.warning('%s has different hierarchy use the minimal value (%s, %s) => %s' \ 666 % (name, same_name.hierarchy, coupling_order.hierarchy, 667 min(same_name.hierarchy, coupling_order.hierarchy))) 668 same_name.hierarchy = min(same_name.hierarchy, coupling_order.hierarchy) 669 if coupling_order.expansion_order != same_name.expansion_order: 670 logger.warning('%s has different expansion_order use the minimal value (%s, %s) => %s' \ 671 % (name, coupling_order.expansion_order, same_name.expansion_order, 672 min(same_name.expansion_order, coupling_order.expansion_order))) 673 same_name.expansion_order = min(same_name.expansion_order, coupling_order.expansion_order) 674 if hasattr(same_name, 'perturbative_expansion') and same_name.perturbative_expansion: 675 logger.info('%s will be forbidden to run at NLO' % same_name.name) 676 same_name.perturbative_expansion = 0 677 678 679 else: 680 self.orders.append(coupling_order)
681
682 - def add_lorentz(self, lorentz):
683 """add one coupling""" 684 685 # avoid name duplication 686 name = lorentz.name 687 same_name = next((p for p in self.lorentz if p.name==name), None) 688 if same_name: 689 lorentz.name = '%s%s' % (lorentz.name, self.addon) 690 691 if self.old_new: 692 pattern = re.compile(r'\b(%s)\b' % '|'.join(self.old_new.keys())) 693 def replace(matchobj): 694 return self.old_new[matchobj.group(0)]
695 lorentz.structure = pattern.sub(replace, lorentz.structure) 696 697 old_lor = next((p for p in self.lorentz 698 if p.structure==lorentz.structure and p.spins == lorentz.spins), 699 None) 700 701 if old_lor: 702 lorentz.replace = old_lor #tag for replacement 703 else: 704 self.lorentz.append(lorentz) 705
706 - def add_interaction(self, interaction , model):
707 """Add one interaction to the model. This is UNCONDITIONAL! 708 if the same interaction is in the model this means that the interaction 709 will appear twice. This is now weaken if both interaction are exactly identical! 710 (EXACT same color/lorentz/coupling expression) 711 """ 712 713 interaction = interaction.__class__(**interaction.__dict__) 714 model.all_vertices.pop(-1) 715 716 #0. check name: 717 name = interaction.name 718 same_name = next((p for p in self.vertices if p.name==name), None) 719 if same_name: 720 interaction.name = '%s%s' % (interaction.name, self.addon) 721 722 #1. check particles translation 723 particles = [p.replace if hasattr(p, 'replace') else p for p in interaction.particles] 724 interaction.particles = particles 725 #2. check the lorentz structure 726 lorentz = [l.replace if hasattr(l, 'replace') else l for l in interaction.lorentz] 727 interaction.lorentz = lorentz 728 729 #3. check the couplings 730 couplings = [(key, c.replace) if hasattr(c, 'replace') else (key, c) 731 for key, c in interaction.couplings.items()] 732 interaction.couplings = dict(couplings) 733 734 #4. Try to avoid duplication of interaction: 735 # A crash is raised if the same particles have already the some lorentz structure 736 # at the same coupling order: 737 get_pdg = lambda vertex: sorted([p.pdg_code for p in vertex.particles]) 738 id_part = get_pdg(interaction) 739 iden_vertex = [v for v in self.vertices if get_pdg(v) == id_part] 740 iden = False 741 nb_coupling = len(interaction.couplings) 742 keys = interaction.couplings.keys() # to have a fixed order! 743 744 get_lor_and_color = lambda i: (interaction.lorentz[keys[i][1]].structure, 745 interaction.color[keys[i][0]]) 746 for v in iden_vertex: 747 if len(v.couplings) != nb_coupling: 748 continue 749 found = [] 750 for ((i,j), coup) in v.couplings.items(): 751 new_lorentz = v.lorentz[j].structure 752 new_color = v.color[i] 753 k=0 754 same = [k for k in range(nb_coupling) if k not in found and 755 get_lor_and_color(k) == (new_lorentz, new_color)] 756 if not same: 757 break 758 else: 759 for k in same: 760 if interaction.couplings[keys[k]] == coup: 761 found.append(k) 762 break 763 else: 764 # check only the coupling order 765 for k in same: 766 if interaction.couplings[keys[k]].order == coup.order: 767 found.append(k) 768 warning = """Did NOT add interaction %s since same particles/lorentz/color/coupling order 769 BUT did not manage to ensure that the coupling is the same. couplings expression: 770 base model: %s 771 addon model: %s 772 """ % (id_part, coup.value, interaction.couplings[keys[k]].value) 773 logger.warning(warning) 774 found.append(k) 775 break 776 else: 777 pass 778 # mat 779 else: 780 # all found one identical... 781 return 782 783 logger.info('Adding interaction for the following particles: %s' % id_part) 784 785 786 787 788 self.vertices.append(interaction)
789
790 - def add_CTinteraction(self, interaction):
791 """Add one interaction to the model. This is UNCONDITIONAL! 792 if the same interaction is in the model this means that the interaction 793 will appear twice.""" 794 795 #0. check name: 796 name = interaction.name 797 same_name = next((p for p in self.vertices if p.name==name), None) 798 if same_name: 799 interaction.name = '%s%s' % (interaction.name, self.addon) 800 801 #1. check particles translation 802 particles = [p.replace if hasattr(p, 'replace') else p for p in interaction.particles] 803 interaction.particles = particles 804 805 #2. check the lorentz structure 806 lorentz = [l.replace if hasattr(l, 'replace') else l for l in interaction.lorentz] 807 interaction.lorentz = lorentz 808 809 #3. check the couplings 810 couplings = [(key, c.replace) if hasattr(c, 'replace') else (key, c) 811 for key, c in interaction.couplings.items()] 812 interaction.couplings = dict(couplings) 813 814 815 #4. check the loop_particles 816 loop_particles=[ [p.replace if hasattr(p, 'replace') else p for p in plist] 817 for plist in interaction.loop_particles] 818 interaction.loop_particles = loop_particles 819 self.CTvertices.append(interaction)
820 821
822 - def add_model(self, model=None, path=None, identify_particles=None):
823 """add another model in the current one""" 824 825 826 self.new_external = [] 827 if path: 828 model = ufomodels.load_model(path) 829 830 if not model: 831 raise USRMODERROR, 'Need a valid Model' 832 else: 833 path = model.__path__[0] 834 # Check the validity of the model. Too old UFO (before UFO 1.0) 835 if not hasattr(model, 'all_orders'): 836 raise USRMODERROR, 'Add-on Model doesn\'t follows UFO convention (no couplings_order information)\n' +\ 837 'MG5 is able to load such model but NOT to the add model feature.' 838 if isinstance(model.all_particles[0].mass, basestring): 839 raise USRMODERROR, 'Add-on Model doesn\'t follows UFO convention (Mass/Width of particles are string name, not object)\n' +\ 840 'MG5 is able to load such model but NOT to the add model feature.' 841 842 for order in model.all_orders: 843 if hasattr(order, 'perturbative_expansion') and order.perturbative_expansion: 844 raise USRMODERROR, 'Add-on model can not be loop model.' 845 846 for order in model.all_orders: 847 self.add_coupling_order(order) 848 849 # Adding automatically identification for anti-particle if needed 850 # + define identify_pid which keep tracks of the pdg_code identified 851 identify_pid = {} 852 if identify_particles: 853 for new, old in identify_particles.items(): 854 new_part = next((p for p in model.all_particles if p.name==new), None) 855 old_part = next((p for p in self.particles if p.name==old), None) 856 # secure agqinst lower/upper case problem 857 if not new_part: 858 first = True 859 for p in model.all_particles: 860 if p.name.lower() == new.lower(): 861 if not first: 862 raise Exception 863 else: 864 first =False 865 new_part = p 866 if not old_part: 867 first = True 868 for p in self.particles: 869 if p.name.lower() == old.lower(): 870 if not first: 871 raise Exception 872 else: 873 first =False 874 old_part = p 875 # end for the case security 876 identify_pid[new_part.pdg_code] = old_part.pdg_code 877 if new_part is None: 878 raise USRMODERROR, "particle %s not in added model" % new 879 if old_part is None: 880 raise USRMODERROR, "particle %s not in original model" % old 881 if new_part.antiname not in identify_particles: 882 new_anti = new_part.antiname 883 old_anti = old_part.antiname 884 if old_anti == old: 885 raise USRMODERROR, "failed identification (one particle is self-conjugate and not the other)" 886 logger.info("adding identification for anti-particle: %s=%s" % (new_anti, old_anti)) 887 identify_particles[new_anti] = old_anti 888 889 for parameter in model.all_parameters: 890 self.add_parameter(parameter, identify_pid) 891 for coupling in model.all_couplings: 892 self.add_coupling(coupling) 893 for lorentz in model.all_lorentz: 894 self.add_lorentz(lorentz) 895 for particle in model.all_particles: 896 if particle.name in identify_particles: 897 self.add_particle(particle, identify=identify_particles[particle.name]) 898 else: 899 self.add_particle(particle) 900 for vertex in model.all_vertices: 901 self.add_interaction(vertex, model) 902 903 self.all_path.append(path) 904 905 906 return
907 908 # def add_particle_from_model(self, model, name): 909 # """add the particles NAME from model model (either path or object) 910 # names can be either the name of one particle or a list of particle name 911 # """ 912 # 913 # if isinstance(model, basestring): 914 # model = UFOModel(self.modelpath) 915 # 916 # 917 # if isinstance(name, list): 918 # [self.add_particles(self.modelpath, name) for name in names] 919 # return 920 # 921 # # Check Validity 922 # part = self.get_particle(name) 923 # if self.particles_dict.has_key(part.pdg_code): 924 # raise USRMODERROR, 'The model contains already a particle with pdg_code %s.' % part.pdg_code 925 # 926 # # Add the particles to model 927 # self.particles.append(part) 928 # self.particles_dict[part.pdg_code] = part 929 # 930 # # Loop over the interactions of the other model and add (if possible) the interactions 931 # #associated to the new particles 932 # possibility = [v for v in vertex if part in v.particles] 933 # 934 # for vertex in possibility: 935 # # Check that all particles are define in the model 936 # for particles in vertex.particles: 937 # if particles.pdg_code not in self.particles_dict: 938 # continue 939 # # Add the interactions/lorentz structure/coupling 940 # self.vertices.append(vertex) 941 # # NEED WORK!!!!! 942