1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 """ Set of Tool in order to modify a given UFO model.
16 (mainly by adding-suppressing interactions and allow to modify by text the
17 different part of the model. Check of consistency of the model are performed.
18 This produce a new valid UFO model in output.
19 """
20 import copy
21 import glob
22 import logging
23 import os
24 import re
25 import sys
26
27 import madgraph.core.base_objects as base_objects
28 import madgraph.iolibs.files as files
29 import madgraph.various.misc as misc
30 import models as ufomodels
31 import models.import_ufo as import_ufo
32 import models.check_param_card as check_param_card
33 from madgraph import MG5DIR
34
35 pjoin =os.path.join
36 logger = logging.getLogger('madgraph.model')
37
39
40
42
43 text = obj.__repr__()
44 if text.startswith('_'):
45 text = '%s%s' % (str(obj.__class__.__name__)[0].upper(), text)
46 return text
47
49 """ The class storing the current status of the model """
50
51 - def __init__(self, modelpath, addon='__1'):
52 """load the model from a valid UFO directory (otherwise keep everything
53 as empty."""
54 self.modelpath = modelpath
55 model = ufomodels.load_model(modelpath)
56
57 if not hasattr(model, 'all_orders'):
58 raise USRMODERROR, 'Base Model doesn\'t follows UFO convention (no couplings_order information)\n' +\
59 'MG5 is able to load such model but NOT to the add model feature.'
60 if isinstance(model.all_particles[0].mass, basestring):
61 raise USRMODERROR, 'Base Model doesn\'t follows UFO convention (Mass/Width of particles are string name, not object)\n' +\
62 'MG5 is able to load such model but NOT to the add model feature.'
63
64 old_particles = [id(p) for p in model.all_particles]
65 self.particles = [copy.copy(p) for p in model.all_particles]
66 if any(hasattr(p, 'loop_particles') for p in self.particles):
67 raise USRMODERROR, 'Base Model doesn\'t follows UFO convention '
68 self.vertices = list(model.all_vertices)
69
70 for v in self.vertices:
71 new_p = []
72 for p in v.particles:
73 try:
74 new_p.append(self.particles[old_particles.index(id(p))])
75 except:
76 p3 = [p2 for p2 in self.particles if p2.name == p.name and p2.pdg_code == p.pdg_code]
77 new_p.append(p3[0])
78 v.particles = new_p
79
80 self.couplings = list(model.all_couplings)
81 self.lorentz = list(model.all_lorentz)
82 self.parameters = list(model.all_parameters)
83 self.Parameter = self.parameters[0].__class__
84 self.orders = list(model.all_orders)
85
86 self.functions = list(model.all_functions)
87 self.new_external = []
88
89 if hasattr(model, 'all_propagators'):
90 self.propagators = list(model.all_propagators)
91 else:
92 self.propagators = []
93
94
95 if hasattr(model, 'all_CTvertices'):
96 self.CTvertices = list(model.all_CTvertices)
97 else:
98 self.CTvertices = []
99
100
101 if 'self.expr = expression' in open(pjoin(self.modelpath, 'object_library.py')).read():
102 self.translate = {'expr': 'expression'}
103 else:
104 self.translate = {}
105
106
107 self.old_new = {}
108 self.addon = addon
109
110
111 self.particle_dict = {}
112 for particle in self.particles:
113 self.particle_dict[particle.pdg_code] = particle
114
115
116 self.all_path = [self.modelpath]
117
118 - def write(self, outputdir):
138
139
140 - def mod_file(self, inputpath, outputpath):
141
142 fsock = open(outputpath, 'w')
143
144 to_change = {}
145 to_change.update(self.translate)
146 to_change.update(self.old_new)
147 for particle in self.particles:
148 if hasattr(particle, 'replace') and particle.replace:
149 misc.sprint(particle.get('name'), particle.replace.get('name'))
150
151 pattern = re.compile(r'\b(%s)\b' % ('|'.join(to_change)))
152 for line in open(inputpath):
153 line = pattern.sub(lambda mo: to_change[mo.group()], line)
154 fsock.write(line)
155
156
158 """ propagate model restriction of the original model. """
159
160 restrict_list = [l for l in os.listdir(self.modelpath) if l.startswith('restrict_')]
161 if not self.new_external:
162
163 for p in restrict_list:
164 files.cp(pjoin(self.modelpath, p), outputdir)
165
166 else:
167
168 for p in restrict_list:
169 param_card = check_param_card.ParamCard(pjoin(self.modelpath, p))
170 for parameter in self.new_external:
171 block = parameter.lhablock
172 lhaid = parameter.lhacode
173 value = parameter.value
174 if value == 0:
175 value = 1e-99
176 elif value == 1:
177 value = 9.999999e-1
178 try:
179 param_card.add_param(block.lower(), lhaid, value, 'from addon')
180 except check_param_card.InvalidParamCard:
181 logger.warning("%s will not acting for %s %s" % (p, block, lhaid))
182 param_card[block.lower()].get(lhaid).value = value
183
184 param_card.write(pjoin(outputdir, p), precision=7)
185
186
187
188
189
190
191
192
225
226
227
228 - def create_data_text(self, obj):
229 """ create the data associate to the object"""
230
231
232
233 nb_space = 0
234 if hasattr(obj, 'require_args_all'):
235 args = obj.require_args_all
236 elif hasattr(obj, 'require_args'):
237 args = obj.require_args
238 else:
239 args = []
240 if args:
241 text = """%s = %s(""" % (repr(obj), obj.__class__.__name__)
242 else:
243 text = """%s = %s(""" % (obj.name, obj.__class__.__name__)
244
245
246 for data in args:
247 if data in self.translate:
248 data = self.translate[data]
249 if not nb_space:
250 add_space = len(text)
251 else:
252 add_space = 0
253
254 try:
255 expr = getattr(obj, data)
256 except:
257 if data in ['counterterm', 'propagator', 'loop_particles']:
258 expr = None
259 setattr(obj, data, None)
260 else:
261 raise
262 name =str(data)
263 if name in self.translate:
264 name = self.translate[name]
265
266
267 text += '%s%s = %s,\n' % (' ' * nb_space,name, self.format_param(getattr(obj, data)))
268 nb_space += add_space
269
270 if hasattr(obj, 'get_all'):
271 other_attr = [name for name in obj.get_all().keys()
272 if name not in args]
273 else:
274 other_attr = obj.__dict__.keys()
275
276 other_attr.sort()
277 if other_attr == ['GhostNumber', 'LeptonNumber', 'Y', 'partial_widths', 'selfconjugate']:
278 other_attr=['GhostNumber', 'LeptonNumber', 'Y','selfconjugate']
279
280 for data in other_attr:
281 name =str(data)
282 if name in ['partial_widths', 'loop_particles']:
283 continue
284 if name in self.translate:
285 name = self.translate[name]
286 if not nb_space:
287 add_space = len(text)
288 else:
289 add_space = 0
290 text += '%s%s = %s,\n' % (' ' * nb_space, name, self.format_param(getattr(obj, data)))
291 nb_space += add_space
292
293 text = text[:-2] + ')\n\n'
294
295
296 return text
297
298 - def create_file_content(self, datalist):
299 """ """
300 return '\n'.join([self.create_data_text(obj) for obj in datalist])
301
302
303 - def write_particles(self, outputdir):
304 """ """
305 text = """
306 # This file was automatically created by The UFO_usermod
307
308 from __future__ import division
309 from object_library import all_particles, Particle
310 import parameters as Param
311
312 """
313 text += self.create_file_content(self.particles)
314 ff = open(os.path.join(outputdir, 'particles.py'), 'w')
315 ff.writelines(text)
316 ff.close()
317 return
318
320 """ """
321 text = """
322 # This file was automatically created by The UFO_usermod
323
324 from object_library import all_vertices, Vertex
325 import particles as P
326 import couplings as C
327 import lorentz as L
328
329 """
330 text += self.create_file_content(self.vertices)
331 ff = open(os.path.join(outputdir, 'vertices.py'), 'w')
332 ff.writelines(text)
333 ff.close()
334 return
335
337 """ """
338
339 if not self.CTvertices:
340 return
341
342 text = """
343 # This file was automatically created by The UFO_usermod
344
345 from object_library import all_vertices, all_CTvertices, Vertex, CTVertex
346 import particles as P
347 import couplings as C
348 import lorentz as L
349
350 """
351 text += self.create_file_content(self.CTvertices)
352 ff = open(os.path.join(outputdir, 'CT_vertices.py'), 'w')
353 ff.writelines(text)
354 ff.close()
355 return
356
357
359 """ """
360 text = """
361 # This file was automatically created by The UFO_usermod
362
363 from object_library import all_couplings, Coupling
364 """
365 text += self.create_file_content(self.couplings)
366 ff = open(os.path.join(outputdir, 'couplings.py'), 'w')
367 ff.writelines(text)
368 ff.close()
369 return
370
372 """ """
373 text = """
374 # This file was automatically created by The UFO_usermod
375
376 from object_library import all_lorentz, Lorentz
377 """
378
379 text += self.create_file_content(self.lorentz)
380 ff = open(os.path.join(outputdir, 'lorentz.py'), 'w')
381 ff.writelines(text)
382 ff.close()
383 return
384
386 """ """
387 text = """
388 # This file was automatically created by The UFO_usermod
389
390 from object_library import all_parameters, Parameter
391 """
392
393 text += self.create_file_content(self.parameters)
394 ff = open(os.path.join(outputdir, 'parameters.py'), 'w')
395 ff.writelines(text)
396 ff.close()
397 return
398
400 """ """
401 text = """
402 # This file was automatically created by The UFO_usermod
403
404 from object_library import all_orders, CouplingOrder
405 """
406
407 text += self.create_file_content(self.orders)
408 ff = open(os.path.join(outputdir, 'coupling_orders.py'), 'w')
409 ff.writelines(text)
410 ff.close()
411 return
412
414 """ """
415 text = """
416 # This file was automatically created by The UFO_usermod
417
418 import cmath
419 from object_library import all_functions, Function
420
421 """
422
423 text += self.create_file_content(self.functions)
424 ff = open(os.path.join(outputdir, 'function_library.py'), 'w')
425 ff.writelines(text)
426 ff.close()
427 return
428
430 """ """
431
432 text = """
433 # This file was automatically created by The UFO_usermod
434 from object_library import all_propagators, Propagator
435 """
436
437 text += self.create_file_content(self.propagators)
438 ff = open(os.path.join(outputdir, 'propagators.py'), 'w')
439 ff.writelines(text)
440 ff.close()
441 return
442
444 """Copy/merge the routines written in Fortran/C++/pyhton"""
445
446
447 re_fct = re.compile('''^\s{7,70}[\w\s]*function (\w*)\(''',re.M+re.I)
448 present_fct = set()
449 for dirpath in self.all_path:
450 if os.path.exists(pjoin(dirpath, 'Fortran', 'functions.f')):
451 text = open(pjoin(dirpath, 'Fortran', 'functions.f')).read()
452 new_fct = re_fct.findall(text)
453 nb_old = len(present_fct)
454 nb_added = len(new_fct)
455 new_fct = set([f.lower() for f in new_fct])
456 present_fct.update(new_fct)
457 if len(present_fct) < nb_old + nb_added:
458 logger.critical('''Some Functions in functions.f are define in more than one model.
459 This require AT LEAST manual modification of the resulting file. But more likely the
460 model need to be consider as un-physical! Use it very carefully.''')
461
462 if not os.path.exists(pjoin(outputdir, 'Fortran')):
463 os.mkdir(pjoin(outputdir, 'Fortran'))
464 fsock = open(pjoin(outputdir, 'Fortran','functions.f'),'a')
465 fsock.write(text)
466 fsock.close()
467
468
469
470 for dirpath in self.all_path:
471 for subdir in ['Fortran', 'CPP', 'Python']:
472 if os.path.exists(pjoin(dirpath, subdir)):
473 for filepath in os.listdir(pjoin(dirpath, subdir)):
474 if filepath == 'functions.f':
475 continue
476 if '.' not in filepath:
477 continue
478 logger.warning('Manual HELAS routine associated to the model. Those are not modified automaticaly!! So you need to manually checked them')
479 nb = 0
480 name, extension = filepath.rsplit('.', 1)
481
482 while 1:
483 filename = '%s%s%s' %(name, '.moved' * nb, extension)
484 if os.path.exists(pjoin(outputdir, subdir, filename)):
485 nb+=1
486 else:
487 break
488 if not os.path.exists(pjoin(outputdir, subdir)):
489 os.mkdir(pjoin(outputdir, subdir))
490 files.cp(pjoin(dirpath, subdir, filepath), pjoin(outputdir, subdir, filename))
491
492 - def get_particle(self, name):
493 """ """
494 for part in self.particles:
495 if part.name == name:
496 return part
497
498 raise USRMODERROR, 'no particle %s in the model' % name
499
507
508 - def add_particle(self, particle, identify=None):
509 """Add a particle in a consistent way"""
510
511 name = particle.name
512 if identify:
513 name = identify
514 old_part = next((p for p in self.particles if p.name==name), None)
515 if not old_part:
516 first = True
517 for p in self.particles:
518 if p.name.lower() == name.lower():
519 if not first:
520 raise Exception
521 else:
522 first =False
523 old_part = p
524
525
526
527 if old_part:
528
529 if old_part.pdg_code == particle.pdg_code:
530 particle.replace = old_part
531 return self.check_mass_width_of_particle(old_part, particle)
532 elif identify:
533 if particle.spin != old_part.spin:
534 raise USRMODERROR, "identify particles should have the same spin"
535 elif particle.color != old_part.color:
536 raise USRMODERROR, "identify particles should have the same color"
537 particle.replace = old_part
538 return self.check_mass_width_of_particle(old_part, particle)
539 else:
540 logger.warning('The particle name \'%s\' is present in both model with different pdg code' % name)
541 logger.warning('The particle coming from the plug-in model will be rename to \'%s%s\'' % (name, self.addon))
542 particle.name = '%s%s' % (name, self.addon)
543 self.particles.append(particle)
544 return
545 elif identify:
546 raise USRMODERROR, "Particle %s is not in the model" % identify
547
548 pdg = particle.pdg_code
549 if pdg in self.particle_dict:
550 particle.replace = self.particle_dict[pdg]
551 return self.check_mass_width_of_particle(self.particle_dict[pdg], particle)
552 else:
553 if hasattr(particle, 'replace'):
554 del particle.replace
555 self.particles.append(particle)
556
557
558 - def check_mass_width_of_particle(self, p_base, p_plugin):
559
560 if p_base.mass.name != p_plugin.mass.name:
561
562 if p_plugin.mass.name in self.old_new:
563 if self.old_new[p_plugin.mass.name] != p_base.mass.name:
564 raise USRMODERROR, 'Some inconsistency in the mass assignment in the model: equivalent of %s is %s != %s ' % ( p_plugin.mass.name, self.old_new[p_plugin.mass.name], p_base.mass.name)
565 elif p_base.mass.name.lower() == 'zero':
566 p_base.mass = p_plugin.mass
567 elif p_plugin.mass.name.lower() == 'zero':
568 pass
569 else:
570 raise USRMODERROR, 'Some inconsistency in the mass assignment in the model\n' + \
571 ' Mass: %s and %s\n' %(p_base.mass.name, p_plugin.mass.name) + \
572 ' conflict name %s\n' % self.old_new + \
573 ' pdg_code: %s %s' % (p_base.pdg_code, p_plugin.pdg_code)
574
575 if p_base.width.name != p_plugin.width.name:
576
577 if p_plugin.width.name in self.old_new:
578 if self.old_new[p_plugin.width.name] != p_base.width.name:
579 raise USRMODERROR, 'Some inconsistency in the mass assignment in the model'
580 elif p_base.width.name.lower() == 'zero':
581 p_base.width = p_plugin.width
582 elif p_plugin.width.name.lower() == 'zero':
583 pass
584 else:
585 raise USRMODERROR, 'Some inconsistency in the mass assignment in the model'
586
587 return
588
590 """adding a param_card parameter inside the current model.
591 if the parameter block/lhcode already exists then just do nothing
592 (but if the name are different then keep the info for future translation)
593 If the name already exists in the model. raise an exception.
594 """
595
596 name = parameter.name
597
598 old_param = next((p for p in self.parameters if p.name==name), None)
599 if old_param:
600 if old_param.lhablock == parameter.lhablock and \
601 old_param.lhacode == parameter.lhacode:
602 return
603 else:
604 logger.info('The two model defines the parameter \'%s\'\n' % parameter.name +
605 ' the original model for %s :%s\n' %(old_param.lhablock, old_param.lhacode)+
606 ' the plugin for %s :%s\n' %(parameter.lhablock,parameter.lhacode)+
607 ' We will rename the one from the plugin to %s%s' % (parameter.name, self.addon))
608 if old_param.nature == 'internal':
609 logger.warning('''The parameter %s is actually an internal parameter of the base model.
610 his value is given by %s.
611 If those two parameters are expected to be identical, you need to provide the value in the param_card according to this formula.
612 ''')
613
614 self.old_new[parameter.name] = '%s%s' % (parameter.name, self.addon)
615 parameter.name = '%s%s' % (parameter.name, self.addon)
616
617
618
619
620 lhacode = parameter.lhacode
621 if parameter.lhablock.lower() in ['mass', 'decay']:
622 if int(parameter.lhacode[0]) in identify_pid:
623 lhacode = [identify_pid[int(parameter.lhacode[0])]]
624
625 old_param = next((p for p in self.parameters if p.lhacode==lhacode \
626 and p.lhablock==parameter.lhablock), None)
627 if old_param:
628 logger.info('The two model defines the block \'%s\' with id \'%s\' with different parameter name \'%s\', \'%s\'\n'\
629 % (old_param.lhablock, old_param.lhacode, parameter.name, old_param.name) + \
630 ' We will merge those two parameters in a single one')
631 if parameter.name in self.old_new.values():
632 key = [k for k in self.old_new if self.old_new[k] == parameter.name][0]
633 self.old_new[key] = old_param.name
634 self.old_new[parameter.name] = old_param.name
635 else:
636 self.old_new[parameter.name] = old_param.name
637
638
639 else:
640
641 self.parameters.append(parameter)
642 self.new_external.append(parameter)
643
645 """ add a parameter of type internal """
646
647 name = parameter.name
648
649 old_param = next((p for p in self.parameters if p.name==name), None)
650 if old_param:
651 if old_param.value == parameter.value:
652 return
653 else:
654 if self.old_new:
655 pattern = re.compile(r'\b(%s)\b' % '|'.join(self.old_new.keys()))
656 def replace(matchobj):
657 return self.old_new[matchobj.group(0)]
658 parameter.value = pattern.sub(replace, parameter.value)
659 self.old_new[parameter.name] = '%s%s' % (parameter.name, self.addon)
660
661 parameter.name = '%s%s' % (parameter.name, self.addon)
662 self.parameters.append(parameter)
663 return
664
665
666 if self.old_new:
667 pattern = re.compile(r'\b(%s)\b' % '|'.join(self.old_new.keys()))
668 def replace(matchobj):
669 return self.old_new[matchobj.group(0)]
670 parameter.value = pattern.sub(replace, parameter.value)
671
672 self.parameters.append(parameter)
673
674
675
676
678 """add one coupling"""
679
680
681 name = coupling.name
682 same_name = next((p for p in self.couplings if p.name==name), None)
683 if same_name:
684 coupling.name = '%s%s' % (coupling.name, self.addon)
685
686 if self.old_new:
687 pattern = re.compile(r'\b(%s)\b' % '|'.join(self.old_new.keys()))
688 def replace(matchobj):
689 return self.old_new[matchobj.group(0)]
690 coupling.value = pattern.sub(replace, coupling.value)
691
692 old_coupling = next((p for p in self.couplings if p.value==coupling.value), None)
693
694 if old_coupling:
695 coupling.replace = old_coupling
696 else:
697 self.couplings.append(coupling)
698
700 """adding a new coupling order inside the model"""
701
702 name = coupling_order.name
703 same_name = next((p for p in self.orders if p.name==name), None)
704 if same_name:
705 if coupling_order.hierarchy != same_name.hierarchy:
706 logger.warning('%s has different hierarchy use the minimal value (%s, %s) => %s' \
707 % (name, same_name.hierarchy, coupling_order.hierarchy,
708 min(same_name.hierarchy, coupling_order.hierarchy)))
709 same_name.hierarchy = min(same_name.hierarchy, coupling_order.hierarchy)
710 if coupling_order.expansion_order != same_name.expansion_order:
711 logger.warning('%s has different expansion_order use the minimal value (%s, %s) => %s' \
712 % (name, coupling_order.expansion_order, same_name.expansion_order,
713 min(same_name.expansion_order, coupling_order.expansion_order)))
714 same_name.expansion_order = min(same_name.expansion_order, coupling_order.expansion_order)
715 if hasattr(same_name, 'perturbative_expansion') and same_name.perturbative_expansion:
716 logger.info('%s will be forbidden to run at NLO' % same_name.name)
717 same_name.perturbative_expansion = 0
718
719
720 else:
721 self.orders.append(coupling_order)
722
724 """add one coupling"""
725
726
727 name = lorentz.name
728 same_name = next((p for p in self.lorentz if p.name==name), None)
729 if same_name:
730 lorentz.name = '%s%s' % (lorentz.name, self.addon)
731
732 if self.old_new:
733 pattern = re.compile(r'\b(%s)\b' % '|'.join(self.old_new.keys()))
734 def replace(matchobj):
735 return self.old_new[matchobj.group(0)]
736 lorentz.structure = pattern.sub(replace, lorentz.structure)
737
738 old_lor = next((p for p in self.lorentz
739 if p.structure==lorentz.structure and p.spins == lorentz.spins),
740 None)
741
742 if old_lor:
743 lorentz.replace = old_lor
744 else:
745 self.lorentz.append(lorentz)
746
748 """Add one interaction to the model. This is UNCONDITIONAL!
749 if the same interaction is in the model this means that the interaction
750 will appear twice. This is now weaken if both interaction are exactly identical!
751 (EXACT same color/lorentz/coupling expression)
752 """
753
754 interaction = interaction.__class__(**interaction.__dict__)
755 model.all_vertices.pop(-1)
756
757
758 name = interaction.name
759 same_name = next((p for p in self.vertices if p.name==name), None)
760 if same_name:
761 interaction.name = '%s%s' % (interaction.name, self.addon)
762
763
764 particles = [p.replace if hasattr(p, 'replace') else p for p in interaction.particles]
765 interaction.particles = particles
766
767 lorentz = [l.replace if hasattr(l, 'replace') else l for l in interaction.lorentz]
768 interaction.lorentz = lorentz
769
770
771 couplings = [(key, c.replace) if hasattr(c, 'replace') else (key, c)
772 for key, c in interaction.couplings.items()]
773 interaction.couplings = dict(couplings)
774
775
776
777
778 get_pdg = lambda vertex: sorted([p.pdg_code for p in vertex.particles])
779 id_part = get_pdg(interaction)
780 iden_vertex = [v for v in self.vertices if get_pdg(v) == id_part]
781 iden = False
782 nb_coupling = len(interaction.couplings)
783 keys = interaction.couplings.keys()
784
785 get_lor_and_color = lambda i: (interaction.lorentz[keys[i][1]].structure,
786 interaction.color[keys[i][0]])
787 for v in iden_vertex:
788 if len(v.couplings) != nb_coupling:
789 continue
790 found = []
791 for ((i,j), coup) in v.couplings.items():
792 new_lorentz = v.lorentz[j].structure
793 new_color = v.color[i]
794 k=0
795 same = [k for k in range(nb_coupling) if k not in found and
796 get_lor_and_color(k) == (new_lorentz, new_color)]
797 if not same:
798 break
799 else:
800 for k in same:
801 if interaction.couplings[keys[k]] == coup:
802 found.append(k)
803 break
804 else:
805
806 for k in same:
807 if interaction.couplings[keys[k]].order == coup.order:
808 found.append(k)
809 warning = """Did NOT add interaction %s since same particles/lorentz/color/coupling order
810 BUT did not manage to ensure that the coupling is the same. couplings expression:
811 base model: %s
812 addon model: %s
813 """ % (id_part, coup.value, interaction.couplings[keys[k]].value)
814 logger.warning(warning)
815 found.append(k)
816 break
817 else:
818 pass
819
820 else:
821
822 return
823
824 logger.info('Adding interaction for the following particles: %s' % id_part)
825
826
827
828
829 self.vertices.append(interaction)
830
832 """Add one interaction to the model. This is UNCONDITIONAL!
833 if the same interaction is in the model this means that the interaction
834 will appear twice."""
835
836
837 name = interaction.name
838 same_name = next((p for p in self.vertices if p.name==name), None)
839 if same_name:
840 interaction.name = '%s%s' % (interaction.name, self.addon)
841
842
843 particles = [p.replace if hasattr(p, 'replace') else p for p in interaction.particles]
844 interaction.particles = particles
845
846
847 lorentz = [l.replace if hasattr(l, 'replace') else l for l in interaction.lorentz]
848 interaction.lorentz = lorentz
849
850
851 couplings = [(key, c.replace) if hasattr(c, 'replace') else (key, c)
852 for key, c in interaction.couplings.items()]
853 interaction.couplings = dict(couplings)
854
855
856
857 loop_particles=[ [p.replace if hasattr(p, 'replace') else p for p in plist]
858 for plist in interaction.loop_particles]
859 interaction.loop_particles = loop_particles
860 self.CTvertices.append(interaction)
861
862
863 - def add_model(self, model=None, path=None, identify_particles=None):
864 """add another model in the current one"""
865
866
867 self.new_external = []
868 if path:
869 model = ufomodels.load_model(path)
870
871 if not model:
872 raise USRMODERROR, 'Need a valid Model'
873 else:
874 path = model.__path__[0]
875
876 if not hasattr(model, 'all_orders'):
877 raise USRMODERROR, 'Add-on Model doesn\'t follows UFO convention (no couplings_order information)\n' +\
878 'MG5 is able to load such model but NOT to the add model feature.'
879 if isinstance(model.all_particles[0].mass, basestring):
880 raise USRMODERROR, 'Add-on Model doesn\'t follows UFO convention (Mass/Width of particles are string name, not object)\n' +\
881 'MG5 is able to load such model but NOT to the add model feature.'
882
883 for order in model.all_orders:
884 if hasattr(order, 'perturbative_expansion') and order.perturbative_expansion:
885 raise USRMODERROR, 'Add-on model can not be loop model.'
886
887 for order in model.all_orders:
888 self.add_coupling_order(order)
889
890
891
892 identify_pid = {}
893 if identify_particles:
894 for new, old in identify_particles.items():
895 new_part = next((p for p in model.all_particles if p.name==new), None)
896 old_part = next((p for p in self.particles if p.name==old), None)
897
898 if not new_part:
899 first = True
900 for p in model.all_particles:
901 if p.name.lower() == new.lower():
902 if not first:
903 raise Exception
904 else:
905 first =False
906 new_part = p
907 if not old_part:
908 first = True
909 for p in self.particles:
910 if p.name.lower() == old.lower():
911 if not first:
912 raise Exception
913 else:
914 first =False
915 old_part = p
916 if not old_part:
917
918
919 defaultname = base_objects.Model.load_default_name()
920 for pdg, value in defaultname.items():
921 if value == old:
922 old_part = self.particle_dict[pdg]
923 identify_particles[new] = old_part.name
924 break
925
926
927 identify_pid[new_part.pdg_code] = old_part.pdg_code
928 if new_part is None:
929 raise USRMODERROR, "particle %s not in added model" % new
930 if old_part is None:
931 raise USRMODERROR, "particle %s not in original model" % old
932 if new_part.antiname not in identify_particles:
933 new_anti = new_part.antiname
934 old_anti = old_part.antiname
935 if old_anti == old:
936 raise USRMODERROR, "failed identification (one particle is self-conjugate and not the other)"
937 logger.info("adding identification for anti-particle: %s=%s" % (new_anti, old_anti))
938 identify_particles[new_anti] = old_anti
939
940 for parameter in model.all_parameters:
941 self.add_parameter(parameter, identify_pid)
942 for coupling in model.all_couplings:
943 self.add_coupling(coupling)
944 for lorentz in model.all_lorentz:
945 self.add_lorentz(lorentz)
946 for particle in model.all_particles:
947 if particle.name in identify_particles:
948 self.add_particle(particle, identify=identify_particles[particle.name])
949 else:
950 self.add_particle(particle)
951 for vertex in model.all_vertices:
952 self.add_interaction(vertex, model)
953
954 self.all_path.append(path)
955
956
957 return
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993