1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 """ Set of Tool in order to modify a given UFO model.
16 (mainly by adding-suppressing interactions and allow to modify by text the
17 different part of the model. Check of consistency of the model are performed.
18 This produce a new valid UFO model in output.
19 """
20 from __future__ import absolute_import
21 import copy
22 import glob
23 import logging
24 import os
25 import re
26 import sys
27
28 import madgraph.core.base_objects as base_objects
29 import madgraph.iolibs.files as files
30 import madgraph.various.misc as misc
31 import models as ufomodels
32 import models.import_ufo as import_ufo
33 import models.check_param_card as check_param_card
34 from madgraph import MG5DIR
35 import six
36 from six.moves import range
37
38 pjoin =os.path.join
39 logger = logging.getLogger('madgraph.model')
40
42
43
45
46 text = obj.__repr__()
47 if text.startswith('_'):
48 text = '%s%s' % (str(obj.__class__.__name__)[0].upper(), text)
49 return text
50
52 """ The class storing the current status of the model """
53
54 - def __init__(self, modelpath, addon='__1'):
55 """load the model from a valid UFO directory (otherwise keep everything
56 as empty."""
57 self.modelpath = modelpath
58 model = ufomodels.load_model(modelpath)
59
60 if not hasattr(model, 'all_orders'):
61 raise USRMODERROR('Base Model doesn\'t follows UFO convention (no couplings_order information)\n' +\
62 'MG5 is able to load such model but NOT to the add model feature.')
63 if isinstance(model.all_particles[0].mass, six.string_types):
64 raise USRMODERROR('Base Model doesn\'t follows UFO convention (Mass/Width of particles are string name, not object)\n' +\
65 'MG5 is able to load such model but NOT to the add model feature.')
66
67 old_particles = [id(p) for p in model.all_particles]
68 self.particles = [copy.copy(p) for p in model.all_particles]
69 if any(hasattr(p, 'loop_particles') for p in self.particles):
70 raise USRMODERROR('Base Model doesn\'t follows UFO convention ')
71 self.vertices = list(model.all_vertices)
72
73 for v in self.vertices:
74 new_p = []
75 for p in v.particles:
76 try:
77 new_p.append(self.particles[old_particles.index(id(p))])
78 except:
79 p3 = [p2 for p2 in self.particles if p2.name == p.name and p2.pdg_code == p.pdg_code]
80 new_p.append(p3[0])
81 v.particles = new_p
82
83 self.couplings = list(model.all_couplings)
84 self.lorentz = list(model.all_lorentz)
85 self.parameters = list(model.all_parameters)
86 self.Parameter = self.parameters[0].__class__
87 self.orders = list(model.all_orders)
88
89 self.functions = list(model.all_functions)
90 self.new_external = []
91
92 if hasattr(model, 'all_propagators'):
93 self.propagators = list(model.all_propagators)
94 else:
95 self.propagators = []
96
97
98 if hasattr(model, 'all_CTvertices'):
99 self.CTvertices = list(model.all_CTvertices)
100 else:
101 self.CTvertices = []
102
103 if hasattr(model, 'all_CTparameters'):
104 self.CTparameters = list(model.all_CTparameters)
105 else:
106 self.CTparameters = []
107
108
109
110 if 'self.expr = expression' in open(pjoin(self.modelpath, 'object_library.py')).read():
111 self.translate = {'expr': 'expression'}
112 else:
113 self.translate = {}
114
115
116 self.old_new = {}
117 self.addon = addon
118
119
120 self.particle_dict = {}
121 for particle in self.particles:
122 self.particle_dict[particle.pdg_code] = particle
123
124
125 self.all_path = [self.modelpath]
126
127 - def write(self, outputdir):
148
149
150 - def mod_file(self, inputpath, outputpath):
151
152 fsock = open(outputpath, 'w')
153
154 to_change = {}
155 to_change.update(self.translate)
156 to_change.update(self.old_new)
157
158 pattern = re.compile(r'\b(%s)\b' % ('|'.join(to_change)))
159
160
161
162 all_particles_name = [self.format_param(P)[2:] for P in self.particles]
163 all_lower = [p.lower() for p in all_particles_name]
164 pat2 = re.compile(r'\bP\.(\w+)\b')
165
166
167 for line in open(inputpath):
168 line = pattern.sub(lambda mo: to_change[mo.group()], line)
169 part_in_line = set(pat2.findall(line))
170
171
172 to_replace = {}
173 for p in part_in_line:
174 if p in all_particles_name:
175 continue
176 else:
177 ind = all_lower.index(p.lower())
178 to_replace[p] = all_particles_name[ind]
179 if to_replace:
180 pat3 = re.compile(r'\bP\.(%s)\b' % '|'.join(p for p in to_replace))
181 line = pat3.sub(lambda mo: 'P.%s'%to_replace[mo.groups(0)[0]], line)
182 fsock.write(line)
183
184
186 """ propagate model restriction of the original model. """
187
188 restrict_list = [l for l in os.listdir(self.modelpath) if l.startswith('restrict_')]
189 if not self.new_external:
190
191 for p in restrict_list:
192 files.cp(pjoin(self.modelpath, p), outputdir)
193
194 else:
195
196 for p in restrict_list:
197 param_card = check_param_card.ParamCard(pjoin(self.modelpath, p))
198 for parameter in self.new_external:
199 block = parameter.lhablock
200 lhaid = parameter.lhacode
201 value = parameter.value
202 if value == 0:
203 value = 1e-99
204 elif value == 1:
205 value = 9.999999e-1
206 try:
207 param_card.add_param(block.lower(), lhaid, value, 'from addon')
208 except check_param_card.InvalidParamCard:
209 logger.warning("%s will not acting for %s %s" % (p, block, lhaid))
210 param_card[block.lower()].get(lhaid).value = value
211
212 param_card.write(pjoin(outputdir, p), precision=7)
213
246
247
248
249 - def create_data_text(self, obj):
250 """ create the data associate to the object"""
251
252
253
254 nb_space = 0
255 if hasattr(obj, 'require_args_all'):
256 args = obj.require_args_all
257 elif hasattr(obj, 'require_args'):
258 args = obj.require_args
259 else:
260 args = []
261 if args:
262 text = """%s = %s(""" % (repr(obj), obj.__class__.__name__)
263 else:
264 text = """%s = %s(""" % (obj.name, obj.__class__.__name__)
265
266
267 for data in args:
268 if data in self.translate:
269 data = self.translate[data]
270 if not nb_space:
271 add_space = len(text)
272 else:
273 add_space = 0
274
275 if ',' in data:
276 continue
277
278 try:
279 expr = getattr(obj, data)
280 except:
281 if data in ['counterterm', 'propagator', 'loop_particles']:
282 expr = None
283 setattr(obj, data, None)
284 else:
285 raise
286 name =str(data)
287 if name in self.translate:
288 name = self.translate[name]
289
290
291 text += '%s%s = %s,\n' % (' ' * nb_space,name, self.format_param(getattr(obj, data)))
292 nb_space += add_space
293
294 if hasattr(obj, 'get_all'):
295 other_attr = [name for name in obj.get_all().keys()
296 if name not in args]
297 else:
298 other_attr = list(obj.__dict__.keys())
299
300 if str(obj.__class__.__name__) == 'CTParameter' and 'nature' in other_attr:
301 logger.critical('UFO model is outdated (including some bugs). Please update object_library.py to latest version')
302 other_attr.remove('nature')
303
304 other_attr.sort()
305 if other_attr == ['GhostNumber', 'LeptonNumber', 'Y', 'partial_widths', 'selfconjugate']:
306 other_attr=['GhostNumber', 'LeptonNumber', 'Y','selfconjugate']
307
308 for data in other_attr:
309 name =str(data)
310 if name in ['partial_widths', 'loop_particles']:
311 continue
312 if name in self.translate:
313 name = self.translate[name]
314 if not nb_space:
315 add_space = len(text)
316 else:
317 add_space = 0
318 text += '%s%s = %s,\n' % (' ' * nb_space, name, self.format_param(getattr(obj, data)))
319 nb_space += add_space
320
321 text = text[:-2] + ')\n\n'
322
323
324 return text
325
326 - def create_file_content(self, datalist):
327 """ """
328 return '\n'.join([self.create_data_text(obj) for obj in datalist])
329
330
331 - def write_particles(self, outputdir):
332 """ """
333 text = """
334 # This file was automatically created by The UFO_usermod
335
336 from __future__ import division
337 from object_library import all_particles, Particle
338 import parameters as Param
339
340 """
341 text += self.create_file_content(self.particles)
342 ff = open(os.path.join(outputdir, 'particles.py'), 'w')
343 ff.writelines(text)
344 ff.close()
345 return
346
348 """ """
349 text = """
350 # This file was automatically created by The UFO_usermod
351
352 from object_library import all_vertices, Vertex
353 import particles as P
354 import couplings as C
355 import lorentz as L
356
357 """
358 text += self.create_file_content(self.vertices)
359 ff = open(os.path.join(outputdir, 'vertices.py'), 'w')
360 ff.writelines(text)
361 ff.close()
362 return
363
365 """ """
366
367 if not self.CTvertices:
368 return
369
370 text = """
371 # This file was automatically created by The UFO_usermod
372
373 from object_library import all_vertices, all_CTvertices, Vertex, CTVertex
374 import particles as P
375 import couplings as C
376 import lorentz as L
377
378 """
379 text += self.create_file_content(self.CTvertices)
380 ff = open(os.path.join(outputdir, 'CT_vertices.py'), 'w')
381 ff.writelines(text)
382 ff.close()
383 return
384
385
387 """ """
388 text = """
389 # This file was automatically created by The UFO_usermod
390
391 from object_library import all_couplings, Coupling
392 """
393 text += self.create_file_content(self.couplings)
394 ff = open(os.path.join(outputdir, 'couplings.py'), 'w')
395 ff.writelines(text)
396 ff.close()
397 return
398
400 """ """
401 text = """
402 # This file was automatically created by The UFO_usermod
403
404 from object_library import all_lorentz, Lorentz
405 """
406
407 text += self.create_file_content(self.lorentz)
408 ff = open(os.path.join(outputdir, 'lorentz.py'), 'w')
409 ff.writelines(text)
410 ff.close()
411 return
412
414 """ """
415 text = """
416 # This file was automatically created by The UFO_usermod
417
418 from object_library import all_parameters, Parameter
419 """
420
421 text += self.create_file_content(self.parameters)
422 ff = open(os.path.join(outputdir, 'parameters.py'), 'w')
423 ff.writelines(text)
424 ff.close()
425 return
426
428 """ """
429 if not self.CTparameters:
430 return
431
432 text = """
433 # This file was automatically created by The UFO_usermod
434
435 from object_library import all_CTparameters, CTParameter
436
437 from function_library import complexconjugate, re, im, csc, sec, acsc, asec, cot
438 """
439
440 text += self.create_file_content(self.CTparameters)
441 ff = open(os.path.join(outputdir, 'CT_parameters.py'), 'w')
442 ff.writelines(text)
443 ff.close()
444 return
445
446
448 """ """
449 text = """
450 # This file was automatically created by The UFO_usermod
451 from object_library import all_orders, CouplingOrder
452 """
453
454 text += self.create_file_content(self.orders)
455 ff = open(os.path.join(outputdir, 'coupling_orders.py'), 'w')
456 ff.writelines(text)
457 ff.close()
458 return
459
461 """ """
462 text = """
463 # This file was automatically created by The UFO_usermod
464
465 import cmath
466 from object_library import all_functions, Function
467
468 """
469
470 text += self.create_file_content(self.functions)
471 ff = open(os.path.join(outputdir, 'function_library.py'), 'w')
472 ff.writelines(text)
473 ff.close()
474 return
475
477 """ """
478
479 text = """
480 # This file was automatically created by The UFO_usermod
481
482 from object_library import all_propagators, Propagator
483 """
484
485 text += self.create_file_content(self.propagators)
486 ff = open(os.path.join(outputdir, 'propagators.py'), 'w')
487 ff.writelines(text)
488 ff.close()
489 return
490
492 """Copy/merge the routines written in Fortran/C++/pyhton"""
493
494
495 re_fct = re.compile('''^\s{7,70}[\w\s]*function (\w*)\(''',re.M+re.I)
496 present_fct = set()
497 for dirpath in self.all_path:
498 if os.path.exists(pjoin(dirpath, 'Fortran', 'functions.f')):
499 text = open(pjoin(dirpath, 'Fortran', 'functions.f')).read()
500 new_fct = re_fct.findall(text)
501 nb_old = len(present_fct)
502 nb_added = len(new_fct)
503 new_fct = set([f.lower() for f in new_fct])
504 present_fct.update(new_fct)
505 if len(present_fct) < nb_old + nb_added:
506 logger.critical('''Some Functions in functions.f are define in more than one model.
507 This require AT LEAST manual modification of the resulting file. But more likely the
508 model need to be consider as un-physical! Use it very carefully.''')
509
510 if not os.path.exists(pjoin(outputdir, 'Fortran')):
511 os.mkdir(pjoin(outputdir, 'Fortran'))
512 fsock = open(pjoin(outputdir, 'Fortran','functions.f'),'a')
513 fsock.write(text)
514 fsock.close()
515
516
517
518 for dirpath in self.all_path:
519 for subdir in ['Fortran', 'CPP', 'Python']:
520 if os.path.exists(pjoin(dirpath, subdir)):
521 for filepath in os.listdir(pjoin(dirpath, subdir)):
522 if filepath == 'functions.f':
523 continue
524 if '.' not in filepath:
525 continue
526 logger.warning('Manual HELAS routine associated to the model. Those are not modified automaticaly!! So you need to manually checked them')
527 nb = 0
528 name, extension = filepath.rsplit('.', 1)
529
530 while 1:
531 filename = '%s%s%s' %(name, '.moved' * nb, extension)
532 if os.path.exists(pjoin(outputdir, subdir, filename)):
533 nb+=1
534 else:
535 break
536 if not os.path.exists(pjoin(outputdir, subdir)):
537 os.mkdir(pjoin(outputdir, subdir))
538 files.cp(pjoin(dirpath, subdir, filepath), pjoin(outputdir, subdir, filename))
539
540 - def get_particle(self, name):
541 """ """
542 for part in self.particles:
543 if part.name == name:
544 return part
545
546 raise USRMODERROR('no particle %s in the model' % name)
547
555
556 - def add_particle(self, particle, identify=None):
557 """Add a particle in a consistent way"""
558
559 name = particle.name
560 if identify:
561 name = identify
562 old_part = next((p for p in self.particles if p.name==name), None)
563 if not old_part:
564 first = True
565 for p in self.particles:
566 if p.name.lower() == name.lower():
567 if not first:
568 raise Exception
569 else:
570 first =False
571 old_part = p
572
573
574
575 if old_part:
576
577 if old_part.pdg_code == particle.pdg_code:
578 particle.replace = old_part
579 return self.check_mass_width_of_particle(old_part, particle)
580 elif identify:
581 if particle.spin != old_part.spin:
582 raise USRMODERROR("identify particles should have the same spin")
583 elif particle.color != old_part.color:
584 raise USRMODERROR("identify particles should have the same color")
585 particle.replace = old_part
586 return self.check_mass_width_of_particle(old_part, particle)
587 else:
588 logger.warning('The particle name \'%s\' is present in both model with different pdg code' % name)
589 logger.warning('The particle coming from the plug-in model will be rename to \'%s%s\'' % (name, self.addon))
590 particle.name = '%s%s' % (name, self.addon)
591 self.particles.append(particle)
592 return
593 elif identify:
594 raise USRMODERROR("Particle %s is not in the model" % identify)
595
596 pdg = particle.pdg_code
597 if pdg in self.particle_dict:
598 particle.replace = self.particle_dict[pdg]
599 return self.check_mass_width_of_particle(self.particle_dict[pdg], particle)
600 else:
601 if hasattr(particle, 'replace'):
602 del particle.replace
603 self.particles.append(particle)
604
605
606 - def check_mass_width_of_particle(self, p_base, p_plugin):
607
608 if p_base.mass.name != p_plugin.mass.name:
609
610 if p_plugin.mass.name in self.old_new:
611 if self.old_new[p_plugin.mass.name] != p_base.mass.name:
612 raise USRMODERROR('Some inconsistency in the mass assignment in the model: equivalent of %s is %s != %s ' % ( p_plugin.mass.name, self.old_new[p_plugin.mass.name], p_base.mass.name))
613 elif p_base.mass.name.lower() == 'zero':
614 p_base.mass = p_plugin.mass
615 elif p_plugin.mass.name.lower() == 'zero':
616 pass
617 else:
618 misc.sprint(p_base.mass.value, p_plugin.mass.value, dir(p_base.mass))
619 misc.sprint(p_base.mass.nature, p_plugin.mass.nature)
620 misc.sprint(self.old_new)
621 raise USRMODERROR('Some inconsistency in the mass assignment in the model\n' + \
622 ' Mass: %s and %s\n' %(p_base.mass.name, p_plugin.mass.name) + \
623 ' conflict name %s\n' % self.old_new + \
624 ' pdg_code: %s %s' % (p_base.pdg_code, p_plugin.pdg_code))
625
626 if p_base.width.name != p_plugin.width.name:
627
628 if p_plugin.width.name in self.old_new:
629 if self.old_new[p_plugin.width.name] != p_base.width.name:
630 raise USRMODERROR('Some inconsistency in the mass assignment in the model')
631 elif p_base.width.name.lower() == 'zero':
632 p_base.width = p_plugin.width
633 elif p_plugin.width.name.lower() == 'zero':
634 pass
635 else:
636 raise USRMODERROR('Some inconsistency in the mass assignment in the model')
637
638 return
639
641 """adding a param_card parameter inside the current model.
642 if the parameter block/lhcode already exists then just do nothing
643 (but if the name are different then keep the info for future translation)
644 If the name already exists in the model. raise an exception.
645 """
646
647 name = parameter.name
648
649 old_param = next((p for p in self.parameters if p.name==name), None)
650 if old_param:
651 if old_param.lhablock == parameter.lhablock and \
652 old_param.lhacode == parameter.lhacode:
653 return
654 else:
655 logger.info('The two model defines the parameter \'%s\'\n' % parameter.name +
656 ' the original model for %s :%s\n' %(old_param.lhablock, old_param.lhacode)+
657 ' the plugin for %s :%s\n' %(parameter.lhablock,parameter.lhacode)+
658 ' We will rename the one from the plugin to %s%s' % (parameter.name, self.addon))
659 if old_param.nature == 'internal':
660 logger.warning('''The parameter %s is actually an internal parameter of the base model.
661 his value is given by %s.
662 If those two parameters are expected to be identical, you need to provide the value in the param_card according to this formula.
663 ''')
664
665 self.old_new[parameter.name] = '%s%s' % (parameter.name, self.addon)
666 parameter.name = '%s%s' % (parameter.name, self.addon)
667
668
669
670
671 lhacode = parameter.lhacode
672 if parameter.lhablock.lower() in ['mass', 'decay']:
673 if int(parameter.lhacode[0]) in identify_pid:
674 lhacode = [identify_pid[int(parameter.lhacode[0])]]
675
676 old_param = next((p for p in self.parameters if p.lhacode==lhacode \
677 and p.lhablock==parameter.lhablock), None)
678 if old_param:
679 logger.info('The two model defines the block \'%s\' with id \'%s\' with different parameter name \'%s\', \'%s\'\n'\
680 % (old_param.lhablock, old_param.lhacode, parameter.name, old_param.name) + \
681 ' We will merge those two parameters in a single one')
682 if parameter.name in list(self.old_new.values()):
683 key = [k for k in self.old_new if self.old_new[k] == parameter.name][0]
684 self.old_new[key] = old_param.name
685 self.old_new[parameter.name] = old_param.name
686 else:
687 self.old_new[parameter.name] = old_param.name
688
689
690 elif parameter.lhablock.lower() in ['mass', 'decay'] and int(parameter.lhacode[0]) in identify_pid:
691
692
693 orig_particle = self.particle_dict[lhacode[0]]
694 if parameter.lhablock.lower() == 'mass':
695 old_param = orig_particle.mass
696 else:
697 old_param = orig_particle.width
698 if old_param.name.lower() == 'zero':
699
700 self.parameters.append(parameter)
701 self.new_external.append(parameter)
702 else:
703 logger.info('The two model defines the parameter for block \'%s\' with id \'%s\' with different parameter name \'%s\', \'%s\'\n'\
704 % (parameter.lhablock.lower(), lhacode[0], parameter.name, old_param.name) + \
705 ' We will merge those two parameters in a single one')
706 if parameter.name in list(self.old_new.values()):
707 key = [k for k in self.old_new if self.old_new[k] == parameter.name][0]
708 self.old_new[key] = old_param.name
709 self.old_new[parameter.name] = old_param.name
710 else:
711 self.old_new[parameter.name] = old_param.name
712
713 else:
714
715 self.parameters.append(parameter)
716 self.new_external.append(parameter)
717
719 """ add a parameter of type internal """
720
721 name = parameter.name
722
723 old_param = next((p for p in self.parameters if p.name==name), None)
724 if old_param:
725 if old_param.value == parameter.value:
726 return
727 else:
728 if self.old_new:
729 pattern = re.compile(r'\b(%s)\b' % '|'.join(list(self.old_new.keys())))
730 def replace(matchobj):
731 return self.old_new[matchobj.group(0)]
732 parameter.value = pattern.sub(replace, parameter.value)
733 self.old_new[parameter.name] = '%s%s' % (parameter.name, self.addon)
734
735 parameter.name = '%s%s' % (parameter.name, self.addon)
736 self.parameters.append(parameter)
737 return
738
739
740 if self.old_new:
741 pattern = re.compile(r'\b(%s)\b' % '|'.join(list(self.old_new.keys())))
742 def replace(matchobj):
743 return self.old_new[matchobj.group(0)]
744 parameter.value = pattern.sub(replace, parameter.value)
745
746 self.parameters.append(parameter)
747
748
749
750
752 """add one coupling"""
753
754
755 name = coupling.name
756 same_name = next((p for p in self.couplings if p.name==name), None)
757 if same_name:
758 coupling.name = '%s%s' % (coupling.name, self.addon)
759
760 if self.old_new:
761 pattern = re.compile(r'\b(%s)\b' % '|'.join(list(self.old_new.keys())))
762 def replace(matchobj):
763 return self.old_new[matchobj.group(0)]
764 coupling.value = pattern.sub(replace, coupling.value)
765
766 old_coupling = next((p for p in self.couplings if p.value==coupling.value), None)
767
768 if old_coupling:
769 coupling.replace = old_coupling
770 else:
771 self.couplings.append(coupling)
772
774 """adding a new coupling order inside the model"""
775
776 name = coupling_order.name
777 same_name = next((p for p in self.orders if p.name==name), None)
778 if same_name:
779 if coupling_order.hierarchy != same_name.hierarchy:
780 logger.warning('%s has different hierarchy use the minimal value (%s, %s) => %s' \
781 % (name, same_name.hierarchy, coupling_order.hierarchy,
782 min(same_name.hierarchy, coupling_order.hierarchy)))
783 same_name.hierarchy = min(same_name.hierarchy, coupling_order.hierarchy)
784 if coupling_order.expansion_order != same_name.expansion_order:
785 logger.warning('%s has different expansion_order use the minimal value (%s, %s) => %s' \
786 % (name, coupling_order.expansion_order, same_name.expansion_order,
787 min(same_name.expansion_order, coupling_order.expansion_order)))
788 same_name.expansion_order = min(same_name.expansion_order, coupling_order.expansion_order)
789 if hasattr(same_name, 'perturbative_expansion') and same_name.perturbative_expansion:
790 logger.info('%s will be forbidden to run at NLO' % same_name.name)
791 same_name.perturbative_expansion = 0
792
793
794 else:
795 self.orders.append(coupling_order)
796
798 """add one coupling"""
799
800
801 name = lorentz.name
802 same_name = next((p for p in self.lorentz if p.name==name), None)
803 if same_name:
804 lorentz.name = '%s%s' % (lorentz.name, self.addon)
805
806 if self.old_new:
807 pattern = re.compile(r'\b(%s)\b' % '|'.join(list(self.old_new.keys())))
808 def replace(matchobj):
809 return self.old_new[matchobj.group(0)]
810 lorentz.structure = pattern.sub(replace, lorentz.structure)
811
812 old_lor = next((p for p in self.lorentz
813 if p.structure==lorentz.structure and p.spins == lorentz.spins),
814 None)
815
816 if old_lor:
817 lorentz.replace = old_lor
818 else:
819 self.lorentz.append(lorentz)
820
822 """Add one interaction to the model. This is UNCONDITIONAL!
823 if the same interaction is in the model this means that the interaction
824 will appear twice. This is now weaken if both interaction are exactly identical!
825 (EXACT same color/lorentz/coupling expression)
826 """
827
828 interaction = interaction.__class__(**interaction.__dict__)
829 model.all_vertices.pop(-1)
830
831
832 name = interaction.name
833 same_name = next((p for p in self.vertices if p.name==name), None)
834 if same_name:
835 interaction.name = '%s%s' % (interaction.name, self.addon)
836
837
838 particles = [p.replace if hasattr(p, 'replace') else p for p in interaction.particles]
839 interaction.particles = particles
840
841 lorentz = [l.replace if hasattr(l, 'replace') else l for l in interaction.lorentz]
842 interaction.lorentz = lorentz
843
844
845 couplings = [(key, c.replace) if hasattr(c, 'replace') else (key, c)
846 for key, c in interaction.couplings.items()]
847 interaction.couplings = dict(couplings)
848
849
850
851
852 get_pdg = lambda vertex: sorted([p.pdg_code for p in vertex.particles])
853 id_part = get_pdg(interaction)
854 iden_vertex = [v for v in self.vertices if get_pdg(v) == id_part]
855 iden = False
856 nb_coupling = len(interaction.couplings)
857 keys = list(interaction.couplings.keys())
858
859 get_lor_and_color = lambda i: (interaction.lorentz[keys[i][1]].structure,
860 interaction.color[keys[i][0]])
861 for v in iden_vertex:
862 if len(v.couplings) != nb_coupling:
863 continue
864 found = []
865 for ((i,j), coup) in v.couplings.items():
866 new_lorentz = v.lorentz[j].structure
867 new_color = v.color[i]
868 k=0
869 same = [k for k in range(nb_coupling) if k not in found and
870 get_lor_and_color(k) == (new_lorentz, new_color)]
871 if not same:
872 break
873 else:
874 for k in same:
875 if interaction.couplings[keys[k]] == coup:
876 found.append(k)
877 break
878 else:
879
880 for k in same:
881 if interaction.couplings[keys[k]].order == coup.order:
882 found.append(k)
883 warning = """Did NOT add interaction %s since same particles/lorentz/color/coupling order
884 BUT did not manage to ensure that the coupling is the same. couplings expression:
885 base model: %s
886 addon model: %s
887 """ % (id_part, coup.value, interaction.couplings[keys[k]].value)
888 logger.warning(warning)
889 found.append(k)
890 break
891 else:
892 pass
893
894 else:
895
896 return
897
898 logger.info('Adding interaction for the following particles: %s' % id_part)
899
900
901
902
903 self.vertices.append(interaction)
904
906 """Add one interaction to the model. This is UNCONDITIONAL!
907 if the same interaction is in the model this means that the interaction
908 will appear twice."""
909
910
911 name = interaction.name
912 same_name = next((p for p in self.vertices if p.name==name), None)
913 if same_name:
914 interaction.name = '%s%s' % (interaction.name, self.addon)
915
916
917 particles = [p.replace if hasattr(p, 'replace') else p for p in interaction.particles]
918 interaction.particles = particles
919
920
921 lorentz = [l.replace if hasattr(l, 'replace') else l for l in interaction.lorentz]
922 interaction.lorentz = lorentz
923
924
925 couplings = [(key, c.replace) if hasattr(c, 'replace') else (key, c)
926 for key, c in interaction.couplings.items()]
927 interaction.couplings = dict(couplings)
928
929
930
931 loop_particles=[ [p.replace if hasattr(p, 'replace') else p for p in plist]
932 for plist in interaction.loop_particles]
933 interaction.loop_particles = loop_particles
934 self.CTvertices.append(interaction)
935
936
937 - def add_model(self, model=None, path=None, identify_particles=None):
938 """add another model in the current one"""
939
940
941 self.new_external = []
942 if path:
943 model = ufomodels.load_model(path)
944
945 if not model:
946 raise USRMODERROR('Need a valid Model')
947 else:
948 path = model.__path__[0]
949
950 if not hasattr(model, 'all_orders'):
951 raise USRMODERROR('Add-on Model doesn\'t follows UFO convention (no couplings_order information)\n' +\
952 'MG5 is able to load such model but NOT to the add model feature.')
953 if isinstance(model.all_particles[0].mass, six.string_types):
954 raise USRMODERROR('Add-on Model doesn\'t follows UFO convention (Mass/Width of particles are string name, not object)\n' +\
955 'MG5 is able to load such model but NOT to the add model feature.')
956
957 for order in model.all_orders:
958 if hasattr(order, 'perturbative_expansion') and order.perturbative_expansion:
959 raise USRMODERROR('Add-on model can not be loop model.')
960
961 for order in model.all_orders:
962 self.add_coupling_order(order)
963
964
965
966 identify_pid = {}
967 if identify_particles:
968 for new, old in identify_particles.items():
969 new_part = next((p for p in model.all_particles if p.name==new), None)
970 old_part = next((p for p in self.particles if p.name==old), None)
971
972 if not new_part:
973 first = True
974 for p in model.all_particles:
975 if p.name.lower() == new.lower():
976 if not first:
977 raise Exception
978 else:
979 first =False
980 new_part = p
981 if not old_part:
982 first = True
983 for p in self.particles:
984 if p.name.lower() == old.lower():
985 if not first:
986 raise Exception
987 else:
988 first =False
989 old_part = p
990 if not old_part:
991
992
993 defaultname = base_objects.Model.load_default_name()
994 for pdg, value in defaultname.items():
995 if value == old:
996 old_part = self.particle_dict[pdg]
997 identify_particles[new] = old_part.name
998 break
999
1000
1001 identify_pid[new_part.pdg_code] = old_part.pdg_code
1002 if new_part is None:
1003 raise USRMODERROR("particle %s not in added model" % new)
1004 if old_part is None:
1005 raise USRMODERROR("particle %s not in original model" % old)
1006 if new_part.antiname not in identify_particles:
1007 new_anti = new_part.antiname
1008 old_anti = old_part.antiname
1009 if old_anti == old:
1010 raise USRMODERROR("failed identification (one particle is self-conjugate and not the other)")
1011 logger.info("adding identification for anti-particle: %s=%s" % (new_anti, old_anti))
1012 identify_particles[new_anti] = old_anti
1013
1014 for parameter in model.all_parameters:
1015 self.add_parameter(parameter, identify_pid)
1016 for coupling in model.all_couplings:
1017 self.add_coupling(coupling)
1018 for lorentz in model.all_lorentz:
1019 self.add_lorentz(lorentz)
1020 for particle in model.all_particles:
1021 if particle.name in identify_particles:
1022 self.add_particle(particle, identify=identify_particles[particle.name])
1023 else:
1024 self.add_particle(particle)
1025 for vertex in model.all_vertices:
1026 self.add_interaction(vertex, model)
1027
1028 self.all_path.append(path)
1029
1030
1031 return
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067