1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 """Classes for diagram generation. Amplitude performs the diagram
16 generation, DecayChainAmplitude keeps track of processes with decay
17 chains, and MultiProcess allows generation of processes with
18 multiparticle definitions. DiagramTag allows to identify diagrams
19 based on relevant properties.
20 """
21
22 from __future__ import absolute_import
23 from six.moves import filter
24
25
26
27 import array
28 import copy
29 import itertools
30 import logging
31
32 import madgraph.core.base_objects as base_objects
33 import madgraph.various.misc as misc
34 from madgraph import InvalidCmd, MadGraph5Error
35 from six.moves import range
36 from six.moves import zip
37
38 logger = logging.getLogger('madgraph.diagram_generation')
42
48 """Class to tag diagrams based on objects with some __lt__ measure, e.g.
49 PDG code/interaction id (for comparing diagrams from the same amplitude),
50 or Lorentz/coupling/mass/width (for comparing AMPs from different MEs).
51 Algorithm: Create chains starting from external particles:
52 1 \ / 6
53 2 /\______/\ 7
54 3_ / | \_ 8
55 4 / 5 \_ 9
56 \ 10
57 gives ((((9,10,id910),8,id9108),(6,7,id67),id910867)
58 (((1,2,id12),(3,4,id34)),id1234),
59 5,id91086712345)
60 where idN is the id of the corresponding interaction. The ordering within
61 chains is based on chain length (depth; here, 1234 has depth 3, 910867 has
62 depth 4, 5 has depht 0), and if equal on the ordering of the chain elements.
63 The determination of central vertex is based on minimizing the chain length
64 for the longest subchain.
65 This gives a unique tag which can be used to identify diagrams
66 (instead of symmetry), as well as identify identical matrix elements from
67 different processes."""
68
70 """Exception for any problems in DiagramTags"""
71 pass
72
73 - def __init__(self, diagram, model=None, ninitial=2):
74 """Initialize with a diagram. Create DiagramTagChainLinks according to
75 the diagram, and figure out if we need to shift the central vertex."""
76
77
78 leg_dict = {}
79
80 for vertex in diagram.get('vertices'):
81
82 legs = vertex.get('legs')[:-1]
83 lastvx = vertex == diagram.get('vertices')[-1]
84 if lastvx:
85
86 legs = vertex.get('legs')
87
88 link = DiagramTagChainLink([leg_dict.setdefault(leg.get('number'),
89 DiagramTagChainLink(self.link_from_leg(leg, model))) \
90 for leg in legs],
91 self.vertex_id_from_vertex(vertex,
92 lastvx,
93 model,
94 ninitial))
95
96 if not lastvx:
97 leg_dict[vertex.get('legs')[-1].get('number')] = link
98
99
100 self.tag = link
101
102
103
104 done = max([l.depth for l in self.tag.links]) == 0
105 while not done:
106
107 longest_chain = self.tag.links[0]
108
109 new_link = DiagramTagChainLink(self.tag.links[1:],
110 self.flip_vertex(\
111 self.tag.vertex_id,
112 longest_chain.vertex_id,
113 self.tag.links[1:]))
114
115 other_links = list(longest_chain.links) + [new_link]
116 other_link = DiagramTagChainLink(other_links,
117 self.flip_vertex(\
118 longest_chain.vertex_id,
119 self.tag.vertex_id,
120 other_links))
121
122 if other_link.links[0] < self.tag.links[0]:
123
124 self.tag = other_link
125 else:
126
127 done = True
128
133
135 """Output a diagram from a DiagramTag. Note that each daughter
136 class must implement the static functions id_from_vertex_id
137 (if the vertex id is something else than an integer) and
138 leg_from_link (to pass the correct info from an end link to a
139 leg)."""
140
141
142 diagram = base_objects.Diagram({'vertices': \
143 self.vertices_from_link(self.tag,
144 model,
145 True)})
146 diagram.calculate_orders(model)
147 return diagram
148
149 @classmethod
151 """Recursively return the leg corresponding to this link and
152 the list of all vertices from all previous links"""
153
154 if link.end_link:
155
156 return cls.leg_from_link(link), []
157
158
159 leg_vertices = [cls.vertices_from_link(l, model) for l in link.links]
160
161 legs = base_objects.LegList(sorted([l for l,v in leg_vertices],
162 key= lambda l: l.get('number'), reverse=True))
163
164
165 vertices = base_objects.VertexList(sum([v for l, v in leg_vertices],
166 []))
167
168 if not first_vertex:
169
170
171 last_leg = cls.leg_from_legs(legs,link.vertex_id,model)
172 legs.append(last_leg)
173
174
175 vertices.append(cls.vertex_from_link(legs,
176 link.vertex_id,
177 model))
178 if first_vertex:
179
180 return vertices
181 else:
182
183 return last_leg, vertices
184
185 @classmethod
187 """Returns the list of external PDGs of the interaction corresponding
188 to this vertex_id."""
189
190
191
192
193 if (len(vertex_id)>=3 and 'PDGs' in vertex_id[2]):
194 return vertex_id[2]['PDGs']
195 else:
196 return [part.get_pdg_code() for part in model.get_interaction(
197 cls.id_from_vertex_id(vertex_id)).get('particles')]
198
199 @classmethod
201 """Return a leg from a leg list and the model info"""
202
203 pdgs = list(cls.legPDGs_from_vertex_id(vertex_id, model))
204
205
206 for pdg in [leg.get('id') for leg in legs]:
207 pdgs.remove(pdg)
208
209 assert len(pdgs) == 1
210
211 pdg = model.get_particle(pdgs[0]).get_anti_pdg_code()
212 number = min([l.get('number') for l in legs])
213
214 state = (len([l for l in legs if l.get('state') == False]) != 1)
215
216 onshell= False
217
218 return base_objects.Leg({'id': pdg,
219 'number': number,
220 'state': state,
221 'onshell': onshell})
222
223 @classmethod
236
237 @staticmethod
239 """Return a leg from a link"""
240
241 if link.end_link:
242
243 return base_objects.Leg({'number':link.links[0][1],
244 'id':link.links[0][0][0],
245 'state':(link.links[0][0][1] == 0),
246 'onshell':False})
247
248
249 assert False
250
251 @staticmethod
253 """Return the numerical vertex id from a link.vertex_id"""
254
255 return vertex_id[0][0]
256
257 @staticmethod
259 """Return the loop_info stored in this vertex id. Notice that the
260 IdentifyME tag does not store the loop_info, but should normally never
261 need access to it."""
262
263 return vertex_id[2]
264
265 @staticmethod
267 """Reorder a permutation with respect to start_perm. Note that
268 both need to start from 1."""
269 if perm == start_perm:
270 return list(range(len(perm)))
271 order = [i for (p,i) in \
272 sorted([(p,i) for (i,p) in enumerate(perm)])]
273 return [start_perm[i]-1 for i in order]
274
275 @staticmethod
277 """Returns the default end link for a leg: ((id, state), number).
278 Note that the number is not taken into account if tag comparison,
279 but is used only to extract leg permutations."""
280 if leg.get('state'):
281
282 return [((leg.get('id'), 0), leg.get('number'))]
283 else:
284
285 return [((leg.get('id'), leg.get('number')), leg.get('number'))]
286
287 @staticmethod
289 """Returns the default vertex id: just the interaction id
290 Note that in the vertex id, like the leg, only the first entry is
291 taken into account in the tag comparison, while the second is for
292 storing information that is not to be used in comparisons and the
293 third for additional info regarding the shrunk loop vertex."""
294
295 if isinstance(vertex,base_objects.ContractedVertex):
296
297 return ((vertex.get('id'),vertex.get('loop_tag')),(),
298 {'PDGs':vertex.get('PDGs')})
299 else:
300 return ((vertex.get('id'),()),(),{})
301
302 @staticmethod
304 """Returns the default vertex flip: just the new_vertex"""
305 return new_vertex
306
308 """Equal if same tag"""
309 if type(self) != type(other):
310 return False
311 return self.tag == other.tag
312
314 return not self.__eq__(other)
315
318
320 return self.tag < other.tag
321
323 return self.tag > other.tag
324
325 __repr__ = __str__
326
328 """Chain link for a DiagramTag. A link is a tuple + vertex id + depth,
329 with a comparison operator defined"""
330
331 - def __init__(self, objects, vertex_id = None):
332 """Initialize, either with a tuple of DiagramTagChainLinks and
333 a vertex_id (defined by DiagramTag.vertex_id_from_vertex), or
334 with an external leg object (end link) defined by
335 DiagramTag.link_from_leg"""
336
337 if vertex_id == None:
338
339 self.links = tuple(objects)
340 self.vertex_id = (0,)
341 self.depth = 0
342 self.end_link = True
343 return
344
345 self.links = tuple(sorted(list(tuple(objects)), reverse=True))
346 self.vertex_id = vertex_id
347
348
349 self.depth = sum([l.depth for l in self.links],
350 max(1, len(self.links)-1))
351 self.end_link = False
352
354 """Get the permutation of external numbers (assumed to be the
355 second entry in the end link tuples)"""
356
357 if self.end_link:
358 return [self.links[0][1]]
359
360 return sum([l.get_external_numbers() for l in self.links], [])
361
363 """Compare self with other in the order:
364 1. depth 2. len(links) 3. vertex id 4. measure of links"""
365
366 if self == other:
367 return False
368
369 if self.depth != other.depth:
370 return self.depth < other.depth
371
372 if len(self.links) != len(other.links):
373 return len(self.links) < len(other.links)
374
375 if self.vertex_id[0] != other.vertex_id[0]:
376 if isinstance(self.vertex_id[0], int) and isinstance(other.vertex_id[0], tuple):
377 return True
378 elif isinstance(self.vertex_id[0], tuple) and isinstance(other.vertex_id[0], int):
379 return False
380 else:
381 return self.vertex_id[0] < other.vertex_id[0]
382
383 for i, link in enumerate(self.links):
384 if i > len(other.links) - 1:
385 return False
386 if link != other.links[i]:
387 return link < other.links[i]
388
390 return self != other and not self.__lt__(other)
391
393 """For end link,
394 consider equal if self.links[0][0] == other.links[0][0],
395 i.e., ignore the leg number (in links[0][1])."""
396
397 if self.end_link and other.end_link and self.depth == other.depth \
398 and self.vertex_id == other.vertex_id:
399 return self.links[0][0] == other.links[0][0]
400
401 return self.end_link == other.end_link and self.depth == other.depth \
402 and self.vertex_id[0] == other.vertex_id[0] \
403 and self.links == other.links
404
406 return not self.__eq__(other)
407
408
410 if self.end_link:
411 return str(self.links)
412 return "%s, %s; %d" % (str(self.links),
413 str(self.vertex_id),
414 self.depth)
415
416 __repr__ = __str__
417
418
419
420
421 -class Amplitude(base_objects.PhysicsObject):
422 """Amplitude: process + list of diagrams (ordered)
423 Initialize with a process, then call generate_diagrams() to
424 generate the diagrams for the amplitude
425 """
426
428 """Default values for all properties"""
429
430 self['process'] = base_objects.Process()
431 self['diagrams'] = None
432
433
434 self['has_mirror_process'] = False
435
448
449 - def filter(self, name, value):
462
463 - def get(self, name):
472
473
474
476 """Return diagram property names as a nicely sorted list."""
477
478 return ['process', 'diagrams', 'has_mirror_process']
479
481 """Returns number of diagrams for this amplitude"""
482 return len(self.get('diagrams'))
483
485 """Return an AmplitudeList with just this amplitude.
486 Needed for DecayChainAmplitude."""
487
488 return AmplitudeList([self])
489
491 """Returns a nicely formatted string of the amplitude content."""
492 return self.get('process').nice_string(indent) + "\n" + \
493 self.get('diagrams').nice_string(indent)
494
496 """Returns a nicely formatted string of the amplitude process."""
497 return self.get('process').nice_string(indent)
498
500 """Returns the number of initial state particles in the process."""
501 return self.get('process').get_ninitial()
502
504 """ Returns wether this amplitude has a loop process."""
505
506 return self.get('process').get('perturbation_couplings')
507
509 """Generate diagrams. Algorithm:
510
511 1. Define interaction dictionaries:
512 * 2->0 (identity), 3->0, 4->0, ... , maxlegs->0
513 * 2 -> 1, 3 -> 1, ..., maxlegs-1 -> 1
514
515 2. Set flag from_group=true for all external particles.
516 Flip particle/anti particle for incoming particles.
517
518 3. If there is a dictionary n->0 with n=number of external
519 particles, create if possible the combination [(1,2,3,4,...)]
520 with *at least two* from_group==true. This will give a
521 finished (set of) diagram(s) (done by reduce_leglist)
522
523 4. Create all allowed groupings of particles with at least one
524 from_group==true (according to dictionaries n->1):
525 [(1,2),3,4...],[1,(2,3),4,...],...,
526 [(1,2),(3,4),...],...,[(1,2,3),4,...],...
527 (done by combine_legs)
528
529 5. Replace each group with a (list of) new particle(s) with number
530 n = min(group numbers). Set from_group true for these
531 particles and false for all other particles. Store vertex info.
532 (done by merge_comb_legs)
533
534 6. Stop algorithm when at most 2 particles remain.
535 Return all diagrams (lists of vertices).
536
537 7. Repeat from 3 (recursion done by reduce_leglist)
538
539 8. Replace final p=p vertex
540
541 Be aware that the resulting vertices have all particles outgoing,
542 so need to flip for incoming particles when used.
543
544 SPECIAL CASE: For A>BC... processes which are legs in decay
545 chains, we need to ensure that BC... combine first, giving A=A
546 as a final vertex. This case is defined by the Process
547 property is_decay_chain = True.
548 This function can also be called by the generate_diagram function
549 of LoopAmplitudes, in which case the generated diagrams here must not
550 be directly assigned to the 'diagrams' attributed but returned as a
551 DiagramList by the function. This is controlled by the argument
552 returndiag.
553 """
554
555 process = self.get('process')
556 model = process.get('model')
557 legs = process.get('legs')
558
559 for key in process.get('overall_orders').keys():
560 try:
561 process.get('orders')[key] = \
562 min(process.get('orders')[key],
563 process.get('overall_orders')[key])
564 except KeyError:
565 process.get('orders')[key] = process.get('overall_orders')[key]
566
567 assert model.get('particles'), \
568 "particles are missing in model: %s" % model.get('particles')
569
570 assert model.get('interactions'), \
571 "interactions are missing in model"
572
573
574 res = base_objects.DiagramList()
575
576 if len([leg for leg in legs if model.get('particle_dict')[\
577 leg.get('id')].is_fermion()]) % 2 == 1:
578 if not returndiag:
579 self['diagrams'] = res
580 raise InvalidCmd('The number of fermion is odd')
581 else:
582 return False, res
583
584
585
586 if not model.get('got_majoranas') and \
587 len([leg for leg in legs if leg.is_incoming_fermion(model)]) != \
588 len([leg for leg in legs if leg.is_outgoing_fermion(model)]):
589 if not returndiag:
590 self['diagrams'] = res
591 raise InvalidCmd('The number of of incoming/outcoming fermions are different')
592 else:
593 return False, res
594
595
596
597 for charge in model.get('conserved_charge'):
598 total = 0
599 for leg in legs:
600 part = model.get('particle_dict')[leg.get('id')]
601 try:
602 value = part.get(charge)
603 except (AttributeError, base_objects.PhysicsObject.PhysicsObjectError):
604 try:
605 value = getattr(part, charge)
606 except AttributeError:
607 value = 0
608
609 if (leg.get('id') != part['pdg_code']) != leg['state']:
610 total -= value
611 else:
612 total += value
613
614 if abs(total) > 1e-10:
615 if not returndiag:
616 self['diagrams'] = res
617 raise InvalidCmd('No %s conservation for this process ' % charge)
618 return res
619 else:
620 raise InvalidCmd('No %s conservation for this process ' % charge)
621 return res, res
622
623 if not returndiag:
624 logger.info("Trying %s " % process.nice_string().replace('Process', 'process'))
625
626
627 for i in range(0, len(process.get('legs'))):
628
629 leg = copy.copy(process.get('legs')[i])
630 process.get('legs')[i] = leg
631 if leg.get('number') == 0:
632 leg.set('number', i + 1)
633
634
635
636 leglist = self.copy_leglist(process.get('legs'))
637
638 for leg in leglist:
639
640
641 leg.set('from_group', True)
642
643
644
645 if leg.get('state') == False:
646 part = model.get('particle_dict')[leg.get('id')]
647 leg.set('id', part.get_anti_pdg_code())
648
649
650
651 max_multi_to1 = max([len(key) for key in \
652 model.get('ref_dict_to1').keys()])
653
654
655
656
657
658
659
660
661 is_decay_proc = process.get_ninitial() == 1
662 if is_decay_proc:
663 part = model.get('particle_dict')[leglist[0].get('id')]
664
665
666
667 ref_dict_to0 = {(part.get_pdg_code(),part.get_anti_pdg_code()):[0],
668 (part.get_anti_pdg_code(),part.get_pdg_code()):[0]}
669
670
671 leglist[0].set('from_group', None)
672 reduced_leglist = self.reduce_leglist(leglist,
673 max_multi_to1,
674 ref_dict_to0,
675 is_decay_proc,
676 process.get('orders'))
677 else:
678 reduced_leglist = self.reduce_leglist(leglist,
679 max_multi_to1,
680 model.get('ref_dict_to0'),
681 is_decay_proc,
682 process.get('orders'))
683
684
685
686
687 self.convert_dgleg_to_leg(reduced_leglist)
688
689 if reduced_leglist:
690 for vertex_list in reduced_leglist:
691 res.append(self.create_diagram(base_objects.VertexList(vertex_list)))
692
693
694
695 failed_crossing = not res
696
697
698
699
700
701
702 if process.get('required_s_channels') and \
703 process.get('required_s_channels')[0]:
704
705
706 lastvx = -1
707
708
709
710 if is_decay_proc: lastvx = -2
711 ninitial = len([leg for leg in process.get('legs') if leg.get('state') == False])
712
713 old_res = res
714 res = base_objects.DiagramList()
715 for id_list in process.get('required_s_channels'):
716 res_diags = [diagram for diagram in old_res if all([req_s_channel in \
717 [vertex.get_s_channel_id(\
718 process.get('model'), ninitial) \
719 for vertex in diagram.get('vertices')[:lastvx]] \
720 for req_s_channel in \
721 id_list])]
722
723 res.extend([diag for diag in res_diags if diag not in res])
724
725
726
727
728
729 if process.get('forbidden_s_channels'):
730 ninitial = len([leg for leg in process.get('legs') if leg.get('state') == False])
731 if ninitial == 2:
732 res = base_objects.DiagramList(\
733 [diagram for diagram in res if not any([vertex.get_s_channel_id(\
734 process.get('model'), ninitial) \
735 in process.get('forbidden_s_channels')
736 for vertex in diagram.get('vertices')[:-1]])])
737 else:
738
739
740 newres= []
741 for diagram in res:
742 leg1 = 1
743
744
745
746 vertex = diagram.get('vertices')[-1]
747 if any([l['number'] ==1 for l in vertex.get('legs')]):
748 leg1 = [l['number'] for l in vertex.get('legs') if l['number'] !=1][0]
749 to_loop = list(range(len(diagram.get('vertices'))-1))
750 if leg1 >1:
751 to_loop.reverse()
752 for i in to_loop:
753 vertex = diagram.get('vertices')[i]
754 if leg1:
755 if any([l['number'] ==leg1 for l in vertex.get('legs')]):
756 leg1 = 0
757 continue
758 if vertex.get_s_channel_id(process.get('model'), ninitial)\
759 in process.get('forbidden_s_channels'):
760 break
761 else:
762 newres.append(diagram)
763 res = base_objects.DiagramList(newres)
764
765
766
767
768 if process.get('forbidden_onsh_s_channels'):
769 ninitial = len([leg for leg in process.get('legs') if leg.get('state') == False])
770
771 verts = base_objects.VertexList(sum([[vertex for vertex \
772 in diagram.get('vertices')[:-1]
773 if vertex.get_s_channel_id(\
774 process.get('model'), ninitial) \
775 in process.get('forbidden_onsh_s_channels')] \
776 for diagram in res], []))
777 for vert in verts:
778
779 newleg = copy.copy(vert.get('legs').pop(-1))
780 newleg.set('onshell', False)
781 vert.get('legs').append(newleg)
782
783
784 for diagram in res:
785 diagram.calculate_orders(model)
786
787
788
789
790
791
792
793
794 if not returndiag and len(res)>0:
795 res = self.apply_squared_order_constraints(res)
796
797 if diagram_filter:
798 res = self.apply_user_filter(res)
799
800
801 if not process.get('is_decay_chain'):
802 for diagram in res:
803 vertices = diagram.get('vertices')
804 if len(vertices) > 1 and vertices[-1].get('id') == 0:
805
806
807
808
809 vertices = copy.copy(vertices)
810 lastvx = vertices.pop()
811 nexttolastvertex = copy.copy(vertices.pop())
812 legs = copy.copy(nexttolastvertex.get('legs'))
813 ntlnumber = legs[-1].get('number')
814 lastleg = [leg for leg in lastvx.get('legs') if leg.get('number') != ntlnumber][0]
815
816 if lastleg.get('onshell') == False:
817 lastleg.set('onshell', None)
818
819 legs[-1] = lastleg
820 nexttolastvertex.set('legs', legs)
821 vertices.append(nexttolastvertex)
822 diagram.set('vertices', vertices)
823
824 if res and not returndiag:
825 logger.info("Process has %d diagrams" % len(res))
826
827
828 self.trim_diagrams(diaglist=res)
829
830
831 pertur = 'QCD'
832 if self.get('process')['perturbation_couplings']:
833 pertur = sorted(self.get('process')['perturbation_couplings'])[0]
834 self.get('process').get('legs').sort(pert=pertur)
835
836
837 if not returndiag:
838 self['diagrams'] = res
839 return not failed_crossing
840 else:
841 return not failed_crossing, res
842
844 """Applies the user specified squared order constraints on the diagram
845 list in argument."""
846
847 res = copy.copy(diag_list)
848
849
850
851 for name, (value, operator) in self['process'].get('constrained_orders').items():
852 res.filter_constrained_orders(name, value, operator)
853
854
855
856
857 while True:
858 new_res = res.apply_positive_sq_orders(res,
859 self['process'].get('squared_orders'),
860 self['process']['sqorders_types'])
861
862 if len(res)==len(new_res):
863 break
864 elif (len(new_res)>len(res)):
865 raise MadGraph5Error(
866 'Inconsistency in function apply_squared_order_constraints().')
867
868 res = new_res
869
870
871
872
873 neg_orders = [(order, value) for order, value in \
874 self['process'].get('squared_orders').items() if value<0]
875 if len(neg_orders)==1:
876 neg_order, neg_value = neg_orders[0]
877
878 res, target_order = res.apply_negative_sq_order(res, neg_order,\
879 neg_value, self['process']['sqorders_types'][neg_order])
880
881
882
883
884 self['process']['squared_orders'][neg_order]=target_order
885 elif len(neg_orders)>1:
886 raise InvalidCmd('At most one negative squared order constraint'+\
887 ' can be specified, not %s.'%str(neg_orders))
888
889 return res
890
892 """Applies the user specified squared order constraints on the diagram
893 list in argument."""
894
895 if True:
896 remove_diag = misc.plugin_import('user_filter',
897 'user filter required to be defined in PLUGIN/user_filter.py with the function remove_diag(ONEDIAG) which returns True if the diagram has to be removed',
898 fcts=['remove_diag'])
899 else:
900
901 def remove_diag(diag, model=None):
902 for vertex in diag['vertices']:
903 if vertex['id'] == 0:
904 continue
905 if vertex['legs'][-1]['number'] < 3:
906 if abs(vertex['legs'][-1]['id']) <6:
907 return True
908 return False
909
910 res = diag_list.__class__()
911 nb_removed = 0
912 model = self['process']['model']
913 for diag in diag_list:
914 if remove_diag(diag, model):
915 nb_removed +=1
916 else:
917 res.append(diag)
918
919 if nb_removed:
920 logger.warning('Diagram filter is ON and removed %s diagrams for this subprocess.' % nb_removed)
921
922 return res
923
924
925
927 """ Return a Diagram created from the vertex list. This function can be
928 overloaded by daughter classes."""
929 return base_objects.Diagram({'vertices':vertexlist})
930
932 """ In LoopAmplitude, it converts back all DGLoopLegs into Legs.
933 In Amplitude, there is nothing to do. """
934
935 return True
936
938 """ Simply returns a copy of the leg list. This function is
939 overloaded in LoopAmplitude so that a DGLoopLeg list is returned.
940 The DGLoopLeg has some additional parameters only useful during
941 loop diagram generation"""
942
943 return base_objects.LegList(\
944 [ copy.copy(leg) for leg in legs ])
945
946 - def reduce_leglist(self, curr_leglist, max_multi_to1, ref_dict_to0,
947 is_decay_proc = False, coupling_orders = None):
948 """Recursive function to reduce N LegList to N-1
949 For algorithm, see doc for generate_diagrams.
950 """
951
952
953
954 res = []
955
956
957
958 if curr_leglist is None:
959 return None
960
961
962 model = self.get('process').get('model')
963 ref_dict_to1 = self.get('process').get('model').get('ref_dict_to1')
964
965
966
967
968
969
970 if curr_leglist.can_combine_to_0(ref_dict_to0, is_decay_proc):
971
972
973 vertex_ids = self.get_combined_vertices(curr_leglist,
974 copy.copy(ref_dict_to0[tuple(sorted([leg.get('id') for \
975 leg in curr_leglist]))]))
976
977 final_vertices = [base_objects.Vertex({'legs':curr_leglist,
978 'id':vertex_id}) for \
979 vertex_id in vertex_ids]
980
981 for final_vertex in final_vertices:
982 if self.reduce_orders(coupling_orders, model,
983 [final_vertex.get('id')]) != False:
984 res.append([final_vertex])
985
986
987 if len(curr_leglist) == 2:
988 if res:
989 return res
990 else:
991 return None
992
993
994 comb_lists = self.combine_legs(curr_leglist,
995 ref_dict_to1, max_multi_to1)
996
997
998 leg_vertex_list = self.merge_comb_legs(comb_lists, ref_dict_to1)
999
1000
1001 for leg_vertex_tuple in leg_vertex_list:
1002
1003
1004 if self.get('process').get('forbidden_particles') and \
1005 any([abs(vertex.get('legs')[-1].get('id')) in \
1006 self.get('process').get('forbidden_particles') \
1007 for vertex in leg_vertex_tuple[1]]):
1008 continue
1009
1010
1011 new_coupling_orders = self.reduce_orders(coupling_orders,
1012 model,
1013 [vertex.get('id') for vertex in \
1014 leg_vertex_tuple[1]])
1015 if new_coupling_orders == False:
1016
1017 continue
1018
1019
1020
1021 reduced_diagram = self.reduce_leglist(leg_vertex_tuple[0],
1022 max_multi_to1,
1023 ref_dict_to0,
1024 is_decay_proc,
1025 new_coupling_orders)
1026
1027 if reduced_diagram:
1028 vertex_list_list = [list(leg_vertex_tuple[1])]
1029 vertex_list_list.append(reduced_diagram)
1030 expanded_list = expand_list_list(vertex_list_list)
1031 res.extend(expanded_list)
1032
1033 return res
1034
1035 - def reduce_orders(self, coupling_orders, model, vertex_id_list):
1036 """Return False if the coupling orders for any coupling is <
1037 0, otherwise return the new coupling orders with the vertex
1038 orders subtracted. If coupling_orders is not given, return
1039 None (which counts as success).
1040 WEIGHTED is a special order, which corresponds to the sum of
1041 order hierarchies for the couplings.
1042 We ignore negative constraints as these cannot be taken into
1043 account on the fly but only after generation."""
1044
1045 if not coupling_orders:
1046 return None
1047
1048 present_couplings = copy.copy(coupling_orders)
1049 for id in vertex_id_list:
1050
1051 if not id:
1052 continue
1053 inter = model.get("interaction_dict")[id]
1054 for coupling in inter.get('orders').keys():
1055
1056
1057 if coupling in present_couplings and \
1058 present_couplings[coupling]>=0:
1059
1060 present_couplings[coupling] -= \
1061 inter.get('orders')[coupling]
1062 if present_couplings[coupling] < 0:
1063
1064 return False
1065
1066 if 'WEIGHTED' in present_couplings and \
1067 present_couplings['WEIGHTED']>=0:
1068 weight = sum([model.get('order_hierarchy')[c]*n for \
1069 (c,n) in inter.get('orders').items()])
1070 present_couplings['WEIGHTED'] -= weight
1071 if present_couplings['WEIGHTED'] < 0:
1072
1073 return False
1074
1075 return present_couplings
1076
1077 - def combine_legs(self, list_legs, ref_dict_to1, max_multi_to1):
1078 """Recursive function. Take a list of legs as an input, with
1079 the reference dictionary n-1->1, and output a list of list of
1080 tuples of Legs (allowed combinations) and Legs (rest). Algorithm:
1081
1082 1. Get all n-combinations from list [123456]: [12],..,[23],..,[123],..
1083
1084 2. For each combination, say [34]. Check if combination is valid.
1085 If so:
1086
1087 a. Append [12[34]56] to result array
1088
1089 b. Split [123456] at index(first element in combination+1),
1090 i.e. [12],[456] and subtract combination from second half,
1091 i.e.: [456]-[34]=[56]. Repeat from 1. with this array
1092
1093 3. Take result array from call to 1. (here, [[56]]) and append
1094 (first half in step b - combination) + combination + (result
1095 from 1.) = [12[34][56]] to result array
1096
1097 4. After appending results from all n-combinations, return
1098 resulting array. Example, if [13] and [45] are valid
1099 combinations:
1100 [[[13]2456],[[13]2[45]6],[123[45]6]]
1101 """
1102
1103 res = []
1104
1105
1106 for comb_length in range(2, max_multi_to1 + 1):
1107
1108
1109 if comb_length > len(list_legs):
1110 return res
1111
1112
1113
1114 for comb in itertools.combinations(list_legs, comb_length):
1115
1116
1117 if base_objects.LegList(comb).can_combine_to_1(ref_dict_to1):
1118
1119
1120
1121 res_list = copy.copy(list_legs)
1122 for leg in comb:
1123 res_list.remove(leg)
1124 res_list.insert(list_legs.index(comb[0]), comb)
1125 res.append(res_list)
1126
1127
1128
1129
1130
1131
1132 res_list1 = list_legs[0:list_legs.index(comb[0])]
1133 res_list2 = list_legs[list_legs.index(comb[0]) + 1:]
1134 for leg in comb[1:]:
1135 res_list2.remove(leg)
1136
1137
1138 res_list = res_list1
1139 res_list.append(comb)
1140
1141
1142 for item in self.combine_legs(res_list2,
1143 ref_dict_to1,
1144 max_multi_to1):
1145 final_res_list = copy.copy(res_list)
1146 final_res_list.extend(item)
1147 res.append(final_res_list)
1148
1149 return res
1150
1151
1153 """Takes a list of allowed leg combinations as an input and returns
1154 a set of lists where combinations have been properly replaced
1155 (one list per element in the ref_dict, so that all possible intermediate
1156 particles are included). For each list, give the list of vertices
1157 corresponding to the executed merging, group the two as a tuple.
1158 """
1159
1160 res = []
1161
1162 for comb_list in comb_lists:
1163
1164 reduced_list = []
1165 vertex_list = []
1166
1167 for entry in comb_list:
1168
1169
1170 if isinstance(entry, tuple):
1171
1172
1173
1174 leg_vert_ids = copy.copy(ref_dict_to1[\
1175 tuple(sorted([leg.get('id') for leg in entry]))])
1176
1177
1178 number = min([leg.get('number') for leg in entry])
1179
1180
1181 if len([leg for leg in entry if leg.get('state') == False]) == 1:
1182 state = False
1183 else:
1184 state = True
1185
1186
1187
1188
1189
1190 new_leg_vert_ids = []
1191 if leg_vert_ids:
1192 new_leg_vert_ids = self.get_combined_legs(entry,
1193 leg_vert_ids,
1194 number,
1195 state)
1196
1197 reduced_list.append([l[0] for l in new_leg_vert_ids])
1198
1199
1200
1201
1202
1203 vlist = base_objects.VertexList()
1204 for (myleg, vert_id) in new_leg_vert_ids:
1205
1206 myleglist = base_objects.LegList(list(entry))
1207
1208 myleglist.append(myleg)
1209
1210 vlist.append(base_objects.Vertex(
1211 {'legs':myleglist,
1212 'id':vert_id}))
1213
1214 vertex_list.append(vlist)
1215
1216
1217
1218 else:
1219 cp_entry = copy.copy(entry)
1220
1221
1222
1223 if cp_entry.get('from_group') != None:
1224 cp_entry.set('from_group', False)
1225 reduced_list.append(cp_entry)
1226
1227
1228 flat_red_lists = expand_list(reduced_list)
1229 flat_vx_lists = expand_list(vertex_list)
1230
1231
1232 for i in range(0, len(flat_vx_lists)):
1233 res.append((base_objects.LegList(flat_red_lists[i]), \
1234 base_objects.VertexList(flat_vx_lists[i])))
1235
1236 return res
1237
1239 """Create a set of new legs from the info given. This can be
1240 overloaded by daughter classes."""
1241
1242 mylegs = [(base_objects.Leg({'id':leg_id,
1243 'number':number,
1244 'state':state,
1245 'from_group':True}),
1246 vert_id)\
1247 for leg_id, vert_id in leg_vert_ids]
1248
1249 return mylegs
1250
1252 """Allow for selection of vertex ids. This can be
1253 overloaded by daughter classes."""
1254
1255 return vert_ids
1256
1258 """Reduce the number of legs and vertices used in memory.
1259 When called by a diagram generation initiated by LoopAmplitude,
1260 this function should not trim the diagrams in the attribute 'diagrams'
1261 but rather a given list in the 'diaglist' argument."""
1262
1263 legs = []
1264 vertices = []
1265
1266 if diaglist is None:
1267 diaglist=self.get('diagrams')
1268
1269
1270 process = self.get('process')
1271 for leg in process.get('legs'):
1272 if leg.get('state') and leg.get('id') in decay_ids:
1273 leg.set('onshell', True)
1274
1275 for diagram in diaglist:
1276
1277 leg_external = set()
1278 for ivx, vertex in enumerate(diagram.get('vertices')):
1279 for ileg, leg in enumerate(vertex.get('legs')):
1280
1281 if leg.get('state') and leg.get('id') in decay_ids and \
1282 leg.get('number') not in leg_external:
1283
1284
1285 leg = copy.copy(leg)
1286 leg.set('onshell', True)
1287 try:
1288 index = legs.index(leg)
1289 except ValueError:
1290 vertex.get('legs')[ileg] = leg
1291 legs.append(leg)
1292 else:
1293 vertex.get('legs')[ileg] = legs[index]
1294 leg_external.add(leg.get('number'))
1295 try:
1296 index = vertices.index(vertex)
1297 diagram.get('vertices')[ivx] = vertices[index]
1298 except ValueError:
1299 vertices.append(vertex)
1300
1301
1302
1303
1304 -class AmplitudeList(base_objects.PhysicsObjectList):
1305 """List of Amplitude objects
1306 """
1307
1309 """ Check the content of all processes of the amplitudes in this list to
1310 see if there is any which defines perturbation couplings. """
1311
1312 for amp in self:
1313 if amp.has_loop_process():
1314 return True
1315
1317 """Test if object obj is a valid Amplitude for the list."""
1318
1319 return isinstance(obj, Amplitude)
1320
1325 """A list of amplitudes + a list of decay chain amplitude lists;
1326 corresponding to a ProcessDefinition with a list of decay chains
1327 """
1328
1334
1335 - def __init__(self, argument = None, collect_mirror_procs = False,
1336 ignore_six_quark_processes = False, loop_filter=None, diagram_filter=False):
1337 """Allow initialization with Process and with ProcessDefinition"""
1338
1339 if isinstance(argument, base_objects.Process):
1340 super(DecayChainAmplitude, self).__init__()
1341 from madgraph.loop.loop_diagram_generation import LoopMultiProcess
1342 if argument['perturbation_couplings']:
1343 MultiProcessClass=LoopMultiProcess
1344 else:
1345 MultiProcessClass=MultiProcess
1346 if isinstance(argument, base_objects.ProcessDefinition):
1347 self['amplitudes'].extend(\
1348 MultiProcessClass.generate_multi_amplitudes(argument,
1349 collect_mirror_procs,
1350 ignore_six_quark_processes,
1351 loop_filter=loop_filter,
1352 diagram_filter=diagram_filter))
1353 else:
1354 self['amplitudes'].append(\
1355 MultiProcessClass.get_amplitude_from_proc(argument,
1356 loop_filter=loop_filter,
1357 diagram_filter=diagram_filter))
1358
1359
1360 process = copy.copy(self.get('amplitudes')[0].get('process'))
1361 process.set('decay_chains', base_objects.ProcessList())
1362 self['amplitudes'][0].set('process', process)
1363
1364 for process in argument.get('decay_chains'):
1365 if process.get('perturbation_couplings'):
1366 raise MadGraph5Error("Decay processes can not be perturbed")
1367 process.set('overall_orders', argument.get('overall_orders'))
1368 if not process.get('is_decay_chain'):
1369 process.set('is_decay_chain',True)
1370 if not process.get_ninitial() == 1:
1371 raise InvalidCmd("Decay chain process must have exactly one" + \
1372 " incoming particle")
1373 self['decay_chains'].append(\
1374 DecayChainAmplitude(process, collect_mirror_procs,
1375 ignore_six_quark_processes,
1376 diagram_filter=diagram_filter))
1377
1378
1379 decay_ids = sum([[a.get('process').get('legs')[0].get('id') \
1380 for a in dec.get('amplitudes')] for dec in \
1381 self['decay_chains']], [])
1382 decay_ids = set(decay_ids)
1383 for amp in self['amplitudes']:
1384 amp.trim_diagrams(decay_ids)
1385
1386
1387 for amp in self['amplitudes']:
1388 for l in amp.get('process').get('legs'):
1389 if l.get('id') in decay_ids:
1390 decay_ids.remove(l.get('id'))
1391
1392 if decay_ids:
1393 model = amp.get('process').get('model')
1394 names = [model.get_particle(id).get('name') for id in decay_ids]
1395
1396 logger.warning(
1397 "$RED Decay without corresponding particle in core process found.\n" + \
1398 "Decay information for particle(s) %s is discarded.\n" % ','.join(names) + \
1399 "Please check your process definition carefully. \n" + \
1400 "This warning usually means that you forgot parentheses in presence of subdecay.\n" + \
1401 "Example of correct syntax: p p > t t~, ( t > w+ b, w+ > l+ vl)")
1402
1403
1404 for dc in reversed(self['decay_chains']):
1405 for a in reversed(dc.get('amplitudes')):
1406
1407 if a.get('process').get('legs')[0].get('id') in decay_ids:
1408 dc.get('amplitudes').remove(a)
1409 if not dc.get('amplitudes'):
1410
1411 self['decay_chains'].remove(dc)
1412
1413
1414
1415 bad_procs = []
1416 for dc in self['decay_chains']:
1417 for amp in dc.get('amplitudes'):
1418 legs = amp.get('process').get('legs')
1419 fs_parts = [abs(l.get('id')) for l in legs if
1420 l.get('state')]
1421 is_part = [l.get('id') for l in legs if not
1422 l.get('state')][0]
1423 if abs(is_part) in fs_parts:
1424 bad_procs.append(amp.get('process'))
1425
1426 if bad_procs:
1427 logger.warning(
1428 "$RED Decay(s) with particle decaying to itself:\n" + \
1429 '\n'.join([p.nice_string() for p in bad_procs]) + \
1430 "\nPlease check your process definition carefully. \n")
1431
1432
1433 elif argument != None:
1434
1435 super(DecayChainAmplitude, self).__init__(argument)
1436 else:
1437
1438 super(DecayChainAmplitude, self).__init__()
1439
1440 - def filter(self, name, value):
1441 """Filter for valid amplitude property values."""
1442
1443 if name == 'amplitudes':
1444 if not isinstance(value, AmplitudeList):
1445 raise self.PhysicsObjectError("%s is not a valid AmplitudeList" % str(value))
1446 if name == 'decay_chains':
1447 if not isinstance(value, DecayChainAmplitudeList):
1448 raise self.PhysicsObjectError("%s is not a valid DecayChainAmplitudeList object" % \
1449 str(value))
1450 return True
1451
1453 """Return diagram property names as a nicely sorted list."""
1454
1455 return ['amplitudes', 'decay_chains']
1456
1457
1458
1460 """Returns number of diagrams for this amplitude"""
1461 return sum(len(a.get('diagrams')) for a in self.get('amplitudes')) \
1462 + sum(d.get_number_of_diagrams() for d in \
1463 self.get('decay_chains'))
1464
1466 """Returns a nicely formatted string of the amplitude content."""
1467 mystr = ""
1468 for amplitude in self.get('amplitudes'):
1469 mystr = mystr + amplitude.nice_string(indent) + "\n"
1470
1471 if self.get('decay_chains'):
1472 mystr = mystr + " " * indent + "Decays:\n"
1473 for dec in self.get('decay_chains'):
1474 mystr = mystr + dec.nice_string(indent + 2) + "\n"
1475
1476 return mystr[:-1]
1477
1479 """Returns a nicely formatted string of the amplitude processes."""
1480 mystr = ""
1481 for amplitude in self.get('amplitudes'):
1482 mystr = mystr + amplitude.nice_string_processes(indent) + "\n"
1483
1484 if self.get('decay_chains'):
1485 mystr = mystr + " " * indent + "Decays:\n"
1486 for dec in self.get('decay_chains'):
1487 mystr = mystr + dec.nice_string_processes(indent + 2) + "\n"
1488
1489 return mystr[:-1]
1490
1492 """Returns the number of initial state particles in the process."""
1493 return self.get('amplitudes')[0].get('process').get_ninitial()
1494
1496 """Returns a set of all particle ids for which a decay is defined"""
1497
1498 decay_ids = []
1499
1500
1501 for amp in sum([dc.get('amplitudes') for dc \
1502 in self['decay_chains']], []):
1503
1504 decay_ids.append(amp.get('process').get_initial_ids()[0])
1505
1506
1507 return list(set(decay_ids))
1508
1510 """ Returns wether this amplitude has a loop process."""
1511 return self['amplitudes'].has_any_loop_process()
1512
1514 """Recursive function to extract all amplitudes for this process"""
1515
1516 amplitudes = AmplitudeList()
1517
1518 amplitudes.extend(self.get('amplitudes'))
1519 for decay in self.get('decay_chains'):
1520 amplitudes.extend(decay.get_amplitudes())
1521
1522 return amplitudes
1523
1529 """List of DecayChainAmplitude objects
1530 """
1531
1533 """Test if object obj is a valid DecayChainAmplitude for the list."""
1534
1535 return isinstance(obj, DecayChainAmplitude)
1536
1537
1538
1539
1540
1541 -class MultiProcess(base_objects.PhysicsObject):
1542 """MultiProcess: list of process definitions
1543 list of processes (after cleaning)
1544 list of amplitudes (after generation)
1545 """
1546
1548 """Default values for all properties"""
1549
1550 self['process_definitions'] = base_objects.ProcessDefinitionList()
1551
1552
1553
1554 self['amplitudes'] = AmplitudeList()
1555
1556 self['collect_mirror_procs'] = False
1557
1558
1559 self['ignore_six_quark_processes'] = []
1560
1561
1562 self['use_numerical'] = False
1563
1564 - def __init__(self, argument=None, collect_mirror_procs = False,
1565 ignore_six_quark_processes = [], optimize=False,
1566 loop_filter=None, diagram_filter=None):
1594
1595
1596 - def filter(self, name, value):
1597 """Filter for valid process property values."""
1598
1599 if name == 'process_definitions':
1600 if not isinstance(value, base_objects.ProcessDefinitionList):
1601 raise self.PhysicsObjectError("%s is not a valid ProcessDefinitionList object" % str(value))
1602
1603 if name == 'amplitudes':
1604 if not isinstance(value, AmplitudeList):
1605 raise self.PhysicsObjectError("%s is not a valid AmplitudeList object" % str(value))
1606
1607 if name in ['collect_mirror_procs']:
1608 if not isinstance(value, bool):
1609 raise self.PhysicsObjectError("%s is not a valid boolean" % str(value))
1610
1611 if name == 'ignore_six_quark_processes':
1612 if not isinstance(value, list):
1613 raise self.PhysicsObjectError("%s is not a valid list" % str(value))
1614
1615 return True
1616
1617 - def get(self, name):
1618 """Get the value of the property name."""
1619
1620 if (name == 'amplitudes') and not self[name]:
1621 for process_def in self.get('process_definitions'):
1622 if process_def.get('decay_chains'):
1623
1624
1625 self['amplitudes'].append(\
1626 DecayChainAmplitude(process_def,
1627 self.get('collect_mirror_procs'),
1628 self.get('ignore_six_quark_processes'),
1629 diagram_filter=self['diagram_filter']))
1630 else:
1631 self['amplitudes'].extend(\
1632 self.generate_multi_amplitudes(process_def,
1633 self.get('collect_mirror_procs'),
1634 self.get('ignore_six_quark_processes'),
1635 self['use_numerical'],
1636 loop_filter=self['loop_filter'],
1637 diagram_filter=self['diagram_filter']))
1638
1639 return MultiProcess.__bases__[0].get(self, name)
1640
1642 """Return process property names as a nicely sorted list."""
1643
1644 return ['process_definitions', 'amplitudes']
1645
1647
1648 return self['process_definitions'][0]['model']
1649
1650 @classmethod
1651 - def generate_multi_amplitudes(cls,process_definition,
1652 collect_mirror_procs = False,
1653 ignore_six_quark_processes = [],
1654 use_numerical=False,
1655 loop_filter=None,
1656 diagram_filter=False):
1657 """Generate amplitudes in a semi-efficient way.
1658 Make use of crossing symmetry for processes that fail diagram
1659 generation, but not for processes that succeed diagram
1660 generation. Doing so will risk making it impossible to
1661 identify processes with identical amplitudes.
1662 """
1663 assert isinstance(process_definition, base_objects.ProcessDefinition), \
1664 "%s not valid ProcessDefinition object" % \
1665 repr(process_definition)
1666
1667
1668 process_definition.set('orders', MultiProcess.\
1669 find_optimal_process_orders(process_definition,
1670 diagram_filter))
1671
1672 process_definition.check_expansion_orders()
1673
1674 processes = base_objects.ProcessList()
1675 amplitudes = AmplitudeList()
1676
1677
1678
1679 failed_procs = []
1680 success_procs = []
1681
1682 non_permuted_procs = []
1683
1684 permutations = []
1685
1686
1687
1688 model = process_definition['model']
1689
1690 islegs = [leg for leg in process_definition['legs'] \
1691 if leg['state'] == False]
1692 fslegs = [leg for leg in process_definition['legs'] \
1693 if leg['state'] == True]
1694
1695 isids = [leg['ids'] for leg in process_definition['legs'] \
1696 if leg['state'] == False]
1697 fsids = [leg['ids'] for leg in process_definition['legs'] \
1698 if leg['state'] == True]
1699 polids = [tuple(leg['polarization']) for leg in process_definition['legs'] \
1700 if leg['state'] == True]
1701
1702 for prod in itertools.product(*isids):
1703 islegs = [\
1704 base_objects.Leg({'id':id, 'state': False,
1705 'polarization': islegs[i]['polarization']})
1706 for i,id in enumerate(prod)]
1707
1708
1709
1710
1711 red_fsidlist = set()
1712
1713 for prod in itertools.product(*fsids):
1714 tag = zip(prod, polids)
1715 tag = sorted(tag)
1716
1717 if tuple(tag) in red_fsidlist:
1718 continue
1719
1720 red_fsidlist.add(tuple(tag))
1721
1722 leg_list = [copy.copy(leg) for leg in islegs]
1723 leg_list.extend([\
1724 base_objects.Leg({'id':id, 'state': True, 'polarization': fslegs[i]['polarization']}) \
1725 for i,id in enumerate(prod)])
1726
1727 legs = base_objects.LegList(leg_list)
1728
1729
1730 sorted_legs = sorted([(l,i+1) for (i,l) in \
1731 enumerate(legs.get_outgoing_id_list(model))])
1732 permutation = [l[1] for l in sorted_legs]
1733
1734 sorted_legs = array.array('i', [l[0] for l in sorted_legs])
1735
1736
1737 if ignore_six_quark_processes and \
1738 len([i for i in sorted_legs if abs(i) in \
1739 ignore_six_quark_processes]) >= 6:
1740 continue
1741
1742
1743
1744 if sorted_legs in failed_procs:
1745 continue
1746
1747
1748 if use_numerical:
1749
1750 initial_mass = abs(model['parameter_dict'][model.get_particle(legs[0].get('id')).get('mass')])
1751 if initial_mass == 0:
1752 continue
1753 for leg in legs[1:]:
1754 m = model['parameter_dict'][model.get_particle(leg.get('id')).get('mass')]
1755 initial_mass -= abs(m)
1756 if initial_mass.real <= 0:
1757 continue
1758
1759
1760 process = process_definition.get_process_with_legs(legs)
1761
1762 fast_proc = \
1763 array.array('i',[leg.get('id') for leg in legs])
1764 if collect_mirror_procs and \
1765 process_definition.get_ninitial() == 2:
1766
1767 mirror_proc = \
1768 array.array('i', [fast_proc[1], fast_proc[0]] + \
1769 list(fast_proc[2:]))
1770 try:
1771 mirror_amp = \
1772 amplitudes[non_permuted_procs.index(mirror_proc)]
1773 except Exception:
1774
1775 pass
1776 else:
1777
1778 mirror_amp.set('has_mirror_process', True)
1779 logger.info("Process %s added to mirror process %s" % \
1780 (process.base_string(),
1781 mirror_amp.get('process').base_string()))
1782 continue
1783
1784
1785
1786 if not process.get('required_s_channels') and \
1787 not process.get('forbidden_onsh_s_channels') and \
1788 not process.get('forbidden_s_channels') and \
1789 not process.get('is_decay_chain') and not diagram_filter:
1790 try:
1791 crossed_index = success_procs.index(sorted_legs)
1792
1793
1794
1795
1796 if 'loop_diagrams' in amplitudes[crossed_index]:
1797 raise ValueError
1798 except ValueError:
1799
1800 pass
1801 else:
1802
1803 amplitude = MultiProcess.cross_amplitude(\
1804 amplitudes[crossed_index],
1805 process,
1806 permutations[crossed_index],
1807 permutation)
1808 amplitudes.append(amplitude)
1809 success_procs.append(sorted_legs)
1810 permutations.append(permutation)
1811 non_permuted_procs.append(fast_proc)
1812 logger.info("Crossed process found for %s, reuse diagrams." % \
1813 process.base_string())
1814 continue
1815
1816
1817 amplitude = cls.get_amplitude_from_proc(process,
1818 loop_filter=loop_filter)
1819
1820 try:
1821 result = amplitude.generate_diagrams(diagram_filter=diagram_filter)
1822 except InvalidCmd as error:
1823 failed_procs.append(sorted_legs)
1824 else:
1825
1826 if amplitude.get('diagrams'):
1827 amplitudes.append(amplitude)
1828 success_procs.append(sorted_legs)
1829 permutations.append(permutation)
1830 non_permuted_procs.append(fast_proc)
1831 elif not result:
1832
1833 failed_procs.append(sorted_legs)
1834
1835
1836 if not amplitudes:
1837 if len(failed_procs) == 1 and 'error' in locals():
1838 raise error
1839 else:
1840 raise NoDiagramException("No amplitudes generated from process %s. Please enter a valid process" % \
1841 process_definition.nice_string())
1842
1843
1844
1845 return amplitudes
1846
1847 @classmethod
1849 """ Return the correct amplitude type according to the characteristics of
1850 the process proc. The only option that could be specified here is
1851 loop_filter and it is of course not relevant for a tree amplitude."""
1852
1853 return Amplitude({"process": proc})
1854
1855
1856 @staticmethod
1858 """Find the minimal WEIGHTED order for this set of processes.
1859
1860 The algorithm:
1861
1862 1) Check the coupling hierarchy of the model. Assign all
1863 particles to the different coupling hierarchies so that a
1864 particle is considered to be in the highest hierarchy (i.e.,
1865 with lowest value) where it has an interaction.
1866
1867 2) Pick out the legs in the multiprocess according to the
1868 highest hierarchy represented (so don't mix particles from
1869 different hierarchy classes in the same multiparticles!)
1870
1871 3) Find the starting maximum WEIGHTED order as the sum of the
1872 highest n-2 weighted orders
1873
1874 4) Pick out required s-channel particle hierarchies, and use
1875 the highest of the maximum WEIGHTED order from the legs and
1876 the minimum WEIGHTED order extracted from 2*s-channel
1877 hierarchys plus the n-2-2*(number of s-channels) lowest
1878 leg weighted orders.
1879
1880 5) Run process generation with the WEIGHTED order determined
1881 in 3)-4) - # final state gluons, with all gluons removed from
1882 the final state
1883
1884 6) If no process is found, increase WEIGHTED order by 1 and go
1885 back to 5), until we find a process which passes. Return that
1886 order.
1887
1888 7) Continue 5)-6) until we reach (n-2)*(highest hierarchy)-1.
1889 If still no process has passed, return
1890 WEIGHTED = (n-2)*(highest hierarchy)
1891 """
1892
1893 assert isinstance(process_definition, base_objects.ProcessDefinition), \
1894 "%s not valid ProcessDefinition object" % \
1895 repr(process_definition)
1896
1897 processes = base_objects.ProcessList()
1898 amplitudes = AmplitudeList()
1899
1900
1901 if process_definition.get('orders') or \
1902 process_definition.get('overall_orders') or \
1903 process_definition.get('NLO_mode')=='virt':
1904 return process_definition.get('orders')
1905
1906
1907 if process_definition.get_ninitial() == 1 and not \
1908 process_definition.get('is_decay_chain'):
1909 return process_definition.get('orders')
1910
1911 logger.info("Checking for minimal orders which gives processes.")
1912 logger.info("Please specify coupling orders to bypass this step.")
1913
1914
1915 max_order_now, particles, hierarchy = \
1916 process_definition.get_minimum_WEIGHTED()
1917 coupling = 'WEIGHTED'
1918
1919 model = process_definition.get('model')
1920
1921
1922 isids = [leg['ids'] for leg in \
1923 [leg for leg in process_definition['legs'] if leg['state'] == False]]
1924 fsids = [leg['ids'] for leg in \
1925 [leg for leg in process_definition['legs'] if leg['state'] == True]]
1926
1927 max_WEIGHTED_order = \
1928 (len(fsids + isids) - 2)*int(model.get_max_WEIGHTED())
1929
1930 hierarchydef = process_definition['model'].get('order_hierarchy')
1931 tmp = []
1932 hierarchy = list(hierarchydef.items())
1933 hierarchy.sort()
1934 for key, value in hierarchydef.items():
1935 if value>1:
1936 tmp.append('%s*%s' % (value,key))
1937 else:
1938 tmp.append('%s' % key)
1939 wgtdef = '+'.join(tmp)
1940
1941
1942 while max_order_now < max_WEIGHTED_order:
1943 logger.info("Trying coupling order WEIGHTED<=%d: WEIGTHED IS %s" % (max_order_now, wgtdef))
1944
1945 oldloglevel = logger.level
1946 logger.setLevel(logging.WARNING)
1947
1948
1949
1950 failed_procs = []
1951
1952 for prod in itertools.product(*isids):
1953 islegs = [ base_objects.Leg({'id':id, 'state': False}) \
1954 for id in prod]
1955
1956
1957
1958
1959 red_fsidlist = []
1960
1961 for prod in itertools.product(*fsids):
1962
1963
1964 if tuple(sorted(prod)) in red_fsidlist:
1965 continue
1966
1967 red_fsidlist.append(tuple(sorted(prod)));
1968
1969
1970
1971 nglue = 0
1972 if 21 in particles[0]:
1973 nglue = len([id for id in prod if id == 21])
1974 prod = [id for id in prod if id != 21]
1975
1976
1977 leg_list = [copy.copy(leg) for leg in islegs]
1978
1979 leg_list.extend([\
1980 base_objects.Leg({'id':id, 'state': True}) \
1981 for id in prod])
1982
1983 legs = base_objects.LegList(leg_list)
1984
1985
1986
1987 coupling_orders_now = {coupling: max_order_now - \
1988 nglue * model['order_hierarchy']['QCD']}
1989
1990
1991 process = base_objects.Process({\
1992 'legs':legs,
1993 'model':model,
1994 'id': process_definition.get('id'),
1995 'orders': coupling_orders_now,
1996 'required_s_channels': \
1997 process_definition.get('required_s_channels'),
1998 'forbidden_onsh_s_channels': \
1999 process_definition.get('forbidden_onsh_s_channels'),
2000 'sqorders_types': \
2001 process_definition.get('sqorders_types'),
2002 'squared_orders': \
2003 process_definition.get('squared_orders'),
2004 'split_orders': \
2005 process_definition.get('split_orders'),
2006 'forbidden_s_channels': \
2007 process_definition.get('forbidden_s_channels'),
2008 'forbidden_particles': \
2009 process_definition.get('forbidden_particles'),
2010 'is_decay_chain': \
2011 process_definition.get('is_decay_chain'),
2012 'overall_orders': \
2013 process_definition.get('overall_orders'),
2014 'split_orders': \
2015 process_definition.get('split_orders')})
2016
2017
2018 process.check_expansion_orders()
2019
2020
2021 sorted_legs = sorted(legs.get_outgoing_id_list(model))
2022
2023
2024 if tuple(sorted_legs) in failed_procs and not process_definition.get('forbidden_s_channels'):
2025 continue
2026
2027 amplitude = Amplitude({'process': process})
2028 try:
2029 amplitude.generate_diagrams(diagram_filter=diagram_filter)
2030 except InvalidCmd as error:
2031 failed_procs.append(tuple(sorted_legs))
2032 else:
2033 if amplitude.get('diagrams'):
2034
2035 logger.setLevel(oldloglevel)
2036 return {coupling: max_order_now}
2037 else:
2038 failed_procs.append(tuple(sorted_legs))
2039
2040 max_order_now += 1
2041 logger.setLevel(oldloglevel)
2042
2043
2044 return {coupling: max_order_now}
2045
2046 @staticmethod
2048 """Return the amplitude crossed with the permutation new_perm"""
2049
2050 perm_map = dict(list(zip(org_perm, new_perm)))
2051
2052 new_amp = copy.copy(amplitude)
2053
2054 for i, leg in enumerate(process.get('legs')):
2055 leg.set('number', i+1)
2056
2057 new_amp.set('process', process)
2058
2059 diagrams = base_objects.DiagramList([d.renumber_legs(perm_map,
2060 process.get('legs'),) for \
2061 d in new_amp.get('diagrams')])
2062 new_amp.set('diagrams', diagrams)
2063 new_amp.trim_diagrams()
2064
2065
2066 new_amp.set('has_mirror_process', False)
2067
2068 return new_amp
2069
2075 """Takes a list of lists and elements and returns a list of flat lists.
2076 Example: [[1,2], 3, [4,5]] -> [[1,3,4], [1,3,5], [2,3,4], [2,3,5]]
2077 """
2078
2079
2080 assert isinstance(mylist, list), "Expand_list argument must be a list"
2081
2082 res = []
2083
2084 tmplist = []
2085 for item in mylist:
2086 if isinstance(item, list):
2087 tmplist.append(item)
2088 else:
2089 tmplist.append([item])
2090
2091 for item in itertools.product(*tmplist):
2092 res.append(list(item))
2093
2094 return res
2095
2097 """Recursive function. Takes a list of lists and lists of lists
2098 and returns a list of flat lists.
2099 Example: [[1,2],[[4,5],[6,7]]] -> [[1,2,4,5], [1,2,6,7]]
2100 """
2101
2102 res = []
2103
2104 if not mylist or len(mylist) == 1 and not mylist[0]:
2105 return [[]]
2106
2107
2108 assert isinstance(mylist[0], list), \
2109 "Expand_list_list needs a list of lists and lists of lists"
2110
2111
2112 if len(mylist) == 1:
2113 if isinstance(mylist[0][0], list):
2114 return mylist[0]
2115 else:
2116 return mylist
2117
2118 if isinstance(mylist[0][0], list):
2119 for item in mylist[0]:
2120
2121
2122
2123 for rest in expand_list_list(mylist[1:]):
2124 reslist = copy.copy(item)
2125 reslist.extend(rest)
2126 res.append(reslist)
2127 else:
2128 for rest in expand_list_list(mylist[1:]):
2129 reslist = copy.copy(mylist[0])
2130 reslist.extend(rest)
2131 res.append(reslist)
2132
2133
2134 return res
2135