1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 """Classes for diagram generation. Amplitude performs the diagram
16 generation, DecayChainAmplitude keeps track of processes with decay
17 chains, and MultiProcess allows generation of processes with
18 multiparticle definitions. DiagramTag allows to identify diagrams
19 based on relevant properties.
20 """
21
22 import array
23 import copy
24 import itertools
25 import logging
26
27 import madgraph.core.base_objects as base_objects
28 import madgraph.various.misc as misc
29 from madgraph import InvalidCmd, MadGraph5Error
30
31 logger = logging.getLogger('madgraph.diagram_generation')
35
41 """Class to tag diagrams based on objects with some __lt__ measure, e.g.
42 PDG code/interaction id (for comparing diagrams from the same amplitude),
43 or Lorentz/coupling/mass/width (for comparing AMPs from different MEs).
44 Algorithm: Create chains starting from external particles:
45 1 \ / 6
46 2 /\______/\ 7
47 3_ / | \_ 8
48 4 / 5 \_ 9
49 \ 10
50 gives ((((9,10,id910),8,id9108),(6,7,id67),id910867)
51 (((1,2,id12),(3,4,id34)),id1234),
52 5,id91086712345)
53 where idN is the id of the corresponding interaction. The ordering within
54 chains is based on chain length (depth; here, 1234 has depth 3, 910867 has
55 depth 4, 5 has depht 0), and if equal on the ordering of the chain elements.
56 The determination of central vertex is based on minimizing the chain length
57 for the longest subchain.
58 This gives a unique tag which can be used to identify diagrams
59 (instead of symmetry), as well as identify identical matrix elements from
60 different processes."""
61
63 """Exception for any problems in DiagramTags"""
64 pass
65
66 - def __init__(self, diagram, model=None, ninitial=2):
67 """Initialize with a diagram. Create DiagramTagChainLinks according to
68 the diagram, and figure out if we need to shift the central vertex."""
69
70
71 leg_dict = {}
72
73 for vertex in diagram.get('vertices'):
74
75 legs = vertex.get('legs')[:-1]
76 lastvx = vertex == diagram.get('vertices')[-1]
77 if lastvx:
78
79 legs = vertex.get('legs')
80
81 link = DiagramTagChainLink([leg_dict.setdefault(leg.get('number'),
82 DiagramTagChainLink(self.link_from_leg(leg, model))) \
83 for leg in legs],
84 self.vertex_id_from_vertex(vertex,
85 lastvx,
86 model,
87 ninitial))
88
89 if not lastvx:
90 leg_dict[vertex.get('legs')[-1].get('number')] = link
91
92
93 self.tag = link
94
95
96
97 done = max([l.depth for l in self.tag.links]) == 0
98 while not done:
99
100 longest_chain = self.tag.links[0]
101
102 new_link = DiagramTagChainLink(self.tag.links[1:],
103 self.flip_vertex(\
104 self.tag.vertex_id,
105 longest_chain.vertex_id,
106 self.tag.links[1:]))
107
108 other_links = list(longest_chain.links) + [new_link]
109 other_link = DiagramTagChainLink(other_links,
110 self.flip_vertex(\
111 longest_chain.vertex_id,
112 self.tag.vertex_id,
113 other_links))
114
115 if other_link.links[0] < self.tag.links[0]:
116
117 self.tag = other_link
118 else:
119
120 done = True
121
126
128 """Output a diagram from a DiagramTag. Note that each daughter
129 class must implement the static functions id_from_vertex_id
130 (if the vertex id is something else than an integer) and
131 leg_from_link (to pass the correct info from an end link to a
132 leg)."""
133
134
135 diagram = base_objects.Diagram({'vertices': \
136 self.vertices_from_link(self.tag,
137 model,
138 True)})
139 diagram.calculate_orders(model)
140 return diagram
141
142 @classmethod
144 """Recursively return the leg corresponding to this link and
145 the list of all vertices from all previous links"""
146
147 if link.end_link:
148
149 return cls.leg_from_link(link), []
150
151
152 leg_vertices = [cls.vertices_from_link(l, model) for l in link.links]
153
154 legs = base_objects.LegList(sorted([l for l,v in leg_vertices],
155 lambda l1,l2: l2.get('number') - \
156 l1.get('number')))
157
158 vertices = base_objects.VertexList(sum([v for l, v in leg_vertices],
159 []))
160
161 if not first_vertex:
162
163
164 last_leg = cls.leg_from_legs(legs,link.vertex_id,model)
165 legs.append(last_leg)
166
167
168 vertices.append(cls.vertex_from_link(legs,
169 link.vertex_id,
170 model))
171 if first_vertex:
172
173 return vertices
174 else:
175
176 return last_leg, vertices
177
178 @classmethod
180 """Returns the list of external PDGs of the interaction corresponding
181 to this vertex_id."""
182
183
184
185
186 if (len(vertex_id)>=3 and 'PDGs' in vertex_id[2]):
187 return vertex_id[2]['PDGs']
188 else:
189 return [part.get_pdg_code() for part in model.get_interaction(
190 cls.id_from_vertex_id(vertex_id)).get('particles')]
191
192 @classmethod
194 """Return a leg from a leg list and the model info"""
195
196 pdgs = list(cls.legPDGs_from_vertex_id(vertex_id, model))
197
198
199 for pdg in [leg.get('id') for leg in legs]:
200 pdgs.remove(pdg)
201
202 assert len(pdgs) == 1
203
204 pdg = model.get_particle(pdgs[0]).get_anti_pdg_code()
205 number = min([l.get('number') for l in legs])
206
207 state = (len([l for l in legs if l.get('state') == False]) != 1)
208
209 onshell= False
210
211 return base_objects.Leg({'id': pdg,
212 'number': number,
213 'state': state,
214 'onshell': onshell})
215
216 @classmethod
229
230 @staticmethod
232 """Return a leg from a link"""
233
234 if link.end_link:
235
236 return base_objects.Leg({'number':link.links[0][1],
237 'id':link.links[0][0][0],
238 'state':(link.links[0][0][1] == 0),
239 'onshell':False})
240
241
242 assert False
243
244 @staticmethod
246 """Return the numerical vertex id from a link.vertex_id"""
247
248 return vertex_id[0][0]
249
250 @staticmethod
252 """Return the loop_info stored in this vertex id. Notice that the
253 IdentifyME tag does not store the loop_info, but should normally never
254 need access to it."""
255
256 return vertex_id[2]
257
258 @staticmethod
260 """Reorder a permutation with respect to start_perm. Note that
261 both need to start from 1."""
262 if perm == start_perm:
263 return range(len(perm))
264 order = [i for (p,i) in \
265 sorted([(p,i) for (i,p) in enumerate(perm)])]
266 return [start_perm[i]-1 for i in order]
267
268 @staticmethod
270 """Returns the default end link for a leg: ((id, state), number).
271 Note that the number is not taken into account if tag comparison,
272 but is used only to extract leg permutations."""
273 if leg.get('state'):
274
275 return [((leg.get('id'), 0), leg.get('number'))]
276 else:
277
278 return [((leg.get('id'), leg.get('number')), leg.get('number'))]
279
280 @staticmethod
282 """Returns the default vertex id: just the interaction id
283 Note that in the vertex id, like the leg, only the first entry is
284 taken into account in the tag comparison, while the second is for
285 storing information that is not to be used in comparisons and the
286 third for additional info regarding the shrunk loop vertex."""
287
288 if isinstance(vertex,base_objects.ContractedVertex):
289
290 return ((vertex.get('id'),vertex.get('loop_tag')),(),
291 {'PDGs':vertex.get('PDGs')})
292 else:
293 return ((vertex.get('id'),()),(),{})
294
295 @staticmethod
297 """Returns the default vertex flip: just the new_vertex"""
298 return new_vertex
299
301 """Equal if same tag"""
302 if type(self) != type(other):
303 return False
304 return self.tag == other.tag
305
307 return not self.__eq__(other)
308
311
313 return self.tag < other.tag
314
316 return self.tag > other.tag
317
318 __repr__ = __str__
319
321 """Chain link for a DiagramTag. A link is a tuple + vertex id + depth,
322 with a comparison operator defined"""
323
324 - def __init__(self, objects, vertex_id = None):
325 """Initialize, either with a tuple of DiagramTagChainLinks and
326 a vertex_id (defined by DiagramTag.vertex_id_from_vertex), or
327 with an external leg object (end link) defined by
328 DiagramTag.link_from_leg"""
329
330 if vertex_id == None:
331
332 self.links = tuple(objects)
333 self.vertex_id = (0,)
334 self.depth = 0
335 self.end_link = True
336 return
337
338 self.links = tuple(sorted(list(tuple(objects)), reverse=True))
339 self.vertex_id = vertex_id
340
341
342 self.depth = sum([l.depth for l in self.links],
343 max(1, len(self.links)-1))
344 self.end_link = False
345
347 """Get the permutation of external numbers (assumed to be the
348 second entry in the end link tuples)"""
349
350 if self.end_link:
351 return [self.links[0][1]]
352
353 return sum([l.get_external_numbers() for l in self.links], [])
354
356 """Compare self with other in the order:
357 1. depth 2. len(links) 3. vertex id 4. measure of links"""
358
359 if self == other:
360 return False
361
362 if self.depth != other.depth:
363 return self.depth < other.depth
364
365 if len(self.links) != len(other.links):
366 return len(self.links) < len(other.links)
367
368 if self.vertex_id[0] != other.vertex_id[0]:
369 return self.vertex_id[0] < other.vertex_id[0]
370
371 for i, link in enumerate(self.links):
372 if i > len(other.links) - 1:
373 return False
374 if link != other.links[i]:
375 return link < other.links[i]
376
378 return self != other and not self.__lt__(other)
379
381 """For end link,
382 consider equal if self.links[0][0] == other.links[0][0],
383 i.e., ignore the leg number (in links[0][1])."""
384
385 if self.end_link and other.end_link and self.depth == other.depth \
386 and self.vertex_id == other.vertex_id:
387 return self.links[0][0] == other.links[0][0]
388
389 return self.end_link == other.end_link and self.depth == other.depth \
390 and self.vertex_id[0] == other.vertex_id[0] \
391 and self.links == other.links
392
394 return not self.__eq__(other)
395
396
398 if self.end_link:
399 return str(self.links)
400 return "%s, %s; %d" % (str(self.links),
401 str(self.vertex_id),
402 self.depth)
403
404 __repr__ = __str__
405
406
407
408
409 -class Amplitude(base_objects.PhysicsObject):
410 """Amplitude: process + list of diagrams (ordered)
411 Initialize with a process, then call generate_diagrams() to
412 generate the diagrams for the amplitude
413 """
414
416 """Default values for all properties"""
417
418 self['process'] = base_objects.Process()
419 self['diagrams'] = None
420
421
422 self['has_mirror_process'] = False
423
436
437 - def filter(self, name, value):
453
454 - def get(self, name):
463
464
465
467 """Return diagram property names as a nicely sorted list."""
468
469 return ['process', 'diagrams', 'has_mirror_process']
470
472 """Returns number of diagrams for this amplitude"""
473 return len(self.get('diagrams'))
474
476 """Return an AmplitudeList with just this amplitude.
477 Needed for DecayChainAmplitude."""
478
479 return AmplitudeList([self])
480
482 """Returns a nicely formatted string of the amplitude content."""
483 return self.get('process').nice_string(indent) + "\n" + \
484 self.get('diagrams').nice_string(indent)
485
487 """Returns a nicely formatted string of the amplitude process."""
488 return self.get('process').nice_string(indent)
489
491 """Returns the number of initial state particles in the process."""
492 return self.get('process').get_ninitial()
493
495 """ Returns wether this amplitude has a loop process."""
496
497 return self.get('process').get('perturbation_couplings')
498
500 """Generate diagrams. Algorithm:
501
502 1. Define interaction dictionaries:
503 * 2->0 (identity), 3->0, 4->0, ... , maxlegs->0
504 * 2 -> 1, 3 -> 1, ..., maxlegs-1 -> 1
505
506 2. Set flag from_group=true for all external particles.
507 Flip particle/anti particle for incoming particles.
508
509 3. If there is a dictionary n->0 with n=number of external
510 particles, create if possible the combination [(1,2,3,4,...)]
511 with *at least two* from_group==true. This will give a
512 finished (set of) diagram(s) (done by reduce_leglist)
513
514 4. Create all allowed groupings of particles with at least one
515 from_group==true (according to dictionaries n->1):
516 [(1,2),3,4...],[1,(2,3),4,...],...,
517 [(1,2),(3,4),...],...,[(1,2,3),4,...],...
518 (done by combine_legs)
519
520 5. Replace each group with a (list of) new particle(s) with number
521 n = min(group numbers). Set from_group true for these
522 particles and false for all other particles. Store vertex info.
523 (done by merge_comb_legs)
524
525 6. Stop algorithm when at most 2 particles remain.
526 Return all diagrams (lists of vertices).
527
528 7. Repeat from 3 (recursion done by reduce_leglist)
529
530 8. Replace final p=p vertex
531
532 Be aware that the resulting vertices have all particles outgoing,
533 so need to flip for incoming particles when used.
534
535 SPECIAL CASE: For A>BC... processes which are legs in decay
536 chains, we need to ensure that BC... combine first, giving A=A
537 as a final vertex. This case is defined by the Process
538 property is_decay_chain = True.
539 This function can also be called by the generate_diagram function
540 of LoopAmplitudes, in which case the generated diagrams here must not
541 be directly assigned to the 'diagrams' attributed but returned as a
542 DiagramList by the function. This is controlled by the argument
543 returndiag.
544 """
545
546 process = self.get('process')
547 model = process.get('model')
548 legs = process.get('legs')
549
550 for key in process.get('overall_orders').keys():
551 try:
552 process.get('orders')[key] = \
553 min(process.get('orders')[key],
554 process.get('overall_orders')[key])
555 except KeyError:
556 process.get('orders')[key] = process.get('overall_orders')[key]
557
558 assert model.get('particles'), \
559 "particles are missing in model: %s" % model.get('particles')
560
561 assert model.get('interactions'), \
562 "interactions are missing in model"
563
564
565 res = base_objects.DiagramList()
566
567 if len(filter(lambda leg: model.get('particle_dict')[\
568 leg.get('id')].is_fermion(), legs)) % 2 == 1:
569 if not returndiag:
570 self['diagrams'] = res
571 raise InvalidCmd, 'The number of fermion is odd'
572 else:
573 return False, res
574
575
576
577 if not model.get('got_majoranas') and \
578 len(filter(lambda leg: leg.is_incoming_fermion(model), legs)) != \
579 len(filter(lambda leg: leg.is_outgoing_fermion(model), legs)):
580 if not returndiag:
581 self['diagrams'] = res
582 raise InvalidCmd, 'The number of of incoming/outcoming fermions are different'
583 else:
584 return False, res
585
586
587
588 for charge in model.get('conserved_charge'):
589 total = 0
590 for leg in legs:
591 part = model.get('particle_dict')[leg.get('id')]
592 try:
593 value = part.get(charge)
594 except (AttributeError, base_objects.PhysicsObject.PhysicsObjectError):
595 try:
596 value = getattr(part, charge)
597 except AttributeError:
598 value = 0
599
600 if (leg.get('id') != part['pdg_code']) != leg['state']:
601 total -= value
602 else:
603 total += value
604
605 if abs(total) > 1e-10:
606 if not returndiag:
607 self['diagrams'] = res
608 raise InvalidCmd, 'No %s conservation for this process ' % charge
609 return res
610 else:
611 raise InvalidCmd, 'No %s conservation for this process ' % charge
612 return res, res
613
614 if not returndiag:
615 logger.info("Trying %s " % process.nice_string().replace('Process', 'process'))
616
617
618 for i in range(0, len(process.get('legs'))):
619
620 leg = copy.copy(process.get('legs')[i])
621 process.get('legs')[i] = leg
622 if leg.get('number') == 0:
623 leg.set('number', i + 1)
624
625
626
627 leglist = self.copy_leglist(process.get('legs'))
628
629 for leg in leglist:
630
631
632
633 leg.set('from_group', True)
634
635
636
637 if leg.get('state') == False:
638 part = model.get('particle_dict')[leg.get('id')]
639 leg.set('id', part.get_anti_pdg_code())
640
641
642
643 max_multi_to1 = max([len(key) for key in \
644 model.get('ref_dict_to1').keys()])
645
646
647
648
649
650
651
652
653 is_decay_proc = process.get_ninitial() == 1
654 if is_decay_proc:
655 part = model.get('particle_dict')[leglist[0].get('id')]
656
657
658
659 ref_dict_to0 = {(part.get_pdg_code(),part.get_anti_pdg_code()):[0],
660 (part.get_anti_pdg_code(),part.get_pdg_code()):[0]}
661
662
663 leglist[0].set('from_group', None)
664 reduced_leglist = self.reduce_leglist(leglist,
665 max_multi_to1,
666 ref_dict_to0,
667 is_decay_proc,
668 process.get('orders'))
669 else:
670 reduced_leglist = self.reduce_leglist(leglist,
671 max_multi_to1,
672 model.get('ref_dict_to0'),
673 is_decay_proc,
674 process.get('orders'))
675
676
677
678
679 self.convert_dgleg_to_leg(reduced_leglist)
680
681 if reduced_leglist:
682 for vertex_list in reduced_leglist:
683 res.append(self.create_diagram(base_objects.VertexList(vertex_list)))
684
685
686
687 failed_crossing = not res
688
689
690
691
692
693
694 if process.get('required_s_channels') and \
695 process.get('required_s_channels')[0]:
696
697
698 lastvx = -1
699
700
701
702 if is_decay_proc: lastvx = -2
703 ninitial = len(filter(lambda leg: leg.get('state') == False,
704 process.get('legs')))
705
706 old_res = res
707 res = base_objects.DiagramList()
708 for id_list in process.get('required_s_channels'):
709 res_diags = filter(lambda diagram: \
710 all([req_s_channel in \
711 [vertex.get_s_channel_id(\
712 process.get('model'), ninitial) \
713 for vertex in diagram.get('vertices')[:lastvx]] \
714 for req_s_channel in \
715 id_list]), old_res)
716
717 res.extend([diag for diag in res_diags if diag not in res])
718
719
720
721
722
723 if process.get('forbidden_s_channels'):
724 ninitial = len(filter(lambda leg: leg.get('state') == False,
725 process.get('legs')))
726 if ninitial == 2:
727 res = base_objects.DiagramList(\
728 filter(lambda diagram: \
729 not any([vertex.get_s_channel_id(\
730 process.get('model'), ninitial) \
731 in process.get('forbidden_s_channels')
732 for vertex in diagram.get('vertices')[:-1]]),
733 res))
734 else:
735
736
737 newres= []
738 for diagram in res:
739 leg1 = 1
740
741
742
743 vertex = diagram.get('vertices')[-1]
744 if any([l['number'] ==1 for l in vertex.get('legs')]):
745 leg1 = [l['number'] for l in vertex.get('legs') if l['number'] !=1][0]
746 to_loop = range(len(diagram.get('vertices'))-1)
747 if leg1 >1:
748 to_loop.reverse()
749 for i in to_loop:
750 vertex = diagram.get('vertices')[i]
751 if leg1:
752 if any([l['number'] ==leg1 for l in vertex.get('legs')]):
753 leg1 = 0
754 continue
755 if vertex.get_s_channel_id(process.get('model'), ninitial)\
756 in process.get('forbidden_s_channels'):
757 break
758 else:
759 newres.append(diagram)
760 res = base_objects.DiagramList(newres)
761
762
763
764
765 if process.get('forbidden_onsh_s_channels'):
766 ninitial = len(filter(lambda leg: leg.get('state') == False,
767 process.get('legs')))
768
769 verts = base_objects.VertexList(sum([[vertex for vertex \
770 in diagram.get('vertices')[:-1]
771 if vertex.get_s_channel_id(\
772 process.get('model'), ninitial) \
773 in process.get('forbidden_onsh_s_channels')] \
774 for diagram in res], []))
775 for vert in verts:
776
777 newleg = copy.copy(vert.get('legs').pop(-1))
778 newleg.set('onshell', False)
779 vert.get('legs').append(newleg)
780
781
782 for diagram in res:
783 diagram.calculate_orders(model)
784
785
786
787
788
789
790
791
792 if not returndiag and len(res)>0:
793 res = self.apply_squared_order_constraints(res)
794
795
796 if not process.get('is_decay_chain'):
797 for diagram in res:
798 vertices = diagram.get('vertices')
799 if len(vertices) > 1 and vertices[-1].get('id') == 0:
800
801
802
803
804 vertices = copy.copy(vertices)
805 lastvx = vertices.pop()
806 nexttolastvertex = copy.copy(vertices.pop())
807 legs = copy.copy(nexttolastvertex.get('legs'))
808 ntlnumber = legs[-1].get('number')
809 lastleg = filter(lambda leg: leg.get('number') != ntlnumber,
810 lastvx.get('legs'))[0]
811
812 if lastleg.get('onshell') == False:
813 lastleg.set('onshell', None)
814
815 legs[-1] = lastleg
816 nexttolastvertex.set('legs', legs)
817 vertices.append(nexttolastvertex)
818 diagram.set('vertices', vertices)
819
820 if res and not returndiag:
821 logger.info("Process has %d diagrams" % len(res))
822
823
824 self.trim_diagrams(diaglist=res)
825
826
827 pertur = 'QCD'
828 if self.get('process')['perturbation_couplings']:
829 pertur = sorted(self.get('process')['perturbation_couplings'])[0]
830 self.get('process').get('legs').sort(pert=pertur)
831
832
833 if not returndiag:
834 self['diagrams'] = res
835 return not failed_crossing
836 else:
837 return not failed_crossing, res
838
840 """Applies the user specified squared order constraints on the diagram
841 list in argument."""
842
843 res = copy.copy(diag_list)
844
845
846
847 for name, (value, operator) in self['process'].get('constrained_orders').items():
848 res.filter_constrained_orders(name, value, operator)
849
850
851
852
853 while True:
854 new_res = res.apply_positive_sq_orders(res,
855 self['process'].get('squared_orders'),
856 self['process']['sqorders_types'])
857
858 if len(res)==len(new_res):
859 break
860 elif (len(new_res)>len(res)):
861 raise MadGraph5Error(
862 'Inconsistency in function apply_squared_order_constraints().')
863
864 res = new_res
865
866
867
868
869 neg_orders = [(order, value) for order, value in \
870 self['process'].get('squared_orders').items() if value<0]
871 if len(neg_orders)==1:
872 neg_order, neg_value = neg_orders[0]
873
874 res, target_order = res.apply_negative_sq_order(res, neg_order,\
875 neg_value, self['process']['sqorders_types'][neg_order])
876
877
878
879
880 self['process']['squared_orders'][neg_order]=target_order
881 elif len(neg_orders)>1:
882 raise InvalidCmd('At most one negative squared order constraint'+\
883 ' can be specified, not %s.'%str(neg_orders))
884
885 return res
886
888 """ Return a Diagram created from the vertex list. This function can be
889 overloaded by daughter classes."""
890 return base_objects.Diagram({'vertices':vertexlist})
891
893 """ In LoopAmplitude, it converts back all DGLoopLegs into Legs.
894 In Amplitude, there is nothing to do. """
895
896 return True
897
899 """ Simply returns a copy of the leg list. This function is
900 overloaded in LoopAmplitude so that a DGLoopLeg list is returned.
901 The DGLoopLeg has some additional parameters only useful during
902 loop diagram generation"""
903
904 return base_objects.LegList(\
905 [ copy.copy(leg) for leg in legs ])
906
907 - def reduce_leglist(self, curr_leglist, max_multi_to1, ref_dict_to0,
908 is_decay_proc = False, coupling_orders = None):
909 """Recursive function to reduce N LegList to N-1
910 For algorithm, see doc for generate_diagrams.
911 """
912
913
914
915 res = []
916
917
918
919 if curr_leglist is None:
920 return None
921
922
923 model = self.get('process').get('model')
924 ref_dict_to1 = self.get('process').get('model').get('ref_dict_to1')
925
926
927
928
929
930
931 if curr_leglist.can_combine_to_0(ref_dict_to0, is_decay_proc):
932
933
934 vertex_ids = self.get_combined_vertices(curr_leglist,
935 copy.copy(ref_dict_to0[tuple(sorted([leg.get('id') for \
936 leg in curr_leglist]))]))
937
938 final_vertices = [base_objects.Vertex({'legs':curr_leglist,
939 'id':vertex_id}) for \
940 vertex_id in vertex_ids]
941
942 for final_vertex in final_vertices:
943 if self.reduce_orders(coupling_orders, model,
944 [final_vertex.get('id')]) != False:
945 res.append([final_vertex])
946
947
948 if len(curr_leglist) == 2:
949 if res:
950 return res
951 else:
952 return None
953
954
955 comb_lists = self.combine_legs(curr_leglist,
956 ref_dict_to1, max_multi_to1)
957
958
959 leg_vertex_list = self.merge_comb_legs(comb_lists, ref_dict_to1)
960
961
962 for leg_vertex_tuple in leg_vertex_list:
963
964
965 if self.get('process').get('forbidden_particles') and \
966 any([abs(vertex.get('legs')[-1].get('id')) in \
967 self.get('process').get('forbidden_particles') \
968 for vertex in leg_vertex_tuple[1]]):
969 continue
970
971
972 new_coupling_orders = self.reduce_orders(coupling_orders,
973 model,
974 [vertex.get('id') for vertex in \
975 leg_vertex_tuple[1]])
976 if new_coupling_orders == False:
977
978 continue
979
980
981
982 reduced_diagram = self.reduce_leglist(leg_vertex_tuple[0],
983 max_multi_to1,
984 ref_dict_to0,
985 is_decay_proc,
986 new_coupling_orders)
987
988 if reduced_diagram:
989 vertex_list_list = [list(leg_vertex_tuple[1])]
990 vertex_list_list.append(reduced_diagram)
991 expanded_list = expand_list_list(vertex_list_list)
992 res.extend(expanded_list)
993
994 return res
995
996 - def reduce_orders(self, coupling_orders, model, vertex_id_list):
997 """Return False if the coupling orders for any coupling is <
998 0, otherwise return the new coupling orders with the vertex
999 orders subtracted. If coupling_orders is not given, return
1000 None (which counts as success).
1001 WEIGHTED is a special order, which corresponds to the sum of
1002 order hierarchies for the couplings.
1003 We ignore negative constraints as these cannot be taken into
1004 account on the fly but only after generation."""
1005
1006 if not coupling_orders:
1007 return None
1008
1009 present_couplings = copy.copy(coupling_orders)
1010 for id in vertex_id_list:
1011
1012 if not id:
1013 continue
1014 inter = model.get("interaction_dict")[id]
1015 for coupling in inter.get('orders').keys():
1016
1017
1018 if coupling in present_couplings and \
1019 present_couplings[coupling]>=0:
1020
1021 present_couplings[coupling] -= \
1022 inter.get('orders')[coupling]
1023 if present_couplings[coupling] < 0:
1024
1025 return False
1026
1027 if 'WEIGHTED' in present_couplings and \
1028 present_couplings['WEIGHTED']>=0:
1029 weight = sum([model.get('order_hierarchy')[c]*n for \
1030 (c,n) in inter.get('orders').items()])
1031 present_couplings['WEIGHTED'] -= weight
1032 if present_couplings['WEIGHTED'] < 0:
1033
1034 return False
1035
1036 return present_couplings
1037
1038 - def combine_legs(self, list_legs, ref_dict_to1, max_multi_to1):
1039 """Recursive function. Take a list of legs as an input, with
1040 the reference dictionary n-1->1, and output a list of list of
1041 tuples of Legs (allowed combinations) and Legs (rest). Algorithm:
1042
1043 1. Get all n-combinations from list [123456]: [12],..,[23],..,[123],..
1044
1045 2. For each combination, say [34]. Check if combination is valid.
1046 If so:
1047
1048 a. Append [12[34]56] to result array
1049
1050 b. Split [123456] at index(first element in combination+1),
1051 i.e. [12],[456] and subtract combination from second half,
1052 i.e.: [456]-[34]=[56]. Repeat from 1. with this array
1053
1054 3. Take result array from call to 1. (here, [[56]]) and append
1055 (first half in step b - combination) + combination + (result
1056 from 1.) = [12[34][56]] to result array
1057
1058 4. After appending results from all n-combinations, return
1059 resulting array. Example, if [13] and [45] are valid
1060 combinations:
1061 [[[13]2456],[[13]2[45]6],[123[45]6]]
1062 """
1063
1064 res = []
1065
1066
1067 for comb_length in range(2, max_multi_to1 + 1):
1068
1069
1070 if comb_length > len(list_legs):
1071 return res
1072
1073
1074
1075 for comb in itertools.combinations(list_legs, comb_length):
1076
1077
1078 if base_objects.LegList(comb).can_combine_to_1(ref_dict_to1):
1079
1080
1081
1082 res_list = copy.copy(list_legs)
1083 for leg in comb:
1084 res_list.remove(leg)
1085 res_list.insert(list_legs.index(comb[0]), comb)
1086 res.append(res_list)
1087
1088
1089
1090
1091
1092
1093 res_list1 = list_legs[0:list_legs.index(comb[0])]
1094 res_list2 = list_legs[list_legs.index(comb[0]) + 1:]
1095 for leg in comb[1:]:
1096 res_list2.remove(leg)
1097
1098
1099 res_list = res_list1
1100 res_list.append(comb)
1101
1102
1103 for item in self.combine_legs(res_list2,
1104 ref_dict_to1,
1105 max_multi_to1):
1106 final_res_list = copy.copy(res_list)
1107 final_res_list.extend(item)
1108 res.append(final_res_list)
1109
1110 return res
1111
1112
1114 """Takes a list of allowed leg combinations as an input and returns
1115 a set of lists where combinations have been properly replaced
1116 (one list per element in the ref_dict, so that all possible intermediate
1117 particles are included). For each list, give the list of vertices
1118 corresponding to the executed merging, group the two as a tuple.
1119 """
1120
1121 res = []
1122
1123 for comb_list in comb_lists:
1124
1125 reduced_list = []
1126 vertex_list = []
1127
1128 for entry in comb_list:
1129
1130
1131 if isinstance(entry, tuple):
1132
1133
1134
1135 leg_vert_ids = copy.copy(ref_dict_to1[\
1136 tuple(sorted([leg.get('id') for leg in entry]))])
1137
1138
1139 number = min([leg.get('number') for leg in entry])
1140
1141
1142 if len(filter(lambda leg: leg.get('state') == False,
1143 entry)) == 1:
1144 state = False
1145 else:
1146 state = True
1147
1148
1149
1150
1151
1152 new_leg_vert_ids = []
1153 if leg_vert_ids:
1154 new_leg_vert_ids = self.get_combined_legs(entry,
1155 leg_vert_ids,
1156 number,
1157 state)
1158
1159 reduced_list.append([l[0] for l in new_leg_vert_ids])
1160
1161
1162
1163
1164
1165 vlist = base_objects.VertexList()
1166 for (myleg, vert_id) in new_leg_vert_ids:
1167
1168 myleglist = base_objects.LegList(list(entry))
1169
1170 myleglist.append(myleg)
1171
1172 vlist.append(base_objects.Vertex(
1173 {'legs':myleglist,
1174 'id':vert_id}))
1175
1176 vertex_list.append(vlist)
1177
1178
1179
1180 else:
1181 cp_entry = copy.copy(entry)
1182
1183
1184
1185 if cp_entry.get('from_group') != None:
1186 cp_entry.set('from_group', False)
1187 reduced_list.append(cp_entry)
1188
1189
1190 flat_red_lists = expand_list(reduced_list)
1191 flat_vx_lists = expand_list(vertex_list)
1192
1193
1194 for i in range(0, len(flat_vx_lists)):
1195 res.append((base_objects.LegList(flat_red_lists[i]), \
1196 base_objects.VertexList(flat_vx_lists[i])))
1197
1198 return res
1199
1201 """Create a set of new legs from the info given. This can be
1202 overloaded by daughter classes."""
1203
1204 mylegs = [(base_objects.Leg({'id':leg_id,
1205 'number':number,
1206 'state':state,
1207 'from_group':True}),
1208 vert_id)\
1209 for leg_id, vert_id in leg_vert_ids]
1210
1211 return mylegs
1212
1214 """Allow for selection of vertex ids. This can be
1215 overloaded by daughter classes."""
1216
1217 return vert_ids
1218
1220 """Reduce the number of legs and vertices used in memory.
1221 When called by a diagram generation initiated by LoopAmplitude,
1222 this function should not trim the diagrams in the attribute 'diagrams'
1223 but rather a given list in the 'diaglist' argument."""
1224
1225 legs = []
1226 vertices = []
1227
1228 if diaglist is None:
1229 diaglist=self.get('diagrams')
1230
1231
1232 process = self.get('process')
1233 for leg in process.get('legs'):
1234 if leg.get('state') and leg.get('id') in decay_ids:
1235 leg.set('onshell', True)
1236
1237 for diagram in diaglist:
1238
1239 leg_external = set()
1240 for ivx, vertex in enumerate(diagram.get('vertices')):
1241 for ileg, leg in enumerate(vertex.get('legs')):
1242
1243 if leg.get('state') and leg.get('id') in decay_ids and \
1244 leg.get('number') not in leg_external:
1245
1246
1247 leg = copy.copy(leg)
1248 leg.set('onshell', True)
1249 try:
1250 index = legs.index(leg)
1251 except ValueError:
1252 vertex.get('legs')[ileg] = leg
1253 legs.append(leg)
1254 else:
1255 vertex.get('legs')[ileg] = legs[index]
1256 leg_external.add(leg.get('number'))
1257 try:
1258 index = vertices.index(vertex)
1259 diagram.get('vertices')[ivx] = vertices[index]
1260 except ValueError:
1261 vertices.append(vertex)
1262
1263
1264
1265
1266 -class AmplitudeList(base_objects.PhysicsObjectList):
1267 """List of Amplitude objects
1268 """
1269
1271 """ Check the content of all processes of the amplitudes in this list to
1272 see if there is any which defines perturbation couplings. """
1273
1274 for amp in self:
1275 if amp.has_loop_process():
1276 return True
1277
1279 """Test if object obj is a valid Amplitude for the list."""
1280
1281 return isinstance(obj, Amplitude)
1282
1287 """A list of amplitudes + a list of decay chain amplitude lists;
1288 corresponding to a ProcessDefinition with a list of decay chains
1289 """
1290
1296
1297 - def __init__(self, argument = None, collect_mirror_procs = False,
1298 ignore_six_quark_processes = False, loop_filter=None):
1299 """Allow initialization with Process and with ProcessDefinition"""
1300
1301 if isinstance(argument, base_objects.Process):
1302 super(DecayChainAmplitude, self).__init__()
1303 from madgraph.loop.loop_diagram_generation import LoopMultiProcess
1304 if argument['perturbation_couplings']:
1305 MultiProcessClass=LoopMultiProcess
1306 else:
1307 MultiProcessClass=MultiProcess
1308 if isinstance(argument, base_objects.ProcessDefinition):
1309 self['amplitudes'].extend(\
1310 MultiProcessClass.generate_multi_amplitudes(argument,
1311 collect_mirror_procs,
1312 ignore_six_quark_processes,
1313 loop_filter=loop_filter))
1314 else:
1315 self['amplitudes'].append(\
1316 MultiProcessClass.get_amplitude_from_proc(argument,
1317 loop_filter=loop_filter))
1318
1319
1320 process = copy.copy(self.get('amplitudes')[0].get('process'))
1321 process.set('decay_chains', base_objects.ProcessList())
1322 self['amplitudes'][0].set('process', process)
1323
1324 for process in argument.get('decay_chains'):
1325 if process.get('perturbation_couplings'):
1326 raise MadGraph5Error,\
1327 "Decay processes can not be perturbed"
1328 process.set('overall_orders', argument.get('overall_orders'))
1329 if not process.get('is_decay_chain'):
1330 process.set('is_decay_chain',True)
1331 if not process.get_ninitial() == 1:
1332 raise InvalidCmd,\
1333 "Decay chain process must have exactly one" + \
1334 " incoming particle"
1335 self['decay_chains'].append(\
1336 DecayChainAmplitude(process, collect_mirror_procs,
1337 ignore_six_quark_processes))
1338
1339
1340 decay_ids = sum([[a.get('process').get('legs')[0].get('id') \
1341 for a in dec.get('amplitudes')] for dec in \
1342 self['decay_chains']], [])
1343 decay_ids = set(decay_ids)
1344 for amp in self['amplitudes']:
1345 amp.trim_diagrams(decay_ids)
1346
1347
1348 for amp in self['amplitudes']:
1349 for l in amp.get('process').get('legs'):
1350 if l.get('id') in decay_ids:
1351 decay_ids.remove(l.get('id'))
1352
1353 if decay_ids:
1354 model = amp.get('process').get('model')
1355 names = [model.get_particle(id).get('name') for id in decay_ids]
1356
1357 logger.warning(
1358 "$RED Decay without corresponding particle in core process found.\n" + \
1359 "Decay information for particle(s) %s is discarded.\n" % ','.join(names) + \
1360 "Please check your process definition carefully. \n" + \
1361 "This warning usually means that you forgot parentheses in presence of subdecay.\n" + \
1362 "Example of correct syntax: p p > t t~, ( t > w+ b, w+ > l+ vl)")
1363
1364
1365 for dc in reversed(self['decay_chains']):
1366 for a in reversed(dc.get('amplitudes')):
1367
1368 if a.get('process').get('legs')[0].get('id') in decay_ids:
1369 dc.get('amplitudes').remove(a)
1370 if not dc.get('amplitudes'):
1371
1372 self['decay_chains'].remove(dc)
1373
1374
1375
1376 bad_procs = []
1377 for dc in self['decay_chains']:
1378 for amp in dc.get('amplitudes'):
1379 legs = amp.get('process').get('legs')
1380 fs_parts = [abs(l.get('id')) for l in legs if
1381 l.get('state')]
1382 is_part = [l.get('id') for l in legs if not
1383 l.get('state')][0]
1384 if abs(is_part) in fs_parts:
1385 bad_procs.append(amp.get('process'))
1386
1387 if bad_procs:
1388 logger.warning(
1389 "$RED Decay(s) with particle decaying to itself:\n" + \
1390 '\n'.join([p.nice_string() for p in bad_procs]) + \
1391 "\nPlease check your process definition carefully. \n")
1392
1393
1394 elif argument != None:
1395
1396 super(DecayChainAmplitude, self).__init__(argument)
1397 else:
1398
1399 super(DecayChainAmplitude, self).__init__()
1400
1401 - def filter(self, name, value):
1402 """Filter for valid amplitude property values."""
1403
1404 if name == 'amplitudes':
1405 if not isinstance(value, AmplitudeList):
1406 raise self.PhysicsObjectError, \
1407 "%s is not a valid AmplitudeList" % str(value)
1408 if name == 'decay_chains':
1409 if not isinstance(value, DecayChainAmplitudeList):
1410 raise self.PhysicsObjectError, \
1411 "%s is not a valid DecayChainAmplitudeList object" % \
1412 str(value)
1413 return True
1414
1416 """Return diagram property names as a nicely sorted list."""
1417
1418 return ['amplitudes', 'decay_chains']
1419
1420
1421
1423 """Returns number of diagrams for this amplitude"""
1424 return sum(len(a.get('diagrams')) for a in self.get('amplitudes')) \
1425 + sum(d.get_number_of_diagrams() for d in \
1426 self.get('decay_chains'))
1427
1429 """Returns a nicely formatted string of the amplitude content."""
1430 mystr = ""
1431 for amplitude in self.get('amplitudes'):
1432 mystr = mystr + amplitude.nice_string(indent) + "\n"
1433
1434 if self.get('decay_chains'):
1435 mystr = mystr + " " * indent + "Decays:\n"
1436 for dec in self.get('decay_chains'):
1437 mystr = mystr + dec.nice_string(indent + 2) + "\n"
1438
1439 return mystr[:-1]
1440
1442 """Returns a nicely formatted string of the amplitude processes."""
1443 mystr = ""
1444 for amplitude in self.get('amplitudes'):
1445 mystr = mystr + amplitude.nice_string_processes(indent) + "\n"
1446
1447 if self.get('decay_chains'):
1448 mystr = mystr + " " * indent + "Decays:\n"
1449 for dec in self.get('decay_chains'):
1450 mystr = mystr + dec.nice_string_processes(indent + 2) + "\n"
1451
1452 return mystr[:-1]
1453
1455 """Returns the number of initial state particles in the process."""
1456 return self.get('amplitudes')[0].get('process').get_ninitial()
1457
1459 """Returns a set of all particle ids for which a decay is defined"""
1460
1461 decay_ids = []
1462
1463
1464 for amp in sum([dc.get('amplitudes') for dc \
1465 in self['decay_chains']], []):
1466
1467 decay_ids.append(amp.get('process').get_initial_ids()[0])
1468
1469
1470 return list(set(decay_ids))
1471
1473 """ Returns wether this amplitude has a loop process."""
1474 return self['amplitudes'].has_any_loop_process()
1475
1477 """Recursive function to extract all amplitudes for this process"""
1478
1479 amplitudes = AmplitudeList()
1480
1481 amplitudes.extend(self.get('amplitudes'))
1482 for decay in self.get('decay_chains'):
1483 amplitudes.extend(decay.get_amplitudes())
1484
1485 return amplitudes
1486
1492 """List of DecayChainAmplitude objects
1493 """
1494
1496 """Test if object obj is a valid DecayChainAmplitude for the list."""
1497
1498 return isinstance(obj, DecayChainAmplitude)
1499
1500
1501
1502
1503
1504 -class MultiProcess(base_objects.PhysicsObject):
1505 """MultiProcess: list of process definitions
1506 list of processes (after cleaning)
1507 list of amplitudes (after generation)
1508 """
1509
1511 """Default values for all properties"""
1512
1513 self['process_definitions'] = base_objects.ProcessDefinitionList()
1514
1515
1516
1517 self['amplitudes'] = AmplitudeList()
1518
1519 self['collect_mirror_procs'] = False
1520
1521
1522 self['ignore_six_quark_processes'] = []
1523
1524
1525 self['use_numerical'] = False
1526
1527 - def __init__(self, argument=None, collect_mirror_procs = False,
1528 ignore_six_quark_processes = [], optimize=False,
1529 loop_filter=None):
1556
1557
1558 - def filter(self, name, value):
1559 """Filter for valid process property values."""
1560
1561 if name == 'process_definitions':
1562 if not isinstance(value, base_objects.ProcessDefinitionList):
1563 raise self.PhysicsObjectError, \
1564 "%s is not a valid ProcessDefinitionList object" % str(value)
1565
1566 if name == 'amplitudes':
1567 if not isinstance(value, AmplitudeList):
1568 raise self.PhysicsObjectError, \
1569 "%s is not a valid AmplitudeList object" % str(value)
1570
1571 if name in ['collect_mirror_procs']:
1572 if not isinstance(value, bool):
1573 raise self.PhysicsObjectError, \
1574 "%s is not a valid boolean" % str(value)
1575
1576 if name == 'ignore_six_quark_processes':
1577 if not isinstance(value, list):
1578 raise self.PhysicsObjectError, \
1579 "%s is not a valid list" % str(value)
1580
1581 return True
1582
1583 - def get(self, name):
1584 """Get the value of the property name."""
1585
1586 if (name == 'amplitudes') and not self[name]:
1587 for process_def in self.get('process_definitions'):
1588 if process_def.get('decay_chains'):
1589
1590
1591 self['amplitudes'].append(\
1592 DecayChainAmplitude(process_def,
1593 self.get('collect_mirror_procs'),
1594 self.get('ignore_six_quark_processes')))
1595 else:
1596 self['amplitudes'].extend(\
1597 self.generate_multi_amplitudes(process_def,
1598 self.get('collect_mirror_procs'),
1599 self.get('ignore_six_quark_processes'),
1600 self['use_numerical'],
1601 loop_filter=self['loop_filter']))
1602
1603 return MultiProcess.__bases__[0].get(self, name)
1604
1606 """Return process property names as a nicely sorted list."""
1607
1608 return ['process_definitions', 'amplitudes']
1609
1610 @classmethod
1611 - def generate_multi_amplitudes(cls,process_definition,
1612 collect_mirror_procs = False,
1613 ignore_six_quark_processes = [],
1614 use_numerical=False,
1615 loop_filter=None):
1616 """Generate amplitudes in a semi-efficient way.
1617 Make use of crossing symmetry for processes that fail diagram
1618 generation, but not for processes that succeed diagram
1619 generation. Doing so will risk making it impossible to
1620 identify processes with identical amplitudes.
1621 """
1622 assert isinstance(process_definition, base_objects.ProcessDefinition), \
1623 "%s not valid ProcessDefinition object" % \
1624 repr(process_definition)
1625
1626
1627 process_definition.set('orders', MultiProcess.\
1628 find_optimal_process_orders(process_definition))
1629
1630 process_definition.check_expansion_orders()
1631
1632 processes = base_objects.ProcessList()
1633 amplitudes = AmplitudeList()
1634
1635
1636
1637 failed_procs = []
1638 success_procs = []
1639
1640 non_permuted_procs = []
1641
1642 permutations = []
1643
1644
1645
1646 model = process_definition['model']
1647
1648 isids = [leg['ids'] for leg in process_definition['legs'] \
1649 if leg['state'] == False]
1650 fsids = [leg['ids'] for leg in process_definition['legs'] \
1651 if leg['state'] == True]
1652
1653
1654 for prod in itertools.product(*isids):
1655 islegs = [\
1656 base_objects.Leg({'id':id, 'state': False}) \
1657 for id in prod]
1658
1659
1660
1661
1662 red_fsidlist = []
1663
1664 for prod in itertools.product(*fsids):
1665
1666
1667 if tuple(sorted(prod)) in red_fsidlist:
1668 continue
1669
1670 red_fsidlist.append(tuple(sorted(prod)));
1671
1672
1673 leg_list = [copy.copy(leg) for leg in islegs]
1674
1675 leg_list.extend([\
1676 base_objects.Leg({'id':id, 'state': True}) \
1677 for id in prod])
1678
1679 legs = base_objects.LegList(leg_list)
1680
1681
1682 sorted_legs = sorted([(l,i+1) for (i,l) in \
1683 enumerate(legs.get_outgoing_id_list(model))])
1684 permutation = [l[1] for l in sorted_legs]
1685 sorted_legs = array.array('i', [l[0] for l in sorted_legs])
1686
1687
1688 if ignore_six_quark_processes and \
1689 len([i for i in sorted_legs if abs(i) in \
1690 ignore_six_quark_processes]) >= 6:
1691 continue
1692
1693
1694
1695 if sorted_legs in failed_procs:
1696 continue
1697
1698
1699 if use_numerical:
1700
1701 initial_mass = abs(model['parameter_dict'][model.get_particle(legs[0].get('id')).get('mass')])
1702 if initial_mass == 0:
1703 continue
1704 for leg in legs[1:]:
1705 m = model['parameter_dict'][model.get_particle(leg.get('id')).get('mass')]
1706 initial_mass -= abs(m)
1707 if initial_mass.real <= 0:
1708 continue
1709
1710
1711 process = process_definition.get_process_with_legs(legs)
1712
1713 fast_proc = \
1714 array.array('i',[leg.get('id') for leg in legs])
1715 if collect_mirror_procs and \
1716 process_definition.get_ninitial() == 2:
1717
1718 mirror_proc = \
1719 array.array('i', [fast_proc[1], fast_proc[0]] + \
1720 list(fast_proc[2:]))
1721 try:
1722 mirror_amp = \
1723 amplitudes[non_permuted_procs.index(mirror_proc)]
1724 except Exception:
1725
1726 pass
1727 else:
1728
1729 mirror_amp.set('has_mirror_process', True)
1730 logger.info("Process %s added to mirror process %s" % \
1731 (process.base_string(),
1732 mirror_amp.get('process').base_string()))
1733 continue
1734
1735
1736
1737 if not process.get('required_s_channels') and \
1738 not process.get('forbidden_onsh_s_channels') and \
1739 not process.get('forbidden_s_channels') and \
1740 not process.get('is_decay_chain'):
1741 try:
1742 crossed_index = success_procs.index(sorted_legs)
1743
1744
1745
1746
1747 if 'loop_diagrams' in amplitudes[crossed_index]:
1748 raise ValueError
1749 except ValueError:
1750
1751 pass
1752 else:
1753
1754 amplitude = MultiProcess.cross_amplitude(\
1755 amplitudes[crossed_index],
1756 process,
1757 permutations[crossed_index],
1758 permutation)
1759 amplitudes.append(amplitude)
1760 success_procs.append(sorted_legs)
1761 permutations.append(permutation)
1762 non_permuted_procs.append(fast_proc)
1763 logger.info("Crossed process found for %s, reuse diagrams." % \
1764 process.base_string())
1765 continue
1766
1767
1768 amplitude = cls.get_amplitude_from_proc(process,
1769 loop_filter=loop_filter)
1770
1771 try:
1772 result = amplitude.generate_diagrams()
1773 except InvalidCmd as error:
1774 failed_procs.append(sorted_legs)
1775 else:
1776
1777 if amplitude.get('diagrams'):
1778 amplitudes.append(amplitude)
1779 success_procs.append(sorted_legs)
1780 permutations.append(permutation)
1781 non_permuted_procs.append(fast_proc)
1782 elif not result:
1783
1784 failed_procs.append(sorted_legs)
1785
1786
1787 if not amplitudes:
1788 if len(failed_procs) == 1 and 'error' in locals():
1789 raise error
1790 else:
1791 raise NoDiagramException, \
1792 "No amplitudes generated from process %s. Please enter a valid process" % \
1793 process_definition.nice_string()
1794
1795
1796
1797 return amplitudes
1798
1799 @classmethod
1801 """ Return the correct amplitude type according to the characteristics of
1802 the process proc. The only option that could be specified here is
1803 loop_filter and it is of course not relevant for a tree amplitude."""
1804
1805 return Amplitude({"process": proc})
1806
1807
1808 @staticmethod
1810 """Find the minimal WEIGHTED order for this set of processes.
1811
1812 The algorithm:
1813
1814 1) Check the coupling hierarchy of the model. Assign all
1815 particles to the different coupling hierarchies so that a
1816 particle is considered to be in the highest hierarchy (i.e.,
1817 with lowest value) where it has an interaction.
1818
1819 2) Pick out the legs in the multiprocess according to the
1820 highest hierarchy represented (so don't mix particles from
1821 different hierarchy classes in the same multiparticles!)
1822
1823 3) Find the starting maximum WEIGHTED order as the sum of the
1824 highest n-2 weighted orders
1825
1826 4) Pick out required s-channel particle hierarchies, and use
1827 the highest of the maximum WEIGHTED order from the legs and
1828 the minimum WEIGHTED order extracted from 2*s-channel
1829 hierarchys plus the n-2-2*(number of s-channels) lowest
1830 leg weighted orders.
1831
1832 5) Run process generation with the WEIGHTED order determined
1833 in 3)-4) - # final state gluons, with all gluons removed from
1834 the final state
1835
1836 6) If no process is found, increase WEIGHTED order by 1 and go
1837 back to 5), until we find a process which passes. Return that
1838 order.
1839
1840 7) Continue 5)-6) until we reach (n-2)*(highest hierarchy)-1.
1841 If still no process has passed, return
1842 WEIGHTED = (n-2)*(highest hierarchy)
1843 """
1844
1845 assert isinstance(process_definition, base_objects.ProcessDefinition), \
1846 "%s not valid ProcessDefinition object" % \
1847 repr(process_definition)
1848
1849 processes = base_objects.ProcessList()
1850 amplitudes = AmplitudeList()
1851
1852
1853 if process_definition.get('orders') or \
1854 process_definition.get('overall_orders') or \
1855 process_definition.get('NLO_mode')=='virt':
1856 return process_definition.get('orders')
1857
1858
1859 if process_definition.get_ninitial() == 1 and not \
1860 process_definition.get('is_decay_chain'):
1861 return process_definition.get('orders')
1862
1863 logger.info("Checking for minimal orders which gives processes.")
1864 logger.info("Please specify coupling orders to bypass this step.")
1865
1866
1867 max_order_now, particles, hierarchy = \
1868 process_definition.get_minimum_WEIGHTED()
1869 coupling = 'WEIGHTED'
1870
1871 model = process_definition.get('model')
1872
1873
1874 isids = [leg['ids'] for leg in \
1875 filter(lambda leg: leg['state'] == False, process_definition['legs'])]
1876 fsids = [leg['ids'] for leg in \
1877 filter(lambda leg: leg['state'] == True, process_definition['legs'])]
1878
1879 max_WEIGHTED_order = \
1880 (len(fsids + isids) - 2)*int(model.get_max_WEIGHTED())
1881
1882
1883 hierarchydef = process_definition['model'].get('order_hierarchy')
1884 tmp = []
1885 hierarchy = hierarchydef.items()
1886 hierarchy.sort()
1887 for key, value in hierarchydef.items():
1888 if value>1:
1889 tmp.append('%s*%s' % (value,key))
1890 else:
1891 tmp.append('%s' % key)
1892 wgtdef = '+'.join(tmp)
1893
1894
1895 while max_order_now < max_WEIGHTED_order:
1896 logger.info("Trying coupling order WEIGHTED<=%d: WEIGTHED IS %s" % (max_order_now, wgtdef))
1897
1898 oldloglevel = logger.level
1899 logger.setLevel(logging.WARNING)
1900
1901
1902
1903 failed_procs = []
1904
1905
1906 for prod in apply(itertools.product, isids):
1907 islegs = [ base_objects.Leg({'id':id, 'state': False}) \
1908 for id in prod]
1909
1910
1911
1912
1913 red_fsidlist = []
1914
1915 for prod in apply(itertools.product, fsids):
1916
1917
1918 if tuple(sorted(prod)) in red_fsidlist:
1919 continue
1920
1921 red_fsidlist.append(tuple(sorted(prod)));
1922
1923
1924
1925 nglue = 0
1926 if 21 in particles[0]:
1927 nglue = len([id for id in prod if id == 21])
1928 prod = [id for id in prod if id != 21]
1929
1930
1931 leg_list = [copy.copy(leg) for leg in islegs]
1932
1933 leg_list.extend([\
1934 base_objects.Leg({'id':id, 'state': True}) \
1935 for id in prod])
1936
1937 legs = base_objects.LegList(leg_list)
1938
1939
1940
1941 coupling_orders_now = {coupling: max_order_now - \
1942 nglue * model['order_hierarchy']['QCD']}
1943
1944
1945 process = base_objects.Process({\
1946 'legs':legs,
1947 'model':model,
1948 'id': process_definition.get('id'),
1949 'orders': coupling_orders_now,
1950 'required_s_channels': \
1951 process_definition.get('required_s_channels'),
1952 'forbidden_onsh_s_channels': \
1953 process_definition.get('forbidden_onsh_s_channels'),
1954 'sqorders_types': \
1955 process_definition.get('sqorders_types'),
1956 'squared_orders': \
1957 process_definition.get('squared_orders'),
1958 'split_orders': \
1959 process_definition.get('split_orders'),
1960 'forbidden_s_channels': \
1961 process_definition.get('forbidden_s_channels'),
1962 'forbidden_particles': \
1963 process_definition.get('forbidden_particles'),
1964 'is_decay_chain': \
1965 process_definition.get('is_decay_chain'),
1966 'overall_orders': \
1967 process_definition.get('overall_orders'),
1968 'split_orders': \
1969 process_definition.get('split_orders')})
1970
1971
1972 process.check_expansion_orders()
1973
1974
1975 sorted_legs = sorted(legs.get_outgoing_id_list(model))
1976
1977
1978 if tuple(sorted_legs) in failed_procs:
1979 continue
1980
1981 amplitude = Amplitude({'process': process})
1982 try:
1983 amplitude.generate_diagrams()
1984 except InvalidCmd:
1985 failed_procs.append(tuple(sorted_legs))
1986 else:
1987 if amplitude.get('diagrams'):
1988
1989 logger.setLevel(oldloglevel)
1990 return {coupling: max_order_now}
1991 else:
1992 failed_procs.append(tuple(sorted_legs))
1993
1994
1995 max_order_now += 1
1996 logger.setLevel(oldloglevel)
1997
1998
1999 return {coupling: max_order_now}
2000
2001 @staticmethod
2003 """Return the amplitude crossed with the permutation new_perm"""
2004
2005 perm_map = dict(zip(org_perm, new_perm))
2006
2007 new_amp = copy.copy(amplitude)
2008
2009 for i, leg in enumerate(process.get('legs')):
2010 leg.set('number', i+1)
2011
2012 new_amp.set('process', process)
2013
2014 diagrams = base_objects.DiagramList([d.renumber_legs(perm_map,
2015 process.get('legs'),) for \
2016 d in new_amp.get('diagrams')])
2017 new_amp.set('diagrams', diagrams)
2018 new_amp.trim_diagrams()
2019
2020
2021 new_amp.set('has_mirror_process', False)
2022
2023 return new_amp
2024
2030 """Takes a list of lists and elements and returns a list of flat lists.
2031 Example: [[1,2], 3, [4,5]] -> [[1,3,4], [1,3,5], [2,3,4], [2,3,5]]
2032 """
2033
2034
2035 assert isinstance(mylist, list), "Expand_list argument must be a list"
2036
2037 res = []
2038
2039 tmplist = []
2040 for item in mylist:
2041 if isinstance(item, list):
2042 tmplist.append(item)
2043 else:
2044 tmplist.append([item])
2045
2046 for item in apply(itertools.product, tmplist):
2047 res.append(list(item))
2048
2049 return res
2050
2052 """Recursive function. Takes a list of lists and lists of lists
2053 and returns a list of flat lists.
2054 Example: [[1,2],[[4,5],[6,7]]] -> [[1,2,4,5], [1,2,6,7]]
2055 """
2056
2057 res = []
2058
2059 if not mylist or len(mylist) == 1 and not mylist[0]:
2060 return [[]]
2061
2062
2063 assert isinstance(mylist[0], list), \
2064 "Expand_list_list needs a list of lists and lists of lists"
2065
2066
2067 if len(mylist) == 1:
2068 if isinstance(mylist[0][0], list):
2069 return mylist[0]
2070 else:
2071 return mylist
2072
2073 if isinstance(mylist[0][0], list):
2074 for item in mylist[0]:
2075
2076
2077
2078 for rest in expand_list_list(mylist[1:]):
2079 reslist = copy.copy(item)
2080 reslist.extend(rest)
2081 res.append(reslist)
2082 else:
2083 for rest in expand_list_list(mylist[1:]):
2084 reslist = copy.copy(mylist[0])
2085 reslist.extend(rest)
2086 res.append(reslist)
2087
2088
2089 return res
2090