1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 """Classes for diagram generation. Amplitude performs the diagram
16 generation, DecayChainAmplitude keeps track of processes with decay
17 chains, and MultiProcess allows generation of processes with
18 multiparticle definitions. DiagramTag allows to identify diagrams
19 based on relevant properties.
20 """
21
22 import array
23 import copy
24 import itertools
25 import logging
26
27 import madgraph.core.base_objects as base_objects
28 import madgraph.various.misc as misc
29 from madgraph import InvalidCmd, MadGraph5Error
30
31 logger = logging.getLogger('madgraph.diagram_generation')
35
41 """Class to tag diagrams based on objects with some __lt__ measure, e.g.
42 PDG code/interaction id (for comparing diagrams from the same amplitude),
43 or Lorentz/coupling/mass/width (for comparing AMPs from different MEs).
44 Algorithm: Create chains starting from external particles:
45 1 \ / 6
46 2 /\______/\ 7
47 3_ / | \_ 8
48 4 / 5 \_ 9
49 \ 10
50 gives ((((9,10,id910),8,id9108),(6,7,id67),id910867)
51 (((1,2,id12),(3,4,id34)),id1234),
52 5,id91086712345)
53 where idN is the id of the corresponding interaction. The ordering within
54 chains is based on chain length (depth; here, 1234 has depth 3, 910867 has
55 depth 4, 5 has depht 0), and if equal on the ordering of the chain elements.
56 The determination of central vertex is based on minimizing the chain length
57 for the longest subchain.
58 This gives a unique tag which can be used to identify diagrams
59 (instead of symmetry), as well as identify identical matrix elements from
60 different processes."""
61
63 """Exception for any problems in DiagramTags"""
64 pass
65
66 - def __init__(self, diagram, model=None, ninitial=2):
67 """Initialize with a diagram. Create DiagramTagChainLinks according to
68 the diagram, and figure out if we need to shift the central vertex."""
69
70
71 leg_dict = {}
72
73 for vertex in diagram.get('vertices'):
74
75 legs = vertex.get('legs')[:-1]
76 lastvx = vertex == diagram.get('vertices')[-1]
77 if lastvx:
78
79 legs = vertex.get('legs')
80
81 link = DiagramTagChainLink([leg_dict.setdefault(leg.get('number'),
82 DiagramTagChainLink(self.link_from_leg(leg, model))) \
83 for leg in legs],
84 self.vertex_id_from_vertex(vertex,
85 lastvx,
86 model,
87 ninitial))
88
89 if not lastvx:
90 leg_dict[vertex.get('legs')[-1].get('number')] = link
91
92
93 self.tag = link
94
95
96
97 done = max([l.depth for l in self.tag.links]) == 0
98 while not done:
99
100 longest_chain = self.tag.links[0]
101
102 new_link = DiagramTagChainLink(self.tag.links[1:],
103 self.flip_vertex(\
104 self.tag.vertex_id,
105 longest_chain.vertex_id,
106 self.tag.links[1:]))
107
108 other_links = list(longest_chain.links) + [new_link]
109 other_link = DiagramTagChainLink(other_links,
110 self.flip_vertex(\
111 longest_chain.vertex_id,
112 self.tag.vertex_id,
113 other_links))
114
115 if other_link.links[0] < self.tag.links[0]:
116
117 self.tag = other_link
118 else:
119
120 done = True
121
126
128 """Output a diagram from a DiagramTag. Note that each daughter
129 class must implement the static functions id_from_vertex_id
130 (if the vertex id is something else than an integer) and
131 leg_from_link (to pass the correct info from an end link to a
132 leg)."""
133
134
135 diagram = base_objects.Diagram({'vertices': \
136 self.vertices_from_link(self.tag,
137 model,
138 True)})
139 diagram.calculate_orders(model)
140 return diagram
141
142 @classmethod
144 """Recursively return the leg corresponding to this link and
145 the list of all vertices from all previous links"""
146
147 if link.end_link:
148
149 return cls.leg_from_link(link), []
150
151
152 leg_vertices = [cls.vertices_from_link(l, model) for l in link.links]
153
154 legs = base_objects.LegList(sorted([l for l,v in leg_vertices],
155 lambda l1,l2: l2.get('number') - \
156 l1.get('number')))
157
158 vertices = base_objects.VertexList(sum([v for l, v in leg_vertices],
159 []))
160
161 if not first_vertex:
162
163
164 last_leg = cls.leg_from_legs(legs,link.vertex_id,model)
165 legs.append(last_leg)
166
167
168 vertices.append(cls.vertex_from_link(legs,
169 link.vertex_id,
170 model))
171 if first_vertex:
172
173 return vertices
174 else:
175
176 return last_leg, vertices
177
178 @classmethod
180 """Returns the list of external PDGs of the interaction corresponding
181 to this vertex_id."""
182
183
184
185
186 if (len(vertex_id)>=3 and 'PDGs' in vertex_id[2]):
187 return vertex_id[2]['PDGs']
188 else:
189 return [part.get_pdg_code() for part in model.get_interaction(
190 cls.id_from_vertex_id(vertex_id)).get('particles')]
191
192 @classmethod
194 """Return a leg from a leg list and the model info"""
195
196 pdgs = list(cls.legPDGs_from_vertex_id(vertex_id, model))
197
198
199 for pdg in [leg.get('id') for leg in legs]:
200 pdgs.remove(pdg)
201
202 assert len(pdgs) == 1
203
204 pdg = model.get_particle(pdgs[0]).get_anti_pdg_code()
205 number = min([l.get('number') for l in legs])
206
207 state = (len([l for l in legs if l.get('state') == False]) != 1)
208
209 onshell= False
210
211 return base_objects.Leg({'id': pdg,
212 'number': number,
213 'state': state,
214 'onshell': onshell})
215
216 @classmethod
229
230 @staticmethod
232 """Return a leg from a link"""
233
234 if link.end_link:
235
236 return base_objects.Leg({'number':link.links[0][1],
237 'id':link.links[0][0][0],
238 'state':(link.links[0][0][1] == 0),
239 'onshell':False})
240
241
242 assert False
243
244 @staticmethod
246 """Return the numerical vertex id from a link.vertex_id"""
247
248 return vertex_id[0][0]
249
250 @staticmethod
252 """Return the loop_info stored in this vertex id. Notice that the
253 IdentifyME tag does not store the loop_info, but should normally never
254 need access to it."""
255
256 return vertex_id[2]
257
258 @staticmethod
260 """Reorder a permutation with respect to start_perm. Note that
261 both need to start from 1."""
262 if perm == start_perm:
263 return range(len(perm))
264 order = [i for (p,i) in \
265 sorted([(p,i) for (i,p) in enumerate(perm)])]
266 return [start_perm[i]-1 for i in order]
267
268 @staticmethod
270 """Returns the default end link for a leg: ((id, state), number).
271 Note that the number is not taken into account if tag comparison,
272 but is used only to extract leg permutations."""
273 if leg.get('state'):
274
275 return [((leg.get('id'), 0), leg.get('number'))]
276 else:
277
278 return [((leg.get('id'), leg.get('number')), leg.get('number'))]
279
280 @staticmethod
282 """Returns the default vertex id: just the interaction id
283 Note that in the vertex id, like the leg, only the first entry is
284 taken into account in the tag comparison, while the second is for
285 storing information that is not to be used in comparisons and the
286 third for additional info regarding the shrunk loop vertex."""
287
288 if isinstance(vertex,base_objects.ContractedVertex):
289
290 return ((vertex.get('id'),vertex.get('loop_tag')),(),
291 {'PDGs':vertex.get('PDGs')})
292 else:
293 return ((vertex.get('id'),()),(),{})
294
295 @staticmethod
297 """Returns the default vertex flip: just the new_vertex"""
298 return new_vertex
299
301 """Equal if same tag"""
302 if type(self) != type(other):
303 return False
304 return self.tag == other.tag
305
307 return not self.__eq__(other)
308
311
313 return self.tag < other.tag
314
316 return self.tag > other.tag
317
318 __repr__ = __str__
319
321 """Chain link for a DiagramTag. A link is a tuple + vertex id + depth,
322 with a comparison operator defined"""
323
324 - def __init__(self, objects, vertex_id = None):
325 """Initialize, either with a tuple of DiagramTagChainLinks and
326 a vertex_id (defined by DiagramTag.vertex_id_from_vertex), or
327 with an external leg object (end link) defined by
328 DiagramTag.link_from_leg"""
329
330 if vertex_id == None:
331
332 self.links = tuple(objects)
333 self.vertex_id = (0,)
334 self.depth = 0
335 self.end_link = True
336 return
337
338 self.links = tuple(sorted(list(tuple(objects)), reverse=True))
339 self.vertex_id = vertex_id
340
341
342 self.depth = sum([l.depth for l in self.links],
343 max(1, len(self.links)-1))
344 self.end_link = False
345
347 """Get the permutation of external numbers (assumed to be the
348 second entry in the end link tuples)"""
349
350 if self.end_link:
351 return [self.links[0][1]]
352
353 return sum([l.get_external_numbers() for l in self.links], [])
354
356 """Compare self with other in the order:
357 1. depth 2. len(links) 3. vertex id 4. measure of links"""
358
359 if self == other:
360 return False
361
362 if self.depth != other.depth:
363 return self.depth < other.depth
364
365 if len(self.links) != len(other.links):
366 return len(self.links) < len(other.links)
367
368 if self.vertex_id[0] != other.vertex_id[0]:
369 return self.vertex_id[0] < other.vertex_id[0]
370
371 for i, link in enumerate(self.links):
372 if i > len(other.links) - 1:
373 return False
374 if link != other.links[i]:
375 return link < other.links[i]
376
378 return self != other and not self.__lt__(other)
379
381 """For end link,
382 consider equal if self.links[0][0] == other.links[0][0],
383 i.e., ignore the leg number (in links[0][1])."""
384
385 if self.end_link and other.end_link and self.depth == other.depth \
386 and self.vertex_id == other.vertex_id:
387 return self.links[0][0] == other.links[0][0]
388
389 return self.end_link == other.end_link and self.depth == other.depth \
390 and self.vertex_id[0] == other.vertex_id[0] \
391 and self.links == other.links
392
394 return not self.__eq__(other)
395
396
398 if self.end_link:
399 return str(self.links)
400 return "%s, %s; %d" % (str(self.links),
401 str(self.vertex_id),
402 self.depth)
403
404 __repr__ = __str__
405
406
407
408
409 -class Amplitude(base_objects.PhysicsObject):
410 """Amplitude: process + list of diagrams (ordered)
411 Initialize with a process, then call generate_diagrams() to
412 generate the diagrams for the amplitude
413 """
414
416 """Default values for all properties"""
417
418 self['process'] = base_objects.Process()
419 self['diagrams'] = None
420
421
422 self['has_mirror_process'] = False
423
436
437 - def filter(self, name, value):
453
454 - def get(self, name):
463
464
465
467 """Return diagram property names as a nicely sorted list."""
468
469 return ['process', 'diagrams', 'has_mirror_process']
470
472 """Returns number of diagrams for this amplitude"""
473 return len(self.get('diagrams'))
474
476 """Return an AmplitudeList with just this amplitude.
477 Needed for DecayChainAmplitude."""
478
479 return AmplitudeList([self])
480
482 """Returns a nicely formatted string of the amplitude content."""
483 return self.get('process').nice_string(indent) + "\n" + \
484 self.get('diagrams').nice_string(indent)
485
487 """Returns a nicely formatted string of the amplitude process."""
488 return self.get('process').nice_string(indent)
489
491 """Returns the number of initial state particles in the process."""
492 return self.get('process').get_ninitial()
493
495 """ Returns wether this amplitude has a loop process."""
496
497 return self.get('process').get('perturbation_couplings')
498
500 """Generate diagrams. Algorithm:
501
502 1. Define interaction dictionaries:
503 * 2->0 (identity), 3->0, 4->0, ... , maxlegs->0
504 * 2 -> 1, 3 -> 1, ..., maxlegs-1 -> 1
505
506 2. Set flag from_group=true for all external particles.
507 Flip particle/anti particle for incoming particles.
508
509 3. If there is a dictionary n->0 with n=number of external
510 particles, create if possible the combination [(1,2,3,4,...)]
511 with *at least two* from_group==true. This will give a
512 finished (set of) diagram(s) (done by reduce_leglist)
513
514 4. Create all allowed groupings of particles with at least one
515 from_group==true (according to dictionaries n->1):
516 [(1,2),3,4...],[1,(2,3),4,...],...,
517 [(1,2),(3,4),...],...,[(1,2,3),4,...],...
518 (done by combine_legs)
519
520 5. Replace each group with a (list of) new particle(s) with number
521 n = min(group numbers). Set from_group true for these
522 particles and false for all other particles. Store vertex info.
523 (done by merge_comb_legs)
524
525 6. Stop algorithm when at most 2 particles remain.
526 Return all diagrams (lists of vertices).
527
528 7. Repeat from 3 (recursion done by reduce_leglist)
529
530 8. Replace final p=p vertex
531
532 Be aware that the resulting vertices have all particles outgoing,
533 so need to flip for incoming particles when used.
534
535 SPECIAL CASE: For A>BC... processes which are legs in decay
536 chains, we need to ensure that BC... combine first, giving A=A
537 as a final vertex. This case is defined by the Process
538 property is_decay_chain = True.
539 This function can also be called by the generate_diagram function
540 of LoopAmplitudes, in which case the generated diagrams here must not
541 be directly assigned to the 'diagrams' attributed but returned as a
542 DiagramList by the function. This is controlled by the argument
543 returndiag.
544 """
545
546 process = self.get('process')
547 model = process.get('model')
548 legs = process.get('legs')
549
550 for key in process.get('overall_orders').keys():
551 try:
552 process.get('orders')[key] = \
553 min(process.get('orders')[key],
554 process.get('overall_orders')[key])
555 except KeyError:
556 process.get('orders')[key] = process.get('overall_orders')[key]
557
558 assert model.get('particles'), \
559 "particles are missing in model: %s" % model.get('particles')
560
561 assert model.get('interactions'), \
562 "interactions are missing in model"
563
564
565 res = base_objects.DiagramList()
566
567 if len(filter(lambda leg: model.get('particle_dict')[\
568 leg.get('id')].is_fermion(), legs)) % 2 == 1:
569 if not returndiag:
570 self['diagrams'] = res
571 raise InvalidCmd, 'The number of fermion is odd'
572 else:
573 return False, res
574
575
576
577 if not model.get('got_majoranas') and \
578 len(filter(lambda leg: leg.is_incoming_fermion(model), legs)) != \
579 len(filter(lambda leg: leg.is_outgoing_fermion(model), legs)):
580 if not returndiag:
581 self['diagrams'] = res
582 raise InvalidCmd, 'The number of of incoming/outcoming fermions are different'
583 else:
584 return False, res
585
586
587
588 for charge in model.get('conserved_charge'):
589 total = 0
590 for leg in legs:
591 part = model.get('particle_dict')[leg.get('id')]
592 try:
593 value = part.get(charge)
594 except (AttributeError, base_objects.PhysicsObject.PhysicsObjectError):
595 try:
596 value = getattr(part, charge)
597 except AttributeError:
598 value = 0
599
600 if (leg.get('id') != part['pdg_code']) != leg['state']:
601 total -= value
602 else:
603 total += value
604
605 if abs(total) > 1e-10:
606 if not returndiag:
607 self['diagrams'] = res
608 raise InvalidCmd, 'No %s conservation for this process ' % charge
609 return res
610 else:
611 raise InvalidCmd, 'No %s conservation for this process ' % charge
612 return res, res
613
614 if not returndiag:
615 logger.info("Trying %s " % process.nice_string().replace('Process', 'process'))
616
617
618 for i in range(0, len(process.get('legs'))):
619
620 leg = copy.copy(process.get('legs')[i])
621 process.get('legs')[i] = leg
622 if leg.get('number') == 0:
623 leg.set('number', i + 1)
624
625
626
627 leglist = self.copy_leglist(process.get('legs'))
628
629 for leg in leglist:
630
631
632
633 leg.set('from_group', True)
634
635
636
637 if leg.get('state') == False:
638 part = model.get('particle_dict')[leg.get('id')]
639 leg.set('id', part.get_anti_pdg_code())
640
641
642
643 max_multi_to1 = max([len(key) for key in \
644 model.get('ref_dict_to1').keys()])
645
646
647
648
649
650
651
652
653 is_decay_proc = process.get_ninitial() == 1
654 if is_decay_proc:
655 part = model.get('particle_dict')[leglist[0].get('id')]
656
657
658
659 ref_dict_to0 = {(part.get_pdg_code(),part.get_anti_pdg_code()):[0],
660 (part.get_anti_pdg_code(),part.get_pdg_code()):[0]}
661
662
663 leglist[0].set('from_group', None)
664 reduced_leglist = self.reduce_leglist(leglist,
665 max_multi_to1,
666 ref_dict_to0,
667 is_decay_proc,
668 process.get('orders'))
669 else:
670 reduced_leglist = self.reduce_leglist(leglist,
671 max_multi_to1,
672 model.get('ref_dict_to0'),
673 is_decay_proc,
674 process.get('orders'))
675
676
677
678
679 self.convert_dgleg_to_leg(reduced_leglist)
680
681 if reduced_leglist:
682 for vertex_list in reduced_leglist:
683 res.append(self.create_diagram(base_objects.VertexList(vertex_list)))
684
685
686
687 failed_crossing = not res
688
689
690
691
692
693
694 if process.get('required_s_channels') and \
695 process.get('required_s_channels')[0]:
696
697
698 lastvx = -1
699
700
701
702 if is_decay_proc: lastvx = -2
703 ninitial = len(filter(lambda leg: leg.get('state') == False,
704 process.get('legs')))
705
706 old_res = res
707 res = base_objects.DiagramList()
708 for id_list in process.get('required_s_channels'):
709 res_diags = filter(lambda diagram: \
710 all([req_s_channel in \
711 [vertex.get_s_channel_id(\
712 process.get('model'), ninitial) \
713 for vertex in diagram.get('vertices')[:lastvx]] \
714 for req_s_channel in \
715 id_list]), old_res)
716
717 res.extend([diag for diag in res_diags if diag not in res])
718
719
720
721
722
723 if process.get('forbidden_s_channels'):
724 ninitial = len(filter(lambda leg: leg.get('state') == False,
725 process.get('legs')))
726 if ninitial == 2:
727 res = base_objects.DiagramList(\
728 filter(lambda diagram: \
729 not any([vertex.get_s_channel_id(\
730 process.get('model'), ninitial) \
731 in process.get('forbidden_s_channels')
732 for vertex in diagram.get('vertices')[:-1]]),
733 res))
734 else:
735
736
737 newres= []
738 for diagram in res:
739 leg1 = 1
740
741
742
743 vertex = diagram.get('vertices')[-1]
744 if any([l['number'] ==1 for l in vertex.get('legs')]):
745 leg1 = [l['number'] for l in vertex.get('legs') if l['number'] !=1][0]
746 to_loop = range(len(diagram.get('vertices'))-1)
747 if leg1 >1:
748 to_loop.reverse()
749 for i in to_loop:
750 vertex = diagram.get('vertices')[i]
751 if leg1:
752 if any([l['number'] ==leg1 for l in vertex.get('legs')]):
753 leg1 = 0
754 continue
755 if vertex.get_s_channel_id(process.get('model'), ninitial)\
756 in process.get('forbidden_s_channels'):
757 break
758 else:
759 newres.append(diagram)
760 res = base_objects.DiagramList(newres)
761
762
763
764
765 if process.get('forbidden_onsh_s_channels'):
766 ninitial = len(filter(lambda leg: leg.get('state') == False,
767 process.get('legs')))
768
769 verts = base_objects.VertexList(sum([[vertex for vertex \
770 in diagram.get('vertices')[:-1]
771 if vertex.get_s_channel_id(\
772 process.get('model'), ninitial) \
773 in process.get('forbidden_onsh_s_channels')] \
774 for diagram in res], []))
775 for vert in verts:
776
777 newleg = copy.copy(vert.get('legs').pop(-1))
778 newleg.set('onshell', False)
779 vert.get('legs').append(newleg)
780
781
782 for diagram in res:
783 diagram.calculate_orders(model)
784
785
786
787
788
789
790
791
792 if not returndiag and len(res)>0:
793 res = self.apply_squared_order_constraints(res)
794
795 if diagram_filter:
796 res = self.apply_user_filter(res)
797
798
799 if not process.get('is_decay_chain'):
800 for diagram in res:
801 vertices = diagram.get('vertices')
802 if len(vertices) > 1 and vertices[-1].get('id') == 0:
803
804
805
806
807 vertices = copy.copy(vertices)
808 lastvx = vertices.pop()
809 nexttolastvertex = copy.copy(vertices.pop())
810 legs = copy.copy(nexttolastvertex.get('legs'))
811 ntlnumber = legs[-1].get('number')
812 lastleg = filter(lambda leg: leg.get('number') != ntlnumber,
813 lastvx.get('legs'))[0]
814
815 if lastleg.get('onshell') == False:
816 lastleg.set('onshell', None)
817
818 legs[-1] = lastleg
819 nexttolastvertex.set('legs', legs)
820 vertices.append(nexttolastvertex)
821 diagram.set('vertices', vertices)
822
823 if res and not returndiag:
824 logger.info("Process has %d diagrams" % len(res))
825
826
827 self.trim_diagrams(diaglist=res)
828
829
830 pertur = 'QCD'
831 if self.get('process')['perturbation_couplings']:
832 pertur = sorted(self.get('process')['perturbation_couplings'])[0]
833 self.get('process').get('legs').sort(pert=pertur)
834
835
836 if not returndiag:
837 self['diagrams'] = res
838 return not failed_crossing
839 else:
840 return not failed_crossing, res
841
843 """Applies the user specified squared order constraints on the diagram
844 list in argument."""
845
846 res = copy.copy(diag_list)
847
848
849
850 for name, (value, operator) in self['process'].get('constrained_orders').items():
851 res.filter_constrained_orders(name, value, operator)
852
853
854
855
856 while True:
857 new_res = res.apply_positive_sq_orders(res,
858 self['process'].get('squared_orders'),
859 self['process']['sqorders_types'])
860
861 if len(res)==len(new_res):
862 break
863 elif (len(new_res)>len(res)):
864 raise MadGraph5Error(
865 'Inconsistency in function apply_squared_order_constraints().')
866
867 res = new_res
868
869
870
871
872 neg_orders = [(order, value) for order, value in \
873 self['process'].get('squared_orders').items() if value<0]
874 if len(neg_orders)==1:
875 neg_order, neg_value = neg_orders[0]
876
877 res, target_order = res.apply_negative_sq_order(res, neg_order,\
878 neg_value, self['process']['sqorders_types'][neg_order])
879
880
881
882
883 self['process']['squared_orders'][neg_order]=target_order
884 elif len(neg_orders)>1:
885 raise InvalidCmd('At most one negative squared order constraint'+\
886 ' can be specified, not %s.'%str(neg_orders))
887
888 return res
889
891 """Applies the user specified squared order constraints on the diagram
892 list in argument."""
893
894 if True:
895 try:
896 from PLUGIN.user_filter import remove_diag
897 except ImportError:
898 raise MadGraph5Error, 'user filter required to be defined in PLUGIN/user_filter.py with the function remove_diag(ONEDIAG) which returns True if the daigram has to be removed'
899 else:
900
901 def remove_diag(diag):
902 for vertex in diag['vertices']:
903 if vertex['id'] == 0:
904 continue
905 if vertex['legs'][-1]['number'] < 3:
906 if abs(vertex['legs'][-1]['id']) <6:
907 return True
908 return False
909
910 res = diag_list.__class__()
911 nb_removed = 0
912 for diag in diag_list:
913 if remove_diag(diag):
914 nb_removed +=1
915 else:
916 res.append(diag)
917
918 if nb_removed:
919 logger.warning('Diagram filter is ON and removed %s diagrams for this subprocess.' % nb_removed)
920
921 return res
922
923
924
926 """ Return a Diagram created from the vertex list. This function can be
927 overloaded by daughter classes."""
928 return base_objects.Diagram({'vertices':vertexlist})
929
931 """ In LoopAmplitude, it converts back all DGLoopLegs into Legs.
932 In Amplitude, there is nothing to do. """
933
934 return True
935
937 """ Simply returns a copy of the leg list. This function is
938 overloaded in LoopAmplitude so that a DGLoopLeg list is returned.
939 The DGLoopLeg has some additional parameters only useful during
940 loop diagram generation"""
941
942 return base_objects.LegList(\
943 [ copy.copy(leg) for leg in legs ])
944
945 - def reduce_leglist(self, curr_leglist, max_multi_to1, ref_dict_to0,
946 is_decay_proc = False, coupling_orders = None):
947 """Recursive function to reduce N LegList to N-1
948 For algorithm, see doc for generate_diagrams.
949 """
950
951
952
953 res = []
954
955
956
957 if curr_leglist is None:
958 return None
959
960
961 model = self.get('process').get('model')
962 ref_dict_to1 = self.get('process').get('model').get('ref_dict_to1')
963
964
965
966
967
968
969 if curr_leglist.can_combine_to_0(ref_dict_to0, is_decay_proc):
970
971
972 vertex_ids = self.get_combined_vertices(curr_leglist,
973 copy.copy(ref_dict_to0[tuple(sorted([leg.get('id') for \
974 leg in curr_leglist]))]))
975
976 final_vertices = [base_objects.Vertex({'legs':curr_leglist,
977 'id':vertex_id}) for \
978 vertex_id in vertex_ids]
979
980 for final_vertex in final_vertices:
981 if self.reduce_orders(coupling_orders, model,
982 [final_vertex.get('id')]) != False:
983 res.append([final_vertex])
984
985
986 if len(curr_leglist) == 2:
987 if res:
988 return res
989 else:
990 return None
991
992
993 comb_lists = self.combine_legs(curr_leglist,
994 ref_dict_to1, max_multi_to1)
995
996
997 leg_vertex_list = self.merge_comb_legs(comb_lists, ref_dict_to1)
998
999
1000 for leg_vertex_tuple in leg_vertex_list:
1001
1002
1003 if self.get('process').get('forbidden_particles') and \
1004 any([abs(vertex.get('legs')[-1].get('id')) in \
1005 self.get('process').get('forbidden_particles') \
1006 for vertex in leg_vertex_tuple[1]]):
1007 continue
1008
1009
1010 new_coupling_orders = self.reduce_orders(coupling_orders,
1011 model,
1012 [vertex.get('id') for vertex in \
1013 leg_vertex_tuple[1]])
1014 if new_coupling_orders == False:
1015
1016 continue
1017
1018
1019
1020 reduced_diagram = self.reduce_leglist(leg_vertex_tuple[0],
1021 max_multi_to1,
1022 ref_dict_to0,
1023 is_decay_proc,
1024 new_coupling_orders)
1025
1026 if reduced_diagram:
1027 vertex_list_list = [list(leg_vertex_tuple[1])]
1028 vertex_list_list.append(reduced_diagram)
1029 expanded_list = expand_list_list(vertex_list_list)
1030 res.extend(expanded_list)
1031
1032 return res
1033
1034 - def reduce_orders(self, coupling_orders, model, vertex_id_list):
1035 """Return False if the coupling orders for any coupling is <
1036 0, otherwise return the new coupling orders with the vertex
1037 orders subtracted. If coupling_orders is not given, return
1038 None (which counts as success).
1039 WEIGHTED is a special order, which corresponds to the sum of
1040 order hierarchies for the couplings.
1041 We ignore negative constraints as these cannot be taken into
1042 account on the fly but only after generation."""
1043
1044 if not coupling_orders:
1045 return None
1046
1047 present_couplings = copy.copy(coupling_orders)
1048 for id in vertex_id_list:
1049
1050 if not id:
1051 continue
1052 inter = model.get("interaction_dict")[id]
1053 for coupling in inter.get('orders').keys():
1054
1055
1056 if coupling in present_couplings and \
1057 present_couplings[coupling]>=0:
1058
1059 present_couplings[coupling] -= \
1060 inter.get('orders')[coupling]
1061 if present_couplings[coupling] < 0:
1062
1063 return False
1064
1065 if 'WEIGHTED' in present_couplings and \
1066 present_couplings['WEIGHTED']>=0:
1067 weight = sum([model.get('order_hierarchy')[c]*n for \
1068 (c,n) in inter.get('orders').items()])
1069 present_couplings['WEIGHTED'] -= weight
1070 if present_couplings['WEIGHTED'] < 0:
1071
1072 return False
1073
1074 return present_couplings
1075
1076 - def combine_legs(self, list_legs, ref_dict_to1, max_multi_to1):
1077 """Recursive function. Take a list of legs as an input, with
1078 the reference dictionary n-1->1, and output a list of list of
1079 tuples of Legs (allowed combinations) and Legs (rest). Algorithm:
1080
1081 1. Get all n-combinations from list [123456]: [12],..,[23],..,[123],..
1082
1083 2. For each combination, say [34]. Check if combination is valid.
1084 If so:
1085
1086 a. Append [12[34]56] to result array
1087
1088 b. Split [123456] at index(first element in combination+1),
1089 i.e. [12],[456] and subtract combination from second half,
1090 i.e.: [456]-[34]=[56]. Repeat from 1. with this array
1091
1092 3. Take result array from call to 1. (here, [[56]]) and append
1093 (first half in step b - combination) + combination + (result
1094 from 1.) = [12[34][56]] to result array
1095
1096 4. After appending results from all n-combinations, return
1097 resulting array. Example, if [13] and [45] are valid
1098 combinations:
1099 [[[13]2456],[[13]2[45]6],[123[45]6]]
1100 """
1101
1102 res = []
1103
1104
1105 for comb_length in range(2, max_multi_to1 + 1):
1106
1107
1108 if comb_length > len(list_legs):
1109 return res
1110
1111
1112
1113 for comb in itertools.combinations(list_legs, comb_length):
1114
1115
1116 if base_objects.LegList(comb).can_combine_to_1(ref_dict_to1):
1117
1118
1119
1120 res_list = copy.copy(list_legs)
1121 for leg in comb:
1122 res_list.remove(leg)
1123 res_list.insert(list_legs.index(comb[0]), comb)
1124 res.append(res_list)
1125
1126
1127
1128
1129
1130
1131 res_list1 = list_legs[0:list_legs.index(comb[0])]
1132 res_list2 = list_legs[list_legs.index(comb[0]) + 1:]
1133 for leg in comb[1:]:
1134 res_list2.remove(leg)
1135
1136
1137 res_list = res_list1
1138 res_list.append(comb)
1139
1140
1141 for item in self.combine_legs(res_list2,
1142 ref_dict_to1,
1143 max_multi_to1):
1144 final_res_list = copy.copy(res_list)
1145 final_res_list.extend(item)
1146 res.append(final_res_list)
1147
1148 return res
1149
1150
1152 """Takes a list of allowed leg combinations as an input and returns
1153 a set of lists where combinations have been properly replaced
1154 (one list per element in the ref_dict, so that all possible intermediate
1155 particles are included). For each list, give the list of vertices
1156 corresponding to the executed merging, group the two as a tuple.
1157 """
1158
1159 res = []
1160
1161 for comb_list in comb_lists:
1162
1163 reduced_list = []
1164 vertex_list = []
1165
1166 for entry in comb_list:
1167
1168
1169 if isinstance(entry, tuple):
1170
1171
1172
1173 leg_vert_ids = copy.copy(ref_dict_to1[\
1174 tuple(sorted([leg.get('id') for leg in entry]))])
1175
1176
1177 number = min([leg.get('number') for leg in entry])
1178
1179
1180 if len(filter(lambda leg: leg.get('state') == False,
1181 entry)) == 1:
1182 state = False
1183 else:
1184 state = True
1185
1186
1187
1188
1189
1190 new_leg_vert_ids = []
1191 if leg_vert_ids:
1192 new_leg_vert_ids = self.get_combined_legs(entry,
1193 leg_vert_ids,
1194 number,
1195 state)
1196
1197 reduced_list.append([l[0] for l in new_leg_vert_ids])
1198
1199
1200
1201
1202
1203 vlist = base_objects.VertexList()
1204 for (myleg, vert_id) in new_leg_vert_ids:
1205
1206 myleglist = base_objects.LegList(list(entry))
1207
1208 myleglist.append(myleg)
1209
1210 vlist.append(base_objects.Vertex(
1211 {'legs':myleglist,
1212 'id':vert_id}))
1213
1214 vertex_list.append(vlist)
1215
1216
1217
1218 else:
1219 cp_entry = copy.copy(entry)
1220
1221
1222
1223 if cp_entry.get('from_group') != None:
1224 cp_entry.set('from_group', False)
1225 reduced_list.append(cp_entry)
1226
1227
1228 flat_red_lists = expand_list(reduced_list)
1229 flat_vx_lists = expand_list(vertex_list)
1230
1231
1232 for i in range(0, len(flat_vx_lists)):
1233 res.append((base_objects.LegList(flat_red_lists[i]), \
1234 base_objects.VertexList(flat_vx_lists[i])))
1235
1236 return res
1237
1239 """Create a set of new legs from the info given. This can be
1240 overloaded by daughter classes."""
1241
1242 mylegs = [(base_objects.Leg({'id':leg_id,
1243 'number':number,
1244 'state':state,
1245 'from_group':True}),
1246 vert_id)\
1247 for leg_id, vert_id in leg_vert_ids]
1248
1249 return mylegs
1250
1252 """Allow for selection of vertex ids. This can be
1253 overloaded by daughter classes."""
1254
1255 return vert_ids
1256
1258 """Reduce the number of legs and vertices used in memory.
1259 When called by a diagram generation initiated by LoopAmplitude,
1260 this function should not trim the diagrams in the attribute 'diagrams'
1261 but rather a given list in the 'diaglist' argument."""
1262
1263 legs = []
1264 vertices = []
1265
1266 if diaglist is None:
1267 diaglist=self.get('diagrams')
1268
1269
1270 process = self.get('process')
1271 for leg in process.get('legs'):
1272 if leg.get('state') and leg.get('id') in decay_ids:
1273 leg.set('onshell', True)
1274
1275 for diagram in diaglist:
1276
1277 leg_external = set()
1278 for ivx, vertex in enumerate(diagram.get('vertices')):
1279 for ileg, leg in enumerate(vertex.get('legs')):
1280
1281 if leg.get('state') and leg.get('id') in decay_ids and \
1282 leg.get('number') not in leg_external:
1283
1284
1285 leg = copy.copy(leg)
1286 leg.set('onshell', True)
1287 try:
1288 index = legs.index(leg)
1289 except ValueError:
1290 vertex.get('legs')[ileg] = leg
1291 legs.append(leg)
1292 else:
1293 vertex.get('legs')[ileg] = legs[index]
1294 leg_external.add(leg.get('number'))
1295 try:
1296 index = vertices.index(vertex)
1297 diagram.get('vertices')[ivx] = vertices[index]
1298 except ValueError:
1299 vertices.append(vertex)
1300
1301
1302
1303
1304 -class AmplitudeList(base_objects.PhysicsObjectList):
1305 """List of Amplitude objects
1306 """
1307
1309 """ Check the content of all processes of the amplitudes in this list to
1310 see if there is any which defines perturbation couplings. """
1311
1312 for amp in self:
1313 if amp.has_loop_process():
1314 return True
1315
1317 """Test if object obj is a valid Amplitude for the list."""
1318
1319 return isinstance(obj, Amplitude)
1320
1325 """A list of amplitudes + a list of decay chain amplitude lists;
1326 corresponding to a ProcessDefinition with a list of decay chains
1327 """
1328
1334
1335 - def __init__(self, argument = None, collect_mirror_procs = False,
1336 ignore_six_quark_processes = False, loop_filter=None, diagram_filter=False):
1337 """Allow initialization with Process and with ProcessDefinition"""
1338
1339 if isinstance(argument, base_objects.Process):
1340 super(DecayChainAmplitude, self).__init__()
1341 from madgraph.loop.loop_diagram_generation import LoopMultiProcess
1342 if argument['perturbation_couplings']:
1343 MultiProcessClass=LoopMultiProcess
1344 else:
1345 MultiProcessClass=MultiProcess
1346 if isinstance(argument, base_objects.ProcessDefinition):
1347 self['amplitudes'].extend(\
1348 MultiProcessClass.generate_multi_amplitudes(argument,
1349 collect_mirror_procs,
1350 ignore_six_quark_processes,
1351 loop_filter=loop_filter,
1352 diagram_filter=diagram_filter))
1353 else:
1354 self['amplitudes'].append(\
1355 MultiProcessClass.get_amplitude_from_proc(argument,
1356 loop_filter=loop_filter,
1357 diagram_filter=diagram_filter))
1358
1359
1360 process = copy.copy(self.get('amplitudes')[0].get('process'))
1361 process.set('decay_chains', base_objects.ProcessList())
1362 self['amplitudes'][0].set('process', process)
1363
1364 for process in argument.get('decay_chains'):
1365 if process.get('perturbation_couplings'):
1366 raise MadGraph5Error,\
1367 "Decay processes can not be perturbed"
1368 process.set('overall_orders', argument.get('overall_orders'))
1369 if not process.get('is_decay_chain'):
1370 process.set('is_decay_chain',True)
1371 if not process.get_ninitial() == 1:
1372 raise InvalidCmd,\
1373 "Decay chain process must have exactly one" + \
1374 " incoming particle"
1375 self['decay_chains'].append(\
1376 DecayChainAmplitude(process, collect_mirror_procs,
1377 ignore_six_quark_processes))
1378
1379
1380 decay_ids = sum([[a.get('process').get('legs')[0].get('id') \
1381 for a in dec.get('amplitudes')] for dec in \
1382 self['decay_chains']], [])
1383 decay_ids = set(decay_ids)
1384 for amp in self['amplitudes']:
1385 amp.trim_diagrams(decay_ids)
1386
1387
1388 for amp in self['amplitudes']:
1389 for l in amp.get('process').get('legs'):
1390 if l.get('id') in decay_ids:
1391 decay_ids.remove(l.get('id'))
1392
1393 if decay_ids:
1394 model = amp.get('process').get('model')
1395 names = [model.get_particle(id).get('name') for id in decay_ids]
1396
1397 logger.warning(
1398 "$RED Decay without corresponding particle in core process found.\n" + \
1399 "Decay information for particle(s) %s is discarded.\n" % ','.join(names) + \
1400 "Please check your process definition carefully. \n" + \
1401 "This warning usually means that you forgot parentheses in presence of subdecay.\n" + \
1402 "Example of correct syntax: p p > t t~, ( t > w+ b, w+ > l+ vl)")
1403
1404
1405 for dc in reversed(self['decay_chains']):
1406 for a in reversed(dc.get('amplitudes')):
1407
1408 if a.get('process').get('legs')[0].get('id') in decay_ids:
1409 dc.get('amplitudes').remove(a)
1410 if not dc.get('amplitudes'):
1411
1412 self['decay_chains'].remove(dc)
1413
1414
1415
1416 bad_procs = []
1417 for dc in self['decay_chains']:
1418 for amp in dc.get('amplitudes'):
1419 legs = amp.get('process').get('legs')
1420 fs_parts = [abs(l.get('id')) for l in legs if
1421 l.get('state')]
1422 is_part = [l.get('id') for l in legs if not
1423 l.get('state')][0]
1424 if abs(is_part) in fs_parts:
1425 bad_procs.append(amp.get('process'))
1426
1427 if bad_procs:
1428 logger.warning(
1429 "$RED Decay(s) with particle decaying to itself:\n" + \
1430 '\n'.join([p.nice_string() for p in bad_procs]) + \
1431 "\nPlease check your process definition carefully. \n")
1432
1433
1434 elif argument != None:
1435
1436 super(DecayChainAmplitude, self).__init__(argument)
1437 else:
1438
1439 super(DecayChainAmplitude, self).__init__()
1440
1441 - def filter(self, name, value):
1442 """Filter for valid amplitude property values."""
1443
1444 if name == 'amplitudes':
1445 if not isinstance(value, AmplitudeList):
1446 raise self.PhysicsObjectError, \
1447 "%s is not a valid AmplitudeList" % str(value)
1448 if name == 'decay_chains':
1449 if not isinstance(value, DecayChainAmplitudeList):
1450 raise self.PhysicsObjectError, \
1451 "%s is not a valid DecayChainAmplitudeList object" % \
1452 str(value)
1453 return True
1454
1456 """Return diagram property names as a nicely sorted list."""
1457
1458 return ['amplitudes', 'decay_chains']
1459
1460
1461
1463 """Returns number of diagrams for this amplitude"""
1464 return sum(len(a.get('diagrams')) for a in self.get('amplitudes')) \
1465 + sum(d.get_number_of_diagrams() for d in \
1466 self.get('decay_chains'))
1467
1469 """Returns a nicely formatted string of the amplitude content."""
1470 mystr = ""
1471 for amplitude in self.get('amplitudes'):
1472 mystr = mystr + amplitude.nice_string(indent) + "\n"
1473
1474 if self.get('decay_chains'):
1475 mystr = mystr + " " * indent + "Decays:\n"
1476 for dec in self.get('decay_chains'):
1477 mystr = mystr + dec.nice_string(indent + 2) + "\n"
1478
1479 return mystr[:-1]
1480
1482 """Returns a nicely formatted string of the amplitude processes."""
1483 mystr = ""
1484 for amplitude in self.get('amplitudes'):
1485 mystr = mystr + amplitude.nice_string_processes(indent) + "\n"
1486
1487 if self.get('decay_chains'):
1488 mystr = mystr + " " * indent + "Decays:\n"
1489 for dec in self.get('decay_chains'):
1490 mystr = mystr + dec.nice_string_processes(indent + 2) + "\n"
1491
1492 return mystr[:-1]
1493
1495 """Returns the number of initial state particles in the process."""
1496 return self.get('amplitudes')[0].get('process').get_ninitial()
1497
1499 """Returns a set of all particle ids for which a decay is defined"""
1500
1501 decay_ids = []
1502
1503
1504 for amp in sum([dc.get('amplitudes') for dc \
1505 in self['decay_chains']], []):
1506
1507 decay_ids.append(amp.get('process').get_initial_ids()[0])
1508
1509
1510 return list(set(decay_ids))
1511
1513 """ Returns wether this amplitude has a loop process."""
1514 return self['amplitudes'].has_any_loop_process()
1515
1517 """Recursive function to extract all amplitudes for this process"""
1518
1519 amplitudes = AmplitudeList()
1520
1521 amplitudes.extend(self.get('amplitudes'))
1522 for decay in self.get('decay_chains'):
1523 amplitudes.extend(decay.get_amplitudes())
1524
1525 return amplitudes
1526
1532 """List of DecayChainAmplitude objects
1533 """
1534
1536 """Test if object obj is a valid DecayChainAmplitude for the list."""
1537
1538 return isinstance(obj, DecayChainAmplitude)
1539
1540
1541
1542
1543
1544 -class MultiProcess(base_objects.PhysicsObject):
1545 """MultiProcess: list of process definitions
1546 list of processes (after cleaning)
1547 list of amplitudes (after generation)
1548 """
1549
1551 """Default values for all properties"""
1552
1553 self['process_definitions'] = base_objects.ProcessDefinitionList()
1554
1555
1556
1557 self['amplitudes'] = AmplitudeList()
1558
1559 self['collect_mirror_procs'] = False
1560
1561
1562 self['ignore_six_quark_processes'] = []
1563
1564
1565 self['use_numerical'] = False
1566
1567 - def __init__(self, argument=None, collect_mirror_procs = False,
1568 ignore_six_quark_processes = [], optimize=False,
1569 loop_filter=None, diagram_filter=None):
1597
1598
1599 - def filter(self, name, value):
1600 """Filter for valid process property values."""
1601
1602 if name == 'process_definitions':
1603 if not isinstance(value, base_objects.ProcessDefinitionList):
1604 raise self.PhysicsObjectError, \
1605 "%s is not a valid ProcessDefinitionList object" % str(value)
1606
1607 if name == 'amplitudes':
1608 if not isinstance(value, AmplitudeList):
1609 raise self.PhysicsObjectError, \
1610 "%s is not a valid AmplitudeList object" % str(value)
1611
1612 if name in ['collect_mirror_procs']:
1613 if not isinstance(value, bool):
1614 raise self.PhysicsObjectError, \
1615 "%s is not a valid boolean" % str(value)
1616
1617 if name == 'ignore_six_quark_processes':
1618 if not isinstance(value, list):
1619 raise self.PhysicsObjectError, \
1620 "%s is not a valid list" % str(value)
1621
1622 return True
1623
1624 - def get(self, name):
1625 """Get the value of the property name."""
1626
1627 if (name == 'amplitudes') and not self[name]:
1628 for process_def in self.get('process_definitions'):
1629 if process_def.get('decay_chains'):
1630
1631
1632 self['amplitudes'].append(\
1633 DecayChainAmplitude(process_def,
1634 self.get('collect_mirror_procs'),
1635 self.get('ignore_six_quark_processes'),
1636 diagram_filter=self['diagram_filter']))
1637 else:
1638 self['amplitudes'].extend(\
1639 self.generate_multi_amplitudes(process_def,
1640 self.get('collect_mirror_procs'),
1641 self.get('ignore_six_quark_processes'),
1642 self['use_numerical'],
1643 loop_filter=self['loop_filter'],
1644 diagram_filter=self['diagram_filter']))
1645
1646 return MultiProcess.__bases__[0].get(self, name)
1647
1649 """Return process property names as a nicely sorted list."""
1650
1651 return ['process_definitions', 'amplitudes']
1652
1653 @classmethod
1654 - def generate_multi_amplitudes(cls,process_definition,
1655 collect_mirror_procs = False,
1656 ignore_six_quark_processes = [],
1657 use_numerical=False,
1658 loop_filter=None,
1659 diagram_filter=False):
1660 """Generate amplitudes in a semi-efficient way.
1661 Make use of crossing symmetry for processes that fail diagram
1662 generation, but not for processes that succeed diagram
1663 generation. Doing so will risk making it impossible to
1664 identify processes with identical amplitudes.
1665 """
1666 assert isinstance(process_definition, base_objects.ProcessDefinition), \
1667 "%s not valid ProcessDefinition object" % \
1668 repr(process_definition)
1669
1670
1671 process_definition.set('orders', MultiProcess.\
1672 find_optimal_process_orders(process_definition,
1673 diagram_filter))
1674
1675 process_definition.check_expansion_orders()
1676
1677 processes = base_objects.ProcessList()
1678 amplitudes = AmplitudeList()
1679
1680
1681
1682 failed_procs = []
1683 success_procs = []
1684
1685 non_permuted_procs = []
1686
1687 permutations = []
1688
1689
1690
1691 model = process_definition['model']
1692
1693 isids = [leg['ids'] for leg in process_definition['legs'] \
1694 if leg['state'] == False]
1695 fsids = [leg['ids'] for leg in process_definition['legs'] \
1696 if leg['state'] == True]
1697
1698
1699 for prod in itertools.product(*isids):
1700 islegs = [\
1701 base_objects.Leg({'id':id, 'state': False}) \
1702 for id in prod]
1703
1704
1705
1706
1707 red_fsidlist = []
1708
1709 for prod in itertools.product(*fsids):
1710
1711
1712 if tuple(sorted(prod)) in red_fsidlist:
1713 continue
1714
1715 red_fsidlist.append(tuple(sorted(prod)));
1716
1717
1718 leg_list = [copy.copy(leg) for leg in islegs]
1719
1720 leg_list.extend([\
1721 base_objects.Leg({'id':id, 'state': True}) \
1722 for id in prod])
1723
1724 legs = base_objects.LegList(leg_list)
1725
1726
1727 sorted_legs = sorted([(l,i+1) for (i,l) in \
1728 enumerate(legs.get_outgoing_id_list(model))])
1729 permutation = [l[1] for l in sorted_legs]
1730
1731 sorted_legs = array.array('i', [l[0] for l in sorted_legs])
1732
1733
1734 if ignore_six_quark_processes and \
1735 len([i for i in sorted_legs if abs(i) in \
1736 ignore_six_quark_processes]) >= 6:
1737 continue
1738
1739
1740
1741 if sorted_legs in failed_procs:
1742 continue
1743
1744
1745 if use_numerical:
1746
1747 initial_mass = abs(model['parameter_dict'][model.get_particle(legs[0].get('id')).get('mass')])
1748 if initial_mass == 0:
1749 continue
1750 for leg in legs[1:]:
1751 m = model['parameter_dict'][model.get_particle(leg.get('id')).get('mass')]
1752 initial_mass -= abs(m)
1753 if initial_mass.real <= 0:
1754 continue
1755
1756
1757 process = process_definition.get_process_with_legs(legs)
1758
1759 fast_proc = \
1760 array.array('i',[leg.get('id') for leg in legs])
1761 if collect_mirror_procs and \
1762 process_definition.get_ninitial() == 2:
1763
1764 mirror_proc = \
1765 array.array('i', [fast_proc[1], fast_proc[0]] + \
1766 list(fast_proc[2:]))
1767 try:
1768 mirror_amp = \
1769 amplitudes[non_permuted_procs.index(mirror_proc)]
1770 except Exception:
1771
1772 pass
1773 else:
1774
1775 mirror_amp.set('has_mirror_process', True)
1776 logger.info("Process %s added to mirror process %s" % \
1777 (process.base_string(),
1778 mirror_amp.get('process').base_string()))
1779 continue
1780
1781
1782
1783 if not process.get('required_s_channels') and \
1784 not process.get('forbidden_onsh_s_channels') and \
1785 not process.get('forbidden_s_channels') and \
1786 not process.get('is_decay_chain'):
1787 try:
1788 crossed_index = success_procs.index(sorted_legs)
1789
1790
1791
1792
1793 if 'loop_diagrams' in amplitudes[crossed_index]:
1794 raise ValueError
1795 except ValueError:
1796
1797 pass
1798 else:
1799
1800 amplitude = MultiProcess.cross_amplitude(\
1801 amplitudes[crossed_index],
1802 process,
1803 permutations[crossed_index],
1804 permutation)
1805 amplitudes.append(amplitude)
1806 success_procs.append(sorted_legs)
1807 permutations.append(permutation)
1808 non_permuted_procs.append(fast_proc)
1809 logger.info("Crossed process found for %s, reuse diagrams." % \
1810 process.base_string())
1811 continue
1812
1813
1814 amplitude = cls.get_amplitude_from_proc(process,
1815 loop_filter=loop_filter)
1816
1817 try:
1818 result = amplitude.generate_diagrams(diagram_filter=diagram_filter)
1819 except InvalidCmd as error:
1820 failed_procs.append(sorted_legs)
1821 else:
1822
1823 if amplitude.get('diagrams'):
1824 amplitudes.append(amplitude)
1825 success_procs.append(sorted_legs)
1826 permutations.append(permutation)
1827 non_permuted_procs.append(fast_proc)
1828 elif not result:
1829
1830 failed_procs.append(sorted_legs)
1831
1832
1833 if not amplitudes:
1834 if len(failed_procs) == 1 and 'error' in locals():
1835 raise error
1836 else:
1837 raise NoDiagramException, \
1838 "No amplitudes generated from process %s. Please enter a valid process" % \
1839 process_definition.nice_string()
1840
1841
1842
1843 return amplitudes
1844
1845 @classmethod
1847 """ Return the correct amplitude type according to the characteristics of
1848 the process proc. The only option that could be specified here is
1849 loop_filter and it is of course not relevant for a tree amplitude."""
1850
1851 return Amplitude({"process": proc})
1852
1853
1854 @staticmethod
1856 """Find the minimal WEIGHTED order for this set of processes.
1857
1858 The algorithm:
1859
1860 1) Check the coupling hierarchy of the model. Assign all
1861 particles to the different coupling hierarchies so that a
1862 particle is considered to be in the highest hierarchy (i.e.,
1863 with lowest value) where it has an interaction.
1864
1865 2) Pick out the legs in the multiprocess according to the
1866 highest hierarchy represented (so don't mix particles from
1867 different hierarchy classes in the same multiparticles!)
1868
1869 3) Find the starting maximum WEIGHTED order as the sum of the
1870 highest n-2 weighted orders
1871
1872 4) Pick out required s-channel particle hierarchies, and use
1873 the highest of the maximum WEIGHTED order from the legs and
1874 the minimum WEIGHTED order extracted from 2*s-channel
1875 hierarchys plus the n-2-2*(number of s-channels) lowest
1876 leg weighted orders.
1877
1878 5) Run process generation with the WEIGHTED order determined
1879 in 3)-4) - # final state gluons, with all gluons removed from
1880 the final state
1881
1882 6) If no process is found, increase WEIGHTED order by 1 and go
1883 back to 5), until we find a process which passes. Return that
1884 order.
1885
1886 7) Continue 5)-6) until we reach (n-2)*(highest hierarchy)-1.
1887 If still no process has passed, return
1888 WEIGHTED = (n-2)*(highest hierarchy)
1889 """
1890
1891 assert isinstance(process_definition, base_objects.ProcessDefinition), \
1892 "%s not valid ProcessDefinition object" % \
1893 repr(process_definition)
1894
1895 processes = base_objects.ProcessList()
1896 amplitudes = AmplitudeList()
1897
1898
1899 if process_definition.get('orders') or \
1900 process_definition.get('overall_orders') or \
1901 process_definition.get('NLO_mode')=='virt':
1902 return process_definition.get('orders')
1903
1904
1905 if process_definition.get_ninitial() == 1 and not \
1906 process_definition.get('is_decay_chain'):
1907 return process_definition.get('orders')
1908
1909 logger.info("Checking for minimal orders which gives processes.")
1910 logger.info("Please specify coupling orders to bypass this step.")
1911
1912
1913 max_order_now, particles, hierarchy = \
1914 process_definition.get_minimum_WEIGHTED()
1915 coupling = 'WEIGHTED'
1916
1917 model = process_definition.get('model')
1918
1919
1920 isids = [leg['ids'] for leg in \
1921 filter(lambda leg: leg['state'] == False, process_definition['legs'])]
1922 fsids = [leg['ids'] for leg in \
1923 filter(lambda leg: leg['state'] == True, process_definition['legs'])]
1924
1925 max_WEIGHTED_order = \
1926 (len(fsids + isids) - 2)*int(model.get_max_WEIGHTED())
1927
1928
1929 hierarchydef = process_definition['model'].get('order_hierarchy')
1930 tmp = []
1931 hierarchy = hierarchydef.items()
1932 hierarchy.sort()
1933 for key, value in hierarchydef.items():
1934 if value>1:
1935 tmp.append('%s*%s' % (value,key))
1936 else:
1937 tmp.append('%s' % key)
1938 wgtdef = '+'.join(tmp)
1939
1940
1941 while max_order_now < max_WEIGHTED_order:
1942 logger.info("Trying coupling order WEIGHTED<=%d: WEIGTHED IS %s" % (max_order_now, wgtdef))
1943
1944 oldloglevel = logger.level
1945 logger.setLevel(logging.WARNING)
1946
1947
1948
1949 failed_procs = []
1950
1951
1952 for prod in apply(itertools.product, isids):
1953 islegs = [ base_objects.Leg({'id':id, 'state': False}) \
1954 for id in prod]
1955
1956
1957
1958
1959 red_fsidlist = []
1960
1961 for prod in apply(itertools.product, fsids):
1962
1963
1964 if tuple(sorted(prod)) in red_fsidlist:
1965 continue
1966
1967 red_fsidlist.append(tuple(sorted(prod)));
1968
1969
1970
1971 nglue = 0
1972 if 21 in particles[0]:
1973 nglue = len([id for id in prod if id == 21])
1974 prod = [id for id in prod if id != 21]
1975
1976
1977 leg_list = [copy.copy(leg) for leg in islegs]
1978
1979 leg_list.extend([\
1980 base_objects.Leg({'id':id, 'state': True}) \
1981 for id in prod])
1982
1983 legs = base_objects.LegList(leg_list)
1984
1985
1986
1987 coupling_orders_now = {coupling: max_order_now - \
1988 nglue * model['order_hierarchy']['QCD']}
1989
1990
1991 process = base_objects.Process({\
1992 'legs':legs,
1993 'model':model,
1994 'id': process_definition.get('id'),
1995 'orders': coupling_orders_now,
1996 'required_s_channels': \
1997 process_definition.get('required_s_channels'),
1998 'forbidden_onsh_s_channels': \
1999 process_definition.get('forbidden_onsh_s_channels'),
2000 'sqorders_types': \
2001 process_definition.get('sqorders_types'),
2002 'squared_orders': \
2003 process_definition.get('squared_orders'),
2004 'split_orders': \
2005 process_definition.get('split_orders'),
2006 'forbidden_s_channels': \
2007 process_definition.get('forbidden_s_channels'),
2008 'forbidden_particles': \
2009 process_definition.get('forbidden_particles'),
2010 'is_decay_chain': \
2011 process_definition.get('is_decay_chain'),
2012 'overall_orders': \
2013 process_definition.get('overall_orders'),
2014 'split_orders': \
2015 process_definition.get('split_orders')})
2016
2017
2018 process.check_expansion_orders()
2019
2020
2021 sorted_legs = sorted(legs.get_outgoing_id_list(model))
2022
2023
2024 if tuple(sorted_legs) in failed_procs:
2025 continue
2026
2027 amplitude = Amplitude({'process': process})
2028 try:
2029 amplitude.generate_diagrams(diagram_filter=diagram_filter)
2030 except InvalidCmd:
2031 failed_procs.append(tuple(sorted_legs))
2032 else:
2033 if amplitude.get('diagrams'):
2034
2035 logger.setLevel(oldloglevel)
2036 return {coupling: max_order_now}
2037 else:
2038 failed_procs.append(tuple(sorted_legs))
2039
2040
2041 max_order_now += 1
2042 logger.setLevel(oldloglevel)
2043
2044
2045 return {coupling: max_order_now}
2046
2047 @staticmethod
2049 """Return the amplitude crossed with the permutation new_perm"""
2050
2051 perm_map = dict(zip(org_perm, new_perm))
2052
2053 new_amp = copy.copy(amplitude)
2054
2055 for i, leg in enumerate(process.get('legs')):
2056 leg.set('number', i+1)
2057
2058 new_amp.set('process', process)
2059
2060 diagrams = base_objects.DiagramList([d.renumber_legs(perm_map,
2061 process.get('legs'),) for \
2062 d in new_amp.get('diagrams')])
2063 new_amp.set('diagrams', diagrams)
2064 new_amp.trim_diagrams()
2065
2066
2067 new_amp.set('has_mirror_process', False)
2068
2069 return new_amp
2070
2076 """Takes a list of lists and elements and returns a list of flat lists.
2077 Example: [[1,2], 3, [4,5]] -> [[1,3,4], [1,3,5], [2,3,4], [2,3,5]]
2078 """
2079
2080
2081 assert isinstance(mylist, list), "Expand_list argument must be a list"
2082
2083 res = []
2084
2085 tmplist = []
2086 for item in mylist:
2087 if isinstance(item, list):
2088 tmplist.append(item)
2089 else:
2090 tmplist.append([item])
2091
2092 for item in apply(itertools.product, tmplist):
2093 res.append(list(item))
2094
2095 return res
2096
2098 """Recursive function. Takes a list of lists and lists of lists
2099 and returns a list of flat lists.
2100 Example: [[1,2],[[4,5],[6,7]]] -> [[1,2,4,5], [1,2,6,7]]
2101 """
2102
2103 res = []
2104
2105 if not mylist or len(mylist) == 1 and not mylist[0]:
2106 return [[]]
2107
2108
2109 assert isinstance(mylist[0], list), \
2110 "Expand_list_list needs a list of lists and lists of lists"
2111
2112
2113 if len(mylist) == 1:
2114 if isinstance(mylist[0][0], list):
2115 return mylist[0]
2116 else:
2117 return mylist
2118
2119 if isinstance(mylist[0][0], list):
2120 for item in mylist[0]:
2121
2122
2123
2124 for rest in expand_list_list(mylist[1:]):
2125 reslist = copy.copy(item)
2126 reslist.extend(rest)
2127 res.append(reslist)
2128 else:
2129 for rest in expand_list_list(mylist[1:]):
2130 reslist = copy.copy(mylist[0])
2131 reslist.extend(rest)
2132 res.append(reslist)
2133
2134
2135 return res
2136