1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 """Classes for diagram generation. Amplitude performs the diagram
16 generation, DecayChainAmplitude keeps track of processes with decay
17 chains, and MultiProcess allows generation of processes with
18 multiparticle definitions. DiagramTag allows to identify diagrams
19 based on relevant properties.
20 """
21
22 import array
23 import copy
24 import itertools
25 import logging
26
27 import madgraph.core.base_objects as base_objects
28 import madgraph.various.misc as misc
29 from madgraph import InvalidCmd
30 logger = logging.getLogger('madgraph.diagram_generation')
37 """Class to tag diagrams based on objects with some __lt__ measure, e.g.
38 PDG code/interaction id (for comparing diagrams from the same amplitude),
39 or Lorentz/coupling/mass/width (for comparing AMPs from different MEs).
40 Algorithm: Create chains starting from external particles:
41 1 \ / 6
42 2 /\______/\ 7
43 3_ / | \_ 8
44 4 / 5 \_ 9
45 \ 10
46 gives ((((9,10,id910),8,id9108),(6,7,id67),id910867)
47 (((1,2,id12),(3,4,id34)),id1234),
48 5,id91086712345)
49 where idN is the id of the corresponding interaction. The ordering within
50 chains is based on chain length (depth; here, 1234 has depth 3, 910867 has
51 depth 4, 5 has depht 0), and if equal on the ordering of the chain elements.
52 The determination of central vertex is based on minimizing the chain length
53 for the longest subchain.
54 This gives a unique tag which can be used to identify diagrams
55 (instead of symmetry), as well as identify identical matrix elements from
56 different processes."""
57
59 """Exception for any problems in DiagramTags"""
60 pass
61
62 - def __init__(self, diagram, model = None, ninitial = 2):
63 """Initialize with a diagram. Create DiagramTagChainLinks according to
64 the diagram, and figure out if we need to shift the central vertex."""
65
66
67 leg_dict = {}
68
69 for vertex in diagram.get('vertices'):
70
71 legs = vertex.get('legs')[:-1]
72 lastvx = vertex == diagram.get('vertices')[-1]
73 if lastvx:
74
75 legs = vertex.get('legs')
76
77 link = DiagramTagChainLink([leg_dict.setdefault(leg.get('number'),
78 DiagramTagChainLink(self.link_from_leg(leg, model))) \
79 for leg in legs],
80 self.vertex_id_from_vertex(vertex,
81 lastvx,
82 model,
83 ninitial))
84
85 if not lastvx:
86 leg_dict[vertex.get('legs')[-1].get('number')] = link
87
88
89 self.tag = link
90
91
92
93 done = max([l.depth for l in self.tag.links]) == 0
94 while not done:
95
96 longest_chain = self.tag.links[0]
97
98 new_link = DiagramTagChainLink(self.tag.links[1:],
99 self.flip_vertex(\
100 self.tag.vertex_id,
101 longest_chain.vertex_id,
102 self.tag.links[1:]))
103
104 other_links = list(longest_chain.links) + [new_link]
105 other_link = DiagramTagChainLink(other_links,
106 self.flip_vertex(\
107 longest_chain.vertex_id,
108 self.tag.vertex_id,
109 other_links))
110
111 if other_link.links[0] < self.tag.links[0]:
112
113 self.tag = other_link
114 else:
115
116 done = True
117
122
124 """Output a diagram from a DiagramTag. Note that each daughter
125 class must implement the static functions id_from_vertex_id
126 (if the vertex id is something else than an integer) and
127 leg_from_link (to pass the correct info from an end link to a
128 leg)."""
129
130
131 diagram = base_objects.Diagram({'vertices': \
132 self.vertices_from_link(self.tag,
133 model,
134 True)})
135 diagram.calculate_orders(model)
136 return diagram
137
138 @classmethod
140 """Recursively return the leg corresponding to this link and
141 the list of all vertices from all previous links"""
142
143 if link.end_link:
144
145 return cls.leg_from_link(link), []
146
147
148 leg_vertices = [cls.vertices_from_link(l, model) for l in link.links]
149
150
151 legs = base_objects.LegList(sorted([l for l,v in leg_vertices],
152 lambda l1,l2: l2.get('number') - \
153 l1.get('number')))
154
155 vertices = base_objects.VertexList(sum([v for l, v in leg_vertices],
156 []))
157
158 if not first_vertex:
159
160
161 last_leg = cls.leg_from_legs(legs,
162 cls.id_from_vertex_id(link.vertex_id),
163 model)
164 legs.append(last_leg)
165
166
167 vertices.append(cls.vertex_from_link(legs,
168 link.vertex_id,
169 model))
170
171 if first_vertex:
172
173 return vertices
174 else:
175
176 return last_leg, vertices
177
178 @staticmethod
180 """Return a leg from a leg list and the model info"""
181
182 pdgs = [part.get_pdg_code() for part in \
183 model.get_interaction(vertex_id).get('particles')]
184
185 for pdg in [leg.get('id') for leg in legs]:
186 pdgs.remove(pdg)
187
188 assert len(pdgs) == 1
189
190 pdg = model.get_particle(pdgs[0]).get_anti_pdg_code()
191 number = min([l.get('number') for l in legs])
192
193 state = (len([l for l in legs if l.get('state') == False]) != 1)
194
195 onshell= False
196
197 return base_objects.Leg({'id': pdg,
198 'number': number,
199 'state': state,
200 'onshell': onshell})
201
202 @classmethod
208
209 @staticmethod
211 """Return a leg from a link"""
212
213 if link.end_link:
214
215 return base_objects.Leg({'number':link.links[0][1],
216 'id':link.links[0][0][0],
217 'state':(link.links[0][0][1] == 0),
218 'onshell':False})
219
220
221 assert False
222
223 @staticmethod
225 """Return the numerical vertex id from a link.vertex_id"""
226 return vertex_id[0]
227
228 @staticmethod
230 """Reorder a permutation with respect to start_perm. Note that
231 both need to start from 1."""
232 if perm == start_perm:
233 return range(len(perm))
234 order = [i for (p,i) in \
235 sorted([(p,i) for (i,p) in enumerate(perm)])]
236 return [start_perm[i]-1 for i in order]
237
238 @staticmethod
240 """Returns the default end link for a leg: ((id, state), number).
241 Note that the number is not taken into account if tag comparison,
242 but is used only to extract leg permutations."""
243 if leg.get('state'):
244
245 return [((leg.get('id'), 0), leg.get('number'))]
246 else:
247
248 return [((leg.get('id'), leg.get('number')), leg.get('number'))]
249
250 @staticmethod
252 """Returns the default vertex id: just the interaction id
253 Note that in the vertex id, like the leg, only the first entry is
254 taken into account in the tag comparison, while the second is for
255 storing information that is not to be used in comparisons."""
256
257 return (vertex.get('id'),)
258
259 @staticmethod
261 """Returns the default vertex flip: just the new_vertex"""
262 return new_vertex
263
265 """Equal if same tag"""
266 if type(self) != type(other):
267 return False
268 return self.tag == other.tag
269
271 return not self.__eq__(other)
272
275
277 return self.tag < other.tag
278
280 return self.tag > other.tag
281
282 __repr__ = __str__
283
285 """Chain link for a DiagramTag. A link is a tuple + vertex id + depth,
286 with a comparison operator defined"""
287
288 - def __init__(self, objects, vertex_id = None):
289 """Initialize, either with a tuple of DiagramTagChainLinks and
290 a vertex_id (defined by DiagramTag.vertex_id_from_vertex), or
291 with an external leg object (end link) defined by
292 DiagramTag.link_from_leg"""
293
294 if vertex_id == None:
295
296 self.links = tuple(objects)
297 self.vertex_id = (0,)
298 self.depth = 0
299 self.end_link = True
300 return
301
302 self.links = tuple(sorted(list(tuple(objects)), reverse=True))
303 self.vertex_id = vertex_id
304
305
306 self.depth = sum([l.depth for l in self.links],
307 max(1, len(self.links)-1))
308 self.end_link = False
309
311 """Get the permutation of external numbers (assumed to be the
312 second entry in the end link tuples)"""
313
314 if self.end_link:
315 return [self.links[0][1]]
316
317 return sum([l.get_external_numbers() for l in self.links], [])
318
320 """Compare self with other in the order:
321 1. depth 2. len(links) 3. vertex id 4. measure of links"""
322
323 if self == other:
324 return False
325
326 if self.depth != other.depth:
327 return self.depth < other.depth
328
329 if len(self.links) != len(other.links):
330 return len(self.links) < len(other.links)
331
332 if self.vertex_id[0] != other.vertex_id[0]:
333 return self.vertex_id[0] < other.vertex_id[0]
334
335 for i, link in enumerate(self.links):
336 if i > len(other.links) - 1:
337 return False
338 if link != other.links[i]:
339 return link < other.links[i]
340
342 return self != other and not self.__lt__(other)
343
345 """For end link,
346 consider equal if self.links[0][0] == other.links[0][0],
347 i.e., ignore the leg number (in links[0][1])."""
348
349 if self.end_link and other.end_link and self.depth == other.depth \
350 and self.vertex_id == other.vertex_id:
351 return self.links[0][0] == other.links[0][0]
352
353 return self.end_link == other.end_link and self.depth == other.depth \
354 and self.vertex_id[0] == other.vertex_id[0] \
355 and self.links == other.links
356
358 return not self.__eq__(other)
359
360
362 if self.end_link:
363 return str(self.links)
364 return "%s, %s; %d" % (str(self.links),
365 str(self.vertex_id),
366 self.depth)
367
368 __repr__ = __str__
369
370
371
372
373 -class Amplitude(base_objects.PhysicsObject):
374 """Amplitude: process + list of diagrams (ordered)
375 Initialize with a process, then call generate_diagrams() to
376 generate the diagrams for the amplitude
377 """
378
380 """Default values for all properties"""
381
382 self['process'] = base_objects.Process()
383 self['diagrams'] = None
384
385
386 self['has_mirror_process'] = False
387
400
401 - def filter(self, name, value):
417
418 - def get(self, name):
427
428
429
431 """Return diagram property names as a nicely sorted list."""
432
433 return ['process', 'diagrams', 'has_mirror_process']
434
436 """Returns number of diagrams for this amplitude"""
437 return len(self.get('diagrams'))
438
440 """Return an AmplitudeList with just this amplitude.
441 Needed for DecayChainAmplitude."""
442
443 return AmplitudeList([self])
444
446 """Returns a nicely formatted string of the amplitude content."""
447 return self.get('process').nice_string(indent) + "\n" + \
448 self.get('diagrams').nice_string(indent)
449
451 """Returns a nicely formatted string of the amplitude process."""
452 return self.get('process').nice_string(indent)
453
455 """Returns the number of initial state particles in the process."""
456 return self.get('process').get_ninitial()
457
459 """ Returns wether this amplitude has a loop process."""
460
461 return self.get('process').get('perturbation_couplings')
462
464 """Generate diagrams. Algorithm:
465
466 1. Define interaction dictionaries:
467 * 2->0 (identity), 3->0, 4->0, ... , maxlegs->0
468 * 2 -> 1, 3 -> 1, ..., maxlegs-1 -> 1
469
470 2. Set flag from_group=true for all external particles.
471 Flip particle/anti particle for incoming particles.
472
473 3. If there is a dictionary n->0 with n=number of external
474 particles, create if possible the combination [(1,2,3,4,...)]
475 with *at least two* from_group==true. This will give a
476 finished (set of) diagram(s) (done by reduce_leglist)
477
478 4. Create all allowed groupings of particles with at least one
479 from_group==true (according to dictionaries n->1):
480 [(1,2),3,4...],[1,(2,3),4,...],...,
481 [(1,2),(3,4),...],...,[(1,2,3),4,...],...
482 (done by combine_legs)
483
484 5. Replace each group with a (list of) new particle(s) with number
485 n = min(group numbers). Set from_group true for these
486 particles and false for all other particles. Store vertex info.
487 (done by merge_comb_legs)
488
489 6. Stop algorithm when at most 2 particles remain.
490 Return all diagrams (lists of vertices).
491
492 7. Repeat from 3 (recursion done by reduce_leglist)
493
494 8. Replace final p=p vertex
495
496 Be aware that the resulting vertices have all particles outgoing,
497 so need to flip for incoming particles when used.
498
499 SPECIAL CASE: For A>BC... processes which are legs in decay
500 chains, we need to ensure that BC... combine first, giving A=A
501 as a final vertex. This case is defined by the Process
502 property is_decay_chain = True.
503 This function can also be called by the generate_diagram function
504 of LoopAmplitudes, in which case the generated diagrams here must not
505 be directly assigned to the 'diagrams' attributed but returned as a
506 DiagramList by the function. This is controlled by the argument
507 returndiag.
508 """
509 process = self.get('process')
510 model = process.get('model')
511 legs = process.get('legs')
512
513
514 for key in process.get('overall_orders').keys():
515 try:
516 process.get('orders')[key] = \
517 min(process.get('orders')[key],
518 process.get('overall_orders')[key])
519 except KeyError:
520 process.get('orders')[key] = process.get('overall_orders')[key]
521
522 assert model.get('particles'), \
523 "particles are missing in model: %s" % model.get('particles')
524
525 assert model.get('interactions'), \
526 "interactions are missing in model"
527
528
529 res = base_objects.DiagramList()
530
531 if len(filter(lambda leg: model.get('particle_dict')[\
532 leg.get('id')].is_fermion(), legs)) % 2 == 1:
533 if not returndiag:
534 self['diagrams'] = res
535 raise InvalidCmd, 'The number of fermion is odd'
536 else:
537 raise InvalidCmd, 'The number of fermion is odd'
538
539
540
541
542 if not model.get('got_majoranas') and \
543 len(filter(lambda leg: leg.is_incoming_fermion(model), legs)) != \
544 len(filter(lambda leg: leg.is_outgoing_fermion(model), legs)):
545 if not returndiag:
546 self['diagrams'] = res
547 raise InvalidCmd, 'The number of of incoming/outcoming fermions are different'
548 else:
549 raise InvalidCmd, 'The number of of incoming/outcoming fermions are different'
550
551
552
553 for charge in model.get('conserved_charge'):
554 total = 0
555 for leg in legs:
556 part = model.get('particle_dict')[leg.get('id')]
557 try:
558 value = part.get(charge)
559 except (AttributeError, base_objects.PhysicsObject.PhysicsObjectError):
560 try:
561 value = getattr(part, charge)
562 except AttributeError:
563 value = 0
564
565 if (leg.get('id') != part['pdg_code']) != leg['state']:
566 total -= value
567 else:
568 total += value
569
570 if abs(total) > 1e-10:
571 if not returndiag:
572 self['diagrams'] = res
573 raise InvalidCmd, 'No %s conservation for this process ' % charge
574 return res
575 else:
576 raise InvalidCmd, 'No %s conservation for this process ' % charge
577 return res, res
578
579 if not returndiag:
580 logger.info("Trying %s " % process.nice_string().replace('Process', 'process'))
581
582
583 for i in range(0, len(process.get('legs'))):
584
585 leg = copy.copy(process.get('legs')[i])
586 process.get('legs')[i] = leg
587 if leg.get('number') == 0:
588 leg.set('number', i + 1)
589
590
591
592 leglist = self.copy_leglist(process.get('legs'))
593
594 for leg in leglist:
595
596
597
598 leg.set('from_group', True)
599
600
601
602 if leg.get('state') == False:
603 part = model.get('particle_dict')[leg.get('id')]
604 leg.set('id', part.get_anti_pdg_code())
605
606
607
608 max_multi_to1 = max([len(key) for key in \
609 model.get('ref_dict_to1').keys()])
610
611
612
613
614
615
616
617
618 is_decay_proc = process.get_ninitial() == 1
619 if is_decay_proc:
620 part = model.get('particle_dict')[leglist[0].get('id')]
621
622
623
624 ref_dict_to0 = {(part.get_pdg_code(),part.get_anti_pdg_code()):[0],
625 (part.get_anti_pdg_code(),part.get_pdg_code()):[0]}
626
627
628 leglist[0].set('from_group', None)
629 reduced_leglist = self.reduce_leglist(leglist,
630 max_multi_to1,
631 ref_dict_to0,
632 is_decay_proc,
633 process.get('orders'))
634 else:
635 reduced_leglist = self.reduce_leglist(leglist,
636 max_multi_to1,
637 model.get('ref_dict_to0'),
638 is_decay_proc,
639 process.get('orders'))
640
641
642
643
644 self.convert_dgleg_to_leg(reduced_leglist)
645
646 if reduced_leglist:
647 for vertex_list in reduced_leglist:
648 res.append(self.create_diagram(base_objects.VertexList(vertex_list)))
649
650
651
652 failed_crossing = not res
653
654
655
656
657
658
659 if process.get('required_s_channels') and \
660 process.get('required_s_channels')[0]:
661
662
663 lastvx = -1
664
665
666
667 if is_decay_proc: lastvx = -2
668 ninitial = len(filter(lambda leg: leg.get('state') == False,
669 process.get('legs')))
670
671 old_res = res
672 res = base_objects.DiagramList()
673 for id_list in process.get('required_s_channels'):
674 res_diags = filter(lambda diagram: \
675 all([req_s_channel in \
676 [vertex.get_s_channel_id(\
677 process.get('model'), ninitial) \
678 for vertex in diagram.get('vertices')[:lastvx]] \
679 for req_s_channel in \
680 id_list]), old_res)
681
682 res.extend([diag for diag in res_diags if diag not in res])
683
684
685
686
687
688 if process.get('forbidden_s_channels'):
689 ninitial = len(filter(lambda leg: leg.get('state') == False,
690 process.get('legs')))
691 if ninitial == 2:
692 res = base_objects.DiagramList(\
693 filter(lambda diagram: \
694 not any([vertex.get_s_channel_id(\
695 process.get('model'), ninitial) \
696 in process.get('forbidden_s_channels')
697 for vertex in diagram.get('vertices')[:-1]]),
698 res))
699 else:
700
701
702 newres= []
703 for diagram in res:
704 leg1 = 1
705
706
707
708 vertex = diagram.get('vertices')[-1]
709 if any([l['number'] ==1 for l in vertex.get('legs')]):
710 leg1 = [l['number'] for l in vertex.get('legs') if l['number'] !=1][0]
711 to_loop = range(len(diagram.get('vertices'))-1)
712 if leg1 >1:
713 to_loop.reverse()
714 for i in to_loop:
715 vertex = diagram.get('vertices')[i]
716 if leg1:
717 if any([l['number'] ==leg1 for l in vertex.get('legs')]):
718 leg1 = 0
719 continue
720 if vertex.get_s_channel_id(process.get('model'), ninitial)\
721 in process.get('forbidden_s_channels'):
722 break
723 else:
724 newres.append(diagram)
725 newres = base_objects.DiagramList(newres)
726
727
728
729
730 if process.get('forbidden_onsh_s_channels'):
731 ninitial = len(filter(lambda leg: leg.get('state') == False,
732 process.get('legs')))
733
734 verts = base_objects.VertexList(sum([[vertex for vertex \
735 in diagram.get('vertices')[:-1]
736 if vertex.get_s_channel_id(\
737 process.get('model'), ninitial) \
738 in process.get('forbidden_onsh_s_channels')] \
739 for diagram in res], []))
740 for vert in verts:
741
742 newleg = copy.copy(vert.get('legs').pop(-1))
743 newleg.set('onshell', False)
744 vert.get('legs').append(newleg)
745
746
747 for diagram in res:
748 diagram.calculate_orders(model)
749
750
751
752
753
754
755
756
757 if not returndiag and len(res)>0:
758 res = self.apply_squared_order_constraints(res)
759
760
761 if not process.get('is_decay_chain'):
762 for diagram in res:
763 vertices = diagram.get('vertices')
764 if len(vertices) > 1 and vertices[-1].get('id') == 0:
765
766
767
768
769 vertices = copy.copy(vertices)
770 lastvx = vertices.pop()
771 nexttolastvertex = copy.copy(vertices.pop())
772 legs = copy.copy(nexttolastvertex.get('legs'))
773 ntlnumber = legs[-1].get('number')
774 lastleg = filter(lambda leg: leg.get('number') != ntlnumber,
775 lastvx.get('legs'))[0]
776
777 if lastleg.get('onshell') == False:
778 lastleg.set('onshell', None)
779
780 legs[-1] = lastleg
781 nexttolastvertex.set('legs', legs)
782 vertices.append(nexttolastvertex)
783 diagram.set('vertices', vertices)
784
785 if res and not returndiag:
786 logger.info("Process has %d diagrams" % len(res))
787
788
789 self.trim_diagrams(diaglist=res)
790
791
792 pertur = 'QCD'
793 if self.get('process')['perturbation_couplings']:
794 pertur = sorted(self.get('process')['perturbation_couplings'])[0]
795 self.get('process').get('legs').sort(pert=pertur)
796
797
798 if not returndiag:
799 self['diagrams'] = res
800 return not failed_crossing
801 else:
802 return not failed_crossing, res
803
805 """Applies the user specified squared order constraints on the diagram
806 list in argument."""
807
808 res = copy.copy(diag_list)
809
810
811
812
813 while True:
814 new_res = res.apply_positive_sq_orders(res,
815 self['process'].get('squared_orders'),
816 self['process']['sqorders_types'])
817
818 if len(res)==len(new_res):
819 break
820 elif (len(new_res)>len(res)):
821 raise MadGraph5Error(
822 'Inconsistency in function apply_squared_order_constraints().')
823
824 res = new_res
825
826
827 neg_orders = [(order, value) for order, value in \
828 self['process'].get('squared_orders').items() if value<0]
829 if len(neg_orders)==1:
830 neg_order, neg_value = neg_orders[0]
831
832 res, target_order = res.apply_negative_sq_order(res, neg_order,\
833 neg_value, self['process']['sqorders_types'][neg_order])
834
835
836
837
838 self['process']['squared_orders'][neg_order]=target_order
839 elif len(neg_orders)>1:
840 raise InvalidCmd('At most one negative squared order constraint'+\
841 ' can be specified, not %s.'%str(neg_orders))
842
843 return res
844
846 """ Return a Diagram created from the vertex list. This function can be
847 overloaded by daughter classes."""
848 return base_objects.Diagram({'vertices':vertexlist})
849
851 """ In LoopAmplitude, it converts back all DGLoopLegs into Legs.
852 In Amplitude, there is nothing to do. """
853
854 return True
855
857 """ Simply returns a copy of the leg list. This function is
858 overloaded in LoopAmplitude so that a DGLoopLeg list is returned.
859 The DGLoopLeg has some additional parameters only useful during
860 loop diagram generation"""
861
862 return base_objects.LegList(\
863 [ copy.copy(leg) for leg in legs ])
864
865 - def reduce_leglist(self, curr_leglist, max_multi_to1, ref_dict_to0,
866 is_decay_proc = False, coupling_orders = None):
867 """Recursive function to reduce N LegList to N-1
868 For algorithm, see doc for generate_diagrams.
869 """
870
871
872
873 res = []
874
875
876
877 if curr_leglist is None:
878 return None
879
880
881 model = self.get('process').get('model')
882 ref_dict_to1 = self.get('process').get('model').get('ref_dict_to1')
883
884
885
886
887
888
889 if curr_leglist.can_combine_to_0(ref_dict_to0, is_decay_proc):
890
891
892 vertex_ids = self.get_combined_vertices(curr_leglist,
893 copy.copy(ref_dict_to0[tuple(sorted([leg.get('id') for \
894 leg in curr_leglist]))]))
895
896 final_vertices = [base_objects.Vertex({'legs':curr_leglist,
897 'id':vertex_id}) for \
898 vertex_id in vertex_ids]
899
900 for final_vertex in final_vertices:
901 if self.reduce_orders(coupling_orders, model,
902 [final_vertex.get('id')]) != False:
903 res.append([final_vertex])
904
905
906 if len(curr_leglist) == 2:
907 if res:
908 return res
909 else:
910 return None
911
912
913 comb_lists = self.combine_legs(curr_leglist,
914 ref_dict_to1, max_multi_to1)
915
916
917 leg_vertex_list = self.merge_comb_legs(comb_lists, ref_dict_to1)
918
919
920 for leg_vertex_tuple in leg_vertex_list:
921
922
923 if self.get('process').get('forbidden_particles') and \
924 any([abs(vertex.get('legs')[-1].get('id')) in \
925 self.get('process').get('forbidden_particles') \
926 for vertex in leg_vertex_tuple[1]]):
927 continue
928
929
930 new_coupling_orders = self.reduce_orders(coupling_orders,
931 model,
932 [vertex.get('id') for vertex in \
933 leg_vertex_tuple[1]])
934 if new_coupling_orders == False:
935
936 continue
937
938
939
940 reduced_diagram = self.reduce_leglist(leg_vertex_tuple[0],
941 max_multi_to1,
942 ref_dict_to0,
943 is_decay_proc,
944 new_coupling_orders)
945
946 if reduced_diagram:
947 vertex_list_list = [list(leg_vertex_tuple[1])]
948 vertex_list_list.append(reduced_diagram)
949 expanded_list = expand_list_list(vertex_list_list)
950 res.extend(expanded_list)
951
952 return res
953
954 - def reduce_orders(self, coupling_orders, model, vertex_id_list):
955 """Return False if the coupling orders for any coupling is <
956 0, otherwise return the new coupling orders with the vertex
957 orders subtracted. If coupling_orders is not given, return
958 None (which counts as success).
959 WEIGHTED is a special order, which corresponds to the sum of
960 order hierarchies for the couplings.
961 We ignore negative constraints as these cannot be taken into
962 account on the fly but only after generation."""
963
964 if not coupling_orders:
965 return None
966
967 present_couplings = copy.copy(coupling_orders)
968 for id in vertex_id_list:
969
970 if not id:
971 continue
972 inter = model.get("interaction_dict")[id]
973 for coupling in inter.get('orders').keys():
974
975
976 if coupling in present_couplings and \
977 present_couplings[coupling]>=0:
978
979 present_couplings[coupling] -= \
980 inter.get('orders')[coupling]
981 if present_couplings[coupling] < 0:
982
983 return False
984
985 if 'WEIGHTED' in present_couplings and \
986 present_couplings['WEIGHTED']>=0:
987 weight = sum([model.get('order_hierarchy')[c]*n for \
988 (c,n) in inter.get('orders').items()])
989 present_couplings['WEIGHTED'] -= weight
990 if present_couplings['WEIGHTED'] < 0:
991
992 return False
993
994 return present_couplings
995
996 - def combine_legs(self, list_legs, ref_dict_to1, max_multi_to1):
997 """Recursive function. Take a list of legs as an input, with
998 the reference dictionary n-1->1, and output a list of list of
999 tuples of Legs (allowed combinations) and Legs (rest). Algorithm:
1000
1001 1. Get all n-combinations from list [123456]: [12],..,[23],..,[123],..
1002
1003 2. For each combination, say [34]. Check if combination is valid.
1004 If so:
1005
1006 a. Append [12[34]56] to result array
1007
1008 b. Split [123456] at index(first element in combination+1),
1009 i.e. [12],[456] and subtract combination from second half,
1010 i.e.: [456]-[34]=[56]. Repeat from 1. with this array
1011
1012 3. Take result array from call to 1. (here, [[56]]) and append
1013 (first half in step b - combination) + combination + (result
1014 from 1.) = [12[34][56]] to result array
1015
1016 4. After appending results from all n-combinations, return
1017 resulting array. Example, if [13] and [45] are valid
1018 combinations:
1019 [[[13]2456],[[13]2[45]6],[123[45]6]]
1020 """
1021
1022 res = []
1023
1024
1025 for comb_length in range(2, max_multi_to1 + 1):
1026
1027
1028 if comb_length > len(list_legs):
1029 return res
1030
1031
1032
1033 for comb in itertools.combinations(list_legs, comb_length):
1034
1035
1036 if base_objects.LegList(comb).can_combine_to_1(ref_dict_to1):
1037
1038
1039
1040 res_list = copy.copy(list_legs)
1041 for leg in comb:
1042 res_list.remove(leg)
1043 res_list.insert(list_legs.index(comb[0]), comb)
1044 res.append(res_list)
1045
1046
1047
1048
1049
1050
1051 res_list1 = list_legs[0:list_legs.index(comb[0])]
1052 res_list2 = list_legs[list_legs.index(comb[0]) + 1:]
1053 for leg in comb[1:]:
1054 res_list2.remove(leg)
1055
1056
1057 res_list = res_list1
1058 res_list.append(comb)
1059
1060
1061 for item in self.combine_legs(res_list2,
1062 ref_dict_to1,
1063 max_multi_to1):
1064 final_res_list = copy.copy(res_list)
1065 final_res_list.extend(item)
1066 res.append(final_res_list)
1067
1068 return res
1069
1070
1072 """Takes a list of allowed leg combinations as an input and returns
1073 a set of lists where combinations have been properly replaced
1074 (one list per element in the ref_dict, so that all possible intermediate
1075 particles are included). For each list, give the list of vertices
1076 corresponding to the executed merging, group the two as a tuple.
1077 """
1078
1079 res = []
1080
1081 for comb_list in comb_lists:
1082
1083 reduced_list = []
1084 vertex_list = []
1085
1086 for entry in comb_list:
1087
1088
1089 if isinstance(entry, tuple):
1090
1091
1092
1093 leg_vert_ids = copy.copy(ref_dict_to1[\
1094 tuple(sorted([leg.get('id') for leg in entry]))])
1095
1096
1097 number = min([leg.get('number') for leg in entry])
1098
1099
1100 if len(filter(lambda leg: leg.get('state') == False,
1101 entry)) == 1:
1102 state = False
1103 else:
1104 state = True
1105
1106
1107
1108
1109
1110 new_leg_vert_ids = []
1111 if leg_vert_ids:
1112 new_leg_vert_ids = self.get_combined_legs(entry,
1113 leg_vert_ids,
1114 number,
1115 state)
1116
1117 reduced_list.append([l[0] for l in new_leg_vert_ids])
1118
1119
1120
1121
1122
1123 vlist = base_objects.VertexList()
1124 for (myleg, vert_id) in new_leg_vert_ids:
1125
1126 myleglist = base_objects.LegList(list(entry))
1127
1128 myleglist.append(myleg)
1129
1130 vlist.append(base_objects.Vertex(
1131 {'legs':myleglist,
1132 'id':vert_id}))
1133
1134 vertex_list.append(vlist)
1135
1136
1137
1138 else:
1139 cp_entry = copy.copy(entry)
1140
1141
1142
1143 if cp_entry.get('from_group') != None:
1144 cp_entry.set('from_group', False)
1145 reduced_list.append(cp_entry)
1146
1147
1148 flat_red_lists = expand_list(reduced_list)
1149 flat_vx_lists = expand_list(vertex_list)
1150
1151
1152 for i in range(0, len(flat_vx_lists)):
1153 res.append((base_objects.LegList(flat_red_lists[i]), \
1154 base_objects.VertexList(flat_vx_lists[i])))
1155
1156 return res
1157
1159 """Create a set of new legs from the info given. This can be
1160 overloaded by daughter classes."""
1161
1162 mylegs = [(base_objects.Leg({'id':leg_id,
1163 'number':number,
1164 'state':state,
1165 'from_group':True}),
1166 vert_id)\
1167 for leg_id, vert_id in leg_vert_ids]
1168
1169 return mylegs
1170
1172 """Allow for selection of vertex ids. This can be
1173 overloaded by daughter classes."""
1174
1175 return vert_ids
1176
1178 """Reduce the number of legs and vertices used in memory.
1179 When called by a diagram generation initiated by LoopAmplitude,
1180 this function should not trim the diagrams in the attribute 'diagrams'
1181 but rather a given list in the 'diaglist' argument."""
1182
1183 legs = []
1184 vertices = []
1185
1186 if diaglist is None:
1187 diaglist=self.get('diagrams')
1188
1189
1190 process = self.get('process')
1191 for leg in process.get('legs'):
1192 if leg.get('state') and leg.get('id') in decay_ids:
1193 leg.set('onshell', True)
1194
1195 for diagram in diaglist:
1196
1197 leg_external = set()
1198 for ivx, vertex in enumerate(diagram.get('vertices')):
1199 for ileg, leg in enumerate(vertex.get('legs')):
1200
1201 if leg.get('state') and leg.get('id') in decay_ids and \
1202 leg.get('number') not in leg_external:
1203
1204
1205 leg = copy.copy(leg)
1206 leg.set('onshell', True)
1207 try:
1208 index = legs.index(leg)
1209 except ValueError:
1210 vertex.get('legs')[ileg] = leg
1211 legs.append(leg)
1212 else:
1213 vertex.get('legs')[ileg] = legs[index]
1214 leg_external.add(leg.get('number'))
1215 try:
1216 index = vertices.index(vertex)
1217 diagram.get('vertices')[ivx] = vertices[index]
1218 except ValueError:
1219 vertices.append(vertex)
1220
1221
1222
1223
1224 -class AmplitudeList(base_objects.PhysicsObjectList):
1225 """List of Amplitude objects
1226 """
1227
1229 """ Check the content of all processes of the amplitudes in this list to
1230 see if there is any which defines perturbation couplings. """
1231
1232 for amp in self:
1233 if amp.has_loop_process():
1234 return True
1235
1237 """Test if object obj is a valid Amplitude for the list."""
1238
1239 return isinstance(obj, Amplitude)
1240
1245 """A list of amplitudes + a list of decay chain amplitude lists;
1246 corresponding to a ProcessDefinition with a list of decay chains
1247 """
1248
1254
1255 - def __init__(self, argument = None, collect_mirror_procs = False,
1256 ignore_six_quark_processes = False):
1257 """Allow initialization with Process and with ProcessDefinition"""
1258
1259 if isinstance(argument, base_objects.Process):
1260 super(DecayChainAmplitude, self).__init__()
1261 from madgraph.loop.loop_diagram_generation import LoopMultiProcess
1262 if argument['perturbation_couplings']:
1263 MultiProcessClass=LoopMultiProcess
1264 else:
1265 MultiProcessClass=MultiProcess
1266 if isinstance(argument, base_objects.ProcessDefinition):
1267 self['amplitudes'].extend(\
1268 MultiProcessClass.generate_multi_amplitudes(argument,
1269 collect_mirror_procs,
1270 ignore_six_quark_processes))
1271 else:
1272 self['amplitudes'].append(\
1273 MultiProcessClass.get_amplitude_from_proc(argument))
1274
1275
1276 process = copy.copy(self.get('amplitudes')[0].get('process'))
1277 process.set('decay_chains', base_objects.ProcessList())
1278 self['amplitudes'][0].set('process', process)
1279
1280 for process in argument.get('decay_chains'):
1281 if process.get('perturbation_couplings'):
1282 raise MadGraph5Error,\
1283 "Decay processes can not be perturbed"
1284 process.set('overall_orders', argument.get('overall_orders'))
1285 if not process.get('is_decay_chain'):
1286 process.set('is_decay_chain',True)
1287 if not process.get_ninitial() == 1:
1288 raise InvalidCmd,\
1289 "Decay chain process must have exactly one" + \
1290 " incoming particle"
1291 self['decay_chains'].append(\
1292 DecayChainAmplitude(process, collect_mirror_procs,
1293 ignore_six_quark_processes))
1294
1295
1296 decay_ids = sum([[a.get('process').get('legs')[0].get('id') \
1297 for a in dec.get('amplitudes')] for dec in \
1298 self['decay_chains']], [])
1299 decay_ids = set(decay_ids)
1300 for amp in self['amplitudes']:
1301 amp.trim_diagrams(decay_ids)
1302
1303
1304 for amp in self['amplitudes']:
1305 for l in amp.get('process').get('legs'):
1306 if l.get('id') in decay_ids:
1307 decay_ids.remove(l.get('id'))
1308
1309 if decay_ids:
1310 model = amp.get('process').get('model')
1311 names = [model.get_particle(id).get('name') for id in decay_ids]
1312
1313 logger.warning(
1314 "$RED Decay without corresponding particle in core process found.\n" + \
1315 "Decay information for particle(s) %s is discarded.\n" % ','.join(names) + \
1316 "Please check your process definition carefully. \n" + \
1317 "This warning usually means that you forgot parentheses in presence of subdecay.\n" + \
1318 "Example of correct syntax: p p > t t~, ( t > w+ b, w+ > l+ vl)")
1319
1320
1321 for dc in reversed(self['decay_chains']):
1322 for a in reversed(dc.get('amplitudes')):
1323
1324 if a.get('process').get('legs')[0].get('id') in decay_ids:
1325 dc.get('amplitudes').remove(a)
1326 if not dc.get('amplitudes'):
1327
1328 self['decay_chains'].remove(dc)
1329
1330
1331
1332 bad_procs = []
1333 for dc in self['decay_chains']:
1334 for amp in dc.get('amplitudes'):
1335 legs = amp.get('process').get('legs')
1336 fs_parts = [abs(l.get('id')) for l in legs if
1337 l.get('state')]
1338 is_part = [l.get('id') for l in legs if not
1339 l.get('state')][0]
1340 if abs(is_part) in fs_parts:
1341 bad_procs.append(amp.get('process'))
1342
1343 if bad_procs:
1344 logger.warning(
1345 "$RED Decay(s) with particle decaying to itself:\n" + \
1346 '\n'.join([p.nice_string() for p in bad_procs]) + \
1347 "\nPlease check your process definition carefully. \n")
1348
1349
1350 elif argument != None:
1351
1352 super(DecayChainAmplitude, self).__init__(argument)
1353 else:
1354
1355 super(DecayChainAmplitude, self).__init__()
1356
1357 - def filter(self, name, value):
1358 """Filter for valid amplitude property values."""
1359
1360 if name == 'amplitudes':
1361 if not isinstance(value, AmplitudeList):
1362 raise self.PhysicsObjectError, \
1363 "%s is not a valid AmplitudeList" % str(value)
1364 if name == 'decay_chains':
1365 if not isinstance(value, DecayChainAmplitudeList):
1366 raise self.PhysicsObjectError, \
1367 "%s is not a valid DecayChainAmplitudeList object" % \
1368 str(value)
1369 return True
1370
1372 """Return diagram property names as a nicely sorted list."""
1373
1374 return ['amplitudes', 'decay_chains']
1375
1376
1377
1379 """Returns number of diagrams for this amplitude"""
1380 return sum(len(a.get('diagrams')) for a in self.get('amplitudes')) \
1381 + sum(d.get_number_of_diagrams() for d in \
1382 self.get('decay_chains'))
1383
1385 """Returns a nicely formatted string of the amplitude content."""
1386 mystr = ""
1387 for amplitude in self.get('amplitudes'):
1388 mystr = mystr + amplitude.nice_string(indent) + "\n"
1389
1390 if self.get('decay_chains'):
1391 mystr = mystr + " " * indent + "Decays:\n"
1392 for dec in self.get('decay_chains'):
1393 mystr = mystr + dec.nice_string(indent + 2) + "\n"
1394
1395 return mystr[:-1]
1396
1398 """Returns a nicely formatted string of the amplitude processes."""
1399 mystr = ""
1400 for amplitude in self.get('amplitudes'):
1401 mystr = mystr + amplitude.nice_string_processes(indent) + "\n"
1402
1403 if self.get('decay_chains'):
1404 mystr = mystr + " " * indent + "Decays:\n"
1405 for dec in self.get('decay_chains'):
1406 mystr = mystr + dec.nice_string_processes(indent + 2) + "\n"
1407
1408 return mystr[:-1]
1409
1411 """Returns the number of initial state particles in the process."""
1412 return self.get('amplitudes')[0].get('process').get_ninitial()
1413
1415 """Returns a set of all particle ids for which a decay is defined"""
1416
1417 decay_ids = []
1418
1419
1420 for amp in sum([dc.get('amplitudes') for dc \
1421 in self['decay_chains']], []):
1422
1423 decay_ids.append(amp.get('process').get_initial_ids()[0])
1424
1425
1426 return list(set(decay_ids))
1427
1429 """ Returns wether this amplitude has a loop process."""
1430 return self['amplitudes'].has_any_loop_process()
1431
1433 """Recursive function to extract all amplitudes for this process"""
1434
1435 amplitudes = AmplitudeList()
1436
1437 amplitudes.extend(self.get('amplitudes'))
1438 for decay in self.get('decay_chains'):
1439 amplitudes.extend(decay.get_amplitudes())
1440
1441 return amplitudes
1442
1448 """List of DecayChainAmplitude objects
1449 """
1450
1452 """Test if object obj is a valid DecayChainAmplitude for the list."""
1453
1454 return isinstance(obj, DecayChainAmplitude)
1455
1456
1457
1458
1459
1460 -class MultiProcess(base_objects.PhysicsObject):
1461 """MultiProcess: list of process definitions
1462 list of processes (after cleaning)
1463 list of amplitudes (after generation)
1464 """
1465
1467 """Default values for all properties"""
1468
1469 self['process_definitions'] = base_objects.ProcessDefinitionList()
1470
1471
1472
1473 self['amplitudes'] = AmplitudeList()
1474
1475 self['collect_mirror_procs'] = False
1476
1477
1478 self['ignore_six_quark_processes'] = []
1479
1480
1481 self['use_numerical'] = False
1482
1483 - def __init__(self, argument=None, collect_mirror_procs = False,
1484 ignore_six_quark_processes = [], optimize=False):
1510
1511
1512 - def filter(self, name, value):
1513 """Filter for valid process property values."""
1514
1515 if name == 'process_definitions':
1516 if not isinstance(value, base_objects.ProcessDefinitionList):
1517 raise self.PhysicsObjectError, \
1518 "%s is not a valid ProcessDefinitionList object" % str(value)
1519
1520 if name == 'amplitudes':
1521 if not isinstance(value, diagram_generation.AmplitudeList):
1522 raise self.PhysicsObjectError, \
1523 "%s is not a valid AmplitudeList object" % str(value)
1524
1525 if name in ['collect_mirror_procs']:
1526 if not isinstance(value, bool):
1527 raise self.PhysicsObjectError, \
1528 "%s is not a valid boolean" % str(value)
1529
1530 if name == 'ignore_six_quark_processes':
1531 if not isinstance(value, list):
1532 raise self.PhysicsObjectError, \
1533 "%s is not a valid list" % str(value)
1534
1535 return True
1536
1537 - def get(self, name):
1538 """Get the value of the property name."""
1539
1540 if (name == 'amplitudes') and not self[name]:
1541 for process_def in self.get('process_definitions'):
1542 if process_def.get('decay_chains'):
1543
1544
1545 self['amplitudes'].append(\
1546 DecayChainAmplitude(process_def,
1547 self.get('collect_mirror_procs'),
1548 self.get('ignore_six_quark_processes')))
1549 else:
1550 self['amplitudes'].extend(\
1551 self.generate_multi_amplitudes(process_def,
1552 self.get('collect_mirror_procs'),
1553 self.get('ignore_six_quark_processes'),
1554 self['use_numerical']))
1555
1556 return MultiProcess.__bases__[0].get(self, name)
1557
1559 """Return process property names as a nicely sorted list."""
1560
1561 return ['process_definitions', 'amplitudes']
1562
1563 @classmethod
1564 - def generate_multi_amplitudes(cls,process_definition,
1565 collect_mirror_procs = False,
1566 ignore_six_quark_processes = [],
1567 use_numerical=False):
1568 """Generate amplitudes in a semi-efficient way.
1569 Make use of crossing symmetry for processes that fail diagram
1570 generation, but not for processes that succeed diagram
1571 generation. Doing so will risk making it impossible to
1572 identify processes with identical amplitudes.
1573 """
1574 assert isinstance(process_definition, base_objects.ProcessDefinition), \
1575 "%s not valid ProcessDefinition object" % \
1576 repr(process_definition)
1577
1578
1579 process_definition.set('orders', MultiProcess.\
1580 find_optimal_process_orders(process_definition))
1581
1582 process_definition.check_expansion_orders()
1583
1584 processes = base_objects.ProcessList()
1585 amplitudes = AmplitudeList()
1586
1587
1588
1589 failed_procs = []
1590 success_procs = []
1591
1592 non_permuted_procs = []
1593
1594 permutations = []
1595
1596
1597
1598 model = process_definition['model']
1599
1600 isids = [leg['ids'] for leg in process_definition['legs'] \
1601 if leg['state'] == False]
1602 fsids = [leg['ids'] for leg in process_definition['legs'] \
1603 if leg['state'] == True]
1604
1605
1606 for prod in itertools.product(*isids):
1607 islegs = [\
1608 base_objects.Leg({'id':id, 'state': False}) \
1609 for id in prod]
1610
1611
1612
1613
1614 red_fsidlist = []
1615
1616 for prod in itertools.product(*fsids):
1617
1618
1619 if tuple(sorted(prod)) in red_fsidlist:
1620 continue
1621
1622 red_fsidlist.append(tuple(sorted(prod)));
1623
1624
1625 leg_list = [copy.copy(leg) for leg in islegs]
1626
1627 leg_list.extend([\
1628 base_objects.Leg({'id':id, 'state': True}) \
1629 for id in prod])
1630
1631 legs = base_objects.LegList(leg_list)
1632
1633
1634 sorted_legs = sorted([(l,i+1) for (i,l) in \
1635 enumerate(legs.get_outgoing_id_list(model))])
1636 permutation = [l[1] for l in sorted_legs]
1637 sorted_legs = array.array('i', [l[0] for l in sorted_legs])
1638
1639
1640 if ignore_six_quark_processes and \
1641 len([i for i in sorted_legs if abs(i) in \
1642 ignore_six_quark_processes]) >= 6:
1643 continue
1644
1645
1646
1647 if sorted_legs in failed_procs:
1648 continue
1649
1650
1651 if use_numerical:
1652
1653 initial_mass = abs(model['parameter_dict'][model.get_particle(legs[0].get('id')).get('mass')])
1654 if initial_mass == 0:
1655 continue
1656 for leg in legs[1:]:
1657 m = model['parameter_dict'][model.get_particle(leg.get('id')).get('mass')]
1658 initial_mass -= abs(m)
1659 if initial_mass.real <= 0:
1660 continue
1661
1662
1663 process = base_objects.Process({\
1664 'legs':legs,
1665 'model':process_definition.get('model'),
1666 'id': process_definition.get('id'),
1667 'orders': process_definition.get('orders'),
1668 'required_s_channels': \
1669 process_definition.get('required_s_channels'),
1670 'forbidden_onsh_s_channels': \
1671 process_definition.get('forbidden_onsh_s_channels'),
1672 'forbidden_s_channels': \
1673 process_definition.get('forbidden_s_channels'),
1674 'forbidden_particles': \
1675 process_definition.get('forbidden_particles'),
1676 'is_decay_chain': \
1677 process_definition.get('is_decay_chain'),
1678 'perturbation_couplings': \
1679 process_definition.get('perturbation_couplings'),
1680 'squared_orders': \
1681 process_definition.get('squared_orders'),
1682 'sqorders_types': \
1683 process_definition.get('sqorders_types'),
1684 'overall_orders': \
1685 process_definition.get('overall_orders'),
1686 'has_born': \
1687 process_definition.get('has_born'),
1688 'split_orders': \
1689 process_definition.get('split_orders')
1690 })
1691 fast_proc = \
1692 array.array('i',[leg.get('id') for leg in legs])
1693 if collect_mirror_procs and \
1694 process_definition.get_ninitial() == 2:
1695
1696 mirror_proc = \
1697 array.array('i', [fast_proc[1], fast_proc[0]] + \
1698 list(fast_proc[2:]))
1699 try:
1700 mirror_amp = \
1701 amplitudes[non_permuted_procs.index(mirror_proc)]
1702 except Exception:
1703
1704 pass
1705 else:
1706
1707 mirror_amp.set('has_mirror_process', True)
1708 logger.info("Process %s added to mirror process %s" % \
1709 (process.base_string(),
1710 mirror_amp.get('process').base_string()))
1711 continue
1712
1713
1714
1715 if not process.get('required_s_channels') and \
1716 not process.get('forbidden_onsh_s_channels') and \
1717 not process.get('forbidden_s_channels') and \
1718 not process.get('is_decay_chain'):
1719 try:
1720 crossed_index = success_procs.index(sorted_legs)
1721 except ValueError:
1722
1723 pass
1724 else:
1725
1726 amplitude = MultiProcess.cross_amplitude(\
1727 amplitudes[crossed_index],
1728 process,
1729 permutations[crossed_index],
1730 permutation)
1731 amplitudes.append(amplitude)
1732 success_procs.append(sorted_legs)
1733 permutations.append(permutation)
1734 non_permuted_procs.append(fast_proc)
1735 logger.info("Crossed process found for %s, reuse diagrams." % \
1736 process.base_string())
1737 continue
1738
1739
1740 amplitude = cls.get_amplitude_from_proc(process)
1741
1742 try:
1743 result = amplitude.generate_diagrams()
1744 except InvalidCmd as error:
1745 failed_procs.append(sorted_legs)
1746 else:
1747
1748 if amplitude.get('diagrams'):
1749 amplitudes.append(amplitude)
1750 success_procs.append(sorted_legs)
1751 permutations.append(permutation)
1752 non_permuted_procs.append(fast_proc)
1753 elif not result:
1754
1755 failed_procs.append(sorted_legs)
1756
1757
1758 if not amplitudes:
1759 if len(failed_procs) == 1 and 'error' in locals():
1760 raise error
1761 else:
1762 raise InvalidCmd, \
1763 "No amplitudes generated from process %s. Please enter a valid process" % \
1764 process_definition.nice_string()
1765
1766
1767
1768 return amplitudes
1769
1770 @classmethod
1772 """ Return the correct amplitude type according to the characteristics of
1773 the process proc """
1774 return Amplitude({"process": proc})
1775
1776
1777 @staticmethod
1779 """Find the minimal WEIGHTED order for this set of processes.
1780
1781 The algorithm:
1782
1783 1) Check the coupling hierarchy of the model. Assign all
1784 particles to the different coupling hierarchies so that a
1785 particle is considered to be in the highest hierarchy (i.e.,
1786 with lowest value) where it has an interaction.
1787
1788 2) Pick out the legs in the multiprocess according to the
1789 highest hierarchy represented (so don't mix particles from
1790 different hierarchy classes in the same multiparticles!)
1791
1792 3) Find the starting maximum WEIGHTED order as the sum of the
1793 highest n-2 weighted orders
1794
1795 4) Pick out required s-channel particle hierarchies, and use
1796 the highest of the maximum WEIGHTED order from the legs and
1797 the minimum WEIGHTED order extracted from 2*s-channel
1798 hierarchys plus the n-2-2*(number of s-channels) lowest
1799 leg weighted orders.
1800
1801 5) Run process generation with the WEIGHTED order determined
1802 in 3)-4) - # final state gluons, with all gluons removed from
1803 the final state
1804
1805 6) If no process is found, increase WEIGHTED order by 1 and go
1806 back to 5), until we find a process which passes. Return that
1807 order.
1808
1809 7) Continue 5)-6) until we reach (n-2)*(highest hierarchy)-1.
1810 If still no process has passed, return
1811 WEIGHTED = (n-2)*(highest hierarchy)
1812 """
1813
1814 assert isinstance(process_definition, base_objects.ProcessDefinition), \
1815 "%s not valid ProcessDefinition object" % \
1816 repr(process_definition)
1817
1818 processes = base_objects.ProcessList()
1819 amplitudes = AmplitudeList()
1820
1821
1822 if process_definition.get('orders') or \
1823 process_definition.get('overall_orders') or \
1824 process_definition.get('NLO_mode')=='virt':
1825 return process_definition.get('orders')
1826
1827
1828 if process_definition.get_ninitial() == 1 and not \
1829 process_definition.get('is_decay_chain'):
1830 return process_definition.get('orders')
1831
1832 logger.info("Checking for minimal orders which gives processes.")
1833 logger.info("Please specify coupling orders to bypass this step.")
1834
1835
1836 max_order_now, particles, hierarchy = \
1837 process_definition.get_minimum_WEIGHTED()
1838 coupling = 'WEIGHTED'
1839
1840 model = process_definition.get('model')
1841
1842
1843 isids = [leg['ids'] for leg in \
1844 filter(lambda leg: leg['state'] == False, process_definition['legs'])]
1845 fsids = [leg['ids'] for leg in \
1846 filter(lambda leg: leg['state'] == True, process_definition['legs'])]
1847
1848 max_WEIGHTED_order = \
1849 (len(fsids + isids) - 2)*int(model.get_max_WEIGHTED())
1850
1851
1852
1853 while max_order_now < max_WEIGHTED_order:
1854
1855 logger.info("Trying coupling order WEIGHTED=%d" % max_order_now)
1856
1857 oldloglevel = logger.level
1858 logger.setLevel(logging.WARNING)
1859
1860
1861
1862 failed_procs = []
1863
1864
1865 for prod in apply(itertools.product, isids):
1866 islegs = [ base_objects.Leg({'id':id, 'state': False}) \
1867 for id in prod]
1868
1869
1870
1871
1872 red_fsidlist = []
1873
1874 for prod in apply(itertools.product, fsids):
1875
1876
1877 if tuple(sorted(prod)) in red_fsidlist:
1878 continue
1879
1880 red_fsidlist.append(tuple(sorted(prod)));
1881
1882
1883
1884 nglue = 0
1885 if 21 in particles[0]:
1886 nglue = len([id for id in prod if id == 21])
1887 prod = [id for id in prod if id != 21]
1888
1889
1890 leg_list = [copy.copy(leg) for leg in islegs]
1891
1892 leg_list.extend([\
1893 base_objects.Leg({'id':id, 'state': True}) \
1894 for id in prod])
1895
1896 legs = base_objects.LegList(leg_list)
1897
1898
1899
1900 coupling_orders_now = {coupling: max_order_now - \
1901 nglue * model['order_hierarchy']['QCD']}
1902
1903
1904 process = base_objects.Process({\
1905 'legs':legs,
1906 'model':model,
1907 'id': process_definition.get('id'),
1908 'orders': coupling_orders_now,
1909 'required_s_channels': \
1910 process_definition.get('required_s_channels'),
1911 'forbidden_onsh_s_channels': \
1912 process_definition.get('forbidden_onsh_s_channels'),
1913 'sqorders_types': \
1914 process_definition.get('sqorders_types'),
1915 'squared_orders': \
1916 process_definition.get('squared_orders'),
1917 'split_orders': \
1918 process_definition.get('split_orders'),
1919 'forbidden_s_channels': \
1920 process_definition.get('forbidden_s_channels'),
1921 'forbidden_particles': \
1922 process_definition.get('forbidden_particles'),
1923 'is_decay_chain': \
1924 process_definition.get('is_decay_chain'),
1925 'overall_orders': \
1926 process_definition.get('overall_orders'),
1927 'split_orders': \
1928 process_definition.get('split_orders')})
1929
1930
1931 process.check_expansion_orders()
1932
1933
1934 sorted_legs = sorted(legs.get_outgoing_id_list(model))
1935
1936
1937 if tuple(sorted_legs) in failed_procs:
1938 continue
1939
1940 amplitude = Amplitude({'process': process})
1941 try:
1942 amplitude.generate_diagrams()
1943 except InvalidCmd:
1944 failed_procs.append(tuple(sorted_legs))
1945 else:
1946 if amplitude.get('diagrams'):
1947
1948 logger.setLevel(oldloglevel)
1949 return {coupling: max_order_now}
1950 else:
1951 failed_procs.append(tuple(sorted_legs))
1952
1953
1954 max_order_now += 1
1955 logger.setLevel(oldloglevel)
1956
1957
1958 return {coupling: max_order_now}
1959
1960 @staticmethod
1962 """Return the amplitude crossed with the permutation new_perm"""
1963
1964 perm_map = dict(zip(org_perm, new_perm))
1965
1966 new_amp = copy.copy(amplitude)
1967
1968 for i, leg in enumerate(process.get('legs')):
1969 leg.set('number', i+1)
1970
1971 new_amp.set('process', process)
1972
1973 diagrams = base_objects.DiagramList([d.renumber_legs(perm_map,
1974 process.get('legs'),) for \
1975 d in new_amp.get('diagrams')])
1976 new_amp.set('diagrams', diagrams)
1977 new_amp.trim_diagrams()
1978
1979
1980 new_amp.set('has_mirror_process', False)
1981
1982 return new_amp
1983
1989 """Takes a list of lists and elements and returns a list of flat lists.
1990 Example: [[1,2], 3, [4,5]] -> [[1,3,4], [1,3,5], [2,3,4], [2,3,5]]
1991 """
1992
1993
1994 assert isinstance(mylist, list), "Expand_list argument must be a list"
1995
1996 res = []
1997
1998 tmplist = []
1999 for item in mylist:
2000 if isinstance(item, list):
2001 tmplist.append(item)
2002 else:
2003 tmplist.append([item])
2004
2005 for item in apply(itertools.product, tmplist):
2006 res.append(list(item))
2007
2008 return res
2009
2011 """Recursive function. Takes a list of lists and lists of lists
2012 and returns a list of flat lists.
2013 Example: [[1,2],[[4,5],[6,7]]] -> [[1,2,4,5], [1,2,6,7]]
2014 """
2015
2016 res = []
2017
2018 if not mylist or len(mylist) == 1 and not mylist[0]:
2019 return [[]]
2020
2021
2022 assert isinstance(mylist[0], list), \
2023 "Expand_list_list needs a list of lists and lists of lists"
2024
2025
2026 if len(mylist) == 1:
2027 if isinstance(mylist[0][0], list):
2028 return mylist[0]
2029 else:
2030 return mylist
2031
2032 if isinstance(mylist[0][0], list):
2033 for item in mylist[0]:
2034
2035
2036
2037 for rest in expand_list_list(mylist[1:]):
2038 reslist = copy.copy(item)
2039 reslist.extend(rest)
2040 res.append(reslist)
2041 else:
2042 for rest in expand_list_list(mylist[1:]):
2043 reslist = copy.copy(mylist[0])
2044 reslist.extend(rest)
2045 res.append(reslist)
2046
2047
2048 return res
2049