Package madgraph :: Package core :: Module diagram_generation
[hide private]
[frames] | no frames]

Source Code for Module madgraph.core.diagram_generation

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Classes for diagram generation. Amplitude performs the diagram 
  16  generation, DecayChainAmplitude keeps track of processes with decay 
  17  chains, and MultiProcess allows generation of processes with 
  18  multiparticle definitions. DiagramTag allows to identify diagrams 
  19  based on relevant properties. 
  20  """ 
  21   
  22  import array 
  23  import copy 
  24  import itertools 
  25  import logging 
  26   
  27  import madgraph.core.base_objects as base_objects 
  28  import madgraph.various.misc as misc 
  29  from madgraph import InvalidCmd, MadGraph5Error 
  30   
  31  logger = logging.getLogger('madgraph.diagram_generation') 
32 33 34 -class NoDiagramException(InvalidCmd): pass
35
36 #=============================================================================== 37 # DiagramTag mother class 38 #=============================================================================== 39 40 -class DiagramTag(object):
41 """Class to tag diagrams based on objects with some __lt__ measure, e.g. 42 PDG code/interaction id (for comparing diagrams from the same amplitude), 43 or Lorentz/coupling/mass/width (for comparing AMPs from different MEs). 44 Algorithm: Create chains starting from external particles: 45 1 \ / 6 46 2 /\______/\ 7 47 3_ / | \_ 8 48 4 / 5 \_ 9 49 \ 10 50 gives ((((9,10,id910),8,id9108),(6,7,id67),id910867) 51 (((1,2,id12),(3,4,id34)),id1234), 52 5,id91086712345) 53 where idN is the id of the corresponding interaction. The ordering within 54 chains is based on chain length (depth; here, 1234 has depth 3, 910867 has 55 depth 4, 5 has depht 0), and if equal on the ordering of the chain elements. 56 The determination of central vertex is based on minimizing the chain length 57 for the longest subchain. 58 This gives a unique tag which can be used to identify diagrams 59 (instead of symmetry), as well as identify identical matrix elements from 60 different processes.""" 61
62 - class DiagramTagError(Exception):
63 """Exception for any problems in DiagramTags""" 64 pass
65
66 - def __init__(self, diagram, model=None, ninitial=2):
67 """Initialize with a diagram. Create DiagramTagChainLinks according to 68 the diagram, and figure out if we need to shift the central vertex.""" 69 70 # wf_dict keeps track of the intermediate particles 71 leg_dict = {} 72 # Create the chain which will be the diagram tag 73 for vertex in diagram.get('vertices'): 74 # Only add incoming legs 75 legs = vertex.get('legs')[:-1] 76 lastvx = vertex == diagram.get('vertices')[-1] 77 if lastvx: 78 # If last vertex, all legs are incoming 79 legs = vertex.get('legs') 80 # Add links corresponding to the relevant legs 81 link = DiagramTagChainLink([leg_dict.setdefault(leg.get('number'), 82 DiagramTagChainLink(self.link_from_leg(leg, model))) \ 83 for leg in legs], 84 self.vertex_id_from_vertex(vertex, 85 lastvx, 86 model, 87 ninitial)) 88 # Add vertex to leg_dict if not last one 89 if not lastvx: 90 leg_dict[vertex.get('legs')[-1].get('number')] = link 91 92 # The resulting link is the hypothetical result 93 self.tag = link 94 95 # Now make sure to find the central vertex in the diagram, 96 # defined by the longest leg being as short as possible 97 done = max([l.depth for l in self.tag.links]) == 0 98 while not done: 99 # Identify the longest chain in the tag 100 longest_chain = self.tag.links[0] 101 # Create a new link corresponding to moving one step 102 new_link = DiagramTagChainLink(self.tag.links[1:], 103 self.flip_vertex(\ 104 self.tag.vertex_id, 105 longest_chain.vertex_id, 106 self.tag.links[1:])) 107 # Create a new final vertex in the direction of the longest link 108 other_links = list(longest_chain.links) + [new_link] 109 other_link = DiagramTagChainLink(other_links, 110 self.flip_vertex(\ 111 longest_chain.vertex_id, 112 self.tag.vertex_id, 113 other_links)) 114 115 if other_link.links[0] < self.tag.links[0]: 116 # Switch to new tag, continue search 117 self.tag = other_link 118 else: 119 # We have found the central vertex 120 done = True
121
122 - def get_external_numbers(self):
123 """Get the order of external particles in this tag""" 124 125 return self.tag.get_external_numbers()
126
127 - def diagram_from_tag(self, model):
128 """Output a diagram from a DiagramTag. Note that each daughter 129 class must implement the static functions id_from_vertex_id 130 (if the vertex id is something else than an integer) and 131 leg_from_link (to pass the correct info from an end link to a 132 leg).""" 133 134 # Create the vertices, starting from the final vertex 135 diagram = base_objects.Diagram({'vertices': \ 136 self.vertices_from_link(self.tag, 137 model, 138 True)}) 139 diagram.calculate_orders(model) 140 return diagram
141 142 @classmethod 177 178 @classmethod
179 - def legPDGs_from_vertex_id(cls, vertex_id,model):
180 """Returns the list of external PDGs of the interaction corresponding 181 to this vertex_id.""" 182 183 # In case we have to deal with a regular vertex, we return the list 184 # external PDGs as given by the model information on that integer 185 # vertex id. 186 if (len(vertex_id)>=3 and 'PDGs' in vertex_id[2]): 187 return vertex_id[2]['PDGs'] 188 else: 189 return [part.get_pdg_code() for part in model.get_interaction( 190 cls.id_from_vertex_id(vertex_id)).get('particles')]
191 192 @classmethod
193 - def leg_from_legs(cls,legs, vertex_id, model):
194 """Return a leg from a leg list and the model info""" 195 196 pdgs = list(cls.legPDGs_from_vertex_id(vertex_id, model)) 197 198 # Extract the resulting pdg code from the interaction pdgs 199 for pdg in [leg.get('id') for leg in legs]: 200 pdgs.remove(pdg) 201 202 assert len(pdgs) == 1 203 # Prepare the new leg properties 204 pdg = model.get_particle(pdgs[0]).get_anti_pdg_code() 205 number = min([l.get('number') for l in legs]) 206 # State is False for t-channel, True for s-channel 207 state = (len([l for l in legs if l.get('state') == False]) != 1) 208 # Note that this needs to be done before combining decay chains 209 onshell= False 210 211 return base_objects.Leg({'id': pdg, 212 'number': number, 213 'state': state, 214 'onshell': onshell})
215 216 @classmethod 229 230 @staticmethod 243 244 @staticmethod
245 - def id_from_vertex_id(vertex_id):
246 """Return the numerical vertex id from a link.vertex_id""" 247 248 return vertex_id[0][0]
249 250 @staticmethod
251 - def loop_info_from_vertex_id(vertex_id):
252 """Return the loop_info stored in this vertex id. Notice that the 253 IdentifyME tag does not store the loop_info, but should normally never 254 need access to it.""" 255 256 return vertex_id[2]
257 258 @staticmethod
259 - def reorder_permutation(perm, start_perm):
260 """Reorder a permutation with respect to start_perm. Note that 261 both need to start from 1.""" 262 if perm == start_perm: 263 return range(len(perm)) 264 order = [i for (p,i) in \ 265 sorted([(p,i) for (i,p) in enumerate(perm)])] 266 return [start_perm[i]-1 for i in order]
267 268 @staticmethod 279 280 @staticmethod
281 - def vertex_id_from_vertex(vertex, last_vertex, model, ninitial):
282 """Returns the default vertex id: just the interaction id 283 Note that in the vertex id, like the leg, only the first entry is 284 taken into account in the tag comparison, while the second is for 285 storing information that is not to be used in comparisons and the 286 third for additional info regarding the shrunk loop vertex.""" 287 288 if isinstance(vertex,base_objects.ContractedVertex): 289 # return (vertex.get('id'),(),{'PDGs':vertex.get('PDGs')}) 290 return ((vertex.get('id'),vertex.get('loop_tag')),(), 291 {'PDGs':vertex.get('PDGs')}) 292 else: 293 return ((vertex.get('id'),()),(),{})
294 295 @staticmethod
296 - def flip_vertex(new_vertex, old_vertex, links):
297 """Returns the default vertex flip: just the new_vertex""" 298 return new_vertex
299
300 - def __eq__(self, other):
301 """Equal if same tag""" 302 if type(self) != type(other): 303 return False 304 return self.tag == other.tag
305
306 - def __ne__(self, other):
307 return not self.__eq__(other)
308
309 - def __str__(self):
310 return str(self.tag)
311
312 - def __lt__(self, other):
313 return self.tag < other.tag
314
315 - def __gt__(self, other):
316 return self.tag > other.tag
317 318 __repr__ = __str__
319 405
406 #=============================================================================== 407 # Amplitude 408 #=============================================================================== 409 -class Amplitude(base_objects.PhysicsObject):
410 """Amplitude: process + list of diagrams (ordered) 411 Initialize with a process, then call generate_diagrams() to 412 generate the diagrams for the amplitude 413 """ 414
415 - def default_setup(self):
416 """Default values for all properties""" 417 418 self['process'] = base_objects.Process() 419 self['diagrams'] = None 420 # has_mirror_process is True if the same process but with the 421 # two incoming particles interchanged has been generated 422 self['has_mirror_process'] = False
423
424 - def __init__(self, argument=None):
425 """Allow initialization with Process""" 426 if isinstance(argument, base_objects.Process): 427 super(Amplitude, self).__init__() 428 self.set('process', argument) 429 self.generate_diagrams() 430 elif argument != None: 431 # call the mother routine 432 super(Amplitude, self).__init__(argument) 433 else: 434 # call the mother routine 435 super(Amplitude, self).__init__()
436
437 - def filter(self, name, value):
438 """Filter for valid amplitude property values.""" 439 440 if name == 'process': 441 if not isinstance(value, base_objects.Process): 442 raise self.PhysicsObjectError, \ 443 "%s is not a valid Process object" % str(value) 444 if name == 'diagrams': 445 if not isinstance(value, base_objects.DiagramList): 446 raise self.PhysicsObjectError, \ 447 "%s is not a valid DiagramList object" % str(value) 448 if name == 'has_mirror_process': 449 if not isinstance(value, bool): 450 raise self.PhysicsObjectError, \ 451 "%s is not a valid boolean" % str(value) 452 return True
453
454 - def get(self, name):
455 """Get the value of the property name.""" 456 457 if name == 'diagrams' and self[name] == None: 458 # Have not yet generated diagrams for this process 459 if self['process']: 460 self.generate_diagrams() 461 462 return super(Amplitude, self).get(name)
463 # return Amplitude.__bases__[0].get(self, name) #return the mother routine 464 465
466 - def get_sorted_keys(self):
467 """Return diagram property names as a nicely sorted list.""" 468 469 return ['process', 'diagrams', 'has_mirror_process']
470
471 - def get_number_of_diagrams(self):
472 """Returns number of diagrams for this amplitude""" 473 return len(self.get('diagrams'))
474
475 - def get_amplitudes(self):
476 """Return an AmplitudeList with just this amplitude. 477 Needed for DecayChainAmplitude.""" 478 479 return AmplitudeList([self])
480
481 - def nice_string(self, indent=0):
482 """Returns a nicely formatted string of the amplitude content.""" 483 return self.get('process').nice_string(indent) + "\n" + \ 484 self.get('diagrams').nice_string(indent)
485
486 - def nice_string_processes(self, indent=0):
487 """Returns a nicely formatted string of the amplitude process.""" 488 return self.get('process').nice_string(indent)
489
490 - def get_ninitial(self):
491 """Returns the number of initial state particles in the process.""" 492 return self.get('process').get_ninitial()
493
494 - def has_loop_process(self):
495 """ Returns wether this amplitude has a loop process.""" 496 497 return self.get('process').get('perturbation_couplings')
498
499 - def generate_diagrams(self, returndiag=False):
500 """Generate diagrams. Algorithm: 501 502 1. Define interaction dictionaries: 503 * 2->0 (identity), 3->0, 4->0, ... , maxlegs->0 504 * 2 -> 1, 3 -> 1, ..., maxlegs-1 -> 1 505 506 2. Set flag from_group=true for all external particles. 507 Flip particle/anti particle for incoming particles. 508 509 3. If there is a dictionary n->0 with n=number of external 510 particles, create if possible the combination [(1,2,3,4,...)] 511 with *at least two* from_group==true. This will give a 512 finished (set of) diagram(s) (done by reduce_leglist) 513 514 4. Create all allowed groupings of particles with at least one 515 from_group==true (according to dictionaries n->1): 516 [(1,2),3,4...],[1,(2,3),4,...],..., 517 [(1,2),(3,4),...],...,[(1,2,3),4,...],... 518 (done by combine_legs) 519 520 5. Replace each group with a (list of) new particle(s) with number 521 n = min(group numbers). Set from_group true for these 522 particles and false for all other particles. Store vertex info. 523 (done by merge_comb_legs) 524 525 6. Stop algorithm when at most 2 particles remain. 526 Return all diagrams (lists of vertices). 527 528 7. Repeat from 3 (recursion done by reduce_leglist) 529 530 8. Replace final p=p vertex 531 532 Be aware that the resulting vertices have all particles outgoing, 533 so need to flip for incoming particles when used. 534 535 SPECIAL CASE: For A>BC... processes which are legs in decay 536 chains, we need to ensure that BC... combine first, giving A=A 537 as a final vertex. This case is defined by the Process 538 property is_decay_chain = True. 539 This function can also be called by the generate_diagram function 540 of LoopAmplitudes, in which case the generated diagrams here must not 541 be directly assigned to the 'diagrams' attributed but returned as a 542 DiagramList by the function. This is controlled by the argument 543 returndiag. 544 """ 545 546 process = self.get('process') 547 model = process.get('model') 548 legs = process.get('legs') 549 # Make sure orders is the minimum of orders and overall_orders 550 for key in process.get('overall_orders').keys(): 551 try: 552 process.get('orders')[key] = \ 553 min(process.get('orders')[key], 554 process.get('overall_orders')[key]) 555 except KeyError: 556 process.get('orders')[key] = process.get('overall_orders')[key] 557 558 assert model.get('particles'), \ 559 "particles are missing in model: %s" % model.get('particles') 560 561 assert model.get('interactions'), \ 562 "interactions are missing in model" 563 564 565 res = base_objects.DiagramList() 566 # First check that the number of fermions is even 567 if len(filter(lambda leg: model.get('particle_dict')[\ 568 leg.get('id')].is_fermion(), legs)) % 2 == 1: 569 if not returndiag: 570 self['diagrams'] = res 571 raise InvalidCmd, 'The number of fermion is odd' 572 else: 573 return False, res 574 575 # Then check same number of incoming and outgoing fermions (if 576 # no Majorana particles in model) 577 if not model.get('got_majoranas') and \ 578 len(filter(lambda leg: leg.is_incoming_fermion(model), legs)) != \ 579 len(filter(lambda leg: leg.is_outgoing_fermion(model), legs)): 580 if not returndiag: 581 self['diagrams'] = res 582 raise InvalidCmd, 'The number of of incoming/outcoming fermions are different' 583 else: 584 return False, res 585 586 # Finally check that charge (conserve by all interactions) of the process 587 #is globally conserve for this process. 588 for charge in model.get('conserved_charge'): 589 total = 0 590 for leg in legs: 591 part = model.get('particle_dict')[leg.get('id')] 592 try: 593 value = part.get(charge) 594 except (AttributeError, base_objects.PhysicsObject.PhysicsObjectError): 595 try: 596 value = getattr(part, charge) 597 except AttributeError: 598 value = 0 599 600 if (leg.get('id') != part['pdg_code']) != leg['state']: 601 total -= value 602 else: 603 total += value 604 605 if abs(total) > 1e-10: 606 if not returndiag: 607 self['diagrams'] = res 608 raise InvalidCmd, 'No %s conservation for this process ' % charge 609 return res 610 else: 611 raise InvalidCmd, 'No %s conservation for this process ' % charge 612 return res, res 613 614 if not returndiag: 615 logger.info("Trying %s " % process.nice_string().replace('Process', 'process')) 616 617 # Give numbers to legs in process 618 for i in range(0, len(process.get('legs'))): 619 # Make sure legs are unique 620 leg = copy.copy(process.get('legs')[i]) 621 process.get('legs')[i] = leg 622 if leg.get('number') == 0: 623 leg.set('number', i + 1) 624 625 # Copy leglist from process, so we can flip leg identities 626 # without affecting the original process 627 leglist = self.copy_leglist(process.get('legs')) 628 629 for leg in leglist: 630 631 # For the first step, ensure the tag from_group 632 # is true for all legs 633 leg.set('from_group', True) 634 635 # Need to flip part-antipart for incoming particles, 636 # so they are all outgoing 637 if leg.get('state') == False: 638 part = model.get('particle_dict')[leg.get('id')] 639 leg.set('id', part.get_anti_pdg_code()) 640 641 # Calculate the maximal multiplicity of n-1>1 configurations 642 # to restrict possible leg combinations 643 max_multi_to1 = max([len(key) for key in \ 644 model.get('ref_dict_to1').keys()]) 645 646 647 # Reduce the leg list and return the corresponding 648 # list of vertices 649 650 # For decay processes, generate starting from final-state 651 # combined only as the last particle. This allows to use these 652 # in decay chains later on. 653 is_decay_proc = process.get_ninitial() == 1 654 if is_decay_proc: 655 part = model.get('particle_dict')[leglist[0].get('id')] 656 # For decay chain legs, we want everything to combine to 657 # the initial leg. This is done by only allowing the 658 # initial leg to combine as a final identity. 659 ref_dict_to0 = {(part.get_pdg_code(),part.get_anti_pdg_code()):[0], 660 (part.get_anti_pdg_code(),part.get_pdg_code()):[0]} 661 # Need to set initial leg from_group to None, to make sure 662 # it can only be combined at the end. 663 leglist[0].set('from_group', None) 664 reduced_leglist = self.reduce_leglist(leglist, 665 max_multi_to1, 666 ref_dict_to0, 667 is_decay_proc, 668 process.get('orders')) 669 else: 670 reduced_leglist = self.reduce_leglist(leglist, 671 max_multi_to1, 672 model.get('ref_dict_to0'), 673 is_decay_proc, 674 process.get('orders')) 675 676 #In LoopAmplitude the function below is overloaded such that it 677 #converts back all DGLoopLegs to Legs. In the default tree-level 678 #diagram generation, this does nothing. 679 self.convert_dgleg_to_leg(reduced_leglist) 680 681 if reduced_leglist: 682 for vertex_list in reduced_leglist: 683 res.append(self.create_diagram(base_objects.VertexList(vertex_list))) 684 685 # Record whether or not we failed generation before required 686 # s-channel propagators are taken into account 687 failed_crossing = not res 688 689 # Required s-channels is a list of id-lists. Select the 690 # diagrams where all required s-channel propagators in any of 691 # the lists are present (i.e., the different lists correspond 692 # to "or", while the elements of the list correspond to 693 # "and"). 694 if process.get('required_s_channels') and \ 695 process.get('required_s_channels')[0]: 696 # We shouldn't look at the last vertex in each diagram, 697 # since that is the n->0 vertex 698 lastvx = -1 699 # For decay chain processes, there is an "artificial" 700 # extra vertex corresponding to particle 1=1, so we need 701 # to exclude the two last vertexes. 702 if is_decay_proc: lastvx = -2 703 ninitial = len(filter(lambda leg: leg.get('state') == False, 704 process.get('legs'))) 705 # Check required s-channels for each list in required_s_channels 706 old_res = res 707 res = base_objects.DiagramList() 708 for id_list in process.get('required_s_channels'): 709 res_diags = filter(lambda diagram: \ 710 all([req_s_channel in \ 711 [vertex.get_s_channel_id(\ 712 process.get('model'), ninitial) \ 713 for vertex in diagram.get('vertices')[:lastvx]] \ 714 for req_s_channel in \ 715 id_list]), old_res) 716 # Add diagrams only if not already in res 717 res.extend([diag for diag in res_diags if diag not in res]) 718 719 # Remove all diagrams with a "double" forbidden s-channel propagator 720 # is present. 721 # Note that we shouldn't look at the last vertex in each 722 # diagram, since that is the n->0 vertex 723 if process.get('forbidden_s_channels'): 724 ninitial = len(filter(lambda leg: leg.get('state') == False, 725 process.get('legs'))) 726 if ninitial == 2: 727 res = base_objects.DiagramList(\ 728 filter(lambda diagram: \ 729 not any([vertex.get_s_channel_id(\ 730 process.get('model'), ninitial) \ 731 in process.get('forbidden_s_channels') 732 for vertex in diagram.get('vertices')[:-1]]), 733 res)) 734 else: 735 # split since we need to avoid that the initial particle is forbidden 736 # as well. 737 newres= [] 738 for diagram in res: 739 leg1 = 1 740 #check the latest vertex to see if the leg 1 is inside if it 741 #is we need to inverse the look-up and allow the first s-channel 742 # of the associate particles. 743 vertex = diagram.get('vertices')[-1] 744 if any([l['number'] ==1 for l in vertex.get('legs')]): 745 leg1 = [l['number'] for l in vertex.get('legs') if l['number'] !=1][0] 746 to_loop = range(len(diagram.get('vertices'))-1) 747 if leg1 >1: 748 to_loop.reverse() 749 for i in to_loop: 750 vertex = diagram.get('vertices')[i] 751 if leg1: 752 if any([l['number'] ==leg1 for l in vertex.get('legs')]): 753 leg1 = 0 754 continue 755 if vertex.get_s_channel_id(process.get('model'), ninitial)\ 756 in process.get('forbidden_s_channels'): 757 break 758 else: 759 newres.append(diagram) 760 res = base_objects.DiagramList(newres) 761 762 763 # Mark forbidden (onshell) s-channel propagators, to forbid onshell 764 # generation. 765 if process.get('forbidden_onsh_s_channels'): 766 ninitial = len(filter(lambda leg: leg.get('state') == False, 767 process.get('legs'))) 768 769 verts = base_objects.VertexList(sum([[vertex for vertex \ 770 in diagram.get('vertices')[:-1] 771 if vertex.get_s_channel_id(\ 772 process.get('model'), ninitial) \ 773 in process.get('forbidden_onsh_s_channels')] \ 774 for diagram in res], [])) 775 for vert in verts: 776 # Use onshell = False to indicate that this s-channel is forbidden 777 newleg = copy.copy(vert.get('legs').pop(-1)) 778 newleg.set('onshell', False) 779 vert.get('legs').append(newleg) 780 781 # Set actual coupling orders for each diagram 782 for diagram in res: 783 diagram.calculate_orders(model) 784 785 # Filter the diagrams according to the squared coupling order 786 # constraints and possible the negative one. Remember that OrderName=-n 787 # means that the user wants to include everything up to the N^(n+1)LO 788 # contribution in that order and at most one order can be restricted 789 # in this way. We shall do this only if the diagrams are not asked to 790 # be returned, as it is the case for NLO because it this case the 791 # interference are not necessarily among the diagrams generated here only. 792 if not returndiag and len(res)>0: 793 res = self.apply_squared_order_constraints(res) 794 795 # Replace final id=0 vertex if necessary 796 if not process.get('is_decay_chain'): 797 for diagram in res: 798 vertices = diagram.get('vertices') 799 if len(vertices) > 1 and vertices[-1].get('id') == 0: 800 # Need to "glue together" last and next-to-last 801 # vertex, by replacing the (incoming) last leg of the 802 # next-to-last vertex with the (outgoing) leg in the 803 # last vertex 804 vertices = copy.copy(vertices) 805 lastvx = vertices.pop() 806 nexttolastvertex = copy.copy(vertices.pop()) 807 legs = copy.copy(nexttolastvertex.get('legs')) 808 ntlnumber = legs[-1].get('number') 809 lastleg = filter(lambda leg: leg.get('number') != ntlnumber, 810 lastvx.get('legs'))[0] 811 # Reset onshell in case we have forbidden s-channels 812 if lastleg.get('onshell') == False: 813 lastleg.set('onshell', None) 814 # Replace the last leg of nexttolastvertex 815 legs[-1] = lastleg 816 nexttolastvertex.set('legs', legs) 817 vertices.append(nexttolastvertex) 818 diagram.set('vertices', vertices) 819 820 if res and not returndiag: 821 logger.info("Process has %d diagrams" % len(res)) 822 823 # Trim down number of legs and vertices used to save memory 824 self.trim_diagrams(diaglist=res) 825 826 # Sort process legs according to leg number 827 pertur = 'QCD' 828 if self.get('process')['perturbation_couplings']: 829 pertur = sorted(self.get('process')['perturbation_couplings'])[0] 830 self.get('process').get('legs').sort(pert=pertur) 831 832 # Set diagrams to res if not asked to be returned 833 if not returndiag: 834 self['diagrams'] = res 835 return not failed_crossing 836 else: 837 return not failed_crossing, res
838
839 - def apply_squared_order_constraints(self, diag_list):
840 """Applies the user specified squared order constraints on the diagram 841 list in argument.""" 842 843 res = copy.copy(diag_list) 844 845 # Apply the filtering on constrained amplitude (== and >) 846 # No need to iterate on this one 847 for name, (value, operator) in self['process'].get('constrained_orders').items(): 848 res.filter_constrained_orders(name, value, operator) 849 850 # Iterate the filtering since the applying the constraint on one 851 # type of coupling order can impact what the filtering on a previous 852 # one (relevant for the '==' type of constraint). 853 while True: 854 new_res = res.apply_positive_sq_orders(res, 855 self['process'].get('squared_orders'), 856 self['process']['sqorders_types']) 857 # Exit condition 858 if len(res)==len(new_res): 859 break 860 elif (len(new_res)>len(res)): 861 raise MadGraph5Error( 862 'Inconsistency in function apply_squared_order_constraints().') 863 # Actualizing the list of diagram for the next iteration 864 res = new_res 865 866 867 868 # Now treat the negative squared order constraint (at most one) 869 neg_orders = [(order, value) for order, value in \ 870 self['process'].get('squared_orders').items() if value<0] 871 if len(neg_orders)==1: 872 neg_order, neg_value = neg_orders[0] 873 # Now check any negative order constraint 874 res, target_order = res.apply_negative_sq_order(res, neg_order,\ 875 neg_value, self['process']['sqorders_types'][neg_order]) 876 # Substitute the negative value to this positive one so that 877 # the resulting computed constraints appears in the print out 878 # and at the output stage we no longer have to deal with 879 # negative valued target orders 880 self['process']['squared_orders'][neg_order]=target_order 881 elif len(neg_orders)>1: 882 raise InvalidCmd('At most one negative squared order constraint'+\ 883 ' can be specified, not %s.'%str(neg_orders)) 884 885 return res
886
887 - def create_diagram(self, vertexlist):
888 """ Return a Diagram created from the vertex list. This function can be 889 overloaded by daughter classes.""" 890 return base_objects.Diagram({'vertices':vertexlist})
891
892 - def convert_dgleg_to_leg(self, vertexdoublelist):
893 """ In LoopAmplitude, it converts back all DGLoopLegs into Legs. 894 In Amplitude, there is nothing to do. """ 895 896 return True
897
898 - def copy_leglist(self, legs):
899 """ Simply returns a copy of the leg list. This function is 900 overloaded in LoopAmplitude so that a DGLoopLeg list is returned. 901 The DGLoopLeg has some additional parameters only useful during 902 loop diagram generation""" 903 904 return base_objects.LegList(\ 905 [ copy.copy(leg) for leg in legs ])
906
907 - def reduce_leglist(self, curr_leglist, max_multi_to1, ref_dict_to0, 908 is_decay_proc = False, coupling_orders = None):
909 """Recursive function to reduce N LegList to N-1 910 For algorithm, see doc for generate_diagrams. 911 """ 912 913 # Result variable which is a list of lists of vertices 914 # to be added 915 res = [] 916 917 # Stop condition. If LegList is None, that means that this 918 # diagram must be discarded 919 if curr_leglist is None: 920 return None 921 922 # Extract ref dict information 923 model = self.get('process').get('model') 924 ref_dict_to1 = self.get('process').get('model').get('ref_dict_to1') 925 926 927 # If all legs can be combined in one single vertex, add this 928 # vertex to res and continue. 929 # Special treatment for decay chain legs 930 931 if curr_leglist.can_combine_to_0(ref_dict_to0, is_decay_proc): 932 # Extract the interaction id associated to the vertex 933 934 vertex_ids = self.get_combined_vertices(curr_leglist, 935 copy.copy(ref_dict_to0[tuple(sorted([leg.get('id') for \ 936 leg in curr_leglist]))])) 937 938 final_vertices = [base_objects.Vertex({'legs':curr_leglist, 939 'id':vertex_id}) for \ 940 vertex_id in vertex_ids] 941 # Check for coupling orders. If orders < 0, skip vertex 942 for final_vertex in final_vertices: 943 if self.reduce_orders(coupling_orders, model, 944 [final_vertex.get('id')]) != False: 945 res.append([final_vertex]) 946 # Stop condition 2: if the leglist contained exactly two particles, 947 # return the result, if any, and stop. 948 if len(curr_leglist) == 2: 949 if res: 950 return res 951 else: 952 return None 953 954 # Create a list of all valid combinations of legs 955 comb_lists = self.combine_legs(curr_leglist, 956 ref_dict_to1, max_multi_to1) 957 958 # Create a list of leglists/vertices by merging combinations 959 leg_vertex_list = self.merge_comb_legs(comb_lists, ref_dict_to1) 960 961 # Consider all the pairs 962 for leg_vertex_tuple in leg_vertex_list: 963 964 # Remove forbidden particles 965 if self.get('process').get('forbidden_particles') and \ 966 any([abs(vertex.get('legs')[-1].get('id')) in \ 967 self.get('process').get('forbidden_particles') \ 968 for vertex in leg_vertex_tuple[1]]): 969 continue 970 971 # Check for coupling orders. If couplings < 0, skip recursion. 972 new_coupling_orders = self.reduce_orders(coupling_orders, 973 model, 974 [vertex.get('id') for vertex in \ 975 leg_vertex_tuple[1]]) 976 if new_coupling_orders == False: 977 # Some coupling order < 0 978 continue 979 980 # This is where recursion happens 981 # First, reduce again the leg part 982 reduced_diagram = self.reduce_leglist(leg_vertex_tuple[0], 983 max_multi_to1, 984 ref_dict_to0, 985 is_decay_proc, 986 new_coupling_orders) 987 # If there is a reduced diagram 988 if reduced_diagram: 989 vertex_list_list = [list(leg_vertex_tuple[1])] 990 vertex_list_list.append(reduced_diagram) 991 expanded_list = expand_list_list(vertex_list_list) 992 res.extend(expanded_list) 993 994 return res
995
996 - def reduce_orders(self, coupling_orders, model, vertex_id_list):
997 """Return False if the coupling orders for any coupling is < 998 0, otherwise return the new coupling orders with the vertex 999 orders subtracted. If coupling_orders is not given, return 1000 None (which counts as success). 1001 WEIGHTED is a special order, which corresponds to the sum of 1002 order hierarchies for the couplings. 1003 We ignore negative constraints as these cannot be taken into 1004 account on the fly but only after generation.""" 1005 1006 if not coupling_orders: 1007 return None 1008 1009 present_couplings = copy.copy(coupling_orders) 1010 for id in vertex_id_list: 1011 # Don't check for identity vertex (id = 0) 1012 if not id: 1013 continue 1014 inter = model.get("interaction_dict")[id] 1015 for coupling in inter.get('orders').keys(): 1016 # Note that we don't consider a missing coupling as a 1017 # constraint 1018 if coupling in present_couplings and \ 1019 present_couplings[coupling]>=0: 1020 # Reduce the number of couplings that are left 1021 present_couplings[coupling] -= \ 1022 inter.get('orders')[coupling] 1023 if present_couplings[coupling] < 0: 1024 # We have too many couplings of this type 1025 return False 1026 # Now check for WEIGHTED, i.e. the sum of coupling hierarchy values 1027 if 'WEIGHTED' in present_couplings and \ 1028 present_couplings['WEIGHTED']>=0: 1029 weight = sum([model.get('order_hierarchy')[c]*n for \ 1030 (c,n) in inter.get('orders').items()]) 1031 present_couplings['WEIGHTED'] -= weight 1032 if present_couplings['WEIGHTED'] < 0: 1033 # Total coupling weight too large 1034 return False 1035 1036 return present_couplings
1037
1038 - def combine_legs(self, list_legs, ref_dict_to1, max_multi_to1):
1039 """Recursive function. Take a list of legs as an input, with 1040 the reference dictionary n-1->1, and output a list of list of 1041 tuples of Legs (allowed combinations) and Legs (rest). Algorithm: 1042 1043 1. Get all n-combinations from list [123456]: [12],..,[23],..,[123],.. 1044 1045 2. For each combination, say [34]. Check if combination is valid. 1046 If so: 1047 1048 a. Append [12[34]56] to result array 1049 1050 b. Split [123456] at index(first element in combination+1), 1051 i.e. [12],[456] and subtract combination from second half, 1052 i.e.: [456]-[34]=[56]. Repeat from 1. with this array 1053 1054 3. Take result array from call to 1. (here, [[56]]) and append 1055 (first half in step b - combination) + combination + (result 1056 from 1.) = [12[34][56]] to result array 1057 1058 4. After appending results from all n-combinations, return 1059 resulting array. Example, if [13] and [45] are valid 1060 combinations: 1061 [[[13]2456],[[13]2[45]6],[123[45]6]] 1062 """ 1063 1064 res = [] 1065 1066 # loop over possible combination lengths (+1 is for range convention!) 1067 for comb_length in range(2, max_multi_to1 + 1): 1068 1069 # Check the considered length is not longer than the list length 1070 if comb_length > len(list_legs): 1071 return res 1072 1073 # itertools.combinations returns all possible combinations 1074 # of comb_length elements from list_legs 1075 for comb in itertools.combinations(list_legs, comb_length): 1076 1077 # Check if the combination is valid 1078 if base_objects.LegList(comb).can_combine_to_1(ref_dict_to1): 1079 1080 # Identify the rest, create a list [comb,rest] and 1081 # add it to res 1082 res_list = copy.copy(list_legs) 1083 for leg in comb: 1084 res_list.remove(leg) 1085 res_list.insert(list_legs.index(comb[0]), comb) 1086 res.append(res_list) 1087 1088 # Now, deal with cases with more than 1 combination 1089 1090 # First, split the list into two, according to the 1091 # position of the first element in comb, and remove 1092 # all elements form comb 1093 res_list1 = list_legs[0:list_legs.index(comb[0])] 1094 res_list2 = list_legs[list_legs.index(comb[0]) + 1:] 1095 for leg in comb[1:]: 1096 res_list2.remove(leg) 1097 1098 # Create a list of type [comb,rest1,rest2(combined)] 1099 res_list = res_list1 1100 res_list.append(comb) 1101 # This is where recursion actually happens, 1102 # on the second part 1103 for item in self.combine_legs(res_list2, 1104 ref_dict_to1, 1105 max_multi_to1): 1106 final_res_list = copy.copy(res_list) 1107 final_res_list.extend(item) 1108 res.append(final_res_list) 1109 1110 return res
1111 1112
1113 - def merge_comb_legs(self, comb_lists, ref_dict_to1):
1114 """Takes a list of allowed leg combinations as an input and returns 1115 a set of lists where combinations have been properly replaced 1116 (one list per element in the ref_dict, so that all possible intermediate 1117 particles are included). For each list, give the list of vertices 1118 corresponding to the executed merging, group the two as a tuple. 1119 """ 1120 1121 res = [] 1122 1123 for comb_list in comb_lists: 1124 1125 reduced_list = [] 1126 vertex_list = [] 1127 1128 for entry in comb_list: 1129 1130 # Act on all leg combinations 1131 if isinstance(entry, tuple): 1132 1133 # Build the leg object which will replace the combination: 1134 # 1) leg ids is as given in the ref_dict 1135 leg_vert_ids = copy.copy(ref_dict_to1[\ 1136 tuple(sorted([leg.get('id') for leg in entry]))]) 1137 # 2) number is the minimum of leg numbers involved in the 1138 # combination 1139 number = min([leg.get('number') for leg in entry]) 1140 # 3) state is final, unless there is exactly one initial 1141 # state particle involved in the combination -> t-channel 1142 if len(filter(lambda leg: leg.get('state') == False, 1143 entry)) == 1: 1144 state = False 1145 else: 1146 state = True 1147 # 4) from_group is True, by definition 1148 1149 # Create and add the object. This is done by a 1150 # separate routine, to allow overloading by 1151 # daughter classes 1152 new_leg_vert_ids = [] 1153 if leg_vert_ids: 1154 new_leg_vert_ids = self.get_combined_legs(entry, 1155 leg_vert_ids, 1156 number, 1157 state) 1158 1159 reduced_list.append([l[0] for l in new_leg_vert_ids]) 1160 1161 1162 # Create and add the corresponding vertex 1163 # Extract vertex ids corresponding to the various legs 1164 # in mylegs 1165 vlist = base_objects.VertexList() 1166 for (myleg, vert_id) in new_leg_vert_ids: 1167 # Start with the considered combination... 1168 myleglist = base_objects.LegList(list(entry)) 1169 # ... and complete with legs after reducing 1170 myleglist.append(myleg) 1171 # ... and consider the correct vertex id 1172 vlist.append(base_objects.Vertex( 1173 {'legs':myleglist, 1174 'id':vert_id})) 1175 1176 vertex_list.append(vlist) 1177 1178 # If entry is not a combination, switch the from_group flag 1179 # and add it 1180 else: 1181 cp_entry = copy.copy(entry) 1182 # Need special case for from_group == None; this 1183 # is for initial state leg of decay chain process 1184 # (see Leg.can_combine_to_0) 1185 if cp_entry.get('from_group') != None: 1186 cp_entry.set('from_group', False) 1187 reduced_list.append(cp_entry) 1188 1189 # Flatten the obtained leg and vertex lists 1190 flat_red_lists = expand_list(reduced_list) 1191 flat_vx_lists = expand_list(vertex_list) 1192 1193 # Combine the two lists in a list of tuple 1194 for i in range(0, len(flat_vx_lists)): 1195 res.append((base_objects.LegList(flat_red_lists[i]), \ 1196 base_objects.VertexList(flat_vx_lists[i]))) 1197 1198 return res
1199
1200 - def get_combined_legs(self, legs, leg_vert_ids, number, state):
1201 """Create a set of new legs from the info given. This can be 1202 overloaded by daughter classes.""" 1203 1204 mylegs = [(base_objects.Leg({'id':leg_id, 1205 'number':number, 1206 'state':state, 1207 'from_group':True}), 1208 vert_id)\ 1209 for leg_id, vert_id in leg_vert_ids] 1210 1211 return mylegs
1212
1213 - def get_combined_vertices(self, legs, vert_ids):
1214 """Allow for selection of vertex ids. This can be 1215 overloaded by daughter classes.""" 1216 1217 return vert_ids
1218
1219 - def trim_diagrams(self, decay_ids=[], diaglist=None):
1220 """Reduce the number of legs and vertices used in memory. 1221 When called by a diagram generation initiated by LoopAmplitude, 1222 this function should not trim the diagrams in the attribute 'diagrams' 1223 but rather a given list in the 'diaglist' argument.""" 1224 1225 legs = [] 1226 vertices = [] 1227 1228 if diaglist is None: 1229 diaglist=self.get('diagrams') 1230 1231 # Flag decaying legs in the core process by onshell = True 1232 process = self.get('process') 1233 for leg in process.get('legs'): 1234 if leg.get('state') and leg.get('id') in decay_ids: 1235 leg.set('onshell', True) 1236 1237 for diagram in diaglist: 1238 # Keep track of external legs (leg numbers already used) 1239 leg_external = set() 1240 for ivx, vertex in enumerate(diagram.get('vertices')): 1241 for ileg, leg in enumerate(vertex.get('legs')): 1242 # Ensure that only external legs get decay flag 1243 if leg.get('state') and leg.get('id') in decay_ids and \ 1244 leg.get('number') not in leg_external: 1245 # Use onshell to indicate decaying legs, 1246 # i.e. legs that have decay chains 1247 leg = copy.copy(leg) 1248 leg.set('onshell', True) 1249 try: 1250 index = legs.index(leg) 1251 except ValueError: 1252 vertex.get('legs')[ileg] = leg 1253 legs.append(leg) 1254 else: # Found a leg 1255 vertex.get('legs')[ileg] = legs[index] 1256 leg_external.add(leg.get('number')) 1257 try: 1258 index = vertices.index(vertex) 1259 diagram.get('vertices')[ivx] = vertices[index] 1260 except ValueError: 1261 vertices.append(vertex)
1262
1263 #=============================================================================== 1264 # AmplitudeList 1265 #=============================================================================== 1266 -class AmplitudeList(base_objects.PhysicsObjectList):
1267 """List of Amplitude objects 1268 """ 1269
1270 - def has_any_loop_process(self):
1271 """ Check the content of all processes of the amplitudes in this list to 1272 see if there is any which defines perturbation couplings. """ 1273 1274 for amp in self: 1275 if amp.has_loop_process(): 1276 return True
1277
1278 - def is_valid_element(self, obj):
1279 """Test if object obj is a valid Amplitude for the list.""" 1280 1281 return isinstance(obj, Amplitude)
1282
1283 #=============================================================================== 1284 # DecayChainAmplitude 1285 #=============================================================================== 1286 -class DecayChainAmplitude(Amplitude):
1287 """A list of amplitudes + a list of decay chain amplitude lists; 1288 corresponding to a ProcessDefinition with a list of decay chains 1289 """ 1290
1291 - def default_setup(self):
1292 """Default values for all properties""" 1293 1294 self['amplitudes'] = AmplitudeList() 1295 self['decay_chains'] = DecayChainAmplitudeList()
1296
1297 - def __init__(self, argument = None, collect_mirror_procs = False, 1298 ignore_six_quark_processes = False, loop_filter=None):
1299 """Allow initialization with Process and with ProcessDefinition""" 1300 1301 if isinstance(argument, base_objects.Process): 1302 super(DecayChainAmplitude, self).__init__() 1303 from madgraph.loop.loop_diagram_generation import LoopMultiProcess 1304 if argument['perturbation_couplings']: 1305 MultiProcessClass=LoopMultiProcess 1306 else: 1307 MultiProcessClass=MultiProcess 1308 if isinstance(argument, base_objects.ProcessDefinition): 1309 self['amplitudes'].extend(\ 1310 MultiProcessClass.generate_multi_amplitudes(argument, 1311 collect_mirror_procs, 1312 ignore_six_quark_processes, 1313 loop_filter=loop_filter)) 1314 else: 1315 self['amplitudes'].append(\ 1316 MultiProcessClass.get_amplitude_from_proc(argument, 1317 loop_filter=loop_filter)) 1318 # Clean decay chains from process, since we haven't 1319 # combined processes with decay chains yet 1320 process = copy.copy(self.get('amplitudes')[0].get('process')) 1321 process.set('decay_chains', base_objects.ProcessList()) 1322 self['amplitudes'][0].set('process', process) 1323 1324 for process in argument.get('decay_chains'): 1325 if process.get('perturbation_couplings'): 1326 raise MadGraph5Error,\ 1327 "Decay processes can not be perturbed" 1328 process.set('overall_orders', argument.get('overall_orders')) 1329 if not process.get('is_decay_chain'): 1330 process.set('is_decay_chain',True) 1331 if not process.get_ninitial() == 1: 1332 raise InvalidCmd,\ 1333 "Decay chain process must have exactly one" + \ 1334 " incoming particle" 1335 self['decay_chains'].append(\ 1336 DecayChainAmplitude(process, collect_mirror_procs, 1337 ignore_six_quark_processes)) 1338 1339 # Flag decaying legs in the core diagrams by onshell = True 1340 decay_ids = sum([[a.get('process').get('legs')[0].get('id') \ 1341 for a in dec.get('amplitudes')] for dec in \ 1342 self['decay_chains']], []) 1343 decay_ids = set(decay_ids) 1344 for amp in self['amplitudes']: 1345 amp.trim_diagrams(decay_ids) 1346 1347 # Check that all decay ids are present in at least some process 1348 for amp in self['amplitudes']: 1349 for l in amp.get('process').get('legs'): 1350 if l.get('id') in decay_ids: 1351 decay_ids.remove(l.get('id')) 1352 1353 if decay_ids: 1354 model = amp.get('process').get('model') 1355 names = [model.get_particle(id).get('name') for id in decay_ids] 1356 1357 logger.warning( 1358 "$RED Decay without corresponding particle in core process found.\n" + \ 1359 "Decay information for particle(s) %s is discarded.\n" % ','.join(names) + \ 1360 "Please check your process definition carefully. \n" + \ 1361 "This warning usually means that you forgot parentheses in presence of subdecay.\n" + \ 1362 "Example of correct syntax: p p > t t~, ( t > w+ b, w+ > l+ vl)") 1363 1364 # Remove unused decays from the process list 1365 for dc in reversed(self['decay_chains']): 1366 for a in reversed(dc.get('amplitudes')): 1367 # Remove the amplitudes from this decay chain 1368 if a.get('process').get('legs')[0].get('id') in decay_ids: 1369 dc.get('amplitudes').remove(a) 1370 if not dc.get('amplitudes'): 1371 # If no amplitudes left, remove the decay chain 1372 self['decay_chains'].remove(dc) 1373 1374 # Finally, write a fat warning if any decay process has 1375 # the decaying particle (or its antiparticle) in the final state 1376 bad_procs = [] 1377 for dc in self['decay_chains']: 1378 for amp in dc.get('amplitudes'): 1379 legs = amp.get('process').get('legs') 1380 fs_parts = [abs(l.get('id')) for l in legs if 1381 l.get('state')] 1382 is_part = [l.get('id') for l in legs if not 1383 l.get('state')][0] 1384 if abs(is_part) in fs_parts: 1385 bad_procs.append(amp.get('process')) 1386 1387 if bad_procs: 1388 logger.warning( 1389 "$RED Decay(s) with particle decaying to itself:\n" + \ 1390 '\n'.join([p.nice_string() for p in bad_procs]) + \ 1391 "\nPlease check your process definition carefully. \n") 1392 1393 1394 elif argument != None: 1395 # call the mother routine 1396 super(DecayChainAmplitude, self).__init__(argument) 1397 else: 1398 # call the mother routine 1399 super(DecayChainAmplitude, self).__init__()
1400
1401 - def filter(self, name, value):
1402 """Filter for valid amplitude property values.""" 1403 1404 if name == 'amplitudes': 1405 if not isinstance(value, AmplitudeList): 1406 raise self.PhysicsObjectError, \ 1407 "%s is not a valid AmplitudeList" % str(value) 1408 if name == 'decay_chains': 1409 if not isinstance(value, DecayChainAmplitudeList): 1410 raise self.PhysicsObjectError, \ 1411 "%s is not a valid DecayChainAmplitudeList object" % \ 1412 str(value) 1413 return True
1414
1415 - def get_sorted_keys(self):
1416 """Return diagram property names as a nicely sorted list.""" 1417 1418 return ['amplitudes', 'decay_chains']
1419 1420 # Helper functions 1421
1422 - def get_number_of_diagrams(self):
1423 """Returns number of diagrams for this amplitude""" 1424 return sum(len(a.get('diagrams')) for a in self.get('amplitudes')) \ 1425 + sum(d.get_number_of_diagrams() for d in \ 1426 self.get('decay_chains'))
1427
1428 - def nice_string(self, indent = 0):
1429 """Returns a nicely formatted string of the amplitude content.""" 1430 mystr = "" 1431 for amplitude in self.get('amplitudes'): 1432 mystr = mystr + amplitude.nice_string(indent) + "\n" 1433 1434 if self.get('decay_chains'): 1435 mystr = mystr + " " * indent + "Decays:\n" 1436 for dec in self.get('decay_chains'): 1437 mystr = mystr + dec.nice_string(indent + 2) + "\n" 1438 1439 return mystr[:-1]
1440
1441 - def nice_string_processes(self, indent = 0):
1442 """Returns a nicely formatted string of the amplitude processes.""" 1443 mystr = "" 1444 for amplitude in self.get('amplitudes'): 1445 mystr = mystr + amplitude.nice_string_processes(indent) + "\n" 1446 1447 if self.get('decay_chains'): 1448 mystr = mystr + " " * indent + "Decays:\n" 1449 for dec in self.get('decay_chains'): 1450 mystr = mystr + dec.nice_string_processes(indent + 2) + "\n" 1451 1452 return mystr[:-1]
1453
1454 - def get_ninitial(self):
1455 """Returns the number of initial state particles in the process.""" 1456 return self.get('amplitudes')[0].get('process').get_ninitial()
1457
1458 - def get_decay_ids(self):
1459 """Returns a set of all particle ids for which a decay is defined""" 1460 1461 decay_ids = [] 1462 1463 # Get all amplitudes for the decay processes 1464 for amp in sum([dc.get('amplitudes') for dc \ 1465 in self['decay_chains']], []): 1466 # For each amplitude, find the initial state leg 1467 decay_ids.append(amp.get('process').get_initial_ids()[0]) 1468 1469 # Return a list with unique ids 1470 return list(set(decay_ids))
1471
1472 - def has_loop_process(self):
1473 """ Returns wether this amplitude has a loop process.""" 1474 return self['amplitudes'].has_any_loop_process()
1475
1476 - def get_amplitudes(self):
1477 """Recursive function to extract all amplitudes for this process""" 1478 1479 amplitudes = AmplitudeList() 1480 1481 amplitudes.extend(self.get('amplitudes')) 1482 for decay in self.get('decay_chains'): 1483 amplitudes.extend(decay.get_amplitudes()) 1484 1485 return amplitudes
1486
1487 1488 #=============================================================================== 1489 # DecayChainAmplitudeList 1490 #=============================================================================== 1491 -class DecayChainAmplitudeList(base_objects.PhysicsObjectList):
1492 """List of DecayChainAmplitude objects 1493 """ 1494
1495 - def is_valid_element(self, obj):
1496 """Test if object obj is a valid DecayChainAmplitude for the list.""" 1497 1498 return isinstance(obj, DecayChainAmplitude)
1499
1500 1501 #=============================================================================== 1502 # MultiProcess 1503 #=============================================================================== 1504 -class MultiProcess(base_objects.PhysicsObject):
1505 """MultiProcess: list of process definitions 1506 list of processes (after cleaning) 1507 list of amplitudes (after generation) 1508 """ 1509
1510 - def default_setup(self):
1511 """Default values for all properties""" 1512 1513 self['process_definitions'] = base_objects.ProcessDefinitionList() 1514 # self['amplitudes'] can be an AmplitudeList or a 1515 # DecayChainAmplitudeList, depending on whether there are 1516 # decay chains in the process definitions or not. 1517 self['amplitudes'] = AmplitudeList() 1518 # Flag for whether to combine IS mirror processes together 1519 self['collect_mirror_procs'] = False 1520 # List of quark flavors where we ignore processes with at 1521 # least 6 quarks (three quark lines) 1522 self['ignore_six_quark_processes'] = [] 1523 # Allow to use the model parameter numerical value for optimization. 1524 #This is currently use for 1->N generation(check mass). 1525 self['use_numerical'] = False
1526
1527 - def __init__(self, argument=None, collect_mirror_procs = False, 1528 ignore_six_quark_processes = [], optimize=False, 1529 loop_filter=None):
1530 """Allow initialization with ProcessDefinition or 1531 ProcessDefinitionList 1532 optimize allows to use param_card information. (usefull for 1-.N)""" 1533 1534 if isinstance(argument, base_objects.ProcessDefinition): 1535 super(MultiProcess, self).__init__() 1536 self['process_definitions'].append(argument) 1537 elif isinstance(argument, base_objects.ProcessDefinitionList): 1538 super(MultiProcess, self).__init__() 1539 self['process_definitions'] = argument 1540 elif argument != None: 1541 # call the mother routine 1542 super(MultiProcess, self).__init__(argument) 1543 else: 1544 # call the mother routine 1545 super(MultiProcess, self).__init__() 1546 1547 self['collect_mirror_procs'] = collect_mirror_procs 1548 self['ignore_six_quark_processes'] = ignore_six_quark_processes 1549 self['use_numerical'] = optimize 1550 self['loop_filter'] = loop_filter 1551 1552 if isinstance(argument, base_objects.ProcessDefinition) or \ 1553 isinstance(argument, base_objects.ProcessDefinitionList): 1554 # Generate the diagrams 1555 self.get('amplitudes')
1556 1557
1558 - def filter(self, name, value):
1559 """Filter for valid process property values.""" 1560 1561 if name == 'process_definitions': 1562 if not isinstance(value, base_objects.ProcessDefinitionList): 1563 raise self.PhysicsObjectError, \ 1564 "%s is not a valid ProcessDefinitionList object" % str(value) 1565 1566 if name == 'amplitudes': 1567 if not isinstance(value, AmplitudeList): 1568 raise self.PhysicsObjectError, \ 1569 "%s is not a valid AmplitudeList object" % str(value) 1570 1571 if name in ['collect_mirror_procs']: 1572 if not isinstance(value, bool): 1573 raise self.PhysicsObjectError, \ 1574 "%s is not a valid boolean" % str(value) 1575 1576 if name == 'ignore_six_quark_processes': 1577 if not isinstance(value, list): 1578 raise self.PhysicsObjectError, \ 1579 "%s is not a valid list" % str(value) 1580 1581 return True
1582
1583 - def get(self, name):
1584 """Get the value of the property name.""" 1585 1586 if (name == 'amplitudes') and not self[name]: 1587 for process_def in self.get('process_definitions'): 1588 if process_def.get('decay_chains'): 1589 # This is a decay chain process 1590 # Store amplitude(s) as DecayChainAmplitude 1591 self['amplitudes'].append(\ 1592 DecayChainAmplitude(process_def, 1593 self.get('collect_mirror_procs'), 1594 self.get('ignore_six_quark_processes'))) 1595 else: 1596 self['amplitudes'].extend(\ 1597 self.generate_multi_amplitudes(process_def, 1598 self.get('collect_mirror_procs'), 1599 self.get('ignore_six_quark_processes'), 1600 self['use_numerical'], 1601 loop_filter=self['loop_filter'])) 1602 1603 return MultiProcess.__bases__[0].get(self, name) # call the mother routine
1604
1605 - def get_sorted_keys(self):
1606 """Return process property names as a nicely sorted list.""" 1607 1608 return ['process_definitions', 'amplitudes']
1609 1610 @classmethod
1611 - def generate_multi_amplitudes(cls,process_definition, 1612 collect_mirror_procs = False, 1613 ignore_six_quark_processes = [], 1614 use_numerical=False, 1615 loop_filter=None):
1616 """Generate amplitudes in a semi-efficient way. 1617 Make use of crossing symmetry for processes that fail diagram 1618 generation, but not for processes that succeed diagram 1619 generation. Doing so will risk making it impossible to 1620 identify processes with identical amplitudes. 1621 """ 1622 assert isinstance(process_definition, base_objects.ProcessDefinition), \ 1623 "%s not valid ProcessDefinition object" % \ 1624 repr(process_definition) 1625 1626 # Set automatic coupling orders 1627 process_definition.set('orders', MultiProcess.\ 1628 find_optimal_process_orders(process_definition)) 1629 # Check for maximum orders from the model 1630 process_definition.check_expansion_orders() 1631 1632 processes = base_objects.ProcessList() 1633 amplitudes = AmplitudeList() 1634 1635 # failed_procs and success_procs are sorted processes that have 1636 # already failed/succeeded based on crossing symmetry 1637 failed_procs = [] 1638 success_procs = [] 1639 # Complete processes, for identification of mirror processes 1640 non_permuted_procs = [] 1641 # permutations keeps the permutations of the crossed processes 1642 permutations = [] 1643 1644 # Store the diagram tags for processes, to allow for 1645 # identifying identical matrix elements already at this stage. 1646 model = process_definition['model'] 1647 1648 isids = [leg['ids'] for leg in process_definition['legs'] \ 1649 if leg['state'] == False] 1650 fsids = [leg['ids'] for leg in process_definition['legs'] \ 1651 if leg['state'] == True] 1652 # Generate all combinations for the initial state 1653 1654 for prod in itertools.product(*isids): 1655 islegs = [\ 1656 base_objects.Leg({'id':id, 'state': False}) \ 1657 for id in prod] 1658 1659 # Generate all combinations for the final state, and make 1660 # sure to remove double counting 1661 1662 red_fsidlist = [] 1663 1664 for prod in itertools.product(*fsids): 1665 1666 # Remove double counting between final states 1667 if tuple(sorted(prod)) in red_fsidlist: 1668 continue 1669 1670 red_fsidlist.append(tuple(sorted(prod))); 1671 1672 # Generate leg list for process 1673 leg_list = [copy.copy(leg) for leg in islegs] 1674 1675 leg_list.extend([\ 1676 base_objects.Leg({'id':id, 'state': True}) \ 1677 for id in prod]) 1678 1679 legs = base_objects.LegList(leg_list) 1680 1681 # Check for crossed processes 1682 sorted_legs = sorted([(l,i+1) for (i,l) in \ 1683 enumerate(legs.get_outgoing_id_list(model))]) 1684 permutation = [l[1] for l in sorted_legs] 1685 sorted_legs = array.array('i', [l[0] for l in sorted_legs]) 1686 1687 # Check for six-quark processes 1688 if ignore_six_quark_processes and \ 1689 len([i for i in sorted_legs if abs(i) in \ 1690 ignore_six_quark_processes]) >= 6: 1691 continue 1692 1693 # Check if crossed process has already failed, 1694 # in that case don't check process 1695 if sorted_legs in failed_procs: 1696 continue 1697 1698 # If allowed check mass validity [assume 1->N] 1699 if use_numerical: 1700 # check that final state has lower mass than initial state 1701 initial_mass = abs(model['parameter_dict'][model.get_particle(legs[0].get('id')).get('mass')]) 1702 if initial_mass == 0: 1703 continue 1704 for leg in legs[1:]: 1705 m = model['parameter_dict'][model.get_particle(leg.get('id')).get('mass')] 1706 initial_mass -= abs(m) 1707 if initial_mass.real <= 0: 1708 continue 1709 1710 # Setup process 1711 process = process_definition.get_process_with_legs(legs) 1712 1713 fast_proc = \ 1714 array.array('i',[leg.get('id') for leg in legs]) 1715 if collect_mirror_procs and \ 1716 process_definition.get_ninitial() == 2: 1717 # Check if mirrored process is already generated 1718 mirror_proc = \ 1719 array.array('i', [fast_proc[1], fast_proc[0]] + \ 1720 list(fast_proc[2:])) 1721 try: 1722 mirror_amp = \ 1723 amplitudes[non_permuted_procs.index(mirror_proc)] 1724 except Exception: 1725 # Didn't find any mirror process 1726 pass 1727 else: 1728 # Mirror process found 1729 mirror_amp.set('has_mirror_process', True) 1730 logger.info("Process %s added to mirror process %s" % \ 1731 (process.base_string(), 1732 mirror_amp.get('process').base_string())) 1733 continue 1734 1735 # Check for successful crossings, unless we have specified 1736 # properties that break crossing symmetry 1737 if not process.get('required_s_channels') and \ 1738 not process.get('forbidden_onsh_s_channels') and \ 1739 not process.get('forbidden_s_channels') and \ 1740 not process.get('is_decay_chain'): 1741 try: 1742 crossed_index = success_procs.index(sorted_legs) 1743 # The relabeling of legs for loop amplitudes is cumbersome 1744 # and does not save so much time. It is disable here and 1745 # we use the key 'loop_diagrams' to decide whether 1746 # it is an instance of LoopAmplitude. 1747 if 'loop_diagrams' in amplitudes[crossed_index]: 1748 raise ValueError 1749 except ValueError: 1750 # No crossing found, just continue 1751 pass 1752 else: 1753 # Found crossing - reuse amplitude 1754 amplitude = MultiProcess.cross_amplitude(\ 1755 amplitudes[crossed_index], 1756 process, 1757 permutations[crossed_index], 1758 permutation) 1759 amplitudes.append(amplitude) 1760 success_procs.append(sorted_legs) 1761 permutations.append(permutation) 1762 non_permuted_procs.append(fast_proc) 1763 logger.info("Crossed process found for %s, reuse diagrams." % \ 1764 process.base_string()) 1765 continue 1766 1767 # Create new amplitude 1768 amplitude = cls.get_amplitude_from_proc(process, 1769 loop_filter=loop_filter) 1770 1771 try: 1772 result = amplitude.generate_diagrams() 1773 except InvalidCmd as error: 1774 failed_procs.append(sorted_legs) 1775 else: 1776 # Succeeded in generating diagrams 1777 if amplitude.get('diagrams'): 1778 amplitudes.append(amplitude) 1779 success_procs.append(sorted_legs) 1780 permutations.append(permutation) 1781 non_permuted_procs.append(fast_proc) 1782 elif not result: 1783 # Diagram generation failed for all crossings 1784 failed_procs.append(sorted_legs) 1785 1786 # Raise exception if there are no amplitudes for this process 1787 if not amplitudes: 1788 if len(failed_procs) == 1 and 'error' in locals(): 1789 raise error 1790 else: 1791 raise NoDiagramException, \ 1792 "No amplitudes generated from process %s. Please enter a valid process" % \ 1793 process_definition.nice_string() 1794 1795 1796 # Return the produced amplitudes 1797 return amplitudes
1798 1799 @classmethod
1800 - def get_amplitude_from_proc(cls,proc,**opts):
1801 """ Return the correct amplitude type according to the characteristics of 1802 the process proc. The only option that could be specified here is 1803 loop_filter and it is of course not relevant for a tree amplitude.""" 1804 1805 return Amplitude({"process": proc})
1806 1807 1808 @staticmethod
1809 - def find_optimal_process_orders(process_definition):
1810 """Find the minimal WEIGHTED order for this set of processes. 1811 1812 The algorithm: 1813 1814 1) Check the coupling hierarchy of the model. Assign all 1815 particles to the different coupling hierarchies so that a 1816 particle is considered to be in the highest hierarchy (i.e., 1817 with lowest value) where it has an interaction. 1818 1819 2) Pick out the legs in the multiprocess according to the 1820 highest hierarchy represented (so don't mix particles from 1821 different hierarchy classes in the same multiparticles!) 1822 1823 3) Find the starting maximum WEIGHTED order as the sum of the 1824 highest n-2 weighted orders 1825 1826 4) Pick out required s-channel particle hierarchies, and use 1827 the highest of the maximum WEIGHTED order from the legs and 1828 the minimum WEIGHTED order extracted from 2*s-channel 1829 hierarchys plus the n-2-2*(number of s-channels) lowest 1830 leg weighted orders. 1831 1832 5) Run process generation with the WEIGHTED order determined 1833 in 3)-4) - # final state gluons, with all gluons removed from 1834 the final state 1835 1836 6) If no process is found, increase WEIGHTED order by 1 and go 1837 back to 5), until we find a process which passes. Return that 1838 order. 1839 1840 7) Continue 5)-6) until we reach (n-2)*(highest hierarchy)-1. 1841 If still no process has passed, return 1842 WEIGHTED = (n-2)*(highest hierarchy) 1843 """ 1844 1845 assert isinstance(process_definition, base_objects.ProcessDefinition), \ 1846 "%s not valid ProcessDefinition object" % \ 1847 repr(process_definition) 1848 1849 processes = base_objects.ProcessList() 1850 amplitudes = AmplitudeList() 1851 1852 # If there are already couplings defined, return 1853 if process_definition.get('orders') or \ 1854 process_definition.get('overall_orders') or \ 1855 process_definition.get('NLO_mode')=='virt': 1856 return process_definition.get('orders') 1857 1858 # If this is a decay process (and not a decay chain), return 1859 if process_definition.get_ninitial() == 1 and not \ 1860 process_definition.get('is_decay_chain'): 1861 return process_definition.get('orders') 1862 1863 logger.info("Checking for minimal orders which gives processes.") 1864 logger.info("Please specify coupling orders to bypass this step.") 1865 1866 # Calculate minimum starting guess for WEIGHTED order 1867 max_order_now, particles, hierarchy = \ 1868 process_definition.get_minimum_WEIGHTED() 1869 coupling = 'WEIGHTED' 1870 1871 model = process_definition.get('model') 1872 1873 # Extract the initial and final leg ids 1874 isids = [leg['ids'] for leg in \ 1875 filter(lambda leg: leg['state'] == False, process_definition['legs'])] 1876 fsids = [leg['ids'] for leg in \ 1877 filter(lambda leg: leg['state'] == True, process_definition['legs'])] 1878 1879 max_WEIGHTED_order = \ 1880 (len(fsids + isids) - 2)*int(model.get_max_WEIGHTED()) 1881 1882 # get the definition of the WEIGHTED 1883 hierarchydef = process_definition['model'].get('order_hierarchy') 1884 tmp = [] 1885 hierarchy = hierarchydef.items() 1886 hierarchy.sort() 1887 for key, value in hierarchydef.items(): 1888 if value>1: 1889 tmp.append('%s*%s' % (value,key)) 1890 else: 1891 tmp.append('%s' % key) 1892 wgtdef = '+'.join(tmp) 1893 # Run diagram generation with increasing max_order_now until 1894 # we manage to get diagrams 1895 while max_order_now < max_WEIGHTED_order: 1896 logger.info("Trying coupling order WEIGHTED<=%d: WEIGTHED IS %s" % (max_order_now, wgtdef)) 1897 1898 oldloglevel = logger.level 1899 logger.setLevel(logging.WARNING) 1900 1901 # failed_procs are processes that have already failed 1902 # based on crossing symmetry 1903 failed_procs = [] 1904 1905 # Generate all combinations for the initial state 1906 for prod in apply(itertools.product, isids): 1907 islegs = [ base_objects.Leg({'id':id, 'state': False}) \ 1908 for id in prod] 1909 1910 # Generate all combinations for the final state, and make 1911 # sure to remove double counting 1912 1913 red_fsidlist = [] 1914 1915 for prod in apply(itertools.product, fsids): 1916 1917 # Remove double counting between final states 1918 if tuple(sorted(prod)) in red_fsidlist: 1919 continue 1920 1921 red_fsidlist.append(tuple(sorted(prod))); 1922 1923 # Remove gluons from final state if QCD is among 1924 # the highest coupling hierarchy 1925 nglue = 0 1926 if 21 in particles[0]: 1927 nglue = len([id for id in prod if id == 21]) 1928 prod = [id for id in prod if id != 21] 1929 1930 # Generate leg list for process 1931 leg_list = [copy.copy(leg) for leg in islegs] 1932 1933 leg_list.extend([\ 1934 base_objects.Leg({'id':id, 'state': True}) \ 1935 for id in prod]) 1936 1937 legs = base_objects.LegList(leg_list) 1938 1939 # Set summed coupling order according to max_order_now 1940 # subtracting the removed gluons 1941 coupling_orders_now = {coupling: max_order_now - \ 1942 nglue * model['order_hierarchy']['QCD']} 1943 1944 # Setup process 1945 process = base_objects.Process({\ 1946 'legs':legs, 1947 'model':model, 1948 'id': process_definition.get('id'), 1949 'orders': coupling_orders_now, 1950 'required_s_channels': \ 1951 process_definition.get('required_s_channels'), 1952 'forbidden_onsh_s_channels': \ 1953 process_definition.get('forbidden_onsh_s_channels'), 1954 'sqorders_types': \ 1955 process_definition.get('sqorders_types'), 1956 'squared_orders': \ 1957 process_definition.get('squared_orders'), 1958 'split_orders': \ 1959 process_definition.get('split_orders'), 1960 'forbidden_s_channels': \ 1961 process_definition.get('forbidden_s_channels'), 1962 'forbidden_particles': \ 1963 process_definition.get('forbidden_particles'), 1964 'is_decay_chain': \ 1965 process_definition.get('is_decay_chain'), 1966 'overall_orders': \ 1967 process_definition.get('overall_orders'), 1968 'split_orders': \ 1969 process_definition.get('split_orders')}) 1970 1971 # Check for couplings with given expansion orders 1972 process.check_expansion_orders() 1973 1974 # Check for crossed processes 1975 sorted_legs = sorted(legs.get_outgoing_id_list(model)) 1976 # Check if crossed process has already failed 1977 # In that case don't check process 1978 if tuple(sorted_legs) in failed_procs: 1979 continue 1980 1981 amplitude = Amplitude({'process': process}) 1982 try: 1983 amplitude.generate_diagrams() 1984 except InvalidCmd: 1985 failed_procs.append(tuple(sorted_legs)) 1986 else: 1987 if amplitude.get('diagrams'): 1988 # We found a valid amplitude. Return this order number 1989 logger.setLevel(oldloglevel) 1990 return {coupling: max_order_now} 1991 else: 1992 failed_procs.append(tuple(sorted_legs)) 1993 1994 # No processes found, increase max_order_now 1995 max_order_now += 1 1996 logger.setLevel(oldloglevel) 1997 1998 # If no valid processes found with nfinal-1 couplings, return maximal 1999 return {coupling: max_order_now}
2000 2001 @staticmethod
2002 - def cross_amplitude(amplitude, process, org_perm, new_perm):
2003 """Return the amplitude crossed with the permutation new_perm""" 2004 # Create dict from original leg numbers to new leg numbers 2005 perm_map = dict(zip(org_perm, new_perm)) 2006 # Initiate new amplitude 2007 new_amp = copy.copy(amplitude) 2008 # Number legs 2009 for i, leg in enumerate(process.get('legs')): 2010 leg.set('number', i+1) 2011 # Set process 2012 new_amp.set('process', process) 2013 # Now replace the leg numbers in the diagrams 2014 diagrams = base_objects.DiagramList([d.renumber_legs(perm_map, 2015 process.get('legs'),) for \ 2016 d in new_amp.get('diagrams')]) 2017 new_amp.set('diagrams', diagrams) 2018 new_amp.trim_diagrams() 2019 2020 # Make sure to reset mirror process 2021 new_amp.set('has_mirror_process', False) 2022 2023 return new_amp
2024
2025 #=============================================================================== 2026 # Global helper methods 2027 #=============================================================================== 2028 2029 -def expand_list(mylist):
2030 """Takes a list of lists and elements and returns a list of flat lists. 2031 Example: [[1,2], 3, [4,5]] -> [[1,3,4], [1,3,5], [2,3,4], [2,3,5]] 2032 """ 2033 2034 # Check that argument is a list 2035 assert isinstance(mylist, list), "Expand_list argument must be a list" 2036 2037 res = [] 2038 2039 tmplist = [] 2040 for item in mylist: 2041 if isinstance(item, list): 2042 tmplist.append(item) 2043 else: 2044 tmplist.append([item]) 2045 2046 for item in apply(itertools.product, tmplist): 2047 res.append(list(item)) 2048 2049 return res
2050
2051 -def expand_list_list(mylist):
2052 """Recursive function. Takes a list of lists and lists of lists 2053 and returns a list of flat lists. 2054 Example: [[1,2],[[4,5],[6,7]]] -> [[1,2,4,5], [1,2,6,7]] 2055 """ 2056 2057 res = [] 2058 2059 if not mylist or len(mylist) == 1 and not mylist[0]: 2060 return [[]] 2061 2062 # Check the first element is at least a list 2063 assert isinstance(mylist[0], list), \ 2064 "Expand_list_list needs a list of lists and lists of lists" 2065 2066 # Recursion stop condition, one single element 2067 if len(mylist) == 1: 2068 if isinstance(mylist[0][0], list): 2069 return mylist[0] 2070 else: 2071 return mylist 2072 2073 if isinstance(mylist[0][0], list): 2074 for item in mylist[0]: 2075 # Here the recursion happens, create lists starting with 2076 # each element of the first item and completed with 2077 # the rest expanded 2078 for rest in expand_list_list(mylist[1:]): 2079 reslist = copy.copy(item) 2080 reslist.extend(rest) 2081 res.append(reslist) 2082 else: 2083 for rest in expand_list_list(mylist[1:]): 2084 reslist = copy.copy(mylist[0]) 2085 reslist.extend(rest) 2086 res.append(reslist) 2087 2088 2089 return res
2090