Package madgraph :: Package core :: Module diagram_generation
[hide private]
[frames] | no frames]

Source Code for Module madgraph.core.diagram_generation

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Classes for diagram generation. Amplitude performs the diagram 
  16  generation, DecayChainAmplitude keeps track of processes with decay 
  17  chains, and MultiProcess allows generation of processes with 
  18  multiparticle definitions. DiagramTag allows to identify diagrams 
  19  based on relevant properties. 
  20  """ 
  21   
  22  from __future__ import absolute_import 
  23  from six.moves import filter 
  24  #force filter to be a generator # like in py3 
  25   
  26   
  27  import array 
  28  import copy 
  29  import itertools 
  30  import logging 
  31   
  32  import madgraph.core.base_objects as base_objects 
  33  import madgraph.various.misc as misc 
  34  from madgraph import InvalidCmd, MadGraph5Error 
  35  from six.moves import range 
  36  from six.moves import zip 
  37   
  38  logger = logging.getLogger('madgraph.diagram_generation') 
39 40 41 -class NoDiagramException(InvalidCmd): pass
42
43 #=============================================================================== 44 # DiagramTag mother class 45 #=============================================================================== 46 47 -class DiagramTag(object):
48 """Class to tag diagrams based on objects with some __lt__ measure, e.g. 49 PDG code/interaction id (for comparing diagrams from the same amplitude), 50 or Lorentz/coupling/mass/width (for comparing AMPs from different MEs). 51 Algorithm: Create chains starting from external particles: 52 1 \ / 6 53 2 /\______/\ 7 54 3_ / | \_ 8 55 4 / 5 \_ 9 56 \ 10 57 gives ((((9,10,id910),8,id9108),(6,7,id67),id910867) 58 (((1,2,id12),(3,4,id34)),id1234), 59 5,id91086712345) 60 where idN is the id of the corresponding interaction. The ordering within 61 chains is based on chain length (depth; here, 1234 has depth 3, 910867 has 62 depth 4, 5 has depht 0), and if equal on the ordering of the chain elements. 63 The determination of central vertex is based on minimizing the chain length 64 for the longest subchain. 65 This gives a unique tag which can be used to identify diagrams 66 (instead of symmetry), as well as identify identical matrix elements from 67 different processes.""" 68
69 - class DiagramTagError(Exception):
70 """Exception for any problems in DiagramTags""" 71 pass
72
73 - def __init__(self, diagram, model=None, ninitial=2):
74 """Initialize with a diagram. Create DiagramTagChainLinks according to 75 the diagram, and figure out if we need to shift the central vertex.""" 76 77 # wf_dict keeps track of the intermediate particles 78 leg_dict = {} 79 # Create the chain which will be the diagram tag 80 for vertex in diagram.get('vertices'): 81 # Only add incoming legs 82 legs = vertex.get('legs')[:-1] 83 lastvx = vertex == diagram.get('vertices')[-1] 84 if lastvx: 85 # If last vertex, all legs are incoming 86 legs = vertex.get('legs') 87 # Add links corresponding to the relevant legs 88 link = DiagramTagChainLink([leg_dict.setdefault(leg.get('number'), 89 DiagramTagChainLink(self.link_from_leg(leg, model))) \ 90 for leg in legs], 91 self.vertex_id_from_vertex(vertex, 92 lastvx, 93 model, 94 ninitial)) 95 # Add vertex to leg_dict if not last one 96 if not lastvx: 97 leg_dict[vertex.get('legs')[-1].get('number')] = link 98 99 # The resulting link is the hypothetical result 100 self.tag = link 101 102 # Now make sure to find the central vertex in the diagram, 103 # defined by the longest leg being as short as possible 104 done = max([l.depth for l in self.tag.links]) == 0 105 while not done: 106 # Identify the longest chain in the tag 107 longest_chain = self.tag.links[0] 108 # Create a new link corresponding to moving one step 109 new_link = DiagramTagChainLink(self.tag.links[1:], 110 self.flip_vertex(\ 111 self.tag.vertex_id, 112 longest_chain.vertex_id, 113 self.tag.links[1:])) 114 # Create a new final vertex in the direction of the longest link 115 other_links = list(longest_chain.links) + [new_link] 116 other_link = DiagramTagChainLink(other_links, 117 self.flip_vertex(\ 118 longest_chain.vertex_id, 119 self.tag.vertex_id, 120 other_links)) 121 122 if other_link.links[0] < self.tag.links[0]: 123 # Switch to new tag, continue search 124 self.tag = other_link 125 else: 126 # We have found the central vertex 127 done = True
128
129 - def get_external_numbers(self):
130 """Get the order of external particles in this tag""" 131 132 return self.tag.get_external_numbers()
133
134 - def diagram_from_tag(self, model):
135 """Output a diagram from a DiagramTag. Note that each daughter 136 class must implement the static functions id_from_vertex_id 137 (if the vertex id is something else than an integer) and 138 leg_from_link (to pass the correct info from an end link to a 139 leg).""" 140 141 # Create the vertices, starting from the final vertex 142 diagram = base_objects.Diagram({'vertices': \ 143 self.vertices_from_link(self.tag, 144 model, 145 True)}) 146 diagram.calculate_orders(model) 147 return diagram
148 149 @classmethod 184 185 @classmethod
186 - def legPDGs_from_vertex_id(cls, vertex_id,model):
187 """Returns the list of external PDGs of the interaction corresponding 188 to this vertex_id.""" 189 190 # In case we have to deal with a regular vertex, we return the list 191 # external PDGs as given by the model information on that integer 192 # vertex id. 193 if (len(vertex_id)>=3 and 'PDGs' in vertex_id[2]): 194 return vertex_id[2]['PDGs'] 195 else: 196 return [part.get_pdg_code() for part in model.get_interaction( 197 cls.id_from_vertex_id(vertex_id)).get('particles')]
198 199 @classmethod
200 - def leg_from_legs(cls,legs, vertex_id, model):
201 """Return a leg from a leg list and the model info""" 202 203 pdgs = list(cls.legPDGs_from_vertex_id(vertex_id, model)) 204 205 # Extract the resulting pdg code from the interaction pdgs 206 for pdg in [leg.get('id') for leg in legs]: 207 pdgs.remove(pdg) 208 209 assert len(pdgs) == 1 210 # Prepare the new leg properties 211 pdg = model.get_particle(pdgs[0]).get_anti_pdg_code() 212 number = min([l.get('number') for l in legs]) 213 # State is False for t-channel, True for s-channel 214 state = (len([l for l in legs if l.get('state') == False]) != 1) 215 # Note that this needs to be done before combining decay chains 216 onshell= False 217 218 return base_objects.Leg({'id': pdg, 219 'number': number, 220 'state': state, 221 'onshell': onshell})
222 223 @classmethod 236 237 @staticmethod 250 251 @staticmethod
252 - def id_from_vertex_id(vertex_id):
253 """Return the numerical vertex id from a link.vertex_id""" 254 255 return vertex_id[0][0]
256 257 @staticmethod
258 - def loop_info_from_vertex_id(vertex_id):
259 """Return the loop_info stored in this vertex id. Notice that the 260 IdentifyME tag does not store the loop_info, but should normally never 261 need access to it.""" 262 263 return vertex_id[2]
264 265 @staticmethod
266 - def reorder_permutation(perm, start_perm):
267 """Reorder a permutation with respect to start_perm. Note that 268 both need to start from 1.""" 269 if perm == start_perm: 270 return list(range(len(perm))) 271 order = [i for (p,i) in \ 272 sorted([(p,i) for (i,p) in enumerate(perm)])] 273 return [start_perm[i]-1 for i in order]
274 275 @staticmethod 286 287 @staticmethod
288 - def vertex_id_from_vertex(vertex, last_vertex, model, ninitial):
289 """Returns the default vertex id: just the interaction id 290 Note that in the vertex id, like the leg, only the first entry is 291 taken into account in the tag comparison, while the second is for 292 storing information that is not to be used in comparisons and the 293 third for additional info regarding the shrunk loop vertex.""" 294 295 if isinstance(vertex,base_objects.ContractedVertex): 296 # return (vertex.get('id'),(),{'PDGs':vertex.get('PDGs')}) 297 return ((vertex.get('id'),vertex.get('loop_tag')),(), 298 {'PDGs':vertex.get('PDGs')}) 299 else: 300 return ((vertex.get('id'),()),(),{})
301 302 @staticmethod
303 - def flip_vertex(new_vertex, old_vertex, links):
304 """Returns the default vertex flip: just the new_vertex""" 305 return new_vertex
306
307 - def __eq__(self, other):
308 """Equal if same tag""" 309 if type(self) != type(other): 310 return False 311 return self.tag == other.tag
312
313 - def __ne__(self, other):
314 return not self.__eq__(other)
315
316 - def __str__(self):
317 return str(self.tag)
318
319 - def __lt__(self, other):
320 return self.tag < other.tag
321
322 - def __gt__(self, other):
323 return self.tag > other.tag
324 325 __repr__ = __str__
326 417
418 #=============================================================================== 419 # Amplitude 420 #=============================================================================== 421 -class Amplitude(base_objects.PhysicsObject):
422 """Amplitude: process + list of diagrams (ordered) 423 Initialize with a process, then call generate_diagrams() to 424 generate the diagrams for the amplitude 425 """ 426
427 - def default_setup(self):
428 """Default values for all properties""" 429 430 self['process'] = base_objects.Process() 431 self['diagrams'] = None 432 # has_mirror_process is True if the same process but with the 433 # two incoming particles interchanged has been generated 434 self['has_mirror_process'] = False
435
436 - def __init__(self, argument=None):
437 """Allow initialization with Process""" 438 if isinstance(argument, base_objects.Process): 439 super(Amplitude, self).__init__() 440 self.set('process', argument) 441 self.generate_diagrams() 442 elif argument != None: 443 # call the mother routine 444 super(Amplitude, self).__init__(argument) 445 else: 446 # call the mother routine 447 super(Amplitude, self).__init__()
448
449 - def filter(self, name, value):
450 """Filter for valid amplitude property values.""" 451 452 if name == 'process': 453 if not isinstance(value, base_objects.Process): 454 raise self.PhysicsObjectError("%s is not a valid Process object" % str(value)) 455 if name == 'diagrams': 456 if not isinstance(value, base_objects.DiagramList): 457 raise self.PhysicsObjectError("%s is not a valid DiagramList object" % str(value)) 458 if name == 'has_mirror_process': 459 if not isinstance(value, bool): 460 raise self.PhysicsObjectError("%s is not a valid boolean" % str(value)) 461 return True
462
463 - def get(self, name):
464 """Get the value of the property name.""" 465 466 if name == 'diagrams' and self[name] == None: 467 # Have not yet generated diagrams for this process 468 if self['process']: 469 self.generate_diagrams() 470 471 return super(Amplitude, self).get(name)
472 # return Amplitude.__bases__[0].get(self, name) #return the mother routine 473 474
475 - def get_sorted_keys(self):
476 """Return diagram property names as a nicely sorted list.""" 477 478 return ['process', 'diagrams', 'has_mirror_process']
479
480 - def get_number_of_diagrams(self):
481 """Returns number of diagrams for this amplitude""" 482 return len(self.get('diagrams'))
483
484 - def get_amplitudes(self):
485 """Return an AmplitudeList with just this amplitude. 486 Needed for DecayChainAmplitude.""" 487 488 return AmplitudeList([self])
489
490 - def nice_string(self, indent=0):
491 """Returns a nicely formatted string of the amplitude content.""" 492 return self.get('process').nice_string(indent) + "\n" + \ 493 self.get('diagrams').nice_string(indent)
494
495 - def nice_string_processes(self, indent=0):
496 """Returns a nicely formatted string of the amplitude process.""" 497 return self.get('process').nice_string(indent)
498
499 - def get_ninitial(self):
500 """Returns the number of initial state particles in the process.""" 501 return self.get('process').get_ninitial()
502
503 - def has_loop_process(self):
504 """ Returns wether this amplitude has a loop process.""" 505 506 return self.get('process').get('perturbation_couplings')
507
508 - def generate_diagrams(self, returndiag=False, diagram_filter=False):
509 """Generate diagrams. Algorithm: 510 511 1. Define interaction dictionaries: 512 * 2->0 (identity), 3->0, 4->0, ... , maxlegs->0 513 * 2 -> 1, 3 -> 1, ..., maxlegs-1 -> 1 514 515 2. Set flag from_group=true for all external particles. 516 Flip particle/anti particle for incoming particles. 517 518 3. If there is a dictionary n->0 with n=number of external 519 particles, create if possible the combination [(1,2,3,4,...)] 520 with *at least two* from_group==true. This will give a 521 finished (set of) diagram(s) (done by reduce_leglist) 522 523 4. Create all allowed groupings of particles with at least one 524 from_group==true (according to dictionaries n->1): 525 [(1,2),3,4...],[1,(2,3),4,...],..., 526 [(1,2),(3,4),...],...,[(1,2,3),4,...],... 527 (done by combine_legs) 528 529 5. Replace each group with a (list of) new particle(s) with number 530 n = min(group numbers). Set from_group true for these 531 particles and false for all other particles. Store vertex info. 532 (done by merge_comb_legs) 533 534 6. Stop algorithm when at most 2 particles remain. 535 Return all diagrams (lists of vertices). 536 537 7. Repeat from 3 (recursion done by reduce_leglist) 538 539 8. Replace final p=p vertex 540 541 Be aware that the resulting vertices have all particles outgoing, 542 so need to flip for incoming particles when used. 543 544 SPECIAL CASE: For A>BC... processes which are legs in decay 545 chains, we need to ensure that BC... combine first, giving A=A 546 as a final vertex. This case is defined by the Process 547 property is_decay_chain = True. 548 This function can also be called by the generate_diagram function 549 of LoopAmplitudes, in which case the generated diagrams here must not 550 be directly assigned to the 'diagrams' attributed but returned as a 551 DiagramList by the function. This is controlled by the argument 552 returndiag. 553 """ 554 555 process = self.get('process') 556 model = process.get('model') 557 legs = process.get('legs') 558 # Make sure orders is the minimum of orders and overall_orders 559 for key in process.get('overall_orders').keys(): 560 try: 561 process.get('orders')[key] = \ 562 min(process.get('orders')[key], 563 process.get('overall_orders')[key]) 564 except KeyError: 565 process.get('orders')[key] = process.get('overall_orders')[key] 566 567 assert model.get('particles'), \ 568 "particles are missing in model: %s" % model.get('particles') 569 570 assert model.get('interactions'), \ 571 "interactions are missing in model" 572 573 574 res = base_objects.DiagramList() 575 # First check that the number of fermions is even 576 if len([leg for leg in legs if model.get('particle_dict')[\ 577 leg.get('id')].is_fermion()]) % 2 == 1: 578 if not returndiag: 579 self['diagrams'] = res 580 raise InvalidCmd('The number of fermion is odd') 581 else: 582 return False, res 583 584 # Then check same number of incoming and outgoing fermions (if 585 # no Majorana particles in model) 586 if not model.get('got_majoranas') and \ 587 len([leg for leg in legs if leg.is_incoming_fermion(model)]) != \ 588 len([leg for leg in legs if leg.is_outgoing_fermion(model)]): 589 if not returndiag: 590 self['diagrams'] = res 591 raise InvalidCmd('The number of of incoming/outcoming fermions are different') 592 else: 593 return False, res 594 595 # Finally check that charge (conserve by all interactions) of the process 596 #is globally conserve for this process. 597 for charge in model.get('conserved_charge'): 598 total = 0 599 for leg in legs: 600 part = model.get('particle_dict')[leg.get('id')] 601 try: 602 value = part.get(charge) 603 except (AttributeError, base_objects.PhysicsObject.PhysicsObjectError): 604 try: 605 value = getattr(part, charge) 606 except AttributeError: 607 value = 0 608 609 if (leg.get('id') != part['pdg_code']) != leg['state']: 610 total -= value 611 else: 612 total += value 613 614 if abs(total) > 1e-10: 615 if not returndiag: 616 self['diagrams'] = res 617 raise InvalidCmd('No %s conservation for this process ' % charge) 618 return res 619 else: 620 raise InvalidCmd('No %s conservation for this process ' % charge) 621 return res, res 622 623 if not returndiag: 624 logger.info("Trying %s " % process.nice_string().replace('Process', 'process')) 625 626 # Give numbers to legs in process 627 for i in range(0, len(process.get('legs'))): 628 # Make sure legs are unique 629 leg = copy.copy(process.get('legs')[i]) 630 process.get('legs')[i] = leg 631 if leg.get('number') == 0: 632 leg.set('number', i + 1) 633 634 # Copy leglist from process, so we can flip leg identities 635 # without affecting the original process 636 leglist = self.copy_leglist(process.get('legs')) 637 638 for leg in leglist: 639 # For the first step, ensure the tag from_group 640 # is true for all legs 641 leg.set('from_group', True) 642 643 # Need to flip part-antipart for incoming particles, 644 # so they are all outgoing 645 if leg.get('state') == False: 646 part = model.get('particle_dict')[leg.get('id')] 647 leg.set('id', part.get_anti_pdg_code()) 648 649 # Calculate the maximal multiplicity of n-1>1 configurations 650 # to restrict possible leg combinations 651 max_multi_to1 = max([len(key) for key in \ 652 model.get('ref_dict_to1').keys()]) 653 654 655 # Reduce the leg list and return the corresponding 656 # list of vertices 657 658 # For decay processes, generate starting from final-state 659 # combined only as the last particle. This allows to use these 660 # in decay chains later on. 661 is_decay_proc = process.get_ninitial() == 1 662 if is_decay_proc: 663 part = model.get('particle_dict')[leglist[0].get('id')] 664 # For decay chain legs, we want everything to combine to 665 # the initial leg. This is done by only allowing the 666 # initial leg to combine as a final identity. 667 ref_dict_to0 = {(part.get_pdg_code(),part.get_anti_pdg_code()):[0], 668 (part.get_anti_pdg_code(),part.get_pdg_code()):[0]} 669 # Need to set initial leg from_group to None, to make sure 670 # it can only be combined at the end. 671 leglist[0].set('from_group', None) 672 reduced_leglist = self.reduce_leglist(leglist, 673 max_multi_to1, 674 ref_dict_to0, 675 is_decay_proc, 676 process.get('orders')) 677 else: 678 reduced_leglist = self.reduce_leglist(leglist, 679 max_multi_to1, 680 model.get('ref_dict_to0'), 681 is_decay_proc, 682 process.get('orders')) 683 684 #In LoopAmplitude the function below is overloaded such that it 685 #converts back all DGLoopLegs to Legs. In the default tree-level 686 #diagram generation, this does nothing. 687 self.convert_dgleg_to_leg(reduced_leglist) 688 689 if reduced_leglist: 690 for vertex_list in reduced_leglist: 691 res.append(self.create_diagram(base_objects.VertexList(vertex_list))) 692 693 # Record whether or not we failed generation before required 694 # s-channel propagators are taken into account 695 failed_crossing = not res 696 697 # Required s-channels is a list of id-lists. Select the 698 # diagrams where all required s-channel propagators in any of 699 # the lists are present (i.e., the different lists correspond 700 # to "or", while the elements of the list correspond to 701 # "and"). 702 if process.get('required_s_channels') and \ 703 process.get('required_s_channels')[0]: 704 # We shouldn't look at the last vertex in each diagram, 705 # since that is the n->0 vertex 706 lastvx = -1 707 # For decay chain processes, there is an "artificial" 708 # extra vertex corresponding to particle 1=1, so we need 709 # to exclude the two last vertexes. 710 if is_decay_proc: lastvx = -2 711 ninitial = len([leg for leg in process.get('legs') if leg.get('state') == False]) 712 # Check required s-channels for each list in required_s_channels 713 old_res = res 714 res = base_objects.DiagramList() 715 for id_list in process.get('required_s_channels'): 716 res_diags = [diagram for diagram in old_res if all([req_s_channel in \ 717 [vertex.get_s_channel_id(\ 718 process.get('model'), ninitial) \ 719 for vertex in diagram.get('vertices')[:lastvx]] \ 720 for req_s_channel in \ 721 id_list])] 722 # Add diagrams only if not already in res 723 res.extend([diag for diag in res_diags if diag not in res]) 724 725 # Remove all diagrams with a "double" forbidden s-channel propagator 726 # is present. 727 # Note that we shouldn't look at the last vertex in each 728 # diagram, since that is the n->0 vertex 729 if process.get('forbidden_s_channels'): 730 ninitial = len([leg for leg in process.get('legs') if leg.get('state') == False]) 731 if ninitial == 2: 732 res = base_objects.DiagramList(\ 733 [diagram for diagram in res if not any([vertex.get_s_channel_id(\ 734 process.get('model'), ninitial) \ 735 in process.get('forbidden_s_channels') 736 for vertex in diagram.get('vertices')[:-1]])]) 737 else: 738 # split since we need to avoid that the initial particle is forbidden 739 # as well. 740 newres= [] 741 for diagram in res: 742 leg1 = 1 743 #check the latest vertex to see if the leg 1 is inside if it 744 #is we need to inverse the look-up and allow the first s-channel 745 # of the associate particles. 746 vertex = diagram.get('vertices')[-1] 747 if any([l['number'] ==1 for l in vertex.get('legs')]): 748 leg1 = [l['number'] for l in vertex.get('legs') if l['number'] !=1][0] 749 to_loop = list(range(len(diagram.get('vertices'))-1)) 750 if leg1 >1: 751 to_loop.reverse() 752 for i in to_loop: 753 vertex = diagram.get('vertices')[i] 754 if leg1: 755 if any([l['number'] ==leg1 for l in vertex.get('legs')]): 756 leg1 = 0 757 continue 758 if vertex.get_s_channel_id(process.get('model'), ninitial)\ 759 in process.get('forbidden_s_channels'): 760 break 761 else: 762 newres.append(diagram) 763 res = base_objects.DiagramList(newres) 764 765 766 # Mark forbidden (onshell) s-channel propagators, to forbid onshell 767 # generation. 768 if process.get('forbidden_onsh_s_channels'): 769 ninitial = len([leg for leg in process.get('legs') if leg.get('state') == False]) 770 771 verts = base_objects.VertexList(sum([[vertex for vertex \ 772 in diagram.get('vertices')[:-1] 773 if vertex.get_s_channel_id(\ 774 process.get('model'), ninitial) \ 775 in process.get('forbidden_onsh_s_channels')] \ 776 for diagram in res], [])) 777 for vert in verts: 778 # Use onshell = False to indicate that this s-channel is forbidden 779 newleg = copy.copy(vert.get('legs').pop(-1)) 780 newleg.set('onshell', False) 781 vert.get('legs').append(newleg) 782 783 # Set actual coupling orders for each diagram 784 for diagram in res: 785 diagram.calculate_orders(model) 786 787 # Filter the diagrams according to the squared coupling order 788 # constraints and possible the negative one. Remember that OrderName=-n 789 # means that the user wants to include everything up to the N^(n+1)LO 790 # contribution in that order and at most one order can be restricted 791 # in this way. We shall do this only if the diagrams are not asked to 792 # be returned, as it is the case for NLO because it this case the 793 # interference are not necessarily among the diagrams generated here only. 794 if not returndiag and len(res)>0: 795 res = self.apply_squared_order_constraints(res) 796 797 if diagram_filter: 798 res = self.apply_user_filter(res) 799 800 # Replace final id=0 vertex if necessary 801 if not process.get('is_decay_chain'): 802 for diagram in res: 803 vertices = diagram.get('vertices') 804 if len(vertices) > 1 and vertices[-1].get('id') == 0: 805 # Need to "glue together" last and next-to-last 806 # vertex, by replacing the (incoming) last leg of the 807 # next-to-last vertex with the (outgoing) leg in the 808 # last vertex 809 vertices = copy.copy(vertices) 810 lastvx = vertices.pop() 811 nexttolastvertex = copy.copy(vertices.pop()) 812 legs = copy.copy(nexttolastvertex.get('legs')) 813 ntlnumber = legs[-1].get('number') 814 lastleg = [leg for leg in lastvx.get('legs') if leg.get('number') != ntlnumber][0] 815 # Reset onshell in case we have forbidden s-channels 816 if lastleg.get('onshell') == False: 817 lastleg.set('onshell', None) 818 # Replace the last leg of nexttolastvertex 819 legs[-1] = lastleg 820 nexttolastvertex.set('legs', legs) 821 vertices.append(nexttolastvertex) 822 diagram.set('vertices', vertices) 823 824 if res and not returndiag: 825 logger.info("Process has %d diagrams" % len(res)) 826 827 # Trim down number of legs and vertices used to save memory 828 self.trim_diagrams(diaglist=res) 829 830 # Sort process legs according to leg number 831 pertur = 'QCD' 832 if self.get('process')['perturbation_couplings']: 833 pertur = sorted(self.get('process')['perturbation_couplings'])[0] 834 self.get('process').get('legs').sort(pert=pertur) 835 836 # Set diagrams to res if not asked to be returned 837 if not returndiag: 838 self['diagrams'] = res 839 return not failed_crossing 840 else: 841 return not failed_crossing, res
842
843 - def apply_squared_order_constraints(self, diag_list):
844 """Applies the user specified squared order constraints on the diagram 845 list in argument.""" 846 847 res = copy.copy(diag_list) 848 849 # Apply the filtering on constrained amplitude (== and >) 850 # No need to iterate on this one 851 for name, (value, operator) in self['process'].get('constrained_orders').items(): 852 res.filter_constrained_orders(name, value, operator) 853 854 # Iterate the filtering since the applying the constraint on one 855 # type of coupling order can impact what the filtering on a previous 856 # one (relevant for the '==' type of constraint). 857 while True: 858 new_res = res.apply_positive_sq_orders(res, 859 self['process'].get('squared_orders'), 860 self['process']['sqorders_types']) 861 # Exit condition 862 if len(res)==len(new_res): 863 break 864 elif (len(new_res)>len(res)): 865 raise MadGraph5Error( 866 'Inconsistency in function apply_squared_order_constraints().') 867 # Actualizing the list of diagram for the next iteration 868 res = new_res 869 870 871 872 # Now treat the negative squared order constraint (at most one) 873 neg_orders = [(order, value) for order, value in \ 874 self['process'].get('squared_orders').items() if value<0] 875 if len(neg_orders)==1: 876 neg_order, neg_value = neg_orders[0] 877 # Now check any negative order constraint 878 res, target_order = res.apply_negative_sq_order(res, neg_order,\ 879 neg_value, self['process']['sqorders_types'][neg_order]) 880 # Substitute the negative value to this positive one so that 881 # the resulting computed constraints appears in the print out 882 # and at the output stage we no longer have to deal with 883 # negative valued target orders 884 self['process']['squared_orders'][neg_order]=target_order 885 elif len(neg_orders)>1: 886 raise InvalidCmd('At most one negative squared order constraint'+\ 887 ' can be specified, not %s.'%str(neg_orders)) 888 889 return res
890
891 - def apply_user_filter(self, diag_list):
892 """Applies the user specified squared order constraints on the diagram 893 list in argument.""" 894 895 if True: 896 remove_diag = misc.plugin_import('user_filter', 897 'user filter required to be defined in PLUGIN/user_filter.py with the function remove_diag(ONEDIAG) which returns True if the diagram has to be removed', 898 fcts=['remove_diag']) 899 else: 900 #example and simple tests 901 def remove_diag(diag, model=None): 902 for vertex in diag['vertices']: #last 903 if vertex['id'] == 0: #special final vertex 904 continue 905 if vertex['legs'][-1]['number'] < 3: #this means T-channel 906 if abs(vertex['legs'][-1]['id']) <6: 907 return True 908 return False
909 910 res = diag_list.__class__() 911 nb_removed = 0 912 model = self['process']['model'] 913 for diag in diag_list: 914 if remove_diag(diag, model): 915 nb_removed +=1 916 else: 917 res.append(diag) 918 919 if nb_removed: 920 logger.warning('Diagram filter is ON and removed %s diagrams for this subprocess.' % nb_removed) 921 922 return res
923 924 925
926 - def create_diagram(self, vertexlist):
927 """ Return a Diagram created from the vertex list. This function can be 928 overloaded by daughter classes.""" 929 return base_objects.Diagram({'vertices':vertexlist})
930
931 - def convert_dgleg_to_leg(self, vertexdoublelist):
932 """ In LoopAmplitude, it converts back all DGLoopLegs into Legs. 933 In Amplitude, there is nothing to do. """ 934 935 return True
936
937 - def copy_leglist(self, legs):
938 """ Simply returns a copy of the leg list. This function is 939 overloaded in LoopAmplitude so that a DGLoopLeg list is returned. 940 The DGLoopLeg has some additional parameters only useful during 941 loop diagram generation""" 942 943 return base_objects.LegList(\ 944 [ copy.copy(leg) for leg in legs ])
945
946 - def reduce_leglist(self, curr_leglist, max_multi_to1, ref_dict_to0, 947 is_decay_proc = False, coupling_orders = None):
948 """Recursive function to reduce N LegList to N-1 949 For algorithm, see doc for generate_diagrams. 950 """ 951 952 # Result variable which is a list of lists of vertices 953 # to be added 954 res = [] 955 956 # Stop condition. If LegList is None, that means that this 957 # diagram must be discarded 958 if curr_leglist is None: 959 return None 960 961 # Extract ref dict information 962 model = self.get('process').get('model') 963 ref_dict_to1 = self.get('process').get('model').get('ref_dict_to1') 964 965 966 # If all legs can be combined in one single vertex, add this 967 # vertex to res and continue. 968 # Special treatment for decay chain legs 969 970 if curr_leglist.can_combine_to_0(ref_dict_to0, is_decay_proc): 971 # Extract the interaction id associated to the vertex 972 973 vertex_ids = self.get_combined_vertices(curr_leglist, 974 copy.copy(ref_dict_to0[tuple(sorted([leg.get('id') for \ 975 leg in curr_leglist]))])) 976 977 final_vertices = [base_objects.Vertex({'legs':curr_leglist, 978 'id':vertex_id}) for \ 979 vertex_id in vertex_ids] 980 # Check for coupling orders. If orders < 0, skip vertex 981 for final_vertex in final_vertices: 982 if self.reduce_orders(coupling_orders, model, 983 [final_vertex.get('id')]) != False: 984 res.append([final_vertex]) 985 # Stop condition 2: if the leglist contained exactly two particles, 986 # return the result, if any, and stop. 987 if len(curr_leglist) == 2: 988 if res: 989 return res 990 else: 991 return None 992 993 # Create a list of all valid combinations of legs 994 comb_lists = self.combine_legs(curr_leglist, 995 ref_dict_to1, max_multi_to1) 996 997 # Create a list of leglists/vertices by merging combinations 998 leg_vertex_list = self.merge_comb_legs(comb_lists, ref_dict_to1) 999 1000 # Consider all the pairs 1001 for leg_vertex_tuple in leg_vertex_list: 1002 1003 # Remove forbidden particles 1004 if self.get('process').get('forbidden_particles') and \ 1005 any([abs(vertex.get('legs')[-1].get('id')) in \ 1006 self.get('process').get('forbidden_particles') \ 1007 for vertex in leg_vertex_tuple[1]]): 1008 continue 1009 1010 # Check for coupling orders. If couplings < 0, skip recursion. 1011 new_coupling_orders = self.reduce_orders(coupling_orders, 1012 model, 1013 [vertex.get('id') for vertex in \ 1014 leg_vertex_tuple[1]]) 1015 if new_coupling_orders == False: 1016 # Some coupling order < 0 1017 continue 1018 1019 # This is where recursion happens 1020 # First, reduce again the leg part 1021 reduced_diagram = self.reduce_leglist(leg_vertex_tuple[0], 1022 max_multi_to1, 1023 ref_dict_to0, 1024 is_decay_proc, 1025 new_coupling_orders) 1026 # If there is a reduced diagram 1027 if reduced_diagram: 1028 vertex_list_list = [list(leg_vertex_tuple[1])] 1029 vertex_list_list.append(reduced_diagram) 1030 expanded_list = expand_list_list(vertex_list_list) 1031 res.extend(expanded_list) 1032 1033 return res
1034
1035 - def reduce_orders(self, coupling_orders, model, vertex_id_list):
1036 """Return False if the coupling orders for any coupling is < 1037 0, otherwise return the new coupling orders with the vertex 1038 orders subtracted. If coupling_orders is not given, return 1039 None (which counts as success). 1040 WEIGHTED is a special order, which corresponds to the sum of 1041 order hierarchies for the couplings. 1042 We ignore negative constraints as these cannot be taken into 1043 account on the fly but only after generation.""" 1044 1045 if not coupling_orders: 1046 return None 1047 1048 present_couplings = copy.copy(coupling_orders) 1049 for id in vertex_id_list: 1050 # Don't check for identity vertex (id = 0) 1051 if not id: 1052 continue 1053 inter = model.get("interaction_dict")[id] 1054 for coupling in inter.get('orders').keys(): 1055 # Note that we don't consider a missing coupling as a 1056 # constraint 1057 if coupling in present_couplings and \ 1058 present_couplings[coupling]>=0: 1059 # Reduce the number of couplings that are left 1060 present_couplings[coupling] -= \ 1061 inter.get('orders')[coupling] 1062 if present_couplings[coupling] < 0: 1063 # We have too many couplings of this type 1064 return False 1065 # Now check for WEIGHTED, i.e. the sum of coupling hierarchy values 1066 if 'WEIGHTED' in present_couplings and \ 1067 present_couplings['WEIGHTED']>=0: 1068 weight = sum([model.get('order_hierarchy')[c]*n for \ 1069 (c,n) in inter.get('orders').items()]) 1070 present_couplings['WEIGHTED'] -= weight 1071 if present_couplings['WEIGHTED'] < 0: 1072 # Total coupling weight too large 1073 return False 1074 1075 return present_couplings
1076
1077 - def combine_legs(self, list_legs, ref_dict_to1, max_multi_to1):
1078 """Recursive function. Take a list of legs as an input, with 1079 the reference dictionary n-1->1, and output a list of list of 1080 tuples of Legs (allowed combinations) and Legs (rest). Algorithm: 1081 1082 1. Get all n-combinations from list [123456]: [12],..,[23],..,[123],.. 1083 1084 2. For each combination, say [34]. Check if combination is valid. 1085 If so: 1086 1087 a. Append [12[34]56] to result array 1088 1089 b. Split [123456] at index(first element in combination+1), 1090 i.e. [12],[456] and subtract combination from second half, 1091 i.e.: [456]-[34]=[56]. Repeat from 1. with this array 1092 1093 3. Take result array from call to 1. (here, [[56]]) and append 1094 (first half in step b - combination) + combination + (result 1095 from 1.) = [12[34][56]] to result array 1096 1097 4. After appending results from all n-combinations, return 1098 resulting array. Example, if [13] and [45] are valid 1099 combinations: 1100 [[[13]2456],[[13]2[45]6],[123[45]6]] 1101 """ 1102 1103 res = [] 1104 1105 # loop over possible combination lengths (+1 is for range convention!) 1106 for comb_length in range(2, max_multi_to1 + 1): 1107 1108 # Check the considered length is not longer than the list length 1109 if comb_length > len(list_legs): 1110 return res 1111 1112 # itertools.combinations returns all possible combinations 1113 # of comb_length elements from list_legs 1114 for comb in itertools.combinations(list_legs, comb_length): 1115 1116 # Check if the combination is valid 1117 if base_objects.LegList(comb).can_combine_to_1(ref_dict_to1): 1118 1119 # Identify the rest, create a list [comb,rest] and 1120 # add it to res 1121 res_list = copy.copy(list_legs) 1122 for leg in comb: 1123 res_list.remove(leg) 1124 res_list.insert(list_legs.index(comb[0]), comb) 1125 res.append(res_list) 1126 1127 # Now, deal with cases with more than 1 combination 1128 1129 # First, split the list into two, according to the 1130 # position of the first element in comb, and remove 1131 # all elements form comb 1132 res_list1 = list_legs[0:list_legs.index(comb[0])] 1133 res_list2 = list_legs[list_legs.index(comb[0]) + 1:] 1134 for leg in comb[1:]: 1135 res_list2.remove(leg) 1136 1137 # Create a list of type [comb,rest1,rest2(combined)] 1138 res_list = res_list1 1139 res_list.append(comb) 1140 # This is where recursion actually happens, 1141 # on the second part 1142 for item in self.combine_legs(res_list2, 1143 ref_dict_to1, 1144 max_multi_to1): 1145 final_res_list = copy.copy(res_list) 1146 final_res_list.extend(item) 1147 res.append(final_res_list) 1148 1149 return res
1150 1151
1152 - def merge_comb_legs(self, comb_lists, ref_dict_to1):
1153 """Takes a list of allowed leg combinations as an input and returns 1154 a set of lists where combinations have been properly replaced 1155 (one list per element in the ref_dict, so that all possible intermediate 1156 particles are included). For each list, give the list of vertices 1157 corresponding to the executed merging, group the two as a tuple. 1158 """ 1159 1160 res = [] 1161 1162 for comb_list in comb_lists: 1163 1164 reduced_list = [] 1165 vertex_list = [] 1166 1167 for entry in comb_list: 1168 1169 # Act on all leg combinations 1170 if isinstance(entry, tuple): 1171 1172 # Build the leg object which will replace the combination: 1173 # 1) leg ids is as given in the ref_dict 1174 leg_vert_ids = copy.copy(ref_dict_to1[\ 1175 tuple(sorted([leg.get('id') for leg in entry]))]) 1176 # 2) number is the minimum of leg numbers involved in the 1177 # combination 1178 number = min([leg.get('number') for leg in entry]) 1179 # 3) state is final, unless there is exactly one initial 1180 # state particle involved in the combination -> t-channel 1181 if len([leg for leg in entry if leg.get('state') == False]) == 1: 1182 state = False 1183 else: 1184 state = True 1185 # 4) from_group is True, by definition 1186 1187 # Create and add the object. This is done by a 1188 # separate routine, to allow overloading by 1189 # daughter classes 1190 new_leg_vert_ids = [] 1191 if leg_vert_ids: 1192 new_leg_vert_ids = self.get_combined_legs(entry, 1193 leg_vert_ids, 1194 number, 1195 state) 1196 1197 reduced_list.append([l[0] for l in new_leg_vert_ids]) 1198 1199 1200 # Create and add the corresponding vertex 1201 # Extract vertex ids corresponding to the various legs 1202 # in mylegs 1203 vlist = base_objects.VertexList() 1204 for (myleg, vert_id) in new_leg_vert_ids: 1205 # Start with the considered combination... 1206 myleglist = base_objects.LegList(list(entry)) 1207 # ... and complete with legs after reducing 1208 myleglist.append(myleg) 1209 # ... and consider the correct vertex id 1210 vlist.append(base_objects.Vertex( 1211 {'legs':myleglist, 1212 'id':vert_id})) 1213 1214 vertex_list.append(vlist) 1215 1216 # If entry is not a combination, switch the from_group flag 1217 # and add it 1218 else: 1219 cp_entry = copy.copy(entry) 1220 # Need special case for from_group == None; this 1221 # is for initial state leg of decay chain process 1222 # (see Leg.can_combine_to_0) 1223 if cp_entry.get('from_group') != None: 1224 cp_entry.set('from_group', False) 1225 reduced_list.append(cp_entry) 1226 1227 # Flatten the obtained leg and vertex lists 1228 flat_red_lists = expand_list(reduced_list) 1229 flat_vx_lists = expand_list(vertex_list) 1230 1231 # Combine the two lists in a list of tuple 1232 for i in range(0, len(flat_vx_lists)): 1233 res.append((base_objects.LegList(flat_red_lists[i]), \ 1234 base_objects.VertexList(flat_vx_lists[i]))) 1235 1236 return res
1237
1238 - def get_combined_legs(self, legs, leg_vert_ids, number, state):
1239 """Create a set of new legs from the info given. This can be 1240 overloaded by daughter classes.""" 1241 1242 mylegs = [(base_objects.Leg({'id':leg_id, 1243 'number':number, 1244 'state':state, 1245 'from_group':True}), 1246 vert_id)\ 1247 for leg_id, vert_id in leg_vert_ids] 1248 1249 return mylegs
1250
1251 - def get_combined_vertices(self, legs, vert_ids):
1252 """Allow for selection of vertex ids. This can be 1253 overloaded by daughter classes.""" 1254 1255 return vert_ids
1256
1257 - def trim_diagrams(self, decay_ids=[], diaglist=None):
1258 """Reduce the number of legs and vertices used in memory. 1259 When called by a diagram generation initiated by LoopAmplitude, 1260 this function should not trim the diagrams in the attribute 'diagrams' 1261 but rather a given list in the 'diaglist' argument.""" 1262 1263 legs = [] 1264 vertices = [] 1265 1266 if diaglist is None: 1267 diaglist=self.get('diagrams') 1268 1269 # Flag decaying legs in the core process by onshell = True 1270 process = self.get('process') 1271 for leg in process.get('legs'): 1272 if leg.get('state') and leg.get('id') in decay_ids: 1273 leg.set('onshell', True) 1274 1275 for diagram in diaglist: 1276 # Keep track of external legs (leg numbers already used) 1277 leg_external = set() 1278 for ivx, vertex in enumerate(diagram.get('vertices')): 1279 for ileg, leg in enumerate(vertex.get('legs')): 1280 # Ensure that only external legs get decay flag 1281 if leg.get('state') and leg.get('id') in decay_ids and \ 1282 leg.get('number') not in leg_external: 1283 # Use onshell to indicate decaying legs, 1284 # i.e. legs that have decay chains 1285 leg = copy.copy(leg) 1286 leg.set('onshell', True) 1287 try: 1288 index = legs.index(leg) 1289 except ValueError: 1290 vertex.get('legs')[ileg] = leg 1291 legs.append(leg) 1292 else: # Found a leg 1293 vertex.get('legs')[ileg] = legs[index] 1294 leg_external.add(leg.get('number')) 1295 try: 1296 index = vertices.index(vertex) 1297 diagram.get('vertices')[ivx] = vertices[index] 1298 except ValueError: 1299 vertices.append(vertex)
1300
1301 #=============================================================================== 1302 # AmplitudeList 1303 #=============================================================================== 1304 -class AmplitudeList(base_objects.PhysicsObjectList):
1305 """List of Amplitude objects 1306 """ 1307
1308 - def has_any_loop_process(self):
1309 """ Check the content of all processes of the amplitudes in this list to 1310 see if there is any which defines perturbation couplings. """ 1311 1312 for amp in self: 1313 if amp.has_loop_process(): 1314 return True
1315
1316 - def is_valid_element(self, obj):
1317 """Test if object obj is a valid Amplitude for the list.""" 1318 1319 return isinstance(obj, Amplitude)
1320
1321 #=============================================================================== 1322 # DecayChainAmplitude 1323 #=============================================================================== 1324 -class DecayChainAmplitude(Amplitude):
1325 """A list of amplitudes + a list of decay chain amplitude lists; 1326 corresponding to a ProcessDefinition with a list of decay chains 1327 """ 1328
1329 - def default_setup(self):
1330 """Default values for all properties""" 1331 1332 self['amplitudes'] = AmplitudeList() 1333 self['decay_chains'] = DecayChainAmplitudeList()
1334
1335 - def __init__(self, argument = None, collect_mirror_procs = False, 1336 ignore_six_quark_processes = False, loop_filter=None, diagram_filter=False):
1337 """Allow initialization with Process and with ProcessDefinition""" 1338 1339 if isinstance(argument, base_objects.Process): 1340 super(DecayChainAmplitude, self).__init__() 1341 from madgraph.loop.loop_diagram_generation import LoopMultiProcess 1342 if argument['perturbation_couplings']: 1343 MultiProcessClass=LoopMultiProcess 1344 else: 1345 MultiProcessClass=MultiProcess 1346 if isinstance(argument, base_objects.ProcessDefinition): 1347 self['amplitudes'].extend(\ 1348 MultiProcessClass.generate_multi_amplitudes(argument, 1349 collect_mirror_procs, 1350 ignore_six_quark_processes, 1351 loop_filter=loop_filter, 1352 diagram_filter=diagram_filter)) 1353 else: 1354 self['amplitudes'].append(\ 1355 MultiProcessClass.get_amplitude_from_proc(argument, 1356 loop_filter=loop_filter, 1357 diagram_filter=diagram_filter)) 1358 # Clean decay chains from process, since we haven't 1359 # combined processes with decay chains yet 1360 process = copy.copy(self.get('amplitudes')[0].get('process')) 1361 process.set('decay_chains', base_objects.ProcessList()) 1362 self['amplitudes'][0].set('process', process) 1363 1364 for process in argument.get('decay_chains'): 1365 if process.get('perturbation_couplings'): 1366 raise MadGraph5Error("Decay processes can not be perturbed") 1367 process.set('overall_orders', argument.get('overall_orders')) 1368 if not process.get('is_decay_chain'): 1369 process.set('is_decay_chain',True) 1370 if not process.get_ninitial() == 1: 1371 raise InvalidCmd("Decay chain process must have exactly one" + \ 1372 " incoming particle") 1373 self['decay_chains'].append(\ 1374 DecayChainAmplitude(process, collect_mirror_procs, 1375 ignore_six_quark_processes, 1376 diagram_filter=diagram_filter)) 1377 1378 # Flag decaying legs in the core diagrams by onshell = True 1379 decay_ids = sum([[a.get('process').get('legs')[0].get('id') \ 1380 for a in dec.get('amplitudes')] for dec in \ 1381 self['decay_chains']], []) 1382 decay_ids = set(decay_ids) 1383 for amp in self['amplitudes']: 1384 amp.trim_diagrams(decay_ids) 1385 1386 # Check that all decay ids are present in at least some process 1387 for amp in self['amplitudes']: 1388 for l in amp.get('process').get('legs'): 1389 if l.get('id') in decay_ids: 1390 decay_ids.remove(l.get('id')) 1391 1392 if decay_ids: 1393 model = amp.get('process').get('model') 1394 names = [model.get_particle(id).get('name') for id in decay_ids] 1395 1396 logger.warning( 1397 "$RED Decay without corresponding particle in core process found.\n" + \ 1398 "Decay information for particle(s) %s is discarded.\n" % ','.join(names) + \ 1399 "Please check your process definition carefully. \n" + \ 1400 "This warning usually means that you forgot parentheses in presence of subdecay.\n" + \ 1401 "Example of correct syntax: p p > t t~, ( t > w+ b, w+ > l+ vl)") 1402 1403 # Remove unused decays from the process list 1404 for dc in reversed(self['decay_chains']): 1405 for a in reversed(dc.get('amplitudes')): 1406 # Remove the amplitudes from this decay chain 1407 if a.get('process').get('legs')[0].get('id') in decay_ids: 1408 dc.get('amplitudes').remove(a) 1409 if not dc.get('amplitudes'): 1410 # If no amplitudes left, remove the decay chain 1411 self['decay_chains'].remove(dc) 1412 1413 # Finally, write a fat warning if any decay process has 1414 # the decaying particle (or its antiparticle) in the final state 1415 bad_procs = [] 1416 for dc in self['decay_chains']: 1417 for amp in dc.get('amplitudes'): 1418 legs = amp.get('process').get('legs') 1419 fs_parts = [abs(l.get('id')) for l in legs if 1420 l.get('state')] 1421 is_part = [l.get('id') for l in legs if not 1422 l.get('state')][0] 1423 if abs(is_part) in fs_parts: 1424 bad_procs.append(amp.get('process')) 1425 1426 if bad_procs: 1427 logger.warning( 1428 "$RED Decay(s) with particle decaying to itself:\n" + \ 1429 '\n'.join([p.nice_string() for p in bad_procs]) + \ 1430 "\nPlease check your process definition carefully. \n") 1431 1432 1433 elif argument != None: 1434 # call the mother routine 1435 super(DecayChainAmplitude, self).__init__(argument) 1436 else: 1437 # call the mother routine 1438 super(DecayChainAmplitude, self).__init__()
1439
1440 - def filter(self, name, value):
1441 """Filter for valid amplitude property values.""" 1442 1443 if name == 'amplitudes': 1444 if not isinstance(value, AmplitudeList): 1445 raise self.PhysicsObjectError("%s is not a valid AmplitudeList" % str(value)) 1446 if name == 'decay_chains': 1447 if not isinstance(value, DecayChainAmplitudeList): 1448 raise self.PhysicsObjectError("%s is not a valid DecayChainAmplitudeList object" % \ 1449 str(value)) 1450 return True
1451
1452 - def get_sorted_keys(self):
1453 """Return diagram property names as a nicely sorted list.""" 1454 1455 return ['amplitudes', 'decay_chains']
1456 1457 # Helper functions 1458
1459 - def get_number_of_diagrams(self):
1460 """Returns number of diagrams for this amplitude""" 1461 return sum(len(a.get('diagrams')) for a in self.get('amplitudes')) \ 1462 + sum(d.get_number_of_diagrams() for d in \ 1463 self.get('decay_chains'))
1464
1465 - def nice_string(self, indent = 0):
1466 """Returns a nicely formatted string of the amplitude content.""" 1467 mystr = "" 1468 for amplitude in self.get('amplitudes'): 1469 mystr = mystr + amplitude.nice_string(indent) + "\n" 1470 1471 if self.get('decay_chains'): 1472 mystr = mystr + " " * indent + "Decays:\n" 1473 for dec in self.get('decay_chains'): 1474 mystr = mystr + dec.nice_string(indent + 2) + "\n" 1475 1476 return mystr[:-1]
1477
1478 - def nice_string_processes(self, indent = 0):
1479 """Returns a nicely formatted string of the amplitude processes.""" 1480 mystr = "" 1481 for amplitude in self.get('amplitudes'): 1482 mystr = mystr + amplitude.nice_string_processes(indent) + "\n" 1483 1484 if self.get('decay_chains'): 1485 mystr = mystr + " " * indent + "Decays:\n" 1486 for dec in self.get('decay_chains'): 1487 mystr = mystr + dec.nice_string_processes(indent + 2) + "\n" 1488 1489 return mystr[:-1]
1490
1491 - def get_ninitial(self):
1492 """Returns the number of initial state particles in the process.""" 1493 return self.get('amplitudes')[0].get('process').get_ninitial()
1494
1495 - def get_decay_ids(self):
1496 """Returns a set of all particle ids for which a decay is defined""" 1497 1498 decay_ids = [] 1499 1500 # Get all amplitudes for the decay processes 1501 for amp in sum([dc.get('amplitudes') for dc \ 1502 in self['decay_chains']], []): 1503 # For each amplitude, find the initial state leg 1504 decay_ids.append(amp.get('process').get_initial_ids()[0]) 1505 1506 # Return a list with unique ids 1507 return list(set(decay_ids))
1508
1509 - def has_loop_process(self):
1510 """ Returns wether this amplitude has a loop process.""" 1511 return self['amplitudes'].has_any_loop_process()
1512
1513 - def get_amplitudes(self):
1514 """Recursive function to extract all amplitudes for this process""" 1515 1516 amplitudes = AmplitudeList() 1517 1518 amplitudes.extend(self.get('amplitudes')) 1519 for decay in self.get('decay_chains'): 1520 amplitudes.extend(decay.get_amplitudes()) 1521 1522 return amplitudes
1523
1524 1525 #=============================================================================== 1526 # DecayChainAmplitudeList 1527 #=============================================================================== 1528 -class DecayChainAmplitudeList(base_objects.PhysicsObjectList):
1529 """List of DecayChainAmplitude objects 1530 """ 1531
1532 - def is_valid_element(self, obj):
1533 """Test if object obj is a valid DecayChainAmplitude for the list.""" 1534 1535 return isinstance(obj, DecayChainAmplitude)
1536
1537 1538 #=============================================================================== 1539 # MultiProcess 1540 #=============================================================================== 1541 -class MultiProcess(base_objects.PhysicsObject):
1542 """MultiProcess: list of process definitions 1543 list of processes (after cleaning) 1544 list of amplitudes (after generation) 1545 """ 1546
1547 - def default_setup(self):
1548 """Default values for all properties""" 1549 1550 self['process_definitions'] = base_objects.ProcessDefinitionList() 1551 # self['amplitudes'] can be an AmplitudeList or a 1552 # DecayChainAmplitudeList, depending on whether there are 1553 # decay chains in the process definitions or not. 1554 self['amplitudes'] = AmplitudeList() 1555 # Flag for whether to combine IS mirror processes together 1556 self['collect_mirror_procs'] = False 1557 # List of quark flavors where we ignore processes with at 1558 # least 6 quarks (three quark lines) 1559 self['ignore_six_quark_processes'] = [] 1560 # Allow to use the model parameter numerical value for optimization. 1561 #This is currently use for 1->N generation(check mass). 1562 self['use_numerical'] = False
1563
1564 - def __init__(self, argument=None, collect_mirror_procs = False, 1565 ignore_six_quark_processes = [], optimize=False, 1566 loop_filter=None, diagram_filter=None):
1567 """Allow initialization with ProcessDefinition or 1568 ProcessDefinitionList 1569 optimize allows to use param_card information. (usefull for 1-.N)""" 1570 1571 if isinstance(argument, base_objects.ProcessDefinition): 1572 super(MultiProcess, self).__init__() 1573 self['process_definitions'].append(argument) 1574 elif isinstance(argument, base_objects.ProcessDefinitionList): 1575 super(MultiProcess, self).__init__() 1576 self['process_definitions'] = argument 1577 elif argument != None: 1578 # call the mother routine 1579 super(MultiProcess, self).__init__(argument) 1580 else: 1581 # call the mother routine 1582 super(MultiProcess, self).__init__() 1583 1584 self['collect_mirror_procs'] = collect_mirror_procs 1585 self['ignore_six_quark_processes'] = ignore_six_quark_processes 1586 self['use_numerical'] = optimize 1587 self['loop_filter'] = loop_filter 1588 self['diagram_filter'] = diagram_filter # only True/False so far 1589 1590 if isinstance(argument, base_objects.ProcessDefinition) or \ 1591 isinstance(argument, base_objects.ProcessDefinitionList): 1592 # Generate the diagrams 1593 self.get('amplitudes')
1594 1595
1596 - def filter(self, name, value):
1597 """Filter for valid process property values.""" 1598 1599 if name == 'process_definitions': 1600 if not isinstance(value, base_objects.ProcessDefinitionList): 1601 raise self.PhysicsObjectError("%s is not a valid ProcessDefinitionList object" % str(value)) 1602 1603 if name == 'amplitudes': 1604 if not isinstance(value, AmplitudeList): 1605 raise self.PhysicsObjectError("%s is not a valid AmplitudeList object" % str(value)) 1606 1607 if name in ['collect_mirror_procs']: 1608 if not isinstance(value, bool): 1609 raise self.PhysicsObjectError("%s is not a valid boolean" % str(value)) 1610 1611 if name == 'ignore_six_quark_processes': 1612 if not isinstance(value, list): 1613 raise self.PhysicsObjectError("%s is not a valid list" % str(value)) 1614 1615 return True
1616
1617 - def get(self, name):
1618 """Get the value of the property name.""" 1619 1620 if (name == 'amplitudes') and not self[name]: 1621 for process_def in self.get('process_definitions'): 1622 if process_def.get('decay_chains'): 1623 # This is a decay chain process 1624 # Store amplitude(s) as DecayChainAmplitude 1625 self['amplitudes'].append(\ 1626 DecayChainAmplitude(process_def, 1627 self.get('collect_mirror_procs'), 1628 self.get('ignore_six_quark_processes'), 1629 diagram_filter=self['diagram_filter'])) 1630 else: 1631 self['amplitudes'].extend(\ 1632 self.generate_multi_amplitudes(process_def, 1633 self.get('collect_mirror_procs'), 1634 self.get('ignore_six_quark_processes'), 1635 self['use_numerical'], 1636 loop_filter=self['loop_filter'], 1637 diagram_filter=self['diagram_filter'])) 1638 1639 return MultiProcess.__bases__[0].get(self, name) # call the mother routine
1640
1641 - def get_sorted_keys(self):
1642 """Return process property names as a nicely sorted list.""" 1643 1644 return ['process_definitions', 'amplitudes']
1645
1646 - def get_model(self):
1647 1648 return self['process_definitions'][0]['model']
1649 1650 @classmethod
1651 - def generate_multi_amplitudes(cls,process_definition, 1652 collect_mirror_procs = False, 1653 ignore_six_quark_processes = [], 1654 use_numerical=False, 1655 loop_filter=None, 1656 diagram_filter=False):
1657 """Generate amplitudes in a semi-efficient way. 1658 Make use of crossing symmetry for processes that fail diagram 1659 generation, but not for processes that succeed diagram 1660 generation. Doing so will risk making it impossible to 1661 identify processes with identical amplitudes. 1662 """ 1663 assert isinstance(process_definition, base_objects.ProcessDefinition), \ 1664 "%s not valid ProcessDefinition object" % \ 1665 repr(process_definition) 1666 1667 # Set automatic coupling orders 1668 process_definition.set('orders', MultiProcess.\ 1669 find_optimal_process_orders(process_definition, 1670 diagram_filter)) 1671 # Check for maximum orders from the model 1672 process_definition.check_expansion_orders() 1673 1674 processes = base_objects.ProcessList() 1675 amplitudes = AmplitudeList() 1676 1677 # failed_procs and success_procs are sorted processes that have 1678 # already failed/succeeded based on crossing symmetry 1679 failed_procs = [] 1680 success_procs = [] 1681 # Complete processes, for identification of mirror processes 1682 non_permuted_procs = [] 1683 # permutations keeps the permutations of the crossed processes 1684 permutations = [] 1685 1686 # Store the diagram tags for processes, to allow for 1687 # identifying identical matrix elements already at this stage. 1688 model = process_definition['model'] 1689 1690 islegs = [leg for leg in process_definition['legs'] \ 1691 if leg['state'] == False] 1692 fslegs = [leg for leg in process_definition['legs'] \ 1693 if leg['state'] == True] 1694 1695 isids = [leg['ids'] for leg in process_definition['legs'] \ 1696 if leg['state'] == False] 1697 fsids = [leg['ids'] for leg in process_definition['legs'] \ 1698 if leg['state'] == True] 1699 polids = [tuple(leg['polarization']) for leg in process_definition['legs'] \ 1700 if leg['state'] == True] 1701 # Generate all combinations for the initial state 1702 for prod in itertools.product(*isids): 1703 islegs = [\ 1704 base_objects.Leg({'id':id, 'state': False, 1705 'polarization': islegs[i]['polarization']}) 1706 for i,id in enumerate(prod)] 1707 1708 # Generate all combinations for the final state, and make 1709 # sure to remove double counting 1710 1711 red_fsidlist = set() 1712 1713 for prod in itertools.product(*fsids): 1714 tag = zip(prod, polids) 1715 tag = sorted(tag) 1716 # Remove double counting between final states 1717 if tuple(tag) in red_fsidlist: 1718 continue 1719 1720 red_fsidlist.add(tuple(tag)) 1721 # Generate leg list for process 1722 leg_list = [copy.copy(leg) for leg in islegs] 1723 leg_list.extend([\ 1724 base_objects.Leg({'id':id, 'state': True, 'polarization': fslegs[i]['polarization']}) \ 1725 for i,id in enumerate(prod)]) 1726 1727 legs = base_objects.LegList(leg_list) 1728 1729 # Check for crossed processes 1730 sorted_legs = sorted([(l,i+1) for (i,l) in \ 1731 enumerate(legs.get_outgoing_id_list(model))]) 1732 permutation = [l[1] for l in sorted_legs] 1733 1734 sorted_legs = array.array('i', [l[0] for l in sorted_legs]) 1735 1736 # Check for six-quark processes 1737 if ignore_six_quark_processes and \ 1738 len([i for i in sorted_legs if abs(i) in \ 1739 ignore_six_quark_processes]) >= 6: 1740 continue 1741 1742 # Check if crossed process has already failed, 1743 # in that case don't check process 1744 if sorted_legs in failed_procs: 1745 continue 1746 1747 # If allowed check mass validity [assume 1->N] 1748 if use_numerical: 1749 # check that final state has lower mass than initial state 1750 initial_mass = abs(model['parameter_dict'][model.get_particle(legs[0].get('id')).get('mass')]) 1751 if initial_mass == 0: 1752 continue 1753 for leg in legs[1:]: 1754 m = model['parameter_dict'][model.get_particle(leg.get('id')).get('mass')] 1755 initial_mass -= abs(m) 1756 if initial_mass.real <= 0: 1757 continue 1758 1759 # Setup process 1760 process = process_definition.get_process_with_legs(legs) 1761 1762 fast_proc = \ 1763 array.array('i',[leg.get('id') for leg in legs]) 1764 if collect_mirror_procs and \ 1765 process_definition.get_ninitial() == 2: 1766 # Check if mirrored process is already generated 1767 mirror_proc = \ 1768 array.array('i', [fast_proc[1], fast_proc[0]] + \ 1769 list(fast_proc[2:])) 1770 try: 1771 mirror_amp = \ 1772 amplitudes[non_permuted_procs.index(mirror_proc)] 1773 except Exception: 1774 # Didn't find any mirror process 1775 pass 1776 else: 1777 # Mirror process found 1778 mirror_amp.set('has_mirror_process', True) 1779 logger.info("Process %s added to mirror process %s" % \ 1780 (process.base_string(), 1781 mirror_amp.get('process').base_string())) 1782 continue 1783 1784 # Check for successful crossings, unless we have specified 1785 # properties that break crossing symmetry 1786 if not process.get('required_s_channels') and \ 1787 not process.get('forbidden_onsh_s_channels') and \ 1788 not process.get('forbidden_s_channels') and \ 1789 not process.get('is_decay_chain') and not diagram_filter: 1790 try: 1791 crossed_index = success_procs.index(sorted_legs) 1792 # The relabeling of legs for loop amplitudes is cumbersome 1793 # and does not save so much time. It is disable here and 1794 # we use the key 'loop_diagrams' to decide whether 1795 # it is an instance of LoopAmplitude. 1796 if 'loop_diagrams' in amplitudes[crossed_index]: 1797 raise ValueError 1798 except ValueError: 1799 # No crossing found, just continue 1800 pass 1801 else: 1802 # Found crossing - reuse amplitude 1803 amplitude = MultiProcess.cross_amplitude(\ 1804 amplitudes[crossed_index], 1805 process, 1806 permutations[crossed_index], 1807 permutation) 1808 amplitudes.append(amplitude) 1809 success_procs.append(sorted_legs) 1810 permutations.append(permutation) 1811 non_permuted_procs.append(fast_proc) 1812 logger.info("Crossed process found for %s, reuse diagrams." % \ 1813 process.base_string()) 1814 continue 1815 1816 # Create new amplitude 1817 amplitude = cls.get_amplitude_from_proc(process, 1818 loop_filter=loop_filter) 1819 1820 try: 1821 result = amplitude.generate_diagrams(diagram_filter=diagram_filter) 1822 except InvalidCmd as error: 1823 failed_procs.append(sorted_legs) 1824 else: 1825 # Succeeded in generating diagrams 1826 if amplitude.get('diagrams'): 1827 amplitudes.append(amplitude) 1828 success_procs.append(sorted_legs) 1829 permutations.append(permutation) 1830 non_permuted_procs.append(fast_proc) 1831 elif not result: 1832 # Diagram generation failed for all crossings 1833 failed_procs.append(sorted_legs) 1834 1835 # Raise exception if there are no amplitudes for this process 1836 if not amplitudes: 1837 if len(failed_procs) == 1 and 'error' in locals(): 1838 raise error 1839 else: 1840 raise NoDiagramException("No amplitudes generated from process %s. Please enter a valid process" % \ 1841 process_definition.nice_string()) 1842 1843 1844 # Return the produced amplitudes 1845 return amplitudes
1846 1847 @classmethod
1848 - def get_amplitude_from_proc(cls,proc,**opts):
1849 """ Return the correct amplitude type according to the characteristics of 1850 the process proc. The only option that could be specified here is 1851 loop_filter and it is of course not relevant for a tree amplitude.""" 1852 1853 return Amplitude({"process": proc})
1854 1855 1856 @staticmethod
1857 - def find_optimal_process_orders(process_definition, diagram_filter=False):
1858 """Find the minimal WEIGHTED order for this set of processes. 1859 1860 The algorithm: 1861 1862 1) Check the coupling hierarchy of the model. Assign all 1863 particles to the different coupling hierarchies so that a 1864 particle is considered to be in the highest hierarchy (i.e., 1865 with lowest value) where it has an interaction. 1866 1867 2) Pick out the legs in the multiprocess according to the 1868 highest hierarchy represented (so don't mix particles from 1869 different hierarchy classes in the same multiparticles!) 1870 1871 3) Find the starting maximum WEIGHTED order as the sum of the 1872 highest n-2 weighted orders 1873 1874 4) Pick out required s-channel particle hierarchies, and use 1875 the highest of the maximum WEIGHTED order from the legs and 1876 the minimum WEIGHTED order extracted from 2*s-channel 1877 hierarchys plus the n-2-2*(number of s-channels) lowest 1878 leg weighted orders. 1879 1880 5) Run process generation with the WEIGHTED order determined 1881 in 3)-4) - # final state gluons, with all gluons removed from 1882 the final state 1883 1884 6) If no process is found, increase WEIGHTED order by 1 and go 1885 back to 5), until we find a process which passes. Return that 1886 order. 1887 1888 7) Continue 5)-6) until we reach (n-2)*(highest hierarchy)-1. 1889 If still no process has passed, return 1890 WEIGHTED = (n-2)*(highest hierarchy) 1891 """ 1892 1893 assert isinstance(process_definition, base_objects.ProcessDefinition), \ 1894 "%s not valid ProcessDefinition object" % \ 1895 repr(process_definition) 1896 1897 processes = base_objects.ProcessList() 1898 amplitudes = AmplitudeList() 1899 1900 # If there are already couplings defined, return 1901 if process_definition.get('orders') or \ 1902 process_definition.get('overall_orders') or \ 1903 process_definition.get('NLO_mode')=='virt': 1904 return process_definition.get('orders') 1905 1906 # If this is a decay process (and not a decay chain), return 1907 if process_definition.get_ninitial() == 1 and not \ 1908 process_definition.get('is_decay_chain'): 1909 return process_definition.get('orders') 1910 1911 logger.info("Checking for minimal orders which gives processes.") 1912 logger.info("Please specify coupling orders to bypass this step.") 1913 1914 # Calculate minimum starting guess for WEIGHTED order 1915 max_order_now, particles, hierarchy = \ 1916 process_definition.get_minimum_WEIGHTED() 1917 coupling = 'WEIGHTED' 1918 1919 model = process_definition.get('model') 1920 1921 # Extract the initial and final leg ids 1922 isids = [leg['ids'] for leg in \ 1923 [leg for leg in process_definition['legs'] if leg['state'] == False]] 1924 fsids = [leg['ids'] for leg in \ 1925 [leg for leg in process_definition['legs'] if leg['state'] == True]] 1926 1927 max_WEIGHTED_order = \ 1928 (len(fsids + isids) - 2)*int(model.get_max_WEIGHTED()) 1929 # get the definition of the WEIGHTED 1930 hierarchydef = process_definition['model'].get('order_hierarchy') 1931 tmp = [] 1932 hierarchy = list(hierarchydef.items()) 1933 hierarchy.sort() 1934 for key, value in hierarchydef.items(): 1935 if value>1: 1936 tmp.append('%s*%s' % (value,key)) 1937 else: 1938 tmp.append('%s' % key) 1939 wgtdef = '+'.join(tmp) 1940 # Run diagram generation with increasing max_order_now until 1941 # we manage to get diagrams 1942 while max_order_now < max_WEIGHTED_order: 1943 logger.info("Trying coupling order WEIGHTED<=%d: WEIGTHED IS %s" % (max_order_now, wgtdef)) 1944 1945 oldloglevel = logger.level 1946 logger.setLevel(logging.WARNING) 1947 1948 # failed_procs are processes that have already failed 1949 # based on crossing symmetry 1950 failed_procs = [] 1951 # Generate all combinations for the initial state 1952 for prod in itertools.product(*isids): 1953 islegs = [ base_objects.Leg({'id':id, 'state': False}) \ 1954 for id in prod] 1955 1956 # Generate all combinations for the final state, and make 1957 # sure to remove double counting 1958 1959 red_fsidlist = [] 1960 1961 for prod in itertools.product(*fsids): 1962 1963 # Remove double counting between final states 1964 if tuple(sorted(prod)) in red_fsidlist: 1965 continue 1966 1967 red_fsidlist.append(tuple(sorted(prod))); 1968 1969 # Remove gluons from final state if QCD is among 1970 # the highest coupling hierarchy 1971 nglue = 0 1972 if 21 in particles[0]: 1973 nglue = len([id for id in prod if id == 21]) 1974 prod = [id for id in prod if id != 21] 1975 1976 # Generate leg list for process 1977 leg_list = [copy.copy(leg) for leg in islegs] 1978 1979 leg_list.extend([\ 1980 base_objects.Leg({'id':id, 'state': True}) \ 1981 for id in prod]) 1982 1983 legs = base_objects.LegList(leg_list) 1984 1985 # Set summed coupling order according to max_order_now 1986 # subtracting the removed gluons 1987 coupling_orders_now = {coupling: max_order_now - \ 1988 nglue * model['order_hierarchy']['QCD']} 1989 1990 # Setup process 1991 process = base_objects.Process({\ 1992 'legs':legs, 1993 'model':model, 1994 'id': process_definition.get('id'), 1995 'orders': coupling_orders_now, 1996 'required_s_channels': \ 1997 process_definition.get('required_s_channels'), 1998 'forbidden_onsh_s_channels': \ 1999 process_definition.get('forbidden_onsh_s_channels'), 2000 'sqorders_types': \ 2001 process_definition.get('sqorders_types'), 2002 'squared_orders': \ 2003 process_definition.get('squared_orders'), 2004 'split_orders': \ 2005 process_definition.get('split_orders'), 2006 'forbidden_s_channels': \ 2007 process_definition.get('forbidden_s_channels'), 2008 'forbidden_particles': \ 2009 process_definition.get('forbidden_particles'), 2010 'is_decay_chain': \ 2011 process_definition.get('is_decay_chain'), 2012 'overall_orders': \ 2013 process_definition.get('overall_orders'), 2014 'split_orders': \ 2015 process_definition.get('split_orders')}) 2016 2017 # Check for couplings with given expansion orders 2018 process.check_expansion_orders() 2019 2020 # Check for crossed processes 2021 sorted_legs = sorted(legs.get_outgoing_id_list(model)) 2022 # Check if crossed process has already failed 2023 # In that case don't check process 2024 if tuple(sorted_legs) in failed_procs and not process_definition.get('forbidden_s_channels'): 2025 continue 2026 2027 amplitude = Amplitude({'process': process}) 2028 try: 2029 amplitude.generate_diagrams(diagram_filter=diagram_filter) 2030 except InvalidCmd as error: 2031 failed_procs.append(tuple(sorted_legs)) 2032 else: 2033 if amplitude.get('diagrams'): 2034 # We found a valid amplitude. Return this order number 2035 logger.setLevel(oldloglevel) 2036 return {coupling: max_order_now} 2037 else: 2038 failed_procs.append(tuple(sorted_legs)) 2039 # No processes found, increase max_order_now 2040 max_order_now += 1 2041 logger.setLevel(oldloglevel) 2042 2043 # If no valid processes found with nfinal-1 couplings, return maximal 2044 return {coupling: max_order_now}
2045 2046 @staticmethod
2047 - def cross_amplitude(amplitude, process, org_perm, new_perm):
2048 """Return the amplitude crossed with the permutation new_perm""" 2049 # Create dict from original leg numbers to new leg numbers 2050 perm_map = dict(list(zip(org_perm, new_perm))) 2051 # Initiate new amplitude 2052 new_amp = copy.copy(amplitude) 2053 # Number legs 2054 for i, leg in enumerate(process.get('legs')): 2055 leg.set('number', i+1) 2056 # Set process 2057 new_amp.set('process', process) 2058 # Now replace the leg numbers in the diagrams 2059 diagrams = base_objects.DiagramList([d.renumber_legs(perm_map, 2060 process.get('legs'),) for \ 2061 d in new_amp.get('diagrams')]) 2062 new_amp.set('diagrams', diagrams) 2063 new_amp.trim_diagrams() 2064 2065 # Make sure to reset mirror process 2066 new_amp.set('has_mirror_process', False) 2067 2068 return new_amp
2069
2070 #=============================================================================== 2071 # Global helper methods 2072 #=============================================================================== 2073 2074 -def expand_list(mylist):
2075 """Takes a list of lists and elements and returns a list of flat lists. 2076 Example: [[1,2], 3, [4,5]] -> [[1,3,4], [1,3,5], [2,3,4], [2,3,5]] 2077 """ 2078 2079 # Check that argument is a list 2080 assert isinstance(mylist, list), "Expand_list argument must be a list" 2081 2082 res = [] 2083 2084 tmplist = [] 2085 for item in mylist: 2086 if isinstance(item, list): 2087 tmplist.append(item) 2088 else: 2089 tmplist.append([item]) 2090 2091 for item in itertools.product(*tmplist): 2092 res.append(list(item)) 2093 2094 return res
2095
2096 -def expand_list_list(mylist):
2097 """Recursive function. Takes a list of lists and lists of lists 2098 and returns a list of flat lists. 2099 Example: [[1,2],[[4,5],[6,7]]] -> [[1,2,4,5], [1,2,6,7]] 2100 """ 2101 2102 res = [] 2103 2104 if not mylist or len(mylist) == 1 and not mylist[0]: 2105 return [[]] 2106 2107 # Check the first element is at least a list 2108 assert isinstance(mylist[0], list), \ 2109 "Expand_list_list needs a list of lists and lists of lists" 2110 2111 # Recursion stop condition, one single element 2112 if len(mylist) == 1: 2113 if isinstance(mylist[0][0], list): 2114 return mylist[0] 2115 else: 2116 return mylist 2117 2118 if isinstance(mylist[0][0], list): 2119 for item in mylist[0]: 2120 # Here the recursion happens, create lists starting with 2121 # each element of the first item and completed with 2122 # the rest expanded 2123 for rest in expand_list_list(mylist[1:]): 2124 reslist = copy.copy(item) 2125 reslist.extend(rest) 2126 res.append(reslist) 2127 else: 2128 for rest in expand_list_list(mylist[1:]): 2129 reslist = copy.copy(mylist[0]) 2130 reslist.extend(rest) 2131 res.append(reslist) 2132 2133 2134 return res
2135