Package madgraph :: Package core :: Module diagram_generation
[hide private]
[frames] | no frames]

Source Code for Module madgraph.core.diagram_generation

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Classes for diagram generation. Amplitude performs the diagram 
  16  generation, DecayChainAmplitude keeps track of processes with decay 
  17  chains, and MultiProcess allows generation of processes with 
  18  multiparticle definitions. DiagramTag allows to identify diagrams 
  19  based on relevant properties. 
  20  """ 
  21   
  22  import array 
  23  import copy 
  24  import itertools 
  25  import logging 
  26   
  27  import madgraph.core.base_objects as base_objects 
  28  import madgraph.various.misc as misc 
  29  from madgraph import InvalidCmd 
  30  logger = logging.getLogger('madgraph.diagram_generation') 
31 32 #=============================================================================== 33 # DiagramTag mother class 34 #=============================================================================== 35 36 -class DiagramTag(object):
37 """Class to tag diagrams based on objects with some __lt__ measure, e.g. 38 PDG code/interaction id (for comparing diagrams from the same amplitude), 39 or Lorentz/coupling/mass/width (for comparing AMPs from different MEs). 40 Algorithm: Create chains starting from external particles: 41 1 \ / 6 42 2 /\______/\ 7 43 3_ / | \_ 8 44 4 / 5 \_ 9 45 \ 10 46 gives ((((9,10,id910),8,id9108),(6,7,id67),id910867) 47 (((1,2,id12),(3,4,id34)),id1234), 48 5,id91086712345) 49 where idN is the id of the corresponding interaction. The ordering within 50 chains is based on chain length (depth; here, 1234 has depth 3, 910867 has 51 depth 4, 5 has depht 0), and if equal on the ordering of the chain elements. 52 The determination of central vertex is based on minimizing the chain length 53 for the longest subchain. 54 This gives a unique tag which can be used to identify diagrams 55 (instead of symmetry), as well as identify identical matrix elements from 56 different processes.""" 57
58 - class DiagramTagError(Exception):
59 """Exception for any problems in DiagramTags""" 60 pass
61
62 - def __init__(self, diagram, model = None, ninitial = 2):
63 """Initialize with a diagram. Create DiagramTagChainLinks according to 64 the diagram, and figure out if we need to shift the central vertex.""" 65 66 # wf_dict keeps track of the intermediate particles 67 leg_dict = {} 68 # Create the chain which will be the diagram tag 69 for vertex in diagram.get('vertices'): 70 # Only add incoming legs 71 legs = vertex.get('legs')[:-1] 72 lastvx = vertex == diagram.get('vertices')[-1] 73 if lastvx: 74 # If last vertex, all legs are incoming 75 legs = vertex.get('legs') 76 # Add links corresponding to the relevant legs 77 link = DiagramTagChainLink([leg_dict.setdefault(leg.get('number'), 78 DiagramTagChainLink(self.link_from_leg(leg, model))) \ 79 for leg in legs], 80 self.vertex_id_from_vertex(vertex, 81 lastvx, 82 model, 83 ninitial)) 84 # Add vertex to leg_dict if not last one 85 if not lastvx: 86 leg_dict[vertex.get('legs')[-1].get('number')] = link 87 88 # The resulting link is the hypothetical result 89 self.tag = link 90 91 # Now make sure to find the central vertex in the diagram, 92 # defined by the longest leg being as short as possible 93 done = max([l.depth for l in self.tag.links]) == 0 94 while not done: 95 # Identify the longest chain in the tag 96 longest_chain = self.tag.links[0] 97 # Create a new link corresponding to moving one step 98 new_link = DiagramTagChainLink(self.tag.links[1:], 99 self.flip_vertex(\ 100 self.tag.vertex_id, 101 longest_chain.vertex_id, 102 self.tag.links[1:])) 103 # Create a new final vertex in the direction of the longest link 104 other_links = list(longest_chain.links) + [new_link] 105 other_link = DiagramTagChainLink(other_links, 106 self.flip_vertex(\ 107 longest_chain.vertex_id, 108 self.tag.vertex_id, 109 other_links)) 110 111 if other_link.links[0] < self.tag.links[0]: 112 # Switch to new tag, continue search 113 self.tag = other_link 114 else: 115 # We have found the central vertex 116 done = True
117
118 - def get_external_numbers(self):
119 """Get the order of external particles in this tag""" 120 121 return self.tag.get_external_numbers()
122
123 - def diagram_from_tag(self, model):
124 """Output a diagram from a DiagramTag. Note that each daughter 125 class must implement the static functions id_from_vertex_id 126 (if the vertex id is something else than an integer) and 127 leg_from_link (to pass the correct info from an end link to a 128 leg).""" 129 130 # Create the vertices, starting from the final vertex 131 diagram = base_objects.Diagram({'vertices': \ 132 self.vertices_from_link(self.tag, 133 model, 134 True)}) 135 diagram.calculate_orders(model) 136 return diagram
137 138 @classmethod 177 178 @staticmethod
179 - def leg_from_legs(legs, vertex_id, model):
180 """Return a leg from a leg list and the model info""" 181 182 pdgs = [part.get_pdg_code() for part in \ 183 model.get_interaction(vertex_id).get('particles')] 184 # Extract the resulting pdg code from the interaction pdgs 185 for pdg in [leg.get('id') for leg in legs]: 186 pdgs.remove(pdg) 187 188 assert len(pdgs) == 1 189 # Prepare the new leg properties 190 pdg = model.get_particle(pdgs[0]).get_anti_pdg_code() 191 number = min([l.get('number') for l in legs]) 192 # State is False for t-channel, True for s-channel 193 state = (len([l for l in legs if l.get('state') == False]) != 1) 194 # Note that this needs to be done before combining decay chains 195 onshell= False 196 197 return base_objects.Leg({'id': pdg, 198 'number': number, 199 'state': state, 200 'onshell': onshell})
201 202 @classmethod 208 209 @staticmethod 222 223 @staticmethod
224 - def id_from_vertex_id(vertex_id):
225 """Return the numerical vertex id from a link.vertex_id""" 226 return vertex_id[0]
227 228 @staticmethod
229 - def reorder_permutation(perm, start_perm):
230 """Reorder a permutation with respect to start_perm. Note that 231 both need to start from 1.""" 232 if perm == start_perm: 233 return range(len(perm)) 234 order = [i for (p,i) in \ 235 sorted([(p,i) for (i,p) in enumerate(perm)])] 236 return [start_perm[i]-1 for i in order]
237 238 @staticmethod 249 250 @staticmethod
251 - def vertex_id_from_vertex(vertex, last_vertex, model, ninitial):
252 """Returns the default vertex id: just the interaction id 253 Note that in the vertex id, like the leg, only the first entry is 254 taken into account in the tag comparison, while the second is for 255 storing information that is not to be used in comparisons.""" 256 257 return (vertex.get('id'),)
258 259 @staticmethod
260 - def flip_vertex(new_vertex, old_vertex, links):
261 """Returns the default vertex flip: just the new_vertex""" 262 return new_vertex
263
264 - def __eq__(self, other):
265 """Equal if same tag""" 266 if type(self) != type(other): 267 return False 268 return self.tag == other.tag
269
270 - def __ne__(self, other):
271 return not self.__eq__(other)
272
273 - def __str__(self):
274 return str(self.tag)
275
276 - def __lt__(self, other):
277 return self.tag < other.tag
278
279 - def __gt__(self, other):
280 return self.tag > other.tag
281 282 __repr__ = __str__
283 369
370 #=============================================================================== 371 # Amplitude 372 #=============================================================================== 373 -class Amplitude(base_objects.PhysicsObject):
374 """Amplitude: process + list of diagrams (ordered) 375 Initialize with a process, then call generate_diagrams() to 376 generate the diagrams for the amplitude 377 """ 378
379 - def default_setup(self):
380 """Default values for all properties""" 381 382 self['process'] = base_objects.Process() 383 self['diagrams'] = None 384 # has_mirror_process is True if the same process but with the 385 # two incoming particles interchanged has been generated 386 self['has_mirror_process'] = False
387
388 - def __init__(self, argument=None):
389 """Allow initialization with Process""" 390 if isinstance(argument, base_objects.Process): 391 super(Amplitude, self).__init__() 392 self.set('process', argument) 393 self.generate_diagrams() 394 elif argument != None: 395 # call the mother routine 396 super(Amplitude, self).__init__(argument) 397 else: 398 # call the mother routine 399 super(Amplitude, self).__init__()
400
401 - def filter(self, name, value):
402 """Filter for valid amplitude property values.""" 403 404 if name == 'process': 405 if not isinstance(value, base_objects.Process): 406 raise self.PhysicsObjectError, \ 407 "%s is not a valid Process object" % str(value) 408 if name == 'diagrams': 409 if not isinstance(value, base_objects.DiagramList): 410 raise self.PhysicsObjectError, \ 411 "%s is not a valid DiagramList object" % str(value) 412 if name == 'has_mirror_process': 413 if not isinstance(value, bool): 414 raise self.PhysicsObjectError, \ 415 "%s is not a valid boolean" % str(value) 416 return True
417
418 - def get(self, name):
419 """Get the value of the property name.""" 420 421 if name == 'diagrams' and self[name] == None: 422 # Have not yet generated diagrams for this process 423 if self['process']: 424 self.generate_diagrams() 425 426 return super(Amplitude, self).get(name)
427 # return Amplitude.__bases__[0].get(self, name) #return the mother routine 428 429
430 - def get_sorted_keys(self):
431 """Return diagram property names as a nicely sorted list.""" 432 433 return ['process', 'diagrams', 'has_mirror_process']
434
435 - def get_number_of_diagrams(self):
436 """Returns number of diagrams for this amplitude""" 437 return len(self.get('diagrams'))
438
439 - def get_amplitudes(self):
440 """Return an AmplitudeList with just this amplitude. 441 Needed for DecayChainAmplitude.""" 442 443 return AmplitudeList([self])
444
445 - def nice_string(self, indent=0):
446 """Returns a nicely formatted string of the amplitude content.""" 447 return self.get('process').nice_string(indent) + "\n" + \ 448 self.get('diagrams').nice_string(indent)
449
450 - def nice_string_processes(self, indent=0):
451 """Returns a nicely formatted string of the amplitude process.""" 452 return self.get('process').nice_string(indent)
453
454 - def get_ninitial(self):
455 """Returns the number of initial state particles in the process.""" 456 return self.get('process').get_ninitial()
457
458 - def has_loop_process(self):
459 """ Returns wether this amplitude has a loop process.""" 460 461 return self.get('process').get('perturbation_couplings')
462
463 - def generate_diagrams(self, returndiag=False):
464 """Generate diagrams. Algorithm: 465 466 1. Define interaction dictionaries: 467 * 2->0 (identity), 3->0, 4->0, ... , maxlegs->0 468 * 2 -> 1, 3 -> 1, ..., maxlegs-1 -> 1 469 470 2. Set flag from_group=true for all external particles. 471 Flip particle/anti particle for incoming particles. 472 473 3. If there is a dictionary n->0 with n=number of external 474 particles, create if possible the combination [(1,2,3,4,...)] 475 with *at least two* from_group==true. This will give a 476 finished (set of) diagram(s) (done by reduce_leglist) 477 478 4. Create all allowed groupings of particles with at least one 479 from_group==true (according to dictionaries n->1): 480 [(1,2),3,4...],[1,(2,3),4,...],..., 481 [(1,2),(3,4),...],...,[(1,2,3),4,...],... 482 (done by combine_legs) 483 484 5. Replace each group with a (list of) new particle(s) with number 485 n = min(group numbers). Set from_group true for these 486 particles and false for all other particles. Store vertex info. 487 (done by merge_comb_legs) 488 489 6. Stop algorithm when at most 2 particles remain. 490 Return all diagrams (lists of vertices). 491 492 7. Repeat from 3 (recursion done by reduce_leglist) 493 494 8. Replace final p=p vertex 495 496 Be aware that the resulting vertices have all particles outgoing, 497 so need to flip for incoming particles when used. 498 499 SPECIAL CASE: For A>BC... processes which are legs in decay 500 chains, we need to ensure that BC... combine first, giving A=A 501 as a final vertex. This case is defined by the Process 502 property is_decay_chain = True. 503 This function can also be called by the generate_diagram function 504 of LoopAmplitudes, in which case the generated diagrams here must not 505 be directly assigned to the 'diagrams' attributed but returned as a 506 DiagramList by the function. This is controlled by the argument 507 returndiag. 508 """ 509 process = self.get('process') 510 model = process.get('model') 511 legs = process.get('legs') 512 513 # Make sure orders is the minimum of orders and overall_orders 514 for key in process.get('overall_orders').keys(): 515 try: 516 process.get('orders')[key] = \ 517 min(process.get('orders')[key], 518 process.get('overall_orders')[key]) 519 except KeyError: 520 process.get('orders')[key] = process.get('overall_orders')[key] 521 522 assert model.get('particles'), \ 523 "particles are missing in model: %s" % model.get('particles') 524 525 assert model.get('interactions'), \ 526 "interactions are missing in model" 527 528 529 res = base_objects.DiagramList() 530 # First check that the number of fermions is even 531 if len(filter(lambda leg: model.get('particle_dict')[\ 532 leg.get('id')].is_fermion(), legs)) % 2 == 1: 533 if not returndiag: 534 self['diagrams'] = res 535 raise InvalidCmd, 'The number of fermion is odd' 536 else: 537 raise InvalidCmd, 'The number of fermion is odd' 538 539 540 # Then check same number of incoming and outgoing fermions (if 541 # no Majorana particles in model) 542 if not model.get('got_majoranas') and \ 543 len(filter(lambda leg: leg.is_incoming_fermion(model), legs)) != \ 544 len(filter(lambda leg: leg.is_outgoing_fermion(model), legs)): 545 if not returndiag: 546 self['diagrams'] = res 547 raise InvalidCmd, 'The number of of incoming/outcoming fermions are different' 548 else: 549 raise InvalidCmd, 'The number of of incoming/outcoming fermions are different' 550 551 # Finally check that charge (conserve by all interactions) of the process 552 #is globally conserve for this process. 553 for charge in model.get('conserved_charge'): 554 total = 0 555 for leg in legs: 556 part = model.get('particle_dict')[leg.get('id')] 557 try: 558 value = part.get(charge) 559 except (AttributeError, base_objects.PhysicsObject.PhysicsObjectError): 560 try: 561 value = getattr(part, charge) 562 except AttributeError: 563 value = 0 564 565 if (leg.get('id') != part['pdg_code']) != leg['state']: 566 total -= value 567 else: 568 total += value 569 570 if abs(total) > 1e-10: 571 if not returndiag: 572 self['diagrams'] = res 573 raise InvalidCmd, 'No %s conservation for this process ' % charge 574 return res 575 else: 576 raise InvalidCmd, 'No %s conservation for this process ' % charge 577 return res, res 578 579 if not returndiag: 580 logger.info("Trying %s " % process.nice_string().replace('Process', 'process')) 581 582 # Give numbers to legs in process 583 for i in range(0, len(process.get('legs'))): 584 # Make sure legs are unique 585 leg = copy.copy(process.get('legs')[i]) 586 process.get('legs')[i] = leg 587 if leg.get('number') == 0: 588 leg.set('number', i + 1) 589 590 # Copy leglist from process, so we can flip leg identities 591 # without affecting the original process 592 leglist = self.copy_leglist(process.get('legs')) 593 594 for leg in leglist: 595 596 # For the first step, ensure the tag from_group 597 # is true for all legs 598 leg.set('from_group', True) 599 600 # Need to flip part-antipart for incoming particles, 601 # so they are all outgoing 602 if leg.get('state') == False: 603 part = model.get('particle_dict')[leg.get('id')] 604 leg.set('id', part.get_anti_pdg_code()) 605 606 # Calculate the maximal multiplicity of n-1>1 configurations 607 # to restrict possible leg combinations 608 max_multi_to1 = max([len(key) for key in \ 609 model.get('ref_dict_to1').keys()]) 610 611 612 # Reduce the leg list and return the corresponding 613 # list of vertices 614 615 # For decay processes, generate starting from final-state 616 # combined only as the last particle. This allows to use these 617 # in decay chains later on. 618 is_decay_proc = process.get_ninitial() == 1 619 if is_decay_proc: 620 part = model.get('particle_dict')[leglist[0].get('id')] 621 # For decay chain legs, we want everything to combine to 622 # the initial leg. This is done by only allowing the 623 # initial leg to combine as a final identity. 624 ref_dict_to0 = {(part.get_pdg_code(),part.get_anti_pdg_code()):[0], 625 (part.get_anti_pdg_code(),part.get_pdg_code()):[0]} 626 # Need to set initial leg from_group to None, to make sure 627 # it can only be combined at the end. 628 leglist[0].set('from_group', None) 629 reduced_leglist = self.reduce_leglist(leglist, 630 max_multi_to1, 631 ref_dict_to0, 632 is_decay_proc, 633 process.get('orders')) 634 else: 635 reduced_leglist = self.reduce_leglist(leglist, 636 max_multi_to1, 637 model.get('ref_dict_to0'), 638 is_decay_proc, 639 process.get('orders')) 640 641 #In LoopAmplitude the function below is overloaded such that it 642 #converts back all DGLoopLegs to Legs. In the default tree-level 643 #diagram generation, this does nothing. 644 self.convert_dgleg_to_leg(reduced_leglist) 645 646 if reduced_leglist: 647 for vertex_list in reduced_leglist: 648 res.append(self.create_diagram(base_objects.VertexList(vertex_list))) 649 650 # Record whether or not we failed generation before required 651 # s-channel propagators are taken into account 652 failed_crossing = not res 653 654 # Required s-channels is a list of id-lists. Select the 655 # diagrams where all required s-channel propagators in any of 656 # the lists are present (i.e., the different lists correspond 657 # to "or", while the elements of the list correspond to 658 # "and"). 659 if process.get('required_s_channels') and \ 660 process.get('required_s_channels')[0]: 661 # We shouldn't look at the last vertex in each diagram, 662 # since that is the n->0 vertex 663 lastvx = -1 664 # For decay chain processes, there is an "artificial" 665 # extra vertex corresponding to particle 1=1, so we need 666 # to exclude the two last vertexes. 667 if is_decay_proc: lastvx = -2 668 ninitial = len(filter(lambda leg: leg.get('state') == False, 669 process.get('legs'))) 670 # Check required s-channels for each list in required_s_channels 671 old_res = res 672 res = base_objects.DiagramList() 673 for id_list in process.get('required_s_channels'): 674 res_diags = filter(lambda diagram: \ 675 all([req_s_channel in \ 676 [vertex.get_s_channel_id(\ 677 process.get('model'), ninitial) \ 678 for vertex in diagram.get('vertices')[:lastvx]] \ 679 for req_s_channel in \ 680 id_list]), old_res) 681 # Add diagrams only if not already in res 682 res.extend([diag for diag in res_diags if diag not in res]) 683 684 # Remove all diagrams with a "double" forbidden s-channel propagator 685 # is present. 686 # Note that we shouldn't look at the last vertex in each 687 # diagram, since that is the n->0 vertex 688 if process.get('forbidden_s_channels'): 689 ninitial = len(filter(lambda leg: leg.get('state') == False, 690 process.get('legs'))) 691 if ninitial == 2: 692 res = base_objects.DiagramList(\ 693 filter(lambda diagram: \ 694 not any([vertex.get_s_channel_id(\ 695 process.get('model'), ninitial) \ 696 in process.get('forbidden_s_channels') 697 for vertex in diagram.get('vertices')[:-1]]), 698 res)) 699 else: 700 # split since we need to avoid that the initial particle is forbidden 701 # as well. 702 newres= [] 703 for diagram in res: 704 leg1 = 1 705 #check the latest vertex to see if the leg 1 is inside if it 706 #is we need to inverse the look-up and allow the first s-channel 707 # of the associate particles. 708 vertex = diagram.get('vertices')[-1] 709 if any([l['number'] ==1 for l in vertex.get('legs')]): 710 leg1 = [l['number'] for l in vertex.get('legs') if l['number'] !=1][0] 711 to_loop = range(len(diagram.get('vertices'))-1) 712 if leg1 >1: 713 to_loop.reverse() 714 for i in to_loop: 715 vertex = diagram.get('vertices')[i] 716 if leg1: 717 if any([l['number'] ==leg1 for l in vertex.get('legs')]): 718 leg1 = 0 719 continue 720 if vertex.get_s_channel_id(process.get('model'), ninitial)\ 721 in process.get('forbidden_s_channels'): 722 break 723 else: 724 newres.append(diagram) 725 newres = base_objects.DiagramList(newres) 726 727 728 # Mark forbidden (onshell) s-channel propagators, to forbid onshell 729 # generation. 730 if process.get('forbidden_onsh_s_channels'): 731 ninitial = len(filter(lambda leg: leg.get('state') == False, 732 process.get('legs'))) 733 734 verts = base_objects.VertexList(sum([[vertex for vertex \ 735 in diagram.get('vertices')[:-1] 736 if vertex.get_s_channel_id(\ 737 process.get('model'), ninitial) \ 738 in process.get('forbidden_onsh_s_channels')] \ 739 for diagram in res], [])) 740 for vert in verts: 741 # Use onshell = False to indicate that this s-channel is forbidden 742 newleg = copy.copy(vert.get('legs').pop(-1)) 743 newleg.set('onshell', False) 744 vert.get('legs').append(newleg) 745 746 # Set actual coupling orders for each diagram 747 for diagram in res: 748 diagram.calculate_orders(model) 749 750 # Filter the diagrams according to the squared coupling order 751 # constraints and possible the negative one. Remember that OrderName=-n 752 # means that the user wants to include everything up to the N^(n+1)LO 753 # contribution in that order and at most one order can be restricted 754 # in this way. We shall do this only if the diagrams are not asked to 755 # be returned, as it is the case for NLO because it this case the 756 # interference are not necessarily among the diagrams generated here only. 757 if not returndiag and len(res)>0: 758 res = self.apply_squared_order_constraints(res) 759 760 # Replace final id=0 vertex if necessary 761 if not process.get('is_decay_chain'): 762 for diagram in res: 763 vertices = diagram.get('vertices') 764 if len(vertices) > 1 and vertices[-1].get('id') == 0: 765 # Need to "glue together" last and next-to-last 766 # vertex, by replacing the (incoming) last leg of the 767 # next-to-last vertex with the (outgoing) leg in the 768 # last vertex 769 vertices = copy.copy(vertices) 770 lastvx = vertices.pop() 771 nexttolastvertex = copy.copy(vertices.pop()) 772 legs = copy.copy(nexttolastvertex.get('legs')) 773 ntlnumber = legs[-1].get('number') 774 lastleg = filter(lambda leg: leg.get('number') != ntlnumber, 775 lastvx.get('legs'))[0] 776 # Reset onshell in case we have forbidden s-channels 777 if lastleg.get('onshell') == False: 778 lastleg.set('onshell', None) 779 # Replace the last leg of nexttolastvertex 780 legs[-1] = lastleg 781 nexttolastvertex.set('legs', legs) 782 vertices.append(nexttolastvertex) 783 diagram.set('vertices', vertices) 784 785 if res and not returndiag: 786 logger.info("Process has %d diagrams" % len(res)) 787 788 # Trim down number of legs and vertices used to save memory 789 self.trim_diagrams(diaglist=res) 790 791 # Sort process legs according to leg number 792 pertur = 'QCD' 793 if self.get('process')['perturbation_couplings']: 794 pertur = sorted(self.get('process')['perturbation_couplings'])[0] 795 self.get('process').get('legs').sort(pert=pertur) 796 797 # Set diagrams to res if not asked to be returned 798 if not returndiag: 799 self['diagrams'] = res 800 return not failed_crossing 801 else: 802 return not failed_crossing, res
803
804 - def apply_squared_order_constraints(self, diag_list):
805 """Applies the user specified squared order constraints on the diagram 806 list in argument.""" 807 808 res = copy.copy(diag_list) 809 810 # Iterate the filtering since the applying the constraint on one 811 # type of coupling order can impact what the filtering on a previous 812 # one (relevant for the '==' type of constraint). 813 while True: 814 new_res = res.apply_positive_sq_orders(res, 815 self['process'].get('squared_orders'), 816 self['process']['sqorders_types']) 817 # Exit condition 818 if len(res)==len(new_res): 819 break 820 elif (len(new_res)>len(res)): 821 raise MadGraph5Error( 822 'Inconsistency in function apply_squared_order_constraints().') 823 # Actualizing the list of diagram for the next iteration 824 res = new_res 825 826 # Now treat the negative squared order constraint (at most one) 827 neg_orders = [(order, value) for order, value in \ 828 self['process'].get('squared_orders').items() if value<0] 829 if len(neg_orders)==1: 830 neg_order, neg_value = neg_orders[0] 831 # Now check any negative order constraint 832 res, target_order = res.apply_negative_sq_order(res, neg_order,\ 833 neg_value, self['process']['sqorders_types'][neg_order]) 834 # Substitute the negative value to this positive one so that 835 # the resulting computed constraints appears in the print out 836 # and at the output stage we no longer have to deal with 837 # negative valued target orders 838 self['process']['squared_orders'][neg_order]=target_order 839 elif len(neg_orders)>1: 840 raise InvalidCmd('At most one negative squared order constraint'+\ 841 ' can be specified, not %s.'%str(neg_orders)) 842 843 return res
844
845 - def create_diagram(self, vertexlist):
846 """ Return a Diagram created from the vertex list. This function can be 847 overloaded by daughter classes.""" 848 return base_objects.Diagram({'vertices':vertexlist})
849
850 - def convert_dgleg_to_leg(self, vertexdoublelist):
851 """ In LoopAmplitude, it converts back all DGLoopLegs into Legs. 852 In Amplitude, there is nothing to do. """ 853 854 return True
855
856 - def copy_leglist(self, legs):
857 """ Simply returns a copy of the leg list. This function is 858 overloaded in LoopAmplitude so that a DGLoopLeg list is returned. 859 The DGLoopLeg has some additional parameters only useful during 860 loop diagram generation""" 861 862 return base_objects.LegList(\ 863 [ copy.copy(leg) for leg in legs ])
864
865 - def reduce_leglist(self, curr_leglist, max_multi_to1, ref_dict_to0, 866 is_decay_proc = False, coupling_orders = None):
867 """Recursive function to reduce N LegList to N-1 868 For algorithm, see doc for generate_diagrams. 869 """ 870 871 # Result variable which is a list of lists of vertices 872 # to be added 873 res = [] 874 875 # Stop condition. If LegList is None, that means that this 876 # diagram must be discarded 877 if curr_leglist is None: 878 return None 879 880 # Extract ref dict information 881 model = self.get('process').get('model') 882 ref_dict_to1 = self.get('process').get('model').get('ref_dict_to1') 883 884 885 # If all legs can be combined in one single vertex, add this 886 # vertex to res and continue. 887 # Special treatment for decay chain legs 888 889 if curr_leglist.can_combine_to_0(ref_dict_to0, is_decay_proc): 890 # Extract the interaction id associated to the vertex 891 892 vertex_ids = self.get_combined_vertices(curr_leglist, 893 copy.copy(ref_dict_to0[tuple(sorted([leg.get('id') for \ 894 leg in curr_leglist]))])) 895 896 final_vertices = [base_objects.Vertex({'legs':curr_leglist, 897 'id':vertex_id}) for \ 898 vertex_id in vertex_ids] 899 # Check for coupling orders. If orders < 0, skip vertex 900 for final_vertex in final_vertices: 901 if self.reduce_orders(coupling_orders, model, 902 [final_vertex.get('id')]) != False: 903 res.append([final_vertex]) 904 # Stop condition 2: if the leglist contained exactly two particles, 905 # return the result, if any, and stop. 906 if len(curr_leglist) == 2: 907 if res: 908 return res 909 else: 910 return None 911 912 # Create a list of all valid combinations of legs 913 comb_lists = self.combine_legs(curr_leglist, 914 ref_dict_to1, max_multi_to1) 915 916 # Create a list of leglists/vertices by merging combinations 917 leg_vertex_list = self.merge_comb_legs(comb_lists, ref_dict_to1) 918 919 # Consider all the pairs 920 for leg_vertex_tuple in leg_vertex_list: 921 922 # Remove forbidden particles 923 if self.get('process').get('forbidden_particles') and \ 924 any([abs(vertex.get('legs')[-1].get('id')) in \ 925 self.get('process').get('forbidden_particles') \ 926 for vertex in leg_vertex_tuple[1]]): 927 continue 928 929 # Check for coupling orders. If couplings < 0, skip recursion. 930 new_coupling_orders = self.reduce_orders(coupling_orders, 931 model, 932 [vertex.get('id') for vertex in \ 933 leg_vertex_tuple[1]]) 934 if new_coupling_orders == False: 935 # Some coupling order < 0 936 continue 937 938 # This is where recursion happens 939 # First, reduce again the leg part 940 reduced_diagram = self.reduce_leglist(leg_vertex_tuple[0], 941 max_multi_to1, 942 ref_dict_to0, 943 is_decay_proc, 944 new_coupling_orders) 945 # If there is a reduced diagram 946 if reduced_diagram: 947 vertex_list_list = [list(leg_vertex_tuple[1])] 948 vertex_list_list.append(reduced_diagram) 949 expanded_list = expand_list_list(vertex_list_list) 950 res.extend(expanded_list) 951 952 return res
953
954 - def reduce_orders(self, coupling_orders, model, vertex_id_list):
955 """Return False if the coupling orders for any coupling is < 956 0, otherwise return the new coupling orders with the vertex 957 orders subtracted. If coupling_orders is not given, return 958 None (which counts as success). 959 WEIGHTED is a special order, which corresponds to the sum of 960 order hierarchies for the couplings. 961 We ignore negative constraints as these cannot be taken into 962 account on the fly but only after generation.""" 963 964 if not coupling_orders: 965 return None 966 967 present_couplings = copy.copy(coupling_orders) 968 for id in vertex_id_list: 969 # Don't check for identity vertex (id = 0) 970 if not id: 971 continue 972 inter = model.get("interaction_dict")[id] 973 for coupling in inter.get('orders').keys(): 974 # Note that we don't consider a missing coupling as a 975 # constraint 976 if coupling in present_couplings and \ 977 present_couplings[coupling]>=0: 978 # Reduce the number of couplings that are left 979 present_couplings[coupling] -= \ 980 inter.get('orders')[coupling] 981 if present_couplings[coupling] < 0: 982 # We have too many couplings of this type 983 return False 984 # Now check for WEIGHTED, i.e. the sum of coupling hierarchy values 985 if 'WEIGHTED' in present_couplings and \ 986 present_couplings['WEIGHTED']>=0: 987 weight = sum([model.get('order_hierarchy')[c]*n for \ 988 (c,n) in inter.get('orders').items()]) 989 present_couplings['WEIGHTED'] -= weight 990 if present_couplings['WEIGHTED'] < 0: 991 # Total coupling weight too large 992 return False 993 994 return present_couplings
995
996 - def combine_legs(self, list_legs, ref_dict_to1, max_multi_to1):
997 """Recursive function. Take a list of legs as an input, with 998 the reference dictionary n-1->1, and output a list of list of 999 tuples of Legs (allowed combinations) and Legs (rest). Algorithm: 1000 1001 1. Get all n-combinations from list [123456]: [12],..,[23],..,[123],.. 1002 1003 2. For each combination, say [34]. Check if combination is valid. 1004 If so: 1005 1006 a. Append [12[34]56] to result array 1007 1008 b. Split [123456] at index(first element in combination+1), 1009 i.e. [12],[456] and subtract combination from second half, 1010 i.e.: [456]-[34]=[56]. Repeat from 1. with this array 1011 1012 3. Take result array from call to 1. (here, [[56]]) and append 1013 (first half in step b - combination) + combination + (result 1014 from 1.) = [12[34][56]] to result array 1015 1016 4. After appending results from all n-combinations, return 1017 resulting array. Example, if [13] and [45] are valid 1018 combinations: 1019 [[[13]2456],[[13]2[45]6],[123[45]6]] 1020 """ 1021 1022 res = [] 1023 1024 # loop over possible combination lengths (+1 is for range convention!) 1025 for comb_length in range(2, max_multi_to1 + 1): 1026 1027 # Check the considered length is not longer than the list length 1028 if comb_length > len(list_legs): 1029 return res 1030 1031 # itertools.combinations returns all possible combinations 1032 # of comb_length elements from list_legs 1033 for comb in itertools.combinations(list_legs, comb_length): 1034 1035 # Check if the combination is valid 1036 if base_objects.LegList(comb).can_combine_to_1(ref_dict_to1): 1037 1038 # Identify the rest, create a list [comb,rest] and 1039 # add it to res 1040 res_list = copy.copy(list_legs) 1041 for leg in comb: 1042 res_list.remove(leg) 1043 res_list.insert(list_legs.index(comb[0]), comb) 1044 res.append(res_list) 1045 1046 # Now, deal with cases with more than 1 combination 1047 1048 # First, split the list into two, according to the 1049 # position of the first element in comb, and remove 1050 # all elements form comb 1051 res_list1 = list_legs[0:list_legs.index(comb[0])] 1052 res_list2 = list_legs[list_legs.index(comb[0]) + 1:] 1053 for leg in comb[1:]: 1054 res_list2.remove(leg) 1055 1056 # Create a list of type [comb,rest1,rest2(combined)] 1057 res_list = res_list1 1058 res_list.append(comb) 1059 # This is where recursion actually happens, 1060 # on the second part 1061 for item in self.combine_legs(res_list2, 1062 ref_dict_to1, 1063 max_multi_to1): 1064 final_res_list = copy.copy(res_list) 1065 final_res_list.extend(item) 1066 res.append(final_res_list) 1067 1068 return res
1069 1070
1071 - def merge_comb_legs(self, comb_lists, ref_dict_to1):
1072 """Takes a list of allowed leg combinations as an input and returns 1073 a set of lists where combinations have been properly replaced 1074 (one list per element in the ref_dict, so that all possible intermediate 1075 particles are included). For each list, give the list of vertices 1076 corresponding to the executed merging, group the two as a tuple. 1077 """ 1078 1079 res = [] 1080 1081 for comb_list in comb_lists: 1082 1083 reduced_list = [] 1084 vertex_list = [] 1085 1086 for entry in comb_list: 1087 1088 # Act on all leg combinations 1089 if isinstance(entry, tuple): 1090 1091 # Build the leg object which will replace the combination: 1092 # 1) leg ids is as given in the ref_dict 1093 leg_vert_ids = copy.copy(ref_dict_to1[\ 1094 tuple(sorted([leg.get('id') for leg in entry]))]) 1095 # 2) number is the minimum of leg numbers involved in the 1096 # combination 1097 number = min([leg.get('number') for leg in entry]) 1098 # 3) state is final, unless there is exactly one initial 1099 # state particle involved in the combination -> t-channel 1100 if len(filter(lambda leg: leg.get('state') == False, 1101 entry)) == 1: 1102 state = False 1103 else: 1104 state = True 1105 # 4) from_group is True, by definition 1106 1107 # Create and add the object. This is done by a 1108 # separate routine, to allow overloading by 1109 # daughter classes 1110 new_leg_vert_ids = [] 1111 if leg_vert_ids: 1112 new_leg_vert_ids = self.get_combined_legs(entry, 1113 leg_vert_ids, 1114 number, 1115 state) 1116 1117 reduced_list.append([l[0] for l in new_leg_vert_ids]) 1118 1119 1120 # Create and add the corresponding vertex 1121 # Extract vertex ids corresponding to the various legs 1122 # in mylegs 1123 vlist = base_objects.VertexList() 1124 for (myleg, vert_id) in new_leg_vert_ids: 1125 # Start with the considered combination... 1126 myleglist = base_objects.LegList(list(entry)) 1127 # ... and complete with legs after reducing 1128 myleglist.append(myleg) 1129 # ... and consider the correct vertex id 1130 vlist.append(base_objects.Vertex( 1131 {'legs':myleglist, 1132 'id':vert_id})) 1133 1134 vertex_list.append(vlist) 1135 1136 # If entry is not a combination, switch the from_group flag 1137 # and add it 1138 else: 1139 cp_entry = copy.copy(entry) 1140 # Need special case for from_group == None; this 1141 # is for initial state leg of decay chain process 1142 # (see Leg.can_combine_to_0) 1143 if cp_entry.get('from_group') != None: 1144 cp_entry.set('from_group', False) 1145 reduced_list.append(cp_entry) 1146 1147 # Flatten the obtained leg and vertex lists 1148 flat_red_lists = expand_list(reduced_list) 1149 flat_vx_lists = expand_list(vertex_list) 1150 1151 # Combine the two lists in a list of tuple 1152 for i in range(0, len(flat_vx_lists)): 1153 res.append((base_objects.LegList(flat_red_lists[i]), \ 1154 base_objects.VertexList(flat_vx_lists[i]))) 1155 1156 return res
1157
1158 - def get_combined_legs(self, legs, leg_vert_ids, number, state):
1159 """Create a set of new legs from the info given. This can be 1160 overloaded by daughter classes.""" 1161 1162 mylegs = [(base_objects.Leg({'id':leg_id, 1163 'number':number, 1164 'state':state, 1165 'from_group':True}), 1166 vert_id)\ 1167 for leg_id, vert_id in leg_vert_ids] 1168 1169 return mylegs
1170
1171 - def get_combined_vertices(self, legs, vert_ids):
1172 """Allow for selection of vertex ids. This can be 1173 overloaded by daughter classes.""" 1174 1175 return vert_ids
1176
1177 - def trim_diagrams(self, decay_ids=[], diaglist=None):
1178 """Reduce the number of legs and vertices used in memory. 1179 When called by a diagram generation initiated by LoopAmplitude, 1180 this function should not trim the diagrams in the attribute 'diagrams' 1181 but rather a given list in the 'diaglist' argument.""" 1182 1183 legs = [] 1184 vertices = [] 1185 1186 if diaglist is None: 1187 diaglist=self.get('diagrams') 1188 1189 # Flag decaying legs in the core process by onshell = True 1190 process = self.get('process') 1191 for leg in process.get('legs'): 1192 if leg.get('state') and leg.get('id') in decay_ids: 1193 leg.set('onshell', True) 1194 1195 for diagram in diaglist: 1196 # Keep track of external legs (leg numbers already used) 1197 leg_external = set() 1198 for ivx, vertex in enumerate(diagram.get('vertices')): 1199 for ileg, leg in enumerate(vertex.get('legs')): 1200 # Ensure that only external legs get decay flag 1201 if leg.get('state') and leg.get('id') in decay_ids and \ 1202 leg.get('number') not in leg_external: 1203 # Use onshell to indicate decaying legs, 1204 # i.e. legs that have decay chains 1205 leg = copy.copy(leg) 1206 leg.set('onshell', True) 1207 try: 1208 index = legs.index(leg) 1209 except ValueError: 1210 vertex.get('legs')[ileg] = leg 1211 legs.append(leg) 1212 else: # Found a leg 1213 vertex.get('legs')[ileg] = legs[index] 1214 leg_external.add(leg.get('number')) 1215 try: 1216 index = vertices.index(vertex) 1217 diagram.get('vertices')[ivx] = vertices[index] 1218 except ValueError: 1219 vertices.append(vertex)
1220
1221 #=============================================================================== 1222 # AmplitudeList 1223 #=============================================================================== 1224 -class AmplitudeList(base_objects.PhysicsObjectList):
1225 """List of Amplitude objects 1226 """ 1227
1228 - def has_any_loop_process(self):
1229 """ Check the content of all processes of the amplitudes in this list to 1230 see if there is any which defines perturbation couplings. """ 1231 1232 for amp in self: 1233 if amp.has_loop_process(): 1234 return True
1235
1236 - def is_valid_element(self, obj):
1237 """Test if object obj is a valid Amplitude for the list.""" 1238 1239 return isinstance(obj, Amplitude)
1240
1241 #=============================================================================== 1242 # DecayChainAmplitude 1243 #=============================================================================== 1244 -class DecayChainAmplitude(Amplitude):
1245 """A list of amplitudes + a list of decay chain amplitude lists; 1246 corresponding to a ProcessDefinition with a list of decay chains 1247 """ 1248
1249 - def default_setup(self):
1250 """Default values for all properties""" 1251 1252 self['amplitudes'] = AmplitudeList() 1253 self['decay_chains'] = DecayChainAmplitudeList()
1254
1255 - def __init__(self, argument = None, collect_mirror_procs = False, 1256 ignore_six_quark_processes = False):
1257 """Allow initialization with Process and with ProcessDefinition""" 1258 1259 if isinstance(argument, base_objects.Process): 1260 super(DecayChainAmplitude, self).__init__() 1261 from madgraph.loop.loop_diagram_generation import LoopMultiProcess 1262 if argument['perturbation_couplings']: 1263 MultiProcessClass=LoopMultiProcess 1264 else: 1265 MultiProcessClass=MultiProcess 1266 if isinstance(argument, base_objects.ProcessDefinition): 1267 self['amplitudes'].extend(\ 1268 MultiProcessClass.generate_multi_amplitudes(argument, 1269 collect_mirror_procs, 1270 ignore_six_quark_processes)) 1271 else: 1272 self['amplitudes'].append(\ 1273 MultiProcessClass.get_amplitude_from_proc(argument)) 1274 # Clean decay chains from process, since we haven't 1275 # combined processes with decay chains yet 1276 process = copy.copy(self.get('amplitudes')[0].get('process')) 1277 process.set('decay_chains', base_objects.ProcessList()) 1278 self['amplitudes'][0].set('process', process) 1279 1280 for process in argument.get('decay_chains'): 1281 if process.get('perturbation_couplings'): 1282 raise MadGraph5Error,\ 1283 "Decay processes can not be perturbed" 1284 process.set('overall_orders', argument.get('overall_orders')) 1285 if not process.get('is_decay_chain'): 1286 process.set('is_decay_chain',True) 1287 if not process.get_ninitial() == 1: 1288 raise InvalidCmd,\ 1289 "Decay chain process must have exactly one" + \ 1290 " incoming particle" 1291 self['decay_chains'].append(\ 1292 DecayChainAmplitude(process, collect_mirror_procs, 1293 ignore_six_quark_processes)) 1294 1295 # Flag decaying legs in the core diagrams by onshell = True 1296 decay_ids = sum([[a.get('process').get('legs')[0].get('id') \ 1297 for a in dec.get('amplitudes')] for dec in \ 1298 self['decay_chains']], []) 1299 decay_ids = set(decay_ids) 1300 for amp in self['amplitudes']: 1301 amp.trim_diagrams(decay_ids) 1302 1303 # Check that all decay ids are present in at least some process 1304 for amp in self['amplitudes']: 1305 for l in amp.get('process').get('legs'): 1306 if l.get('id') in decay_ids: 1307 decay_ids.remove(l.get('id')) 1308 1309 if decay_ids: 1310 model = amp.get('process').get('model') 1311 names = [model.get_particle(id).get('name') for id in decay_ids] 1312 1313 logger.warning( 1314 "$RED Decay without corresponding particle in core process found.\n" + \ 1315 "Decay information for particle(s) %s is discarded.\n" % ','.join(names) + \ 1316 "Please check your process definition carefully. \n" + \ 1317 "This warning usually means that you forgot parentheses in presence of subdecay.\n" + \ 1318 "Example of correct syntax: p p > t t~, ( t > w+ b, w+ > l+ vl)") 1319 1320 # Remove unused decays from the process list 1321 for dc in reversed(self['decay_chains']): 1322 for a in reversed(dc.get('amplitudes')): 1323 # Remove the amplitudes from this decay chain 1324 if a.get('process').get('legs')[0].get('id') in decay_ids: 1325 dc.get('amplitudes').remove(a) 1326 if not dc.get('amplitudes'): 1327 # If no amplitudes left, remove the decay chain 1328 self['decay_chains'].remove(dc) 1329 1330 # Finally, write a fat warning if any decay process has 1331 # the decaying particle (or its antiparticle) in the final state 1332 bad_procs = [] 1333 for dc in self['decay_chains']: 1334 for amp in dc.get('amplitudes'): 1335 legs = amp.get('process').get('legs') 1336 fs_parts = [abs(l.get('id')) for l in legs if 1337 l.get('state')] 1338 is_part = [l.get('id') for l in legs if not 1339 l.get('state')][0] 1340 if abs(is_part) in fs_parts: 1341 bad_procs.append(amp.get('process')) 1342 1343 if bad_procs: 1344 logger.warning( 1345 "$RED Decay(s) with particle decaying to itself:\n" + \ 1346 '\n'.join([p.nice_string() for p in bad_procs]) + \ 1347 "\nPlease check your process definition carefully. \n") 1348 1349 1350 elif argument != None: 1351 # call the mother routine 1352 super(DecayChainAmplitude, self).__init__(argument) 1353 else: 1354 # call the mother routine 1355 super(DecayChainAmplitude, self).__init__()
1356
1357 - def filter(self, name, value):
1358 """Filter for valid amplitude property values.""" 1359 1360 if name == 'amplitudes': 1361 if not isinstance(value, AmplitudeList): 1362 raise self.PhysicsObjectError, \ 1363 "%s is not a valid AmplitudeList" % str(value) 1364 if name == 'decay_chains': 1365 if not isinstance(value, DecayChainAmplitudeList): 1366 raise self.PhysicsObjectError, \ 1367 "%s is not a valid DecayChainAmplitudeList object" % \ 1368 str(value) 1369 return True
1370
1371 - def get_sorted_keys(self):
1372 """Return diagram property names as a nicely sorted list.""" 1373 1374 return ['amplitudes', 'decay_chains']
1375 1376 # Helper functions 1377
1378 - def get_number_of_diagrams(self):
1379 """Returns number of diagrams for this amplitude""" 1380 return sum(len(a.get('diagrams')) for a in self.get('amplitudes')) \ 1381 + sum(d.get_number_of_diagrams() for d in \ 1382 self.get('decay_chains'))
1383
1384 - def nice_string(self, indent = 0):
1385 """Returns a nicely formatted string of the amplitude content.""" 1386 mystr = "" 1387 for amplitude in self.get('amplitudes'): 1388 mystr = mystr + amplitude.nice_string(indent) + "\n" 1389 1390 if self.get('decay_chains'): 1391 mystr = mystr + " " * indent + "Decays:\n" 1392 for dec in self.get('decay_chains'): 1393 mystr = mystr + dec.nice_string(indent + 2) + "\n" 1394 1395 return mystr[:-1]
1396
1397 - def nice_string_processes(self, indent = 0):
1398 """Returns a nicely formatted string of the amplitude processes.""" 1399 mystr = "" 1400 for amplitude in self.get('amplitudes'): 1401 mystr = mystr + amplitude.nice_string_processes(indent) + "\n" 1402 1403 if self.get('decay_chains'): 1404 mystr = mystr + " " * indent + "Decays:\n" 1405 for dec in self.get('decay_chains'): 1406 mystr = mystr + dec.nice_string_processes(indent + 2) + "\n" 1407 1408 return mystr[:-1]
1409
1410 - def get_ninitial(self):
1411 """Returns the number of initial state particles in the process.""" 1412 return self.get('amplitudes')[0].get('process').get_ninitial()
1413
1414 - def get_decay_ids(self):
1415 """Returns a set of all particle ids for which a decay is defined""" 1416 1417 decay_ids = [] 1418 1419 # Get all amplitudes for the decay processes 1420 for amp in sum([dc.get('amplitudes') for dc \ 1421 in self['decay_chains']], []): 1422 # For each amplitude, find the initial state leg 1423 decay_ids.append(amp.get('process').get_initial_ids()[0]) 1424 1425 # Return a list with unique ids 1426 return list(set(decay_ids))
1427
1428 - def has_loop_process(self):
1429 """ Returns wether this amplitude has a loop process.""" 1430 return self['amplitudes'].has_any_loop_process()
1431
1432 - def get_amplitudes(self):
1433 """Recursive function to extract all amplitudes for this process""" 1434 1435 amplitudes = AmplitudeList() 1436 1437 amplitudes.extend(self.get('amplitudes')) 1438 for decay in self.get('decay_chains'): 1439 amplitudes.extend(decay.get_amplitudes()) 1440 1441 return amplitudes
1442
1443 1444 #=============================================================================== 1445 # DecayChainAmplitudeList 1446 #=============================================================================== 1447 -class DecayChainAmplitudeList(base_objects.PhysicsObjectList):
1448 """List of DecayChainAmplitude objects 1449 """ 1450
1451 - def is_valid_element(self, obj):
1452 """Test if object obj is a valid DecayChainAmplitude for the list.""" 1453 1454 return isinstance(obj, DecayChainAmplitude)
1455
1456 1457 #=============================================================================== 1458 # MultiProcess 1459 #=============================================================================== 1460 -class MultiProcess(base_objects.PhysicsObject):
1461 """MultiProcess: list of process definitions 1462 list of processes (after cleaning) 1463 list of amplitudes (after generation) 1464 """ 1465
1466 - def default_setup(self):
1467 """Default values for all properties""" 1468 1469 self['process_definitions'] = base_objects.ProcessDefinitionList() 1470 # self['amplitudes'] can be an AmplitudeList or a 1471 # DecayChainAmplitudeList, depending on whether there are 1472 # decay chains in the process definitions or not. 1473 self['amplitudes'] = AmplitudeList() 1474 # Flag for whether to combine IS mirror processes together 1475 self['collect_mirror_procs'] = False 1476 # List of quark flavors where we ignore processes with at 1477 # least 6 quarks (three quark lines) 1478 self['ignore_six_quark_processes'] = [] 1479 # Allow to use the model parameter numerical value for optimization. 1480 #This is currently use for 1->N generation(check mass). 1481 self['use_numerical'] = False
1482
1483 - def __init__(self, argument=None, collect_mirror_procs = False, 1484 ignore_six_quark_processes = [], optimize=False):
1485 """Allow initialization with ProcessDefinition or 1486 ProcessDefinitionList 1487 optimize allows to use param_card information. (usefull for 1-.N)""" 1488 1489 if isinstance(argument, base_objects.ProcessDefinition): 1490 super(MultiProcess, self).__init__() 1491 self['process_definitions'].append(argument) 1492 elif isinstance(argument, base_objects.ProcessDefinitionList): 1493 super(MultiProcess, self).__init__() 1494 self['process_definitions'] = argument 1495 elif argument != None: 1496 # call the mother routine 1497 super(MultiProcess, self).__init__(argument) 1498 else: 1499 # call the mother routine 1500 super(MultiProcess, self).__init__() 1501 1502 self['collect_mirror_procs'] = collect_mirror_procs 1503 self['ignore_six_quark_processes'] = ignore_six_quark_processes 1504 self['use_numerical'] = optimize 1505 1506 if isinstance(argument, base_objects.ProcessDefinition) or \ 1507 isinstance(argument, base_objects.ProcessDefinitionList): 1508 # Generate the diagrams 1509 self.get('amplitudes')
1510 1511
1512 - def filter(self, name, value):
1513 """Filter for valid process property values.""" 1514 1515 if name == 'process_definitions': 1516 if not isinstance(value, base_objects.ProcessDefinitionList): 1517 raise self.PhysicsObjectError, \ 1518 "%s is not a valid ProcessDefinitionList object" % str(value) 1519 1520 if name == 'amplitudes': 1521 if not isinstance(value, diagram_generation.AmplitudeList): 1522 raise self.PhysicsObjectError, \ 1523 "%s is not a valid AmplitudeList object" % str(value) 1524 1525 if name in ['collect_mirror_procs']: 1526 if not isinstance(value, bool): 1527 raise self.PhysicsObjectError, \ 1528 "%s is not a valid boolean" % str(value) 1529 1530 if name == 'ignore_six_quark_processes': 1531 if not isinstance(value, list): 1532 raise self.PhysicsObjectError, \ 1533 "%s is not a valid list" % str(value) 1534 1535 return True
1536
1537 - def get(self, name):
1538 """Get the value of the property name.""" 1539 1540 if (name == 'amplitudes') and not self[name]: 1541 for process_def in self.get('process_definitions'): 1542 if process_def.get('decay_chains'): 1543 # This is a decay chain process 1544 # Store amplitude(s) as DecayChainAmplitude 1545 self['amplitudes'].append(\ 1546 DecayChainAmplitude(process_def, 1547 self.get('collect_mirror_procs'), 1548 self.get('ignore_six_quark_processes'))) 1549 else: 1550 self['amplitudes'].extend(\ 1551 self.generate_multi_amplitudes(process_def, 1552 self.get('collect_mirror_procs'), 1553 self.get('ignore_six_quark_processes'), 1554 self['use_numerical'])) 1555 1556 return MultiProcess.__bases__[0].get(self, name) # call the mother routine
1557
1558 - def get_sorted_keys(self):
1559 """Return process property names as a nicely sorted list.""" 1560 1561 return ['process_definitions', 'amplitudes']
1562 1563 @classmethod
1564 - def generate_multi_amplitudes(cls,process_definition, 1565 collect_mirror_procs = False, 1566 ignore_six_quark_processes = [], 1567 use_numerical=False):
1568 """Generate amplitudes in a semi-efficient way. 1569 Make use of crossing symmetry for processes that fail diagram 1570 generation, but not for processes that succeed diagram 1571 generation. Doing so will risk making it impossible to 1572 identify processes with identical amplitudes. 1573 """ 1574 assert isinstance(process_definition, base_objects.ProcessDefinition), \ 1575 "%s not valid ProcessDefinition object" % \ 1576 repr(process_definition) 1577 1578 # Set automatic coupling orders 1579 process_definition.set('orders', MultiProcess.\ 1580 find_optimal_process_orders(process_definition)) 1581 # Check for maximum orders from the model 1582 process_definition.check_expansion_orders() 1583 1584 processes = base_objects.ProcessList() 1585 amplitudes = AmplitudeList() 1586 1587 # failed_procs and success_procs are sorted processes that have 1588 # already failed/succeeded based on crossing symmetry 1589 failed_procs = [] 1590 success_procs = [] 1591 # Complete processes, for identification of mirror processes 1592 non_permuted_procs = [] 1593 # permutations keeps the permutations of the crossed processes 1594 permutations = [] 1595 1596 # Store the diagram tags for processes, to allow for 1597 # identifying identical matrix elements already at this stage. 1598 model = process_definition['model'] 1599 1600 isids = [leg['ids'] for leg in process_definition['legs'] \ 1601 if leg['state'] == False] 1602 fsids = [leg['ids'] for leg in process_definition['legs'] \ 1603 if leg['state'] == True] 1604 # Generate all combinations for the initial state 1605 1606 for prod in itertools.product(*isids): 1607 islegs = [\ 1608 base_objects.Leg({'id':id, 'state': False}) \ 1609 for id in prod] 1610 1611 # Generate all combinations for the final state, and make 1612 # sure to remove double counting 1613 1614 red_fsidlist = [] 1615 1616 for prod in itertools.product(*fsids): 1617 1618 # Remove double counting between final states 1619 if tuple(sorted(prod)) in red_fsidlist: 1620 continue 1621 1622 red_fsidlist.append(tuple(sorted(prod))); 1623 1624 # Generate leg list for process 1625 leg_list = [copy.copy(leg) for leg in islegs] 1626 1627 leg_list.extend([\ 1628 base_objects.Leg({'id':id, 'state': True}) \ 1629 for id in prod]) 1630 1631 legs = base_objects.LegList(leg_list) 1632 1633 # Check for crossed processes 1634 sorted_legs = sorted([(l,i+1) for (i,l) in \ 1635 enumerate(legs.get_outgoing_id_list(model))]) 1636 permutation = [l[1] for l in sorted_legs] 1637 sorted_legs = array.array('i', [l[0] for l in sorted_legs]) 1638 1639 # Check for six-quark processes 1640 if ignore_six_quark_processes and \ 1641 len([i for i in sorted_legs if abs(i) in \ 1642 ignore_six_quark_processes]) >= 6: 1643 continue 1644 1645 # Check if crossed process has already failed, 1646 # in that case don't check process 1647 if sorted_legs in failed_procs: 1648 continue 1649 1650 # If allowed check mass validity [assume 1->N] 1651 if use_numerical: 1652 # check that final state has lower mass than initial state 1653 initial_mass = abs(model['parameter_dict'][model.get_particle(legs[0].get('id')).get('mass')]) 1654 if initial_mass == 0: 1655 continue 1656 for leg in legs[1:]: 1657 m = model['parameter_dict'][model.get_particle(leg.get('id')).get('mass')] 1658 initial_mass -= abs(m) 1659 if initial_mass.real <= 0: 1660 continue 1661 1662 # Setup process 1663 process = base_objects.Process({\ 1664 'legs':legs, 1665 'model':process_definition.get('model'), 1666 'id': process_definition.get('id'), 1667 'orders': process_definition.get('orders'), 1668 'required_s_channels': \ 1669 process_definition.get('required_s_channels'), 1670 'forbidden_onsh_s_channels': \ 1671 process_definition.get('forbidden_onsh_s_channels'), 1672 'forbidden_s_channels': \ 1673 process_definition.get('forbidden_s_channels'), 1674 'forbidden_particles': \ 1675 process_definition.get('forbidden_particles'), 1676 'is_decay_chain': \ 1677 process_definition.get('is_decay_chain'), 1678 'perturbation_couplings': \ 1679 process_definition.get('perturbation_couplings'), 1680 'squared_orders': \ 1681 process_definition.get('squared_orders'), 1682 'sqorders_types': \ 1683 process_definition.get('sqorders_types'), 1684 'overall_orders': \ 1685 process_definition.get('overall_orders'), 1686 'has_born': \ 1687 process_definition.get('has_born'), 1688 'split_orders': \ 1689 process_definition.get('split_orders') 1690 }) 1691 fast_proc = \ 1692 array.array('i',[leg.get('id') for leg in legs]) 1693 if collect_mirror_procs and \ 1694 process_definition.get_ninitial() == 2: 1695 # Check if mirrored process is already generated 1696 mirror_proc = \ 1697 array.array('i', [fast_proc[1], fast_proc[0]] + \ 1698 list(fast_proc[2:])) 1699 try: 1700 mirror_amp = \ 1701 amplitudes[non_permuted_procs.index(mirror_proc)] 1702 except Exception: 1703 # Didn't find any mirror process 1704 pass 1705 else: 1706 # Mirror process found 1707 mirror_amp.set('has_mirror_process', True) 1708 logger.info("Process %s added to mirror process %s" % \ 1709 (process.base_string(), 1710 mirror_amp.get('process').base_string())) 1711 continue 1712 1713 # Check for successful crossings, unless we have specified 1714 # properties that break crossing symmetry 1715 if not process.get('required_s_channels') and \ 1716 not process.get('forbidden_onsh_s_channels') and \ 1717 not process.get('forbidden_s_channels') and \ 1718 not process.get('is_decay_chain'): 1719 try: 1720 crossed_index = success_procs.index(sorted_legs) 1721 except ValueError: 1722 # No crossing found, just continue 1723 pass 1724 else: 1725 # Found crossing - reuse amplitude 1726 amplitude = MultiProcess.cross_amplitude(\ 1727 amplitudes[crossed_index], 1728 process, 1729 permutations[crossed_index], 1730 permutation) 1731 amplitudes.append(amplitude) 1732 success_procs.append(sorted_legs) 1733 permutations.append(permutation) 1734 non_permuted_procs.append(fast_proc) 1735 logger.info("Crossed process found for %s, reuse diagrams." % \ 1736 process.base_string()) 1737 continue 1738 1739 # Create new amplitude 1740 amplitude = cls.get_amplitude_from_proc(process) 1741 1742 try: 1743 result = amplitude.generate_diagrams() 1744 except InvalidCmd as error: 1745 failed_procs.append(sorted_legs) 1746 else: 1747 # Succeeded in generating diagrams 1748 if amplitude.get('diagrams'): 1749 amplitudes.append(amplitude) 1750 success_procs.append(sorted_legs) 1751 permutations.append(permutation) 1752 non_permuted_procs.append(fast_proc) 1753 elif not result: 1754 # Diagram generation failed for all crossings 1755 failed_procs.append(sorted_legs) 1756 1757 # Raise exception if there are no amplitudes for this process 1758 if not amplitudes: 1759 if len(failed_procs) == 1 and 'error' in locals(): 1760 raise error 1761 else: 1762 raise InvalidCmd, \ 1763 "No amplitudes generated from process %s. Please enter a valid process" % \ 1764 process_definition.nice_string() 1765 1766 1767 # Return the produced amplitudes 1768 return amplitudes
1769 1770 @classmethod
1771 - def get_amplitude_from_proc(cls,proc):
1772 """ Return the correct amplitude type according to the characteristics of 1773 the process proc """ 1774 return Amplitude({"process": proc})
1775 1776 1777 @staticmethod
1778 - def find_optimal_process_orders(process_definition):
1779 """Find the minimal WEIGHTED order for this set of processes. 1780 1781 The algorithm: 1782 1783 1) Check the coupling hierarchy of the model. Assign all 1784 particles to the different coupling hierarchies so that a 1785 particle is considered to be in the highest hierarchy (i.e., 1786 with lowest value) where it has an interaction. 1787 1788 2) Pick out the legs in the multiprocess according to the 1789 highest hierarchy represented (so don't mix particles from 1790 different hierarchy classes in the same multiparticles!) 1791 1792 3) Find the starting maximum WEIGHTED order as the sum of the 1793 highest n-2 weighted orders 1794 1795 4) Pick out required s-channel particle hierarchies, and use 1796 the highest of the maximum WEIGHTED order from the legs and 1797 the minimum WEIGHTED order extracted from 2*s-channel 1798 hierarchys plus the n-2-2*(number of s-channels) lowest 1799 leg weighted orders. 1800 1801 5) Run process generation with the WEIGHTED order determined 1802 in 3)-4) - # final state gluons, with all gluons removed from 1803 the final state 1804 1805 6) If no process is found, increase WEIGHTED order by 1 and go 1806 back to 5), until we find a process which passes. Return that 1807 order. 1808 1809 7) Continue 5)-6) until we reach (n-2)*(highest hierarchy)-1. 1810 If still no process has passed, return 1811 WEIGHTED = (n-2)*(highest hierarchy) 1812 """ 1813 1814 assert isinstance(process_definition, base_objects.ProcessDefinition), \ 1815 "%s not valid ProcessDefinition object" % \ 1816 repr(process_definition) 1817 1818 processes = base_objects.ProcessList() 1819 amplitudes = AmplitudeList() 1820 1821 # If there are already couplings defined, return 1822 if process_definition.get('orders') or \ 1823 process_definition.get('overall_orders') or \ 1824 process_definition.get('NLO_mode')=='virt': 1825 return process_definition.get('orders') 1826 1827 # If this is a decay process (and not a decay chain), return 1828 if process_definition.get_ninitial() == 1 and not \ 1829 process_definition.get('is_decay_chain'): 1830 return process_definition.get('orders') 1831 1832 logger.info("Checking for minimal orders which gives processes.") 1833 logger.info("Please specify coupling orders to bypass this step.") 1834 1835 # Calculate minimum starting guess for WEIGHTED order 1836 max_order_now, particles, hierarchy = \ 1837 process_definition.get_minimum_WEIGHTED() 1838 coupling = 'WEIGHTED' 1839 1840 model = process_definition.get('model') 1841 1842 # Extract the initial and final leg ids 1843 isids = [leg['ids'] for leg in \ 1844 filter(lambda leg: leg['state'] == False, process_definition['legs'])] 1845 fsids = [leg['ids'] for leg in \ 1846 filter(lambda leg: leg['state'] == True, process_definition['legs'])] 1847 1848 max_WEIGHTED_order = \ 1849 (len(fsids + isids) - 2)*int(model.get_max_WEIGHTED()) 1850 1851 # Run diagram generation with increasing max_order_now until 1852 # we manage to get diagrams 1853 while max_order_now < max_WEIGHTED_order: 1854 1855 logger.info("Trying coupling order WEIGHTED=%d" % max_order_now) 1856 1857 oldloglevel = logger.level 1858 logger.setLevel(logging.WARNING) 1859 1860 # failed_procs are processes that have already failed 1861 # based on crossing symmetry 1862 failed_procs = [] 1863 1864 # Generate all combinations for the initial state 1865 for prod in apply(itertools.product, isids): 1866 islegs = [ base_objects.Leg({'id':id, 'state': False}) \ 1867 for id in prod] 1868 1869 # Generate all combinations for the final state, and make 1870 # sure to remove double counting 1871 1872 red_fsidlist = [] 1873 1874 for prod in apply(itertools.product, fsids): 1875 1876 # Remove double counting between final states 1877 if tuple(sorted(prod)) in red_fsidlist: 1878 continue 1879 1880 red_fsidlist.append(tuple(sorted(prod))); 1881 1882 # Remove gluons from final state if QCD is among 1883 # the highest coupling hierarchy 1884 nglue = 0 1885 if 21 in particles[0]: 1886 nglue = len([id for id in prod if id == 21]) 1887 prod = [id for id in prod if id != 21] 1888 1889 # Generate leg list for process 1890 leg_list = [copy.copy(leg) for leg in islegs] 1891 1892 leg_list.extend([\ 1893 base_objects.Leg({'id':id, 'state': True}) \ 1894 for id in prod]) 1895 1896 legs = base_objects.LegList(leg_list) 1897 1898 # Set summed coupling order according to max_order_now 1899 # subtracting the removed gluons 1900 coupling_orders_now = {coupling: max_order_now - \ 1901 nglue * model['order_hierarchy']['QCD']} 1902 1903 # Setup process 1904 process = base_objects.Process({\ 1905 'legs':legs, 1906 'model':model, 1907 'id': process_definition.get('id'), 1908 'orders': coupling_orders_now, 1909 'required_s_channels': \ 1910 process_definition.get('required_s_channels'), 1911 'forbidden_onsh_s_channels': \ 1912 process_definition.get('forbidden_onsh_s_channels'), 1913 'sqorders_types': \ 1914 process_definition.get('sqorders_types'), 1915 'squared_orders': \ 1916 process_definition.get('squared_orders'), 1917 'split_orders': \ 1918 process_definition.get('split_orders'), 1919 'forbidden_s_channels': \ 1920 process_definition.get('forbidden_s_channels'), 1921 'forbidden_particles': \ 1922 process_definition.get('forbidden_particles'), 1923 'is_decay_chain': \ 1924 process_definition.get('is_decay_chain'), 1925 'overall_orders': \ 1926 process_definition.get('overall_orders'), 1927 'split_orders': \ 1928 process_definition.get('split_orders')}) 1929 1930 # Check for couplings with given expansion orders 1931 process.check_expansion_orders() 1932 1933 # Check for crossed processes 1934 sorted_legs = sorted(legs.get_outgoing_id_list(model)) 1935 # Check if crossed process has already failed 1936 # In that case don't check process 1937 if tuple(sorted_legs) in failed_procs: 1938 continue 1939 1940 amplitude = Amplitude({'process': process}) 1941 try: 1942 amplitude.generate_diagrams() 1943 except InvalidCmd: 1944 failed_procs.append(tuple(sorted_legs)) 1945 else: 1946 if amplitude.get('diagrams'): 1947 # We found a valid amplitude. Return this order number 1948 logger.setLevel(oldloglevel) 1949 return {coupling: max_order_now} 1950 else: 1951 failed_procs.append(tuple(sorted_legs)) 1952 1953 # No processes found, increase max_order_now 1954 max_order_now += 1 1955 logger.setLevel(oldloglevel) 1956 1957 # If no valid processes found with nfinal-1 couplings, return maximal 1958 return {coupling: max_order_now}
1959 1960 @staticmethod
1961 - def cross_amplitude(amplitude, process, org_perm, new_perm):
1962 """Return the amplitude crossed with the permutation new_perm""" 1963 # Create dict from original leg numbers to new leg numbers 1964 perm_map = dict(zip(org_perm, new_perm)) 1965 # Initiate new amplitude 1966 new_amp = copy.copy(amplitude) 1967 # Number legs 1968 for i, leg in enumerate(process.get('legs')): 1969 leg.set('number', i+1) 1970 # Set process 1971 new_amp.set('process', process) 1972 # Now replace the leg numbers in the diagrams 1973 diagrams = base_objects.DiagramList([d.renumber_legs(perm_map, 1974 process.get('legs'),) for \ 1975 d in new_amp.get('diagrams')]) 1976 new_amp.set('diagrams', diagrams) 1977 new_amp.trim_diagrams() 1978 1979 # Make sure to reset mirror process 1980 new_amp.set('has_mirror_process', False) 1981 1982 return new_amp
1983
1984 #=============================================================================== 1985 # Global helper methods 1986 #=============================================================================== 1987 1988 -def expand_list(mylist):
1989 """Takes a list of lists and elements and returns a list of flat lists. 1990 Example: [[1,2], 3, [4,5]] -> [[1,3,4], [1,3,5], [2,3,4], [2,3,5]] 1991 """ 1992 1993 # Check that argument is a list 1994 assert isinstance(mylist, list), "Expand_list argument must be a list" 1995 1996 res = [] 1997 1998 tmplist = [] 1999 for item in mylist: 2000 if isinstance(item, list): 2001 tmplist.append(item) 2002 else: 2003 tmplist.append([item]) 2004 2005 for item in apply(itertools.product, tmplist): 2006 res.append(list(item)) 2007 2008 return res
2009
2010 -def expand_list_list(mylist):
2011 """Recursive function. Takes a list of lists and lists of lists 2012 and returns a list of flat lists. 2013 Example: [[1,2],[[4,5],[6,7]]] -> [[1,2,4,5], [1,2,6,7]] 2014 """ 2015 2016 res = [] 2017 2018 if not mylist or len(mylist) == 1 and not mylist[0]: 2019 return [[]] 2020 2021 # Check the first element is at least a list 2022 assert isinstance(mylist[0], list), \ 2023 "Expand_list_list needs a list of lists and lists of lists" 2024 2025 # Recursion stop condition, one single element 2026 if len(mylist) == 1: 2027 if isinstance(mylist[0][0], list): 2028 return mylist[0] 2029 else: 2030 return mylist 2031 2032 if isinstance(mylist[0][0], list): 2033 for item in mylist[0]: 2034 # Here the recursion happens, create lists starting with 2035 # each element of the first item and completed with 2036 # the rest expanded 2037 for rest in expand_list_list(mylist[1:]): 2038 reslist = copy.copy(item) 2039 reslist.extend(rest) 2040 res.append(reslist) 2041 else: 2042 for rest in expand_list_list(mylist[1:]): 2043 reslist = copy.copy(mylist[0]) 2044 reslist.extend(rest) 2045 res.append(reslist) 2046 2047 2048 return res
2049