Package madgraph :: Package core :: Module diagram_generation
[hide private]
[frames] | no frames]

Source Code for Module madgraph.core.diagram_generation

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Classes for diagram generation. Amplitude performs the diagram 
  16  generation, DecayChainAmplitude keeps track of processes with decay 
  17  chains, and MultiProcess allows generation of processes with 
  18  multiparticle definitions. DiagramTag allows to identify diagrams 
  19  based on relevant properties. 
  20  """ 
  21   
  22  from __future__ import absolute_import 
  23  from six.moves import filter 
  24  #force filter to be a generator # like in py3 
  25   
  26   
  27  import array 
  28  import copy 
  29  import itertools 
  30  import logging 
  31   
  32  import madgraph.core.base_objects as base_objects 
  33  import madgraph.various.misc as misc 
  34  from madgraph import InvalidCmd, MadGraph5Error 
  35  from six.moves import range 
  36  from six.moves import zip 
  37   
  38  logger = logging.getLogger('madgraph.diagram_generation') 
39 40 41 -class NoDiagramException(InvalidCmd): pass
42
43 #=============================================================================== 44 # DiagramTag mother class 45 #=============================================================================== 46 47 -class DiagramTag(object):
48 """Class to tag diagrams based on objects with some __lt__ measure, e.g. 49 PDG code/interaction id (for comparing diagrams from the same amplitude), 50 or Lorentz/coupling/mass/width (for comparing AMPs from different MEs). 51 Algorithm: Create chains starting from external particles: 52 1 \ / 6 53 2 /\______/\ 7 54 3_ / | \_ 8 55 4 / 5 \_ 9 56 \ 10 57 gives ((((9,10,id910),8,id9108),(6,7,id67),id910867) 58 (((1,2,id12),(3,4,id34)),id1234), 59 5,id91086712345) 60 where idN is the id of the corresponding interaction. The ordering within 61 chains is based on chain length (depth; here, 1234 has depth 3, 910867 has 62 depth 4, 5 has depht 0), and if equal on the ordering of the chain elements. 63 The determination of central vertex is based on minimizing the chain length 64 for the longest subchain. 65 This gives a unique tag which can be used to identify diagrams 66 (instead of symmetry), as well as identify identical matrix elements from 67 different processes.""" 68
69 - class DiagramTagError(Exception):
70 """Exception for any problems in DiagramTags""" 71 pass
72
73 - def __init__(self, diagram, model=None, ninitial=2):
74 """Initialize with a diagram. Create DiagramTagChainLinks according to 75 the diagram, and figure out if we need to shift the central vertex.""" 76 77 # wf_dict keeps track of the intermediate particles 78 leg_dict = {} 79 # Create the chain which will be the diagram tag 80 for vertex in diagram.get('vertices'): 81 # Only add incoming legs 82 legs = vertex.get('legs')[:-1] 83 lastvx = vertex == diagram.get('vertices')[-1] 84 if lastvx: 85 # If last vertex, all legs are incoming 86 legs = vertex.get('legs') 87 # Add links corresponding to the relevant legs 88 link = DiagramTagChainLink([leg_dict.setdefault(leg.get('number'), 89 DiagramTagChainLink(self.link_from_leg(leg, model))) \ 90 for leg in legs], 91 self.vertex_id_from_vertex(vertex, 92 lastvx, 93 model, 94 ninitial)) 95 # Add vertex to leg_dict if not last one 96 if not lastvx: 97 leg_dict[vertex.get('legs')[-1].get('number')] = link 98 99 # The resulting link is the hypothetical result 100 self.tag = link 101 102 # Now make sure to find the central vertex in the diagram, 103 # defined by the longest leg being as short as possible 104 done = max([l.depth for l in self.tag.links]) == 0 105 while not done: 106 # Identify the longest chain in the tag 107 longest_chain = self.tag.links[0] 108 # Create a new link corresponding to moving one step 109 new_link = DiagramTagChainLink(self.tag.links[1:], 110 self.flip_vertex(\ 111 self.tag.vertex_id, 112 longest_chain.vertex_id, 113 self.tag.links[1:])) 114 # Create a new final vertex in the direction of the longest link 115 other_links = list(longest_chain.links) + [new_link] 116 other_link = DiagramTagChainLink(other_links, 117 self.flip_vertex(\ 118 longest_chain.vertex_id, 119 self.tag.vertex_id, 120 other_links)) 121 122 if other_link.links[0] < self.tag.links[0]: 123 # Switch to new tag, continue search 124 self.tag = other_link 125 else: 126 # We have found the central vertex 127 done = True
128
129 - def get_external_numbers(self):
130 """Get the order of external particles in this tag""" 131 132 return self.tag.get_external_numbers()
133
134 - def diagram_from_tag(self, model):
135 """Output a diagram from a DiagramTag. Note that each daughter 136 class must implement the static functions id_from_vertex_id 137 (if the vertex id is something else than an integer) and 138 leg_from_link (to pass the correct info from an end link to a 139 leg).""" 140 141 # Create the vertices, starting from the final vertex 142 diagram = base_objects.Diagram({'vertices': \ 143 self.vertices_from_link(self.tag, 144 model, 145 True)}) 146 diagram.calculate_orders(model) 147 return diagram
148 149 @classmethod 184 185 @classmethod
186 - def legPDGs_from_vertex_id(cls, vertex_id,model):
187 """Returns the list of external PDGs of the interaction corresponding 188 to this vertex_id.""" 189 190 # In case we have to deal with a regular vertex, we return the list 191 # external PDGs as given by the model information on that integer 192 # vertex id. 193 if (len(vertex_id)>=3 and 'PDGs' in vertex_id[2]): 194 return vertex_id[2]['PDGs'] 195 else: 196 return [part.get_pdg_code() for part in model.get_interaction( 197 cls.id_from_vertex_id(vertex_id)).get('particles')]
198 199 @classmethod
200 - def leg_from_legs(cls,legs, vertex_id, model):
201 """Return a leg from a leg list and the model info""" 202 203 pdgs = list(cls.legPDGs_from_vertex_id(vertex_id, model)) 204 205 # Extract the resulting pdg code from the interaction pdgs 206 for pdg in [leg.get('id') for leg in legs]: 207 pdgs.remove(pdg) 208 209 assert len(pdgs) == 1 210 # Prepare the new leg properties 211 pdg = model.get_particle(pdgs[0]).get_anti_pdg_code() 212 number = min([l.get('number') for l in legs]) 213 # State is False for t-channel, True for s-channel 214 state = (len([l for l in legs if l.get('state') == False]) != 1) 215 # Note that this needs to be done before combining decay chains 216 onshell= False 217 218 return base_objects.Leg({'id': pdg, 219 'number': number, 220 'state': state, 221 'onshell': onshell})
222 223 @classmethod 236 237 @staticmethod 250 251 @staticmethod
252 - def id_from_vertex_id(vertex_id):
253 """Return the numerical vertex id from a link.vertex_id""" 254 255 return vertex_id[0][0]
256 257 @staticmethod
258 - def loop_info_from_vertex_id(vertex_id):
259 """Return the loop_info stored in this vertex id. Notice that the 260 IdentifyME tag does not store the loop_info, but should normally never 261 need access to it.""" 262 263 return vertex_id[2]
264 265 @staticmethod
266 - def reorder_permutation(perm, start_perm):
267 """Reorder a permutation with respect to start_perm. Note that 268 both need to start from 1.""" 269 if perm == start_perm: 270 return list(range(len(perm))) 271 order = [i for (p,i) in \ 272 sorted([(p,i) for (i,p) in enumerate(perm)])] 273 return [start_perm[i]-1 for i in order]
274 275 @staticmethod 286 287 @staticmethod
288 - def vertex_id_from_vertex(vertex, last_vertex, model, ninitial):
289 """Returns the default vertex id: just the interaction id 290 Note that in the vertex id, like the leg, only the first entry is 291 taken into account in the tag comparison, while the second is for 292 storing information that is not to be used in comparisons and the 293 third for additional info regarding the shrunk loop vertex.""" 294 295 if isinstance(vertex,base_objects.ContractedVertex): 296 # return (vertex.get('id'),(),{'PDGs':vertex.get('PDGs')}) 297 return ((vertex.get('id'),vertex.get('loop_tag')),(), 298 {'PDGs':vertex.get('PDGs')}) 299 else: 300 return ((vertex.get('id'),()),(),{})
301 302 @staticmethod
303 - def flip_vertex(new_vertex, old_vertex, links):
304 """Returns the default vertex flip: just the new_vertex""" 305 return new_vertex
306
307 - def __eq__(self, other):
308 """Equal if same tag""" 309 if type(self) != type(other): 310 return False 311 return self.tag == other.tag
312
313 - def __ne__(self, other):
314 return not self.__eq__(other)
315
316 - def __str__(self):
317 return str(self.tag)
318
319 - def __lt__(self, other):
320 return self.tag < other.tag
321
322 - def __gt__(self, other):
323 return self.tag > other.tag
324 325 __repr__ = __str__
326 428
429 #=============================================================================== 430 # Amplitude 431 #=============================================================================== 432 -class Amplitude(base_objects.PhysicsObject):
433 """Amplitude: process + list of diagrams (ordered) 434 Initialize with a process, then call generate_diagrams() to 435 generate the diagrams for the amplitude 436 """ 437
438 - def default_setup(self):
439 """Default values for all properties""" 440 441 self['process'] = base_objects.Process() 442 self['diagrams'] = None 443 # has_mirror_process is True if the same process but with the 444 # two incoming particles interchanged has been generated 445 self['has_mirror_process'] = False
446
447 - def __init__(self, argument=None):
448 """Allow initialization with Process""" 449 if isinstance(argument, base_objects.Process): 450 super(Amplitude, self).__init__() 451 self.set('process', argument) 452 self.generate_diagrams() 453 elif argument != None: 454 # call the mother routine 455 super(Amplitude, self).__init__(argument) 456 else: 457 # call the mother routine 458 super(Amplitude, self).__init__()
459
460 - def filter(self, name, value):
461 """Filter for valid amplitude property values.""" 462 463 if name == 'process': 464 if not isinstance(value, base_objects.Process): 465 raise self.PhysicsObjectError("%s is not a valid Process object" % str(value)) 466 if name == 'diagrams': 467 if not isinstance(value, base_objects.DiagramList): 468 raise self.PhysicsObjectError("%s is not a valid DiagramList object" % str(value)) 469 if name == 'has_mirror_process': 470 if not isinstance(value, bool): 471 raise self.PhysicsObjectError("%s is not a valid boolean" % str(value)) 472 return True
473
474 - def get(self, name):
475 """Get the value of the property name.""" 476 477 if name == 'diagrams' and self[name] == None: 478 # Have not yet generated diagrams for this process 479 if self['process']: 480 self.generate_diagrams() 481 482 return super(Amplitude, self).get(name)
483 # return Amplitude.__bases__[0].get(self, name) #return the mother routine 484 485
486 - def get_sorted_keys(self):
487 """Return diagram property names as a nicely sorted list.""" 488 489 return ['process', 'diagrams', 'has_mirror_process']
490
491 - def get_number_of_diagrams(self):
492 """Returns number of diagrams for this amplitude""" 493 return len(self.get('diagrams'))
494
495 - def get_amplitudes(self):
496 """Return an AmplitudeList with just this amplitude. 497 Needed for DecayChainAmplitude.""" 498 499 return AmplitudeList([self])
500
501 - def nice_string(self, indent=0):
502 """Returns a nicely formatted string of the amplitude content.""" 503 return self.get('process').nice_string(indent) + "\n" + \ 504 self.get('diagrams').nice_string(indent)
505
506 - def nice_string_processes(self, indent=0):
507 """Returns a nicely formatted string of the amplitude process.""" 508 return self.get('process').nice_string(indent)
509
510 - def get_ninitial(self):
511 """Returns the number of initial state particles in the process.""" 512 return self.get('process').get_ninitial()
513
514 - def has_loop_process(self):
515 """ Returns wether this amplitude has a loop process.""" 516 517 return self.get('process').get('perturbation_couplings')
518
519 - def generate_diagrams(self, returndiag=False, diagram_filter=False):
520 """Generate diagrams. Algorithm: 521 522 1. Define interaction dictionaries: 523 * 2->0 (identity), 3->0, 4->0, ... , maxlegs->0 524 * 2 -> 1, 3 -> 1, ..., maxlegs-1 -> 1 525 526 2. Set flag from_group=true for all external particles. 527 Flip particle/anti particle for incoming particles. 528 529 3. If there is a dictionary n->0 with n=number of external 530 particles, create if possible the combination [(1,2,3,4,...)] 531 with *at least two* from_group==true. This will give a 532 finished (set of) diagram(s) (done by reduce_leglist) 533 534 4. Create all allowed groupings of particles with at least one 535 from_group==true (according to dictionaries n->1): 536 [(1,2),3,4...],[1,(2,3),4,...],..., 537 [(1,2),(3,4),...],...,[(1,2,3),4,...],... 538 (done by combine_legs) 539 540 5. Replace each group with a (list of) new particle(s) with number 541 n = min(group numbers). Set from_group true for these 542 particles and false for all other particles. Store vertex info. 543 (done by merge_comb_legs) 544 545 6. Stop algorithm when at most 2 particles remain. 546 Return all diagrams (lists of vertices). 547 548 7. Repeat from 3 (recursion done by reduce_leglist) 549 550 8. Replace final p=p vertex 551 552 Be aware that the resulting vertices have all particles outgoing, 553 so need to flip for incoming particles when used. 554 555 SPECIAL CASE: For A>BC... processes which are legs in decay 556 chains, we need to ensure that BC... combine first, giving A=A 557 as a final vertex. This case is defined by the Process 558 property is_decay_chain = True. 559 This function can also be called by the generate_diagram function 560 of LoopAmplitudes, in which case the generated diagrams here must not 561 be directly assigned to the 'diagrams' attributed but returned as a 562 DiagramList by the function. This is controlled by the argument 563 returndiag. 564 """ 565 566 process = self.get('process') 567 model = process.get('model') 568 legs = process.get('legs') 569 # Make sure orders is the minimum of orders and overall_orders 570 for key in process.get('overall_orders').keys(): 571 try: 572 process.get('orders')[key] = \ 573 min(process.get('orders')[key], 574 process.get('overall_orders')[key]) 575 except KeyError: 576 process.get('orders')[key] = process.get('overall_orders')[key] 577 578 assert model.get('particles'), \ 579 "particles are missing in model: %s" % model.get('particles') 580 581 assert model.get('interactions'), \ 582 "interactions are missing in model" 583 584 585 res = base_objects.DiagramList() 586 # First check that the number of fermions is even 587 if len([leg for leg in legs if model.get('particle_dict')[\ 588 leg.get('id')].is_fermion()]) % 2 == 1: 589 if not returndiag: 590 self['diagrams'] = res 591 raise InvalidCmd('The number of fermion is odd') 592 else: 593 return False, res 594 595 # Then check same number of incoming and outgoing fermions (if 596 # no Majorana particles in model) 597 if not model.get('got_majoranas') and \ 598 len([leg for leg in legs if leg.is_incoming_fermion(model)]) != \ 599 len([leg for leg in legs if leg.is_outgoing_fermion(model)]): 600 if not returndiag: 601 self['diagrams'] = res 602 raise InvalidCmd('The number of of incoming/outcoming fermions are different') 603 else: 604 return False, res 605 606 # Finally check that charge (conserve by all interactions) of the process 607 #is globally conserve for this process. 608 for charge in model.get('conserved_charge'): 609 total = 0 610 for leg in legs: 611 part = model.get('particle_dict')[leg.get('id')] 612 try: 613 value = part.get(charge) 614 except (AttributeError, base_objects.PhysicsObject.PhysicsObjectError): 615 try: 616 value = getattr(part, charge) 617 except AttributeError: 618 value = 0 619 620 if (leg.get('id') != part['pdg_code']) != leg['state']: 621 total -= value 622 else: 623 total += value 624 625 if abs(total) > 1e-10: 626 if not returndiag: 627 self['diagrams'] = res 628 raise InvalidCmd('No %s conservation for this process ' % charge) 629 return res 630 else: 631 raise InvalidCmd('No %s conservation for this process ' % charge) 632 return res, res 633 634 if not returndiag: 635 logger.info("Trying %s " % process.nice_string().replace('Process', 'process')) 636 637 # Give numbers to legs in process 638 for i in range(0, len(process.get('legs'))): 639 # Make sure legs are unique 640 leg = copy.copy(process.get('legs')[i]) 641 process.get('legs')[i] = leg 642 if leg.get('number') == 0: 643 leg.set('number', i + 1) 644 645 # Copy leglist from process, so we can flip leg identities 646 # without affecting the original process 647 leglist = self.copy_leglist(process.get('legs')) 648 649 for leg in leglist: 650 # For the first step, ensure the tag from_group 651 # is true for all legs 652 leg.set('from_group', True) 653 654 # Need to flip part-antipart for incoming particles, 655 # so they are all outgoing 656 if leg.get('state') == False: 657 part = model.get('particle_dict')[leg.get('id')] 658 leg.set('id', part.get_anti_pdg_code()) 659 660 # Calculate the maximal multiplicity of n-1>1 configurations 661 # to restrict possible leg combinations 662 max_multi_to1 = max([len(key) for key in \ 663 model.get('ref_dict_to1').keys()]) 664 665 666 # Reduce the leg list and return the corresponding 667 # list of vertices 668 669 # For decay processes, generate starting from final-state 670 # combined only as the last particle. This allows to use these 671 # in decay chains later on. 672 is_decay_proc = process.get_ninitial() == 1 673 if is_decay_proc: 674 part = model.get('particle_dict')[leglist[0].get('id')] 675 # For decay chain legs, we want everything to combine to 676 # the initial leg. This is done by only allowing the 677 # initial leg to combine as a final identity. 678 ref_dict_to0 = {(part.get_pdg_code(),part.get_anti_pdg_code()):[0], 679 (part.get_anti_pdg_code(),part.get_pdg_code()):[0]} 680 # Need to set initial leg from_group to None, to make sure 681 # it can only be combined at the end. 682 leglist[0].set('from_group', None) 683 reduced_leglist = self.reduce_leglist(leglist, 684 max_multi_to1, 685 ref_dict_to0, 686 is_decay_proc, 687 process.get('orders')) 688 else: 689 reduced_leglist = self.reduce_leglist(leglist, 690 max_multi_to1, 691 model.get('ref_dict_to0'), 692 is_decay_proc, 693 process.get('orders')) 694 695 #In LoopAmplitude the function below is overloaded such that it 696 #converts back all DGLoopLegs to Legs. In the default tree-level 697 #diagram generation, this does nothing. 698 self.convert_dgleg_to_leg(reduced_leglist) 699 700 if reduced_leglist: 701 for vertex_list in reduced_leglist: 702 res.append(self.create_diagram(base_objects.VertexList(vertex_list))) 703 704 # Record whether or not we failed generation before required 705 # s-channel propagators are taken into account 706 failed_crossing = not res 707 708 # Required s-channels is a list of id-lists. Select the 709 # diagrams where all required s-channel propagators in any of 710 # the lists are present (i.e., the different lists correspond 711 # to "or", while the elements of the list correspond to 712 # "and"). 713 if process.get('required_s_channels') and \ 714 process.get('required_s_channels')[0]: 715 # We shouldn't look at the last vertex in each diagram, 716 # since that is the n->0 vertex 717 lastvx = -1 718 # For decay chain processes, there is an "artificial" 719 # extra vertex corresponding to particle 1=1, so we need 720 # to exclude the two last vertexes. 721 if is_decay_proc: lastvx = -2 722 ninitial = len([leg for leg in process.get('legs') if leg.get('state') == False]) 723 # Check required s-channels for each list in required_s_channels 724 old_res = res 725 res = base_objects.DiagramList() 726 for id_list in process.get('required_s_channels'): 727 res_diags = [diagram for diagram in old_res if all([req_s_channel in \ 728 [vertex.get_s_channel_id(\ 729 process.get('model'), ninitial) \ 730 for vertex in diagram.get('vertices')[:lastvx]] \ 731 for req_s_channel in \ 732 id_list])] 733 # Add diagrams only if not already in res 734 res.extend([diag for diag in res_diags if diag not in res]) 735 736 # Remove all diagrams with a "double" forbidden s-channel propagator 737 # is present. 738 # Note that we shouldn't look at the last vertex in each 739 # diagram, since that is the n->0 vertex 740 if process.get('forbidden_s_channels'): 741 ninitial = len([leg for leg in process.get('legs') if leg.get('state') == False]) 742 if ninitial == 2: 743 res = base_objects.DiagramList(\ 744 [diagram for diagram in res if not any([vertex.get_s_channel_id(\ 745 process.get('model'), ninitial) \ 746 in process.get('forbidden_s_channels') 747 for vertex in diagram.get('vertices')[:-1]])]) 748 else: 749 # split since we need to avoid that the initial particle is forbidden 750 # as well. 751 newres= [] 752 for diagram in res: 753 leg1 = 1 754 #check the latest vertex to see if the leg 1 is inside if it 755 #is we need to inverse the look-up and allow the first s-channel 756 # of the associate particles. 757 vertex = diagram.get('vertices')[-1] 758 if any([l['number'] ==1 for l in vertex.get('legs')]): 759 leg1 = [l['number'] for l in vertex.get('legs') if l['number'] !=1][0] 760 to_loop = list(range(len(diagram.get('vertices'))-1)) 761 if leg1 >1: 762 to_loop.reverse() 763 for i in to_loop: 764 vertex = diagram.get('vertices')[i] 765 if leg1: 766 if any([l['number'] ==leg1 for l in vertex.get('legs')]): 767 leg1 = 0 768 continue 769 if vertex.get_s_channel_id(process.get('model'), ninitial)\ 770 in process.get('forbidden_s_channels'): 771 break 772 else: 773 newres.append(diagram) 774 res = base_objects.DiagramList(newres) 775 776 777 # Mark forbidden (onshell) s-channel propagators, to forbid onshell 778 # generation. 779 if process.get('forbidden_onsh_s_channels'): 780 ninitial = len([leg for leg in process.get('legs') if leg.get('state') == False]) 781 782 verts = base_objects.VertexList(sum([[vertex for vertex \ 783 in diagram.get('vertices')[:-1] 784 if vertex.get_s_channel_id(\ 785 process.get('model'), ninitial) \ 786 in process.get('forbidden_onsh_s_channels')] \ 787 for diagram in res], [])) 788 for vert in verts: 789 # Use onshell = False to indicate that this s-channel is forbidden 790 newleg = copy.copy(vert.get('legs').pop(-1)) 791 newleg.set('onshell', False) 792 vert.get('legs').append(newleg) 793 794 # Set actual coupling orders for each diagram 795 for diagram in res: 796 diagram.calculate_orders(model) 797 798 # Filter the diagrams according to the squared coupling order 799 # constraints and possible the negative one. Remember that OrderName=-n 800 # means that the user wants to include everything up to the N^(n+1)LO 801 # contribution in that order and at most one order can be restricted 802 # in this way. We shall do this only if the diagrams are not asked to 803 # be returned, as it is the case for NLO because it this case the 804 # interference are not necessarily among the diagrams generated here only. 805 if not returndiag and len(res)>0: 806 res = self.apply_squared_order_constraints(res) 807 808 if diagram_filter: 809 res = self.apply_user_filter(res) 810 811 # Replace final id=0 vertex if necessary 812 if not process.get('is_decay_chain'): 813 for diagram in res: 814 vertices = diagram.get('vertices') 815 if len(vertices) > 1 and vertices[-1].get('id') == 0: 816 # Need to "glue together" last and next-to-last 817 # vertex, by replacing the (incoming) last leg of the 818 # next-to-last vertex with the (outgoing) leg in the 819 # last vertex 820 vertices = copy.copy(vertices) 821 lastvx = vertices.pop() 822 nexttolastvertex = copy.copy(vertices.pop()) 823 legs = copy.copy(nexttolastvertex.get('legs')) 824 ntlnumber = legs[-1].get('number') 825 lastleg = [leg for leg in lastvx.get('legs') if leg.get('number') != ntlnumber][0] 826 # Reset onshell in case we have forbidden s-channels 827 if lastleg.get('onshell') == False: 828 lastleg.set('onshell', None) 829 # Replace the last leg of nexttolastvertex 830 legs[-1] = lastleg 831 nexttolastvertex.set('legs', legs) 832 vertices.append(nexttolastvertex) 833 diagram.set('vertices', vertices) 834 835 if res and not returndiag: 836 logger.info("Process has %d diagrams" % len(res)) 837 838 # Trim down number of legs and vertices used to save memory 839 self.trim_diagrams(diaglist=res) 840 841 # Sort process legs according to leg number 842 pertur = 'QCD' 843 if self.get('process')['perturbation_couplings']: 844 pertur = sorted(self.get('process')['perturbation_couplings'])[0] 845 self.get('process').get('legs').sort(pert=pertur) 846 847 # Set diagrams to res if not asked to be returned 848 if not returndiag: 849 self['diagrams'] = res 850 return not failed_crossing 851 else: 852 return not failed_crossing, res
853
854 - def apply_squared_order_constraints(self, diag_list):
855 """Applies the user specified squared order constraints on the diagram 856 list in argument.""" 857 858 res = copy.copy(diag_list) 859 860 # Apply the filtering on constrained amplitude (== and >) 861 # No need to iterate on this one 862 for name, (value, operator) in self['process'].get('constrained_orders').items(): 863 res.filter_constrained_orders(name, value, operator) 864 865 # Iterate the filtering since the applying the constraint on one 866 # type of coupling order can impact what the filtering on a previous 867 # one (relevant for the '==' type of constraint). 868 while True: 869 new_res = res.apply_positive_sq_orders(res, 870 self['process'].get('squared_orders'), 871 self['process']['sqorders_types']) 872 # Exit condition 873 if len(res)==len(new_res): 874 break 875 elif (len(new_res)>len(res)): 876 raise MadGraph5Error( 877 'Inconsistency in function apply_squared_order_constraints().') 878 # Actualizing the list of diagram for the next iteration 879 res = new_res 880 881 882 883 # Now treat the negative squared order constraint (at most one) 884 neg_orders = [(order, value) for order, value in \ 885 self['process'].get('squared_orders').items() if value<0] 886 if len(neg_orders)==1: 887 neg_order, neg_value = neg_orders[0] 888 # Now check any negative order constraint 889 res, target_order = res.apply_negative_sq_order(res, neg_order,\ 890 neg_value, self['process']['sqorders_types'][neg_order]) 891 # Substitute the negative value to this positive one so that 892 # the resulting computed constraints appears in the print out 893 # and at the output stage we no longer have to deal with 894 # negative valued target orders 895 self['process']['squared_orders'][neg_order]=target_order 896 elif len(neg_orders)>1: 897 raise InvalidCmd('At most one negative squared order constraint'+\ 898 ' can be specified, not %s.'%str(neg_orders)) 899 900 return res
901
902 - def apply_user_filter(self, diag_list):
903 """Applies the user specified squared order constraints on the diagram 904 list in argument.""" 905 906 if True: 907 remove_diag = misc.plugin_import('user_filter', 908 'user filter required to be defined in PLUGIN/user_filter.py with the function remove_diag(ONEDIAG) which returns True if the diagram has to be removed', 909 fcts=['remove_diag']) 910 else: 911 #example and simple tests 912 def remove_diag(diag, model=None): 913 for vertex in diag['vertices']: #last 914 if vertex['id'] == 0: #special final vertex 915 continue 916 if vertex['legs'][-1]['number'] < 3: #this means T-channel 917 if abs(vertex['legs'][-1]['id']) <6: 918 return True 919 return False
920 921 res = diag_list.__class__() 922 nb_removed = 0 923 model = self['process']['model'] 924 for diag in diag_list: 925 if remove_diag(diag, model): 926 nb_removed +=1 927 else: 928 res.append(diag) 929 930 if nb_removed: 931 logger.warning('Diagram filter is ON and removed %s diagrams for this subprocess.' % nb_removed) 932 933 return res
934 935 936
937 - def create_diagram(self, vertexlist):
938 """ Return a Diagram created from the vertex list. This function can be 939 overloaded by daughter classes.""" 940 return base_objects.Diagram({'vertices':vertexlist})
941
942 - def convert_dgleg_to_leg(self, vertexdoublelist):
943 """ In LoopAmplitude, it converts back all DGLoopLegs into Legs. 944 In Amplitude, there is nothing to do. """ 945 946 return True
947
948 - def copy_leglist(self, legs):
949 """ Simply returns a copy of the leg list. This function is 950 overloaded in LoopAmplitude so that a DGLoopLeg list is returned. 951 The DGLoopLeg has some additional parameters only useful during 952 loop diagram generation""" 953 954 return base_objects.LegList(\ 955 [ copy.copy(leg) for leg in legs ])
956
957 - def reduce_leglist(self, curr_leglist, max_multi_to1, ref_dict_to0, 958 is_decay_proc = False, coupling_orders = None):
959 """Recursive function to reduce N LegList to N-1 960 For algorithm, see doc for generate_diagrams. 961 """ 962 963 # Result variable which is a list of lists of vertices 964 # to be added 965 res = [] 966 967 # Stop condition. If LegList is None, that means that this 968 # diagram must be discarded 969 if curr_leglist is None: 970 return None 971 972 # Extract ref dict information 973 model = self.get('process').get('model') 974 ref_dict_to1 = self.get('process').get('model').get('ref_dict_to1') 975 976 977 # If all legs can be combined in one single vertex, add this 978 # vertex to res and continue. 979 # Special treatment for decay chain legs 980 981 if curr_leglist.can_combine_to_0(ref_dict_to0, is_decay_proc): 982 # Extract the interaction id associated to the vertex 983 984 vertex_ids = self.get_combined_vertices(curr_leglist, 985 copy.copy(ref_dict_to0[tuple(sorted([leg.get('id') for \ 986 leg in curr_leglist]))])) 987 988 final_vertices = [base_objects.Vertex({'legs':curr_leglist, 989 'id':vertex_id}) for \ 990 vertex_id in vertex_ids] 991 # Check for coupling orders. If orders < 0, skip vertex 992 for final_vertex in final_vertices: 993 if self.reduce_orders(coupling_orders, model, 994 [final_vertex.get('id')]) != False: 995 res.append([final_vertex]) 996 # Stop condition 2: if the leglist contained exactly two particles, 997 # return the result, if any, and stop. 998 if len(curr_leglist) == 2: 999 if res: 1000 return res 1001 else: 1002 return None 1003 1004 # Create a list of all valid combinations of legs 1005 comb_lists = self.combine_legs(curr_leglist, 1006 ref_dict_to1, max_multi_to1) 1007 1008 # Create a list of leglists/vertices by merging combinations 1009 leg_vertex_list = self.merge_comb_legs(comb_lists, ref_dict_to1) 1010 1011 # Consider all the pairs 1012 for leg_vertex_tuple in leg_vertex_list: 1013 1014 # Remove forbidden particles 1015 if self.get('process').get('forbidden_particles') and \ 1016 any([abs(vertex.get('legs')[-1].get('id')) in \ 1017 self.get('process').get('forbidden_particles') \ 1018 for vertex in leg_vertex_tuple[1]]): 1019 continue 1020 1021 # Check for coupling orders. If couplings < 0, skip recursion. 1022 new_coupling_orders = self.reduce_orders(coupling_orders, 1023 model, 1024 [vertex.get('id') for vertex in \ 1025 leg_vertex_tuple[1]]) 1026 if new_coupling_orders == False: 1027 # Some coupling order < 0 1028 continue 1029 1030 # This is where recursion happens 1031 # First, reduce again the leg part 1032 reduced_diagram = self.reduce_leglist(leg_vertex_tuple[0], 1033 max_multi_to1, 1034 ref_dict_to0, 1035 is_decay_proc, 1036 new_coupling_orders) 1037 # If there is a reduced diagram 1038 if reduced_diagram: 1039 vertex_list_list = [list(leg_vertex_tuple[1])] 1040 vertex_list_list.append(reduced_diagram) 1041 expanded_list = expand_list_list(vertex_list_list) 1042 res.extend(expanded_list) 1043 1044 return res
1045
1046 - def reduce_orders(self, coupling_orders, model, vertex_id_list):
1047 """Return False if the coupling orders for any coupling is < 1048 0, otherwise return the new coupling orders with the vertex 1049 orders subtracted. If coupling_orders is not given, return 1050 None (which counts as success). 1051 WEIGHTED is a special order, which corresponds to the sum of 1052 order hierarchies for the couplings. 1053 We ignore negative constraints as these cannot be taken into 1054 account on the fly but only after generation.""" 1055 1056 if not coupling_orders: 1057 return None 1058 1059 present_couplings = copy.copy(coupling_orders) 1060 for id in vertex_id_list: 1061 # Don't check for identity vertex (id = 0) 1062 if not id: 1063 continue 1064 inter = model.get("interaction_dict")[id] 1065 for coupling in inter.get('orders').keys(): 1066 # Note that we don't consider a missing coupling as a 1067 # constraint 1068 if coupling in present_couplings and \ 1069 present_couplings[coupling]>=0: 1070 # Reduce the number of couplings that are left 1071 present_couplings[coupling] -= \ 1072 inter.get('orders')[coupling] 1073 if present_couplings[coupling] < 0: 1074 # We have too many couplings of this type 1075 return False 1076 # Now check for WEIGHTED, i.e. the sum of coupling hierarchy values 1077 if 'WEIGHTED' in present_couplings and \ 1078 present_couplings['WEIGHTED']>=0: 1079 weight = sum([model.get('order_hierarchy')[c]*n for \ 1080 (c,n) in inter.get('orders').items()]) 1081 present_couplings['WEIGHTED'] -= weight 1082 if present_couplings['WEIGHTED'] < 0: 1083 # Total coupling weight too large 1084 return False 1085 1086 return present_couplings
1087
1088 - def combine_legs(self, list_legs, ref_dict_to1, max_multi_to1):
1089 """Recursive function. Take a list of legs as an input, with 1090 the reference dictionary n-1->1, and output a list of list of 1091 tuples of Legs (allowed combinations) and Legs (rest). Algorithm: 1092 1093 1. Get all n-combinations from list [123456]: [12],..,[23],..,[123],.. 1094 1095 2. For each combination, say [34]. Check if combination is valid. 1096 If so: 1097 1098 a. Append [12[34]56] to result array 1099 1100 b. Split [123456] at index(first element in combination+1), 1101 i.e. [12],[456] and subtract combination from second half, 1102 i.e.: [456]-[34]=[56]. Repeat from 1. with this array 1103 1104 3. Take result array from call to 1. (here, [[56]]) and append 1105 (first half in step b - combination) + combination + (result 1106 from 1.) = [12[34][56]] to result array 1107 1108 4. After appending results from all n-combinations, return 1109 resulting array. Example, if [13] and [45] are valid 1110 combinations: 1111 [[[13]2456],[[13]2[45]6],[123[45]6]] 1112 """ 1113 1114 res = [] 1115 1116 # loop over possible combination lengths (+1 is for range convention!) 1117 for comb_length in range(2, max_multi_to1 + 1): 1118 1119 # Check the considered length is not longer than the list length 1120 if comb_length > len(list_legs): 1121 return res 1122 1123 # itertools.combinations returns all possible combinations 1124 # of comb_length elements from list_legs 1125 for comb in itertools.combinations(list_legs, comb_length): 1126 1127 # Check if the combination is valid 1128 if base_objects.LegList(comb).can_combine_to_1(ref_dict_to1): 1129 1130 # Identify the rest, create a list [comb,rest] and 1131 # add it to res 1132 res_list = copy.copy(list_legs) 1133 for leg in comb: 1134 res_list.remove(leg) 1135 res_list.insert(list_legs.index(comb[0]), comb) 1136 res.append(res_list) 1137 1138 # Now, deal with cases with more than 1 combination 1139 1140 # First, split the list into two, according to the 1141 # position of the first element in comb, and remove 1142 # all elements form comb 1143 res_list1 = list_legs[0:list_legs.index(comb[0])] 1144 res_list2 = list_legs[list_legs.index(comb[0]) + 1:] 1145 for leg in comb[1:]: 1146 res_list2.remove(leg) 1147 1148 # Create a list of type [comb,rest1,rest2(combined)] 1149 res_list = res_list1 1150 res_list.append(comb) 1151 # This is where recursion actually happens, 1152 # on the second part 1153 for item in self.combine_legs(res_list2, 1154 ref_dict_to1, 1155 max_multi_to1): 1156 final_res_list = copy.copy(res_list) 1157 final_res_list.extend(item) 1158 res.append(final_res_list) 1159 1160 return res
1161 1162
1163 - def merge_comb_legs(self, comb_lists, ref_dict_to1):
1164 """Takes a list of allowed leg combinations as an input and returns 1165 a set of lists where combinations have been properly replaced 1166 (one list per element in the ref_dict, so that all possible intermediate 1167 particles are included). For each list, give the list of vertices 1168 corresponding to the executed merging, group the two as a tuple. 1169 """ 1170 1171 res = [] 1172 1173 for comb_list in comb_lists: 1174 1175 reduced_list = [] 1176 vertex_list = [] 1177 1178 for entry in comb_list: 1179 1180 # Act on all leg combinations 1181 if isinstance(entry, tuple): 1182 1183 # Build the leg object which will replace the combination: 1184 # 1) leg ids is as given in the ref_dict 1185 leg_vert_ids = copy.copy(ref_dict_to1[\ 1186 tuple(sorted([leg.get('id') for leg in entry]))]) 1187 # 2) number is the minimum of leg numbers involved in the 1188 # combination 1189 number = min([leg.get('number') for leg in entry]) 1190 # 3) state is final, unless there is exactly one initial 1191 # state particle involved in the combination -> t-channel 1192 if len([leg for leg in entry if leg.get('state') == False]) == 1: 1193 state = False 1194 else: 1195 state = True 1196 # 4) from_group is True, by definition 1197 1198 # Create and add the object. This is done by a 1199 # separate routine, to allow overloading by 1200 # daughter classes 1201 new_leg_vert_ids = [] 1202 if leg_vert_ids: 1203 new_leg_vert_ids = self.get_combined_legs(entry, 1204 leg_vert_ids, 1205 number, 1206 state) 1207 1208 reduced_list.append([l[0] for l in new_leg_vert_ids]) 1209 1210 1211 # Create and add the corresponding vertex 1212 # Extract vertex ids corresponding to the various legs 1213 # in mylegs 1214 vlist = base_objects.VertexList() 1215 for (myleg, vert_id) in new_leg_vert_ids: 1216 # Start with the considered combination... 1217 myleglist = base_objects.LegList(list(entry)) 1218 # ... and complete with legs after reducing 1219 myleglist.append(myleg) 1220 # ... and consider the correct vertex id 1221 vlist.append(base_objects.Vertex( 1222 {'legs':myleglist, 1223 'id':vert_id})) 1224 1225 vertex_list.append(vlist) 1226 1227 # If entry is not a combination, switch the from_group flag 1228 # and add it 1229 else: 1230 cp_entry = copy.copy(entry) 1231 # Need special case for from_group == None; this 1232 # is for initial state leg of decay chain process 1233 # (see Leg.can_combine_to_0) 1234 if cp_entry.get('from_group') != None: 1235 cp_entry.set('from_group', False) 1236 reduced_list.append(cp_entry) 1237 1238 # Flatten the obtained leg and vertex lists 1239 flat_red_lists = expand_list(reduced_list) 1240 flat_vx_lists = expand_list(vertex_list) 1241 1242 # Combine the two lists in a list of tuple 1243 for i in range(0, len(flat_vx_lists)): 1244 res.append((base_objects.LegList(flat_red_lists[i]), \ 1245 base_objects.VertexList(flat_vx_lists[i]))) 1246 1247 return res
1248
1249 - def get_combined_legs(self, legs, leg_vert_ids, number, state):
1250 """Create a set of new legs from the info given. This can be 1251 overloaded by daughter classes.""" 1252 1253 mylegs = [(base_objects.Leg({'id':leg_id, 1254 'number':number, 1255 'state':state, 1256 'from_group':True}), 1257 vert_id)\ 1258 for leg_id, vert_id in leg_vert_ids] 1259 1260 return mylegs
1261
1262 - def get_combined_vertices(self, legs, vert_ids):
1263 """Allow for selection of vertex ids. This can be 1264 overloaded by daughter classes.""" 1265 1266 return vert_ids
1267
1268 - def trim_diagrams(self, decay_ids=[], diaglist=None):
1269 """Reduce the number of legs and vertices used in memory. 1270 When called by a diagram generation initiated by LoopAmplitude, 1271 this function should not trim the diagrams in the attribute 'diagrams' 1272 but rather a given list in the 'diaglist' argument.""" 1273 1274 legs = [] 1275 vertices = [] 1276 1277 if diaglist is None: 1278 diaglist=self.get('diagrams') 1279 1280 # Flag decaying legs in the core process by onshell = True 1281 process = self.get('process') 1282 for leg in process.get('legs'): 1283 if leg.get('state') and leg.get('id') in decay_ids: 1284 leg.set('onshell', True) 1285 1286 for diagram in diaglist: 1287 # Keep track of external legs (leg numbers already used) 1288 leg_external = set() 1289 for ivx, vertex in enumerate(diagram.get('vertices')): 1290 for ileg, leg in enumerate(vertex.get('legs')): 1291 # Ensure that only external legs get decay flag 1292 if leg.get('state') and leg.get('id') in decay_ids and \ 1293 leg.get('number') not in leg_external: 1294 # Use onshell to indicate decaying legs, 1295 # i.e. legs that have decay chains 1296 leg = copy.copy(leg) 1297 leg.set('onshell', True) 1298 try: 1299 index = legs.index(leg) 1300 except ValueError: 1301 vertex.get('legs')[ileg] = leg 1302 legs.append(leg) 1303 else: # Found a leg 1304 vertex.get('legs')[ileg] = legs[index] 1305 leg_external.add(leg.get('number')) 1306 try: 1307 index = vertices.index(vertex) 1308 diagram.get('vertices')[ivx] = vertices[index] 1309 except ValueError: 1310 vertices.append(vertex)
1311
1312 #=============================================================================== 1313 # AmplitudeList 1314 #=============================================================================== 1315 -class AmplitudeList(base_objects.PhysicsObjectList):
1316 """List of Amplitude objects 1317 """ 1318
1319 - def has_any_loop_process(self):
1320 """ Check the content of all processes of the amplitudes in this list to 1321 see if there is any which defines perturbation couplings. """ 1322 1323 for amp in self: 1324 if amp.has_loop_process(): 1325 return True
1326
1327 - def is_valid_element(self, obj):
1328 """Test if object obj is a valid Amplitude for the list.""" 1329 1330 return isinstance(obj, Amplitude)
1331
1332 #=============================================================================== 1333 # DecayChainAmplitude 1334 #=============================================================================== 1335 -class DecayChainAmplitude(Amplitude):
1336 """A list of amplitudes + a list of decay chain amplitude lists; 1337 corresponding to a ProcessDefinition with a list of decay chains 1338 """ 1339
1340 - def default_setup(self):
1341 """Default values for all properties""" 1342 1343 self['amplitudes'] = AmplitudeList() 1344 self['decay_chains'] = DecayChainAmplitudeList()
1345
1346 - def __init__(self, argument = None, collect_mirror_procs = False, 1347 ignore_six_quark_processes = False, loop_filter=None, diagram_filter=False):
1348 """Allow initialization with Process and with ProcessDefinition""" 1349 1350 if isinstance(argument, base_objects.Process): 1351 super(DecayChainAmplitude, self).__init__() 1352 from madgraph.loop.loop_diagram_generation import LoopMultiProcess 1353 if argument['perturbation_couplings']: 1354 MultiProcessClass=LoopMultiProcess 1355 else: 1356 MultiProcessClass=MultiProcess 1357 if isinstance(argument, base_objects.ProcessDefinition): 1358 self['amplitudes'].extend(\ 1359 MultiProcessClass.generate_multi_amplitudes(argument, 1360 collect_mirror_procs, 1361 ignore_six_quark_processes, 1362 loop_filter=loop_filter, 1363 diagram_filter=diagram_filter)) 1364 else: 1365 self['amplitudes'].append(\ 1366 MultiProcessClass.get_amplitude_from_proc(argument, 1367 loop_filter=loop_filter, 1368 diagram_filter=diagram_filter)) 1369 # Clean decay chains from process, since we haven't 1370 # combined processes with decay chains yet 1371 process = copy.copy(self.get('amplitudes')[0].get('process')) 1372 process.set('decay_chains', base_objects.ProcessList()) 1373 self['amplitudes'][0].set('process', process) 1374 1375 for process in argument.get('decay_chains'): 1376 if process.get('perturbation_couplings'): 1377 raise MadGraph5Error("Decay processes can not be perturbed") 1378 process.set('overall_orders', argument.get('overall_orders')) 1379 if not process.get('is_decay_chain'): 1380 process.set('is_decay_chain',True) 1381 if not process.get_ninitial() == 1: 1382 raise InvalidCmd("Decay chain process must have exactly one" + \ 1383 " incoming particle") 1384 self['decay_chains'].append(\ 1385 DecayChainAmplitude(process, collect_mirror_procs, 1386 ignore_six_quark_processes, 1387 diagram_filter=diagram_filter)) 1388 1389 # Flag decaying legs in the core diagrams by onshell = True 1390 decay_ids = sum([[a.get('process').get('legs')[0].get('id') \ 1391 for a in dec.get('amplitudes')] for dec in \ 1392 self['decay_chains']], []) 1393 decay_ids = set(decay_ids) 1394 for amp in self['amplitudes']: 1395 amp.trim_diagrams(decay_ids) 1396 1397 # Check that all decay ids are present in at least some process 1398 for amp in self['amplitudes']: 1399 for l in amp.get('process').get('legs'): 1400 if l.get('id') in decay_ids: 1401 decay_ids.remove(l.get('id')) 1402 1403 if decay_ids: 1404 model = amp.get('process').get('model') 1405 names = [model.get_particle(id).get('name') for id in decay_ids] 1406 1407 logger.warning( 1408 "$RED Decay without corresponding particle in core process found.\n" + \ 1409 "Decay information for particle(s) %s is discarded.\n" % ','.join(names) + \ 1410 "Please check your process definition carefully. \n" + \ 1411 "This warning usually means that you forgot parentheses in presence of subdecay.\n" + \ 1412 "Example of correct syntax: p p > t t~, ( t > w+ b, w+ > l+ vl)") 1413 1414 # Remove unused decays from the process list 1415 for dc in reversed(self['decay_chains']): 1416 for a in reversed(dc.get('amplitudes')): 1417 # Remove the amplitudes from this decay chain 1418 if a.get('process').get('legs')[0].get('id') in decay_ids: 1419 dc.get('amplitudes').remove(a) 1420 if not dc.get('amplitudes'): 1421 # If no amplitudes left, remove the decay chain 1422 self['decay_chains'].remove(dc) 1423 1424 # Finally, write a fat warning if any decay process has 1425 # the decaying particle (or its antiparticle) in the final state 1426 bad_procs = [] 1427 for dc in self['decay_chains']: 1428 for amp in dc.get('amplitudes'): 1429 legs = amp.get('process').get('legs') 1430 fs_parts = [abs(l.get('id')) for l in legs if 1431 l.get('state')] 1432 is_part = [l.get('id') for l in legs if not 1433 l.get('state')][0] 1434 if abs(is_part) in fs_parts: 1435 bad_procs.append(amp.get('process')) 1436 1437 if bad_procs: 1438 logger.warning( 1439 "$RED Decay(s) with particle decaying to itself:\n" + \ 1440 '\n'.join([p.nice_string() for p in bad_procs]) + \ 1441 "\nPlease check your process definition carefully. \n") 1442 1443 1444 elif argument != None: 1445 # call the mother routine 1446 super(DecayChainAmplitude, self).__init__(argument) 1447 else: 1448 # call the mother routine 1449 super(DecayChainAmplitude, self).__init__()
1450
1451 - def filter(self, name, value):
1452 """Filter for valid amplitude property values.""" 1453 1454 if name == 'amplitudes': 1455 if not isinstance(value, AmplitudeList): 1456 raise self.PhysicsObjectError("%s is not a valid AmplitudeList" % str(value)) 1457 if name == 'decay_chains': 1458 if not isinstance(value, DecayChainAmplitudeList): 1459 raise self.PhysicsObjectError("%s is not a valid DecayChainAmplitudeList object" % \ 1460 str(value)) 1461 return True
1462
1463 - def get_sorted_keys(self):
1464 """Return diagram property names as a nicely sorted list.""" 1465 1466 return ['amplitudes', 'decay_chains']
1467 1468 # Helper functions 1469
1470 - def get_number_of_diagrams(self):
1471 """Returns number of diagrams for this amplitude""" 1472 return sum(len(a.get('diagrams')) for a in self.get('amplitudes')) \ 1473 + sum(d.get_number_of_diagrams() for d in \ 1474 self.get('decay_chains'))
1475
1476 - def nice_string(self, indent = 0):
1477 """Returns a nicely formatted string of the amplitude content.""" 1478 mystr = "" 1479 for amplitude in self.get('amplitudes'): 1480 mystr = mystr + amplitude.nice_string(indent) + "\n" 1481 1482 if self.get('decay_chains'): 1483 mystr = mystr + " " * indent + "Decays:\n" 1484 for dec in self.get('decay_chains'): 1485 mystr = mystr + dec.nice_string(indent + 2) + "\n" 1486 1487 return mystr[:-1]
1488
1489 - def nice_string_processes(self, indent = 0):
1490 """Returns a nicely formatted string of the amplitude processes.""" 1491 mystr = "" 1492 for amplitude in self.get('amplitudes'): 1493 mystr = mystr + amplitude.nice_string_processes(indent) + "\n" 1494 1495 if self.get('decay_chains'): 1496 mystr = mystr + " " * indent + "Decays:\n" 1497 for dec in self.get('decay_chains'): 1498 mystr = mystr + dec.nice_string_processes(indent + 2) + "\n" 1499 1500 return mystr[:-1]
1501
1502 - def get_ninitial(self):
1503 """Returns the number of initial state particles in the process.""" 1504 return self.get('amplitudes')[0].get('process').get_ninitial()
1505
1506 - def get_decay_ids(self):
1507 """Returns a set of all particle ids for which a decay is defined""" 1508 1509 decay_ids = [] 1510 1511 # Get all amplitudes for the decay processes 1512 for amp in sum([dc.get('amplitudes') for dc \ 1513 in self['decay_chains']], []): 1514 # For each amplitude, find the initial state leg 1515 decay_ids.append(amp.get('process').get_initial_ids()[0]) 1516 1517 # Return a list with unique ids 1518 return list(set(decay_ids))
1519
1520 - def has_loop_process(self):
1521 """ Returns wether this amplitude has a loop process.""" 1522 return self['amplitudes'].has_any_loop_process()
1523
1524 - def get_amplitudes(self):
1525 """Recursive function to extract all amplitudes for this process""" 1526 1527 amplitudes = AmplitudeList() 1528 1529 amplitudes.extend(self.get('amplitudes')) 1530 for decay in self.get('decay_chains'): 1531 amplitudes.extend(decay.get_amplitudes()) 1532 1533 return amplitudes
1534
1535 1536 #=============================================================================== 1537 # DecayChainAmplitudeList 1538 #=============================================================================== 1539 -class DecayChainAmplitudeList(base_objects.PhysicsObjectList):
1540 """List of DecayChainAmplitude objects 1541 """ 1542
1543 - def is_valid_element(self, obj):
1544 """Test if object obj is a valid DecayChainAmplitude for the list.""" 1545 1546 return isinstance(obj, DecayChainAmplitude)
1547
1548 1549 #=============================================================================== 1550 # MultiProcess 1551 #=============================================================================== 1552 -class MultiProcess(base_objects.PhysicsObject):
1553 """MultiProcess: list of process definitions 1554 list of processes (after cleaning) 1555 list of amplitudes (after generation) 1556 """ 1557
1558 - def default_setup(self):
1559 """Default values for all properties""" 1560 1561 self['process_definitions'] = base_objects.ProcessDefinitionList() 1562 # self['amplitudes'] can be an AmplitudeList or a 1563 # DecayChainAmplitudeList, depending on whether there are 1564 # decay chains in the process definitions or not. 1565 self['amplitudes'] = AmplitudeList() 1566 # Flag for whether to combine IS mirror processes together 1567 self['collect_mirror_procs'] = False 1568 # List of quark flavors where we ignore processes with at 1569 # least 6 quarks (three quark lines) 1570 self['ignore_six_quark_processes'] = [] 1571 # Allow to use the model parameter numerical value for optimization. 1572 #This is currently use for 1->N generation(check mass). 1573 self['use_numerical'] = False
1574
1575 - def __init__(self, argument=None, collect_mirror_procs = False, 1576 ignore_six_quark_processes = [], optimize=False, 1577 loop_filter=None, diagram_filter=None):
1578 """Allow initialization with ProcessDefinition or 1579 ProcessDefinitionList 1580 optimize allows to use param_card information. (usefull for 1-.N)""" 1581 1582 if isinstance(argument, base_objects.ProcessDefinition): 1583 super(MultiProcess, self).__init__() 1584 self['process_definitions'].append(argument) 1585 elif isinstance(argument, base_objects.ProcessDefinitionList): 1586 super(MultiProcess, self).__init__() 1587 self['process_definitions'] = argument 1588 elif argument != None: 1589 # call the mother routine 1590 super(MultiProcess, self).__init__(argument) 1591 else: 1592 # call the mother routine 1593 super(MultiProcess, self).__init__() 1594 1595 self['collect_mirror_procs'] = collect_mirror_procs 1596 self['ignore_six_quark_processes'] = ignore_six_quark_processes 1597 self['use_numerical'] = optimize 1598 self['loop_filter'] = loop_filter 1599 self['diagram_filter'] = diagram_filter # only True/False so far 1600 1601 if isinstance(argument, base_objects.ProcessDefinition) or \ 1602 isinstance(argument, base_objects.ProcessDefinitionList): 1603 # Generate the diagrams 1604 self.get('amplitudes')
1605 1606
1607 - def filter(self, name, value):
1608 """Filter for valid process property values.""" 1609 1610 if name == 'process_definitions': 1611 if not isinstance(value, base_objects.ProcessDefinitionList): 1612 raise self.PhysicsObjectError("%s is not a valid ProcessDefinitionList object" % str(value)) 1613 1614 if name == 'amplitudes': 1615 if not isinstance(value, AmplitudeList): 1616 raise self.PhysicsObjectError("%s is not a valid AmplitudeList object" % str(value)) 1617 1618 if name in ['collect_mirror_procs']: 1619 if not isinstance(value, bool): 1620 raise self.PhysicsObjectError("%s is not a valid boolean" % str(value)) 1621 1622 if name == 'ignore_six_quark_processes': 1623 if not isinstance(value, list): 1624 raise self.PhysicsObjectError("%s is not a valid list" % str(value)) 1625 1626 return True
1627
1628 - def get(self, name):
1629 """Get the value of the property name.""" 1630 1631 if (name == 'amplitudes') and not self[name]: 1632 for process_def in self.get('process_definitions'): 1633 if process_def.get('decay_chains'): 1634 # This is a decay chain process 1635 # Store amplitude(s) as DecayChainAmplitude 1636 self['amplitudes'].append(\ 1637 DecayChainAmplitude(process_def, 1638 self.get('collect_mirror_procs'), 1639 self.get('ignore_six_quark_processes'), 1640 diagram_filter=self['diagram_filter'])) 1641 else: 1642 self['amplitudes'].extend(\ 1643 self.generate_multi_amplitudes(process_def, 1644 self.get('collect_mirror_procs'), 1645 self.get('ignore_six_quark_processes'), 1646 self['use_numerical'], 1647 loop_filter=self['loop_filter'], 1648 diagram_filter=self['diagram_filter'])) 1649 1650 return MultiProcess.__bases__[0].get(self, name) # call the mother routine
1651
1652 - def get_sorted_keys(self):
1653 """Return process property names as a nicely sorted list.""" 1654 1655 return ['process_definitions', 'amplitudes']
1656
1657 - def get_model(self):
1658 1659 return self['process_definitions'][0]['model']
1660 1661 @classmethod
1662 - def generate_multi_amplitudes(cls,process_definition, 1663 collect_mirror_procs = False, 1664 ignore_six_quark_processes = [], 1665 use_numerical=False, 1666 loop_filter=None, 1667 diagram_filter=False):
1668 """Generate amplitudes in a semi-efficient way. 1669 Make use of crossing symmetry for processes that fail diagram 1670 generation, but not for processes that succeed diagram 1671 generation. Doing so will risk making it impossible to 1672 identify processes with identical amplitudes. 1673 """ 1674 assert isinstance(process_definition, base_objects.ProcessDefinition), \ 1675 "%s not valid ProcessDefinition object" % \ 1676 repr(process_definition) 1677 1678 # Set automatic coupling orders 1679 process_definition.set('orders', MultiProcess.\ 1680 find_optimal_process_orders(process_definition, 1681 diagram_filter)) 1682 # Check for maximum orders from the model 1683 process_definition.check_expansion_orders() 1684 1685 processes = base_objects.ProcessList() 1686 amplitudes = AmplitudeList() 1687 1688 # failed_procs and success_procs are sorted processes that have 1689 # already failed/succeeded based on crossing symmetry 1690 failed_procs = [] 1691 success_procs = [] 1692 # Complete processes, for identification of mirror processes 1693 non_permuted_procs = [] 1694 # permutations keeps the permutations of the crossed processes 1695 permutations = [] 1696 1697 # Store the diagram tags for processes, to allow for 1698 # identifying identical matrix elements already at this stage. 1699 model = process_definition['model'] 1700 1701 islegs = [leg for leg in process_definition['legs'] \ 1702 if leg['state'] == False] 1703 fslegs = [leg for leg in process_definition['legs'] \ 1704 if leg['state'] == True] 1705 1706 isids = [leg['ids'] for leg in process_definition['legs'] \ 1707 if leg['state'] == False] 1708 fsids = [leg['ids'] for leg in process_definition['legs'] \ 1709 if leg['state'] == True] 1710 polids = [tuple(leg['polarization']) for leg in process_definition['legs'] \ 1711 if leg['state'] == True] 1712 # Generate all combinations for the initial state 1713 for prod in itertools.product(*isids): 1714 islegs = [\ 1715 base_objects.Leg({'id':id, 'state': False, 1716 'polarization': islegs[i]['polarization']}) 1717 for i,id in enumerate(prod)] 1718 1719 # Generate all combinations for the final state, and make 1720 # sure to remove double counting 1721 1722 red_fsidlist = set() 1723 1724 for prod in itertools.product(*fsids): 1725 tag = zip(prod, polids) 1726 tag = sorted(tag) 1727 # Remove double counting between final states 1728 if tuple(tag) in red_fsidlist: 1729 continue 1730 1731 red_fsidlist.add(tuple(tag)) 1732 # Generate leg list for process 1733 leg_list = [copy.copy(leg) for leg in islegs] 1734 leg_list.extend([\ 1735 base_objects.Leg({'id':id, 'state': True, 'polarization': fslegs[i]['polarization']}) \ 1736 for i,id in enumerate(prod)]) 1737 1738 legs = base_objects.LegList(leg_list) 1739 1740 # Check for crossed processes 1741 sorted_legs = sorted([(l,i+1) for (i,l) in \ 1742 enumerate(legs.get_outgoing_id_list(model))]) 1743 permutation = [l[1] for l in sorted_legs] 1744 1745 sorted_legs = array.array('i', [l[0] for l in sorted_legs]) 1746 1747 # Check for six-quark processes 1748 if ignore_six_quark_processes and \ 1749 len([i for i in sorted_legs if abs(i) in \ 1750 ignore_six_quark_processes]) >= 6: 1751 continue 1752 1753 # Check if crossed process has already failed, 1754 # in that case don't check process 1755 if sorted_legs in failed_procs: 1756 continue 1757 1758 # If allowed check mass validity [assume 1->N] 1759 if use_numerical: 1760 # check that final state has lower mass than initial state 1761 initial_mass = abs(model['parameter_dict'][model.get_particle(legs[0].get('id')).get('mass')]) 1762 if initial_mass == 0: 1763 continue 1764 for leg in legs[1:]: 1765 m = model['parameter_dict'][model.get_particle(leg.get('id')).get('mass')] 1766 initial_mass -= abs(m) 1767 if initial_mass.real <= 0: 1768 continue 1769 1770 # Setup process 1771 process = process_definition.get_process_with_legs(legs) 1772 1773 fast_proc = \ 1774 array.array('i',[leg.get('id') for leg in legs]) 1775 if collect_mirror_procs and \ 1776 process_definition.get_ninitial() == 2: 1777 # Check if mirrored process is already generated 1778 mirror_proc = \ 1779 array.array('i', [fast_proc[1], fast_proc[0]] + \ 1780 list(fast_proc[2:])) 1781 try: 1782 mirror_amp = \ 1783 amplitudes[non_permuted_procs.index(mirror_proc)] 1784 except Exception: 1785 # Didn't find any mirror process 1786 pass 1787 else: 1788 # Mirror process found 1789 mirror_amp.set('has_mirror_process', True) 1790 logger.info("Process %s added to mirror process %s" % \ 1791 (process.base_string(), 1792 mirror_amp.get('process').base_string())) 1793 continue 1794 1795 # Check for successful crossings, unless we have specified 1796 # properties that break crossing symmetry 1797 if not process.get('required_s_channels') and \ 1798 not process.get('forbidden_onsh_s_channels') and \ 1799 not process.get('forbidden_s_channels') and \ 1800 not process.get('is_decay_chain') and not diagram_filter: 1801 try: 1802 crossed_index = success_procs.index(sorted_legs) 1803 # The relabeling of legs for loop amplitudes is cumbersome 1804 # and does not save so much time. It is disable here and 1805 # we use the key 'loop_diagrams' to decide whether 1806 # it is an instance of LoopAmplitude. 1807 if 'loop_diagrams' in amplitudes[crossed_index]: 1808 raise ValueError 1809 except ValueError: 1810 # No crossing found, just continue 1811 pass 1812 else: 1813 # Found crossing - reuse amplitude 1814 amplitude = MultiProcess.cross_amplitude(\ 1815 amplitudes[crossed_index], 1816 process, 1817 permutations[crossed_index], 1818 permutation) 1819 amplitudes.append(amplitude) 1820 success_procs.append(sorted_legs) 1821 permutations.append(permutation) 1822 non_permuted_procs.append(fast_proc) 1823 logger.info("Crossed process found for %s, reuse diagrams." % \ 1824 process.base_string()) 1825 continue 1826 1827 # Create new amplitude 1828 amplitude = cls.get_amplitude_from_proc(process, 1829 loop_filter=loop_filter) 1830 1831 try: 1832 result = amplitude.generate_diagrams(diagram_filter=diagram_filter) 1833 except InvalidCmd as error: 1834 failed_procs.append(sorted_legs) 1835 else: 1836 # Succeeded in generating diagrams 1837 if amplitude.get('diagrams'): 1838 amplitudes.append(amplitude) 1839 success_procs.append(sorted_legs) 1840 permutations.append(permutation) 1841 non_permuted_procs.append(fast_proc) 1842 elif not result: 1843 # Diagram generation failed for all crossings 1844 failed_procs.append(sorted_legs) 1845 1846 # Raise exception if there are no amplitudes for this process 1847 if not amplitudes: 1848 if len(failed_procs) == 1 and 'error' in locals(): 1849 raise error 1850 else: 1851 raise NoDiagramException("No amplitudes generated from process %s. Please enter a valid process" % \ 1852 process_definition.nice_string()) 1853 1854 1855 # Return the produced amplitudes 1856 return amplitudes
1857 1858 @classmethod
1859 - def get_amplitude_from_proc(cls,proc,**opts):
1860 """ Return the correct amplitude type according to the characteristics of 1861 the process proc. The only option that could be specified here is 1862 loop_filter and it is of course not relevant for a tree amplitude.""" 1863 1864 return Amplitude({"process": proc})
1865 1866 1867 @staticmethod
1868 - def find_optimal_process_orders(process_definition, diagram_filter=False):
1869 """Find the minimal WEIGHTED order for this set of processes. 1870 1871 The algorithm: 1872 1873 1) Check the coupling hierarchy of the model. Assign all 1874 particles to the different coupling hierarchies so that a 1875 particle is considered to be in the highest hierarchy (i.e., 1876 with lowest value) where it has an interaction. 1877 1878 2) Pick out the legs in the multiprocess according to the 1879 highest hierarchy represented (so don't mix particles from 1880 different hierarchy classes in the same multiparticles!) 1881 1882 3) Find the starting maximum WEIGHTED order as the sum of the 1883 highest n-2 weighted orders 1884 1885 4) Pick out required s-channel particle hierarchies, and use 1886 the highest of the maximum WEIGHTED order from the legs and 1887 the minimum WEIGHTED order extracted from 2*s-channel 1888 hierarchys plus the n-2-2*(number of s-channels) lowest 1889 leg weighted orders. 1890 1891 5) Run process generation with the WEIGHTED order determined 1892 in 3)-4) - # final state gluons, with all gluons removed from 1893 the final state 1894 1895 6) If no process is found, increase WEIGHTED order by 1 and go 1896 back to 5), until we find a process which passes. Return that 1897 order. 1898 1899 7) Continue 5)-6) until we reach (n-2)*(highest hierarchy)-1. 1900 If still no process has passed, return 1901 WEIGHTED = (n-2)*(highest hierarchy) 1902 """ 1903 1904 assert isinstance(process_definition, base_objects.ProcessDefinition), \ 1905 "%s not valid ProcessDefinition object" % \ 1906 repr(process_definition) 1907 1908 processes = base_objects.ProcessList() 1909 amplitudes = AmplitudeList() 1910 1911 # If there are already couplings defined, return 1912 if process_definition.get('orders') or \ 1913 process_definition.get('overall_orders') or \ 1914 process_definition.get('NLO_mode')=='virt': 1915 return process_definition.get('orders') 1916 1917 # If this is a decay process (and not a decay chain), return 1918 if process_definition.get_ninitial() == 1 and not \ 1919 process_definition.get('is_decay_chain'): 1920 return process_definition.get('orders') 1921 1922 logger.info("Checking for minimal orders which gives processes.") 1923 logger.info("Please specify coupling orders to bypass this step.") 1924 1925 # Calculate minimum starting guess for WEIGHTED order 1926 max_order_now, particles, hierarchy = \ 1927 process_definition.get_minimum_WEIGHTED() 1928 coupling = 'WEIGHTED' 1929 1930 model = process_definition.get('model') 1931 1932 # Extract the initial and final leg ids 1933 isids = [leg['ids'] for leg in \ 1934 [leg for leg in process_definition['legs'] if leg['state'] == False]] 1935 fsids = [leg['ids'] for leg in \ 1936 [leg for leg in process_definition['legs'] if leg['state'] == True]] 1937 1938 max_WEIGHTED_order = \ 1939 (len(fsids + isids) - 2)*int(model.get_max_WEIGHTED()) 1940 # get the definition of the WEIGHTED 1941 hierarchydef = process_definition['model'].get('order_hierarchy') 1942 tmp = [] 1943 hierarchy = list(hierarchydef.items()) 1944 hierarchy.sort() 1945 for key, value in hierarchydef.items(): 1946 if value>1: 1947 tmp.append('%s*%s' % (value,key)) 1948 else: 1949 tmp.append('%s' % key) 1950 wgtdef = '+'.join(tmp) 1951 # Run diagram generation with increasing max_order_now until 1952 # we manage to get diagrams 1953 while max_order_now < max_WEIGHTED_order: 1954 logger.info("Trying coupling order WEIGHTED<=%d: WEIGTHED IS %s" % (max_order_now, wgtdef)) 1955 1956 oldloglevel = logger.level 1957 logger.setLevel(logging.WARNING) 1958 1959 # failed_procs are processes that have already failed 1960 # based on crossing symmetry 1961 failed_procs = [] 1962 # Generate all combinations for the initial state 1963 for prod in itertools.product(*isids): 1964 islegs = [ base_objects.Leg({'id':id, 'state': False}) \ 1965 for id in prod] 1966 1967 # Generate all combinations for the final state, and make 1968 # sure to remove double counting 1969 1970 red_fsidlist = [] 1971 1972 for prod in itertools.product(*fsids): 1973 1974 # Remove double counting between final states 1975 if tuple(sorted(prod)) in red_fsidlist: 1976 continue 1977 1978 red_fsidlist.append(tuple(sorted(prod))); 1979 1980 # Remove gluons from final state if QCD is among 1981 # the highest coupling hierarchy 1982 nglue = 0 1983 if 21 in particles[0]: 1984 nglue = len([id for id in prod if id == 21]) 1985 prod = [id for id in prod if id != 21] 1986 1987 # Generate leg list for process 1988 leg_list = [copy.copy(leg) for leg in islegs] 1989 1990 leg_list.extend([\ 1991 base_objects.Leg({'id':id, 'state': True}) \ 1992 for id in prod]) 1993 1994 legs = base_objects.LegList(leg_list) 1995 1996 # Set summed coupling order according to max_order_now 1997 # subtracting the removed gluons 1998 coupling_orders_now = {coupling: max_order_now - \ 1999 nglue * model['order_hierarchy']['QCD']} 2000 2001 # Setup process 2002 process = base_objects.Process({\ 2003 'legs':legs, 2004 'model':model, 2005 'id': process_definition.get('id'), 2006 'orders': coupling_orders_now, 2007 'required_s_channels': \ 2008 process_definition.get('required_s_channels'), 2009 'forbidden_onsh_s_channels': \ 2010 process_definition.get('forbidden_onsh_s_channels'), 2011 'sqorders_types': \ 2012 process_definition.get('sqorders_types'), 2013 'squared_orders': \ 2014 process_definition.get('squared_orders'), 2015 'split_orders': \ 2016 process_definition.get('split_orders'), 2017 'forbidden_s_channels': \ 2018 process_definition.get('forbidden_s_channels'), 2019 'forbidden_particles': \ 2020 process_definition.get('forbidden_particles'), 2021 'is_decay_chain': \ 2022 process_definition.get('is_decay_chain'), 2023 'overall_orders': \ 2024 process_definition.get('overall_orders'), 2025 'split_orders': \ 2026 process_definition.get('split_orders')}) 2027 2028 # Check for couplings with given expansion orders 2029 process.check_expansion_orders() 2030 2031 # Check for crossed processes 2032 sorted_legs = sorted(legs.get_outgoing_id_list(model)) 2033 # Check if crossed process has already failed 2034 # In that case don't check process 2035 if tuple(sorted_legs) in failed_procs and not process_definition.get('forbidden_s_channels'): 2036 continue 2037 2038 amplitude = Amplitude({'process': process}) 2039 try: 2040 amplitude.generate_diagrams(diagram_filter=diagram_filter) 2041 except InvalidCmd as error: 2042 failed_procs.append(tuple(sorted_legs)) 2043 else: 2044 if amplitude.get('diagrams'): 2045 # We found a valid amplitude. Return this order number 2046 logger.setLevel(oldloglevel) 2047 return {coupling: max_order_now} 2048 else: 2049 failed_procs.append(tuple(sorted_legs)) 2050 # No processes found, increase max_order_now 2051 max_order_now += 1 2052 logger.setLevel(oldloglevel) 2053 2054 # If no valid processes found with nfinal-1 couplings, return maximal 2055 return {coupling: max_order_now}
2056 2057 @staticmethod
2058 - def cross_amplitude(amplitude, process, org_perm, new_perm):
2059 """Return the amplitude crossed with the permutation new_perm""" 2060 # Create dict from original leg numbers to new leg numbers 2061 perm_map = dict(list(zip(org_perm, new_perm))) 2062 # Initiate new amplitude 2063 new_amp = copy.copy(amplitude) 2064 # Number legs 2065 for i, leg in enumerate(process.get('legs')): 2066 leg.set('number', i+1) 2067 # Set process 2068 new_amp.set('process', process) 2069 # Now replace the leg numbers in the diagrams 2070 diagrams = base_objects.DiagramList([d.renumber_legs(perm_map, 2071 process.get('legs'),) for \ 2072 d in new_amp.get('diagrams')]) 2073 new_amp.set('diagrams', diagrams) 2074 new_amp.trim_diagrams() 2075 2076 # Make sure to reset mirror process 2077 new_amp.set('has_mirror_process', False) 2078 2079 return new_amp
2080
2081 #=============================================================================== 2082 # Global helper methods 2083 #=============================================================================== 2084 2085 -def expand_list(mylist):
2086 """Takes a list of lists and elements and returns a list of flat lists. 2087 Example: [[1,2], 3, [4,5]] -> [[1,3,4], [1,3,5], [2,3,4], [2,3,5]] 2088 """ 2089 2090 # Check that argument is a list 2091 assert isinstance(mylist, list), "Expand_list argument must be a list" 2092 2093 res = [] 2094 2095 tmplist = [] 2096 for item in mylist: 2097 if isinstance(item, list): 2098 tmplist.append(item) 2099 else: 2100 tmplist.append([item]) 2101 2102 for item in itertools.product(*tmplist): 2103 res.append(list(item)) 2104 2105 return res
2106
2107 -def expand_list_list(mylist):
2108 """Recursive function. Takes a list of lists and lists of lists 2109 and returns a list of flat lists. 2110 Example: [[1,2],[[4,5],[6,7]]] -> [[1,2,4,5], [1,2,6,7]] 2111 """ 2112 2113 res = [] 2114 2115 if not mylist or len(mylist) == 1 and not mylist[0]: 2116 return [[]] 2117 2118 # Check the first element is at least a list 2119 assert isinstance(mylist[0], list), \ 2120 "Expand_list_list needs a list of lists and lists of lists" 2121 2122 # Recursion stop condition, one single element 2123 if len(mylist) == 1: 2124 if isinstance(mylist[0][0], list): 2125 return mylist[0] 2126 else: 2127 return mylist 2128 2129 if isinstance(mylist[0][0], list): 2130 for item in mylist[0]: 2131 # Here the recursion happens, create lists starting with 2132 # each element of the first item and completed with 2133 # the rest expanded 2134 for rest in expand_list_list(mylist[1:]): 2135 reslist = copy.copy(item) 2136 reslist.extend(rest) 2137 res.append(reslist) 2138 else: 2139 for rest in expand_list_list(mylist[1:]): 2140 reslist = copy.copy(mylist[0]) 2141 reslist.extend(rest) 2142 res.append(reslist) 2143 2144 2145 return res
2146