1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 """Classes for diagram generation. Amplitude performs the diagram
16 generation, DecayChainAmplitude keeps track of processes with decay
17 chains, and MultiProcess allows generation of processes with
18 multiparticle definitions. DiagramTag allows to identify diagrams
19 based on relevant properties.
20 """
21
22 from __future__ import absolute_import
23 from six.moves import filter
24
25
26 import array
27 import copy
28 import itertools
29 import logging
30
31 import madgraph.core.base_objects as base_objects
32 import madgraph.various.misc as misc
33 from madgraph import InvalidCmd, MadGraph5Error
34 from six.moves import range
35 from six.moves import zip
36 from six.moves import filter
37
38 logger = logging.getLogger('madgraph.diagram_generation')
42
48 """Class to tag diagrams based on objects with some __lt__ measure, e.g.
49 PDG code/interaction id (for comparing diagrams from the same amplitude),
50 or Lorentz/coupling/mass/width (for comparing AMPs from different MEs).
51 Algorithm: Create chains starting from external particles:
52 1 \ / 6
53 2 /\______/\ 7
54 3_ / | \_ 8
55 4 / 5 \_ 9
56 \ 10
57 gives ((((9,10,id910),8,id9108),(6,7,id67),id910867)
58 (((1,2,id12),(3,4,id34)),id1234),
59 5,id91086712345)
60 where idN is the id of the corresponding interaction. The ordering within
61 chains is based on chain length (depth; here, 1234 has depth 3, 910867 has
62 depth 4, 5 has depht 0), and if equal on the ordering of the chain elements.
63 The determination of central vertex is based on minimizing the chain length
64 for the longest subchain.
65 This gives a unique tag which can be used to identify diagrams
66 (instead of symmetry), as well as identify identical matrix elements from
67 different processes."""
68
70 """Exception for any problems in DiagramTags"""
71 pass
72
73 - def __init__(self, diagram, model=None, ninitial=2):
74 """Initialize with a diagram. Create DiagramTagChainLinks according to
75 the diagram, and figure out if we need to shift the central vertex."""
76
77
78 leg_dict = {}
79
80 for vertex in diagram.get('vertices'):
81
82 legs = vertex.get('legs')[:-1]
83 lastvx = vertex == diagram.get('vertices')[-1]
84 if lastvx:
85
86 legs = vertex.get('legs')
87
88 link = DiagramTagChainLink([leg_dict.setdefault(leg.get('number'),
89 DiagramTagChainLink(self.link_from_leg(leg, model))) \
90 for leg in legs],
91 self.vertex_id_from_vertex(vertex,
92 lastvx,
93 model,
94 ninitial))
95
96 if not lastvx:
97 leg_dict[vertex.get('legs')[-1].get('number')] = link
98
99
100 self.tag = link
101
102
103
104 done = max([l.depth for l in self.tag.links]) == 0
105 while not done:
106
107 longest_chain = self.tag.links[0]
108
109 new_link = DiagramTagChainLink(self.tag.links[1:],
110 self.flip_vertex(\
111 self.tag.vertex_id,
112 longest_chain.vertex_id,
113 self.tag.links[1:]))
114
115 other_links = list(longest_chain.links) + [new_link]
116 other_link = DiagramTagChainLink(other_links,
117 self.flip_vertex(\
118 longest_chain.vertex_id,
119 self.tag.vertex_id,
120 other_links))
121
122 if other_link.links[0] < self.tag.links[0]:
123
124 self.tag = other_link
125 else:
126
127 done = True
128
133
135 """Output a diagram from a DiagramTag. Note that each daughter
136 class must implement the static functions id_from_vertex_id
137 (if the vertex id is something else than an integer) and
138 leg_from_link (to pass the correct info from an end link to a
139 leg)."""
140
141
142 diagram = base_objects.Diagram({'vertices': \
143 self.vertices_from_link(self.tag,
144 model,
145 True)})
146 diagram.calculate_orders(model)
147 return diagram
148
149 @classmethod
151 """Recursively return the leg corresponding to this link and
152 the list of all vertices from all previous links"""
153
154 if link.end_link:
155
156 return cls.leg_from_link(link), []
157
158
159 leg_vertices = [cls.vertices_from_link(l, model) for l in link.links]
160
161 legs = base_objects.LegList(sorted([l for l,v in leg_vertices],
162 key= lambda l: l.get('number'), reverse=True))
163
164
165 vertices = base_objects.VertexList(sum([v for l, v in leg_vertices],
166 []))
167
168 if not first_vertex:
169
170
171 last_leg = cls.leg_from_legs(legs,link.vertex_id,model)
172 legs.append(last_leg)
173
174
175 vertices.append(cls.vertex_from_link(legs,
176 link.vertex_id,
177 model))
178 if first_vertex:
179
180 return vertices
181 else:
182
183 return last_leg, vertices
184
185 @classmethod
187 """Returns the list of external PDGs of the interaction corresponding
188 to this vertex_id."""
189
190
191
192
193 if (len(vertex_id)>=3 and 'PDGs' in vertex_id[2]):
194 return vertex_id[2]['PDGs']
195 else:
196 return [part.get_pdg_code() for part in model.get_interaction(
197 cls.id_from_vertex_id(vertex_id)).get('particles')]
198
199 @classmethod
201 """Return a leg from a leg list and the model info"""
202
203 pdgs = list(cls.legPDGs_from_vertex_id(vertex_id, model))
204
205
206 for pdg in [leg.get('id') for leg in legs]:
207 pdgs.remove(pdg)
208
209 assert len(pdgs) == 1
210
211 pdg = model.get_particle(pdgs[0]).get_anti_pdg_code()
212 number = min([l.get('number') for l in legs])
213
214 state = (len([l for l in legs if l.get('state') == False]) != 1)
215
216 onshell= False
217
218 return base_objects.Leg({'id': pdg,
219 'number': number,
220 'state': state,
221 'onshell': onshell})
222
223 @classmethod
236
237 @staticmethod
239 """Return a leg from a link"""
240
241 if link.end_link:
242
243 return base_objects.Leg({'number':link.links[0][1],
244 'id':link.links[0][0][0],
245 'state':(link.links[0][0][1] == 0),
246 'onshell':False})
247
248
249 assert False
250
251 @staticmethod
253 """Return the numerical vertex id from a link.vertex_id"""
254
255 return vertex_id[0][0]
256
257 @staticmethod
259 """Return the loop_info stored in this vertex id. Notice that the
260 IdentifyME tag does not store the loop_info, but should normally never
261 need access to it."""
262
263 return vertex_id[2]
264
265 @staticmethod
267 """Reorder a permutation with respect to start_perm. Note that
268 both need to start from 1."""
269 if perm == start_perm:
270 return list(range(len(perm)))
271 order = [i for (p,i) in \
272 sorted([(p,i) for (i,p) in enumerate(perm)])]
273 return [start_perm[i]-1 for i in order]
274
275 @staticmethod
277 """Returns the default end link for a leg: ((id, state), number).
278 Note that the number is not taken into account if tag comparison,
279 but is used only to extract leg permutations."""
280 if leg.get('state'):
281
282 return [((leg.get('id'), 0), leg.get('number'))]
283 else:
284
285 return [((leg.get('id'), leg.get('number')), leg.get('number'))]
286
287 @staticmethod
289 """Returns the default vertex id: just the interaction id
290 Note that in the vertex id, like the leg, only the first entry is
291 taken into account in the tag comparison, while the second is for
292 storing information that is not to be used in comparisons and the
293 third for additional info regarding the shrunk loop vertex."""
294
295 if isinstance(vertex,base_objects.ContractedVertex):
296
297 return ((vertex.get('id'),vertex.get('loop_tag')),(),
298 {'PDGs':vertex.get('PDGs')})
299 else:
300 return ((vertex.get('id'),()),(),{})
301
302 @staticmethod
304 """Returns the default vertex flip: just the new_vertex"""
305 return new_vertex
306
308 """Equal if same tag"""
309 if type(self) != type(other):
310 return False
311 return self.tag == other.tag
312
314 return not self.__eq__(other)
315
318
320 return self.tag < other.tag
321
323 return self.tag > other.tag
324
325 __repr__ = __str__
326
328 """Chain link for a DiagramTag. A link is a tuple + vertex id + depth,
329 with a comparison operator defined"""
330
331 - def __init__(self, objects, vertex_id = None):
332 """Initialize, either with a tuple of DiagramTagChainLinks and
333 a vertex_id (defined by DiagramTag.vertex_id_from_vertex), or
334 with an external leg object (end link) defined by
335 DiagramTag.link_from_leg"""
336
337 if vertex_id == None:
338
339 self.links = tuple(objects)
340 self.vertex_id = (0,)
341 self.depth = 0
342 self.end_link = True
343 return
344
345 self.links = tuple(sorted(list(tuple(objects)), reverse=True))
346 self.vertex_id = vertex_id
347
348
349 self.depth = sum([l.depth for l in self.links],
350 max(1, len(self.links)-1))
351 self.end_link = False
352
354 """Get the permutation of external numbers (assumed to be the
355 second entry in the end link tuples)"""
356
357 if self.end_link:
358 return [self.links[0][1]]
359
360 return sum([l.get_external_numbers() for l in self.links], [])
361
363 """Compare self with other in the order:
364 1. depth 2. len(links) 3. vertex id 4. measure of links"""
365
366 if self == other:
367 return False
368
369 if self.depth != other.depth:
370 return self.depth < other.depth
371
372 if len(self.links) != len(other.links):
373 return len(self.links) < len(other.links)
374
375 if self.vertex_id[0] != other.vertex_id[0]:
376 if isinstance(self.vertex_id[0], int) and isinstance(other.vertex_id[0], tuple):
377 return True
378 elif isinstance(self.vertex_id[0], tuple) and isinstance(other.vertex_id[0], int):
379 return False
380 elif isinstance(self.vertex_id[0], str) and isinstance(other.vertex_id[0], tuple):
381 return True
382 elif isinstance(self.vertex_id[0], tuple) and isinstance(other.vertex_id[0], str):
383 return False
384 else:
385 try:
386 return self.vertex_id[0] < other.vertex_id[0]
387 except TypeError as error:
388 if error.args == "'<' not supported between instances of 'tuple' and 'str'":
389 return False
390 else:
391 return True
392
393
394 for i, link in enumerate(self.links):
395 if i > len(other.links) - 1:
396 return False
397 if link != other.links[i]:
398 return link < other.links[i]
399
401 return self != other and not self.__lt__(other)
402
404 """For end link,
405 consider equal if self.links[0][0] == other.links[0][0],
406 i.e., ignore the leg number (in links[0][1])."""
407
408 if self.end_link and other.end_link and self.depth == other.depth \
409 and self.vertex_id == other.vertex_id:
410 return self.links[0][0] == other.links[0][0]
411
412 return self.end_link == other.end_link and self.depth == other.depth \
413 and self.vertex_id[0] == other.vertex_id[0] \
414 and self.links == other.links
415
417 return not self.__eq__(other)
418
419
421 if self.end_link:
422 return str(self.links)
423 return "%s, %s; %d" % (str(self.links),
424 str(self.vertex_id),
425 self.depth)
426
427 __repr__ = __str__
428
429
430
431
432 -class Amplitude(base_objects.PhysicsObject):
433 """Amplitude: process + list of diagrams (ordered)
434 Initialize with a process, then call generate_diagrams() to
435 generate the diagrams for the amplitude
436 """
437
439 """Default values for all properties"""
440
441 self['process'] = base_objects.Process()
442 self['diagrams'] = None
443
444
445 self['has_mirror_process'] = False
446
459
460 - def filter(self, name, value):
473
474 - def get(self, name):
483
484
485
487 """Return diagram property names as a nicely sorted list."""
488
489 return ['process', 'diagrams', 'has_mirror_process']
490
492 """Returns number of diagrams for this amplitude"""
493 return len(self.get('diagrams'))
494
496 """Return an AmplitudeList with just this amplitude.
497 Needed for DecayChainAmplitude."""
498
499 return AmplitudeList([self])
500
502 """Returns a nicely formatted string of the amplitude content."""
503 return self.get('process').nice_string(indent) + "\n" + \
504 self.get('diagrams').nice_string(indent)
505
507 """Returns a nicely formatted string of the amplitude process."""
508 return self.get('process').nice_string(indent)
509
511 """Returns the number of initial state particles in the process."""
512 return self.get('process').get_ninitial()
513
515 """ Returns wether this amplitude has a loop process."""
516
517 return self.get('process').get('perturbation_couplings')
518
520 """Generate diagrams. Algorithm:
521
522 1. Define interaction dictionaries:
523 * 2->0 (identity), 3->0, 4->0, ... , maxlegs->0
524 * 2 -> 1, 3 -> 1, ..., maxlegs-1 -> 1
525
526 2. Set flag from_group=true for all external particles.
527 Flip particle/anti particle for incoming particles.
528
529 3. If there is a dictionary n->0 with n=number of external
530 particles, create if possible the combination [(1,2,3,4,...)]
531 with *at least two* from_group==true. This will give a
532 finished (set of) diagram(s) (done by reduce_leglist)
533
534 4. Create all allowed groupings of particles with at least one
535 from_group==true (according to dictionaries n->1):
536 [(1,2),3,4...],[1,(2,3),4,...],...,
537 [(1,2),(3,4),...],...,[(1,2,3),4,...],...
538 (done by combine_legs)
539
540 5. Replace each group with a (list of) new particle(s) with number
541 n = min(group numbers). Set from_group true for these
542 particles and false for all other particles. Store vertex info.
543 (done by merge_comb_legs)
544
545 6. Stop algorithm when at most 2 particles remain.
546 Return all diagrams (lists of vertices).
547
548 7. Repeat from 3 (recursion done by reduce_leglist)
549
550 8. Replace final p=p vertex
551
552 Be aware that the resulting vertices have all particles outgoing,
553 so need to flip for incoming particles when used.
554
555 SPECIAL CASE: For A>BC... processes which are legs in decay
556 chains, we need to ensure that BC... combine first, giving A=A
557 as a final vertex. This case is defined by the Process
558 property is_decay_chain = True.
559 This function can also be called by the generate_diagram function
560 of LoopAmplitudes, in which case the generated diagrams here must not
561 be directly assigned to the 'diagrams' attributed but returned as a
562 DiagramList by the function. This is controlled by the argument
563 returndiag.
564 """
565
566 process = self.get('process')
567 model = process.get('model')
568 legs = process.get('legs')
569
570 for key in process.get('overall_orders').keys():
571 try:
572 process.get('orders')[key] = \
573 min(process.get('orders')[key],
574 process.get('overall_orders')[key])
575 except KeyError:
576 process.get('orders')[key] = process.get('overall_orders')[key]
577
578 assert model.get('particles'), \
579 "particles are missing in model: %s" % model.get('particles')
580
581 assert model.get('interactions'), \
582 "interactions are missing in model"
583
584
585 res = base_objects.DiagramList()
586
587 if len([leg for leg in legs if model.get('particle_dict')[\
588 leg.get('id')].is_fermion()]) % 2 == 1:
589 if not returndiag:
590 self['diagrams'] = res
591 raise InvalidCmd('The number of fermion is odd')
592 else:
593 return False, res
594
595
596
597 if not model.get('got_majoranas') and \
598 len([leg for leg in legs if leg.is_incoming_fermion(model)]) != \
599 len([leg for leg in legs if leg.is_outgoing_fermion(model)]):
600 if not returndiag:
601 self['diagrams'] = res
602 raise InvalidCmd('The number of of incoming/outcoming fermions are different')
603 else:
604 return False, res
605
606
607
608 for charge in model.get('conserved_charge'):
609 total = 0
610 for leg in legs:
611 part = model.get('particle_dict')[leg.get('id')]
612 try:
613 value = part.get(charge)
614 except (AttributeError, base_objects.PhysicsObject.PhysicsObjectError):
615 try:
616 value = getattr(part, charge)
617 except AttributeError:
618 value = 0
619
620 if (leg.get('id') != part['pdg_code']) != leg['state']:
621 total -= value
622 else:
623 total += value
624
625 if abs(total) > 1e-10:
626 if not returndiag:
627 self['diagrams'] = res
628 raise InvalidCmd('No %s conservation for this process ' % charge)
629 return res
630 else:
631 raise InvalidCmd('No %s conservation for this process ' % charge)
632 return res, res
633
634 if not returndiag:
635 logger.info("Trying %s " % process.nice_string().replace('Process', 'process'))
636
637
638 for i in range(0, len(process.get('legs'))):
639
640 leg = copy.copy(process.get('legs')[i])
641 process.get('legs')[i] = leg
642 if leg.get('number') == 0:
643 leg.set('number', i + 1)
644
645
646
647 leglist = self.copy_leglist(process.get('legs'))
648
649 for leg in leglist:
650
651
652 leg.set('from_group', True)
653
654
655
656 if leg.get('state') == False:
657 part = model.get('particle_dict')[leg.get('id')]
658 leg.set('id', part.get_anti_pdg_code())
659
660
661
662 max_multi_to1 = max([len(key) for key in \
663 model.get('ref_dict_to1').keys()])
664
665
666
667
668
669
670
671
672 is_decay_proc = process.get_ninitial() == 1
673 if is_decay_proc:
674 part = model.get('particle_dict')[leglist[0].get('id')]
675
676
677
678 ref_dict_to0 = {(part.get_pdg_code(),part.get_anti_pdg_code()):[0],
679 (part.get_anti_pdg_code(),part.get_pdg_code()):[0]}
680
681
682 leglist[0].set('from_group', None)
683 reduced_leglist = self.reduce_leglist(leglist,
684 max_multi_to1,
685 ref_dict_to0,
686 is_decay_proc,
687 process.get('orders'))
688 else:
689 reduced_leglist = self.reduce_leglist(leglist,
690 max_multi_to1,
691 model.get('ref_dict_to0'),
692 is_decay_proc,
693 process.get('orders'))
694
695
696
697
698 self.convert_dgleg_to_leg(reduced_leglist)
699
700 if reduced_leglist:
701 for vertex_list in reduced_leglist:
702 res.append(self.create_diagram(base_objects.VertexList(vertex_list)))
703
704
705
706 failed_crossing = not res
707
708
709
710
711
712
713 if process.get('required_s_channels') and \
714 process.get('required_s_channels')[0]:
715
716
717 lastvx = -1
718
719
720
721 if is_decay_proc: lastvx = -2
722 ninitial = len([leg for leg in process.get('legs') if leg.get('state') == False])
723
724 old_res = res
725 res = base_objects.DiagramList()
726 for id_list in process.get('required_s_channels'):
727 res_diags = [diagram for diagram in old_res if all([req_s_channel in \
728 [vertex.get_s_channel_id(\
729 process.get('model'), ninitial) \
730 for vertex in diagram.get('vertices')[:lastvx]] \
731 for req_s_channel in \
732 id_list])]
733
734 res.extend([diag for diag in res_diags if diag not in res])
735
736
737
738
739
740 if process.get('forbidden_s_channels'):
741 ninitial = len([leg for leg in process.get('legs') if leg.get('state') == False])
742 if ninitial == 2:
743 res = base_objects.DiagramList(\
744 [diagram for diagram in res if not any([vertex.get_s_channel_id(\
745 process.get('model'), ninitial) \
746 in process.get('forbidden_s_channels')
747 for vertex in diagram.get('vertices')[:-1]])])
748 else:
749
750
751 newres= []
752 for diagram in res:
753 leg1 = 1
754
755
756
757 vertex = diagram.get('vertices')[-1]
758 if any([l['number'] ==1 for l in vertex.get('legs')]):
759 leg1 = [l['number'] for l in vertex.get('legs') if l['number'] !=1][0]
760 to_loop = list(range(len(diagram.get('vertices'))-1))
761 if leg1 >1:
762 to_loop.reverse()
763 for i in to_loop:
764 vertex = diagram.get('vertices')[i]
765 if leg1:
766 if any([l['number'] ==leg1 for l in vertex.get('legs')]):
767 leg1 = 0
768 continue
769 if vertex.get_s_channel_id(process.get('model'), ninitial)\
770 in process.get('forbidden_s_channels'):
771 break
772 else:
773 newres.append(diagram)
774 res = base_objects.DiagramList(newres)
775
776
777
778
779 if process.get('forbidden_onsh_s_channels'):
780 ninitial = len([leg for leg in process.get('legs') if leg.get('state') == False])
781
782 verts = base_objects.VertexList(sum([[vertex for vertex \
783 in diagram.get('vertices')[:-1]
784 if vertex.get_s_channel_id(\
785 process.get('model'), ninitial) \
786 in process.get('forbidden_onsh_s_channels')] \
787 for diagram in res], []))
788 for vert in verts:
789
790 newleg = copy.copy(vert.get('legs').pop(-1))
791 newleg.set('onshell', False)
792 vert.get('legs').append(newleg)
793
794
795 for diagram in res:
796 diagram.calculate_orders(model)
797
798
799
800
801
802
803
804
805 if not returndiag and len(res)>0:
806 res = self.apply_squared_order_constraints(res)
807
808 if diagram_filter:
809 res = self.apply_user_filter(res)
810
811
812 if not process.get('is_decay_chain'):
813 for diagram in res:
814 vertices = diagram.get('vertices')
815 if len(vertices) > 1 and vertices[-1].get('id') == 0:
816
817
818
819
820 vertices = copy.copy(vertices)
821 lastvx = vertices.pop()
822 nexttolastvertex = copy.copy(vertices.pop())
823 legs = copy.copy(nexttolastvertex.get('legs'))
824 ntlnumber = legs[-1].get('number')
825 lastleg = [leg for leg in lastvx.get('legs') if leg.get('number') != ntlnumber][0]
826
827 if lastleg.get('onshell') == False:
828 lastleg.set('onshell', None)
829
830 legs[-1] = lastleg
831 nexttolastvertex.set('legs', legs)
832 vertices.append(nexttolastvertex)
833 diagram.set('vertices', vertices)
834
835 if res and not returndiag:
836 logger.info("Process has %d diagrams" % len(res))
837
838
839 self.trim_diagrams(diaglist=res)
840
841
842 pertur = 'QCD'
843 if self.get('process')['perturbation_couplings']:
844 pertur = sorted(self.get('process')['perturbation_couplings'])[0]
845 self.get('process').get('legs').sort(pert=pertur)
846
847
848 if not returndiag:
849 self['diagrams'] = res
850 return not failed_crossing
851 else:
852 return not failed_crossing, res
853
855 """Applies the user specified squared order constraints on the diagram
856 list in argument."""
857
858 res = copy.copy(diag_list)
859
860
861
862 for name, (value, operator) in self['process'].get('constrained_orders').items():
863 res.filter_constrained_orders(name, value, operator)
864
865
866
867
868 while True:
869 new_res = res.apply_positive_sq_orders(res,
870 self['process'].get('squared_orders'),
871 self['process']['sqorders_types'])
872
873 if len(res)==len(new_res):
874 break
875 elif (len(new_res)>len(res)):
876 raise MadGraph5Error(
877 'Inconsistency in function apply_squared_order_constraints().')
878
879 res = new_res
880
881
882
883
884 neg_orders = [(order, value) for order, value in \
885 self['process'].get('squared_orders').items() if value<0]
886 if len(neg_orders)==1:
887 neg_order, neg_value = neg_orders[0]
888
889 res, target_order = res.apply_negative_sq_order(res, neg_order,\
890 neg_value, self['process']['sqorders_types'][neg_order])
891
892
893
894
895 self['process']['squared_orders'][neg_order]=target_order
896 elif len(neg_orders)>1:
897 raise InvalidCmd('At most one negative squared order constraint'+\
898 ' can be specified, not %s.'%str(neg_orders))
899
900 return res
901
903 """Applies the user specified squared order constraints on the diagram
904 list in argument."""
905
906 if True:
907 remove_diag = misc.plugin_import('user_filter',
908 'user filter required to be defined in PLUGIN/user_filter.py with the function remove_diag(ONEDIAG) which returns True if the diagram has to be removed',
909 fcts=['remove_diag'])
910 else:
911
912 def remove_diag(diag, model=None):
913 for vertex in diag['vertices']:
914 if vertex['id'] == 0:
915 continue
916 if vertex['legs'][-1]['number'] < 3:
917 if abs(vertex['legs'][-1]['id']) <6:
918 return True
919 return False
920
921 res = diag_list.__class__()
922 nb_removed = 0
923 model = self['process']['model']
924 for diag in diag_list:
925 if remove_diag(diag, model):
926 nb_removed +=1
927 else:
928 res.append(diag)
929
930 if nb_removed:
931 logger.warning('Diagram filter is ON and removed %s diagrams for this subprocess.' % nb_removed)
932
933 return res
934
935
936
938 """ Return a Diagram created from the vertex list. This function can be
939 overloaded by daughter classes."""
940 return base_objects.Diagram({'vertices':vertexlist})
941
943 """ In LoopAmplitude, it converts back all DGLoopLegs into Legs.
944 In Amplitude, there is nothing to do. """
945
946 return True
947
949 """ Simply returns a copy of the leg list. This function is
950 overloaded in LoopAmplitude so that a DGLoopLeg list is returned.
951 The DGLoopLeg has some additional parameters only useful during
952 loop diagram generation"""
953
954 return base_objects.LegList(\
955 [ copy.copy(leg) for leg in legs ])
956
957 - def reduce_leglist(self, curr_leglist, max_multi_to1, ref_dict_to0,
958 is_decay_proc = False, coupling_orders = None):
959 """Recursive function to reduce N LegList to N-1
960 For algorithm, see doc for generate_diagrams.
961 """
962
963
964
965 res = []
966
967
968
969 if curr_leglist is None:
970 return None
971
972
973 model = self.get('process').get('model')
974 ref_dict_to1 = self.get('process').get('model').get('ref_dict_to1')
975
976
977
978
979
980
981 if curr_leglist.can_combine_to_0(ref_dict_to0, is_decay_proc):
982
983
984 vertex_ids = self.get_combined_vertices(curr_leglist,
985 copy.copy(ref_dict_to0[tuple(sorted([leg.get('id') for \
986 leg in curr_leglist]))]))
987
988 final_vertices = [base_objects.Vertex({'legs':curr_leglist,
989 'id':vertex_id}) for \
990 vertex_id in vertex_ids]
991
992 for final_vertex in final_vertices:
993 if self.reduce_orders(coupling_orders, model,
994 [final_vertex.get('id')]) != False:
995 res.append([final_vertex])
996
997
998 if len(curr_leglist) == 2:
999 if res:
1000 return res
1001 else:
1002 return None
1003
1004
1005 comb_lists = self.combine_legs(curr_leglist,
1006 ref_dict_to1, max_multi_to1)
1007
1008
1009 leg_vertex_list = self.merge_comb_legs(comb_lists, ref_dict_to1)
1010
1011
1012 for leg_vertex_tuple in leg_vertex_list:
1013
1014
1015 if self.get('process').get('forbidden_particles') and \
1016 any([abs(vertex.get('legs')[-1].get('id')) in \
1017 self.get('process').get('forbidden_particles') \
1018 for vertex in leg_vertex_tuple[1]]):
1019 continue
1020
1021
1022 new_coupling_orders = self.reduce_orders(coupling_orders,
1023 model,
1024 [vertex.get('id') for vertex in \
1025 leg_vertex_tuple[1]])
1026 if new_coupling_orders == False:
1027
1028 continue
1029
1030
1031
1032 reduced_diagram = self.reduce_leglist(leg_vertex_tuple[0],
1033 max_multi_to1,
1034 ref_dict_to0,
1035 is_decay_proc,
1036 new_coupling_orders)
1037
1038 if reduced_diagram:
1039 vertex_list_list = [list(leg_vertex_tuple[1])]
1040 vertex_list_list.append(reduced_diagram)
1041 expanded_list = expand_list_list(vertex_list_list)
1042 res.extend(expanded_list)
1043
1044 return res
1045
1046 - def reduce_orders(self, coupling_orders, model, vertex_id_list):
1047 """Return False if the coupling orders for any coupling is <
1048 0, otherwise return the new coupling orders with the vertex
1049 orders subtracted. If coupling_orders is not given, return
1050 None (which counts as success).
1051 WEIGHTED is a special order, which corresponds to the sum of
1052 order hierarchies for the couplings.
1053 We ignore negative constraints as these cannot be taken into
1054 account on the fly but only after generation."""
1055
1056 if not coupling_orders:
1057 return None
1058
1059 present_couplings = copy.copy(coupling_orders)
1060 for id in vertex_id_list:
1061
1062 if not id:
1063 continue
1064 inter = model.get("interaction_dict")[id]
1065 for coupling in inter.get('orders').keys():
1066
1067
1068 if coupling in present_couplings and \
1069 present_couplings[coupling]>=0:
1070
1071 present_couplings[coupling] -= \
1072 inter.get('orders')[coupling]
1073 if present_couplings[coupling] < 0:
1074
1075 return False
1076
1077 if 'WEIGHTED' in present_couplings and \
1078 present_couplings['WEIGHTED']>=0:
1079 weight = sum([model.get('order_hierarchy')[c]*n for \
1080 (c,n) in inter.get('orders').items()])
1081 present_couplings['WEIGHTED'] -= weight
1082 if present_couplings['WEIGHTED'] < 0:
1083
1084 return False
1085
1086 return present_couplings
1087
1088 - def combine_legs(self, list_legs, ref_dict_to1, max_multi_to1):
1089 """Recursive function. Take a list of legs as an input, with
1090 the reference dictionary n-1->1, and output a list of list of
1091 tuples of Legs (allowed combinations) and Legs (rest). Algorithm:
1092
1093 1. Get all n-combinations from list [123456]: [12],..,[23],..,[123],..
1094
1095 2. For each combination, say [34]. Check if combination is valid.
1096 If so:
1097
1098 a. Append [12[34]56] to result array
1099
1100 b. Split [123456] at index(first element in combination+1),
1101 i.e. [12],[456] and subtract combination from second half,
1102 i.e.: [456]-[34]=[56]. Repeat from 1. with this array
1103
1104 3. Take result array from call to 1. (here, [[56]]) and append
1105 (first half in step b - combination) + combination + (result
1106 from 1.) = [12[34][56]] to result array
1107
1108 4. After appending results from all n-combinations, return
1109 resulting array. Example, if [13] and [45] are valid
1110 combinations:
1111 [[[13]2456],[[13]2[45]6],[123[45]6]]
1112 """
1113
1114 res = []
1115
1116
1117 for comb_length in range(2, max_multi_to1 + 1):
1118
1119
1120 if comb_length > len(list_legs):
1121 return res
1122
1123
1124
1125 for comb in itertools.combinations(list_legs, comb_length):
1126
1127
1128 if base_objects.LegList(comb).can_combine_to_1(ref_dict_to1):
1129
1130
1131
1132 res_list = copy.copy(list_legs)
1133 for leg in comb:
1134 res_list.remove(leg)
1135 res_list.insert(list_legs.index(comb[0]), comb)
1136 res.append(res_list)
1137
1138
1139
1140
1141
1142
1143 res_list1 = list_legs[0:list_legs.index(comb[0])]
1144 res_list2 = list_legs[list_legs.index(comb[0]) + 1:]
1145 for leg in comb[1:]:
1146 res_list2.remove(leg)
1147
1148
1149 res_list = res_list1
1150 res_list.append(comb)
1151
1152
1153 for item in self.combine_legs(res_list2,
1154 ref_dict_to1,
1155 max_multi_to1):
1156 final_res_list = copy.copy(res_list)
1157 final_res_list.extend(item)
1158 res.append(final_res_list)
1159
1160 return res
1161
1162
1164 """Takes a list of allowed leg combinations as an input and returns
1165 a set of lists where combinations have been properly replaced
1166 (one list per element in the ref_dict, so that all possible intermediate
1167 particles are included). For each list, give the list of vertices
1168 corresponding to the executed merging, group the two as a tuple.
1169 """
1170
1171 res = []
1172
1173 for comb_list in comb_lists:
1174
1175 reduced_list = []
1176 vertex_list = []
1177
1178 for entry in comb_list:
1179
1180
1181 if isinstance(entry, tuple):
1182
1183
1184
1185 leg_vert_ids = copy.copy(ref_dict_to1[\
1186 tuple(sorted([leg.get('id') for leg in entry]))])
1187
1188
1189 number = min([leg.get('number') for leg in entry])
1190
1191
1192 if len([leg for leg in entry if leg.get('state') == False]) == 1:
1193 state = False
1194 else:
1195 state = True
1196
1197
1198
1199
1200
1201 new_leg_vert_ids = []
1202 if leg_vert_ids:
1203 new_leg_vert_ids = self.get_combined_legs(entry,
1204 leg_vert_ids,
1205 number,
1206 state)
1207
1208 reduced_list.append([l[0] for l in new_leg_vert_ids])
1209
1210
1211
1212
1213
1214 vlist = base_objects.VertexList()
1215 for (myleg, vert_id) in new_leg_vert_ids:
1216
1217 myleglist = base_objects.LegList(list(entry))
1218
1219 myleglist.append(myleg)
1220
1221 vlist.append(base_objects.Vertex(
1222 {'legs':myleglist,
1223 'id':vert_id}))
1224
1225 vertex_list.append(vlist)
1226
1227
1228
1229 else:
1230 cp_entry = copy.copy(entry)
1231
1232
1233
1234 if cp_entry.get('from_group') != None:
1235 cp_entry.set('from_group', False)
1236 reduced_list.append(cp_entry)
1237
1238
1239 flat_red_lists = expand_list(reduced_list)
1240 flat_vx_lists = expand_list(vertex_list)
1241
1242
1243 for i in range(0, len(flat_vx_lists)):
1244 res.append((base_objects.LegList(flat_red_lists[i]), \
1245 base_objects.VertexList(flat_vx_lists[i])))
1246
1247 return res
1248
1250 """Create a set of new legs from the info given. This can be
1251 overloaded by daughter classes."""
1252
1253 mylegs = [(base_objects.Leg({'id':leg_id,
1254 'number':number,
1255 'state':state,
1256 'from_group':True}),
1257 vert_id)\
1258 for leg_id, vert_id in leg_vert_ids]
1259
1260 return mylegs
1261
1263 """Allow for selection of vertex ids. This can be
1264 overloaded by daughter classes."""
1265
1266 return vert_ids
1267
1269 """Reduce the number of legs and vertices used in memory.
1270 When called by a diagram generation initiated by LoopAmplitude,
1271 this function should not trim the diagrams in the attribute 'diagrams'
1272 but rather a given list in the 'diaglist' argument."""
1273
1274 legs = []
1275 vertices = []
1276
1277 if diaglist is None:
1278 diaglist=self.get('diagrams')
1279
1280
1281 process = self.get('process')
1282 for leg in process.get('legs'):
1283 if leg.get('state') and leg.get('id') in decay_ids:
1284 leg.set('onshell', True)
1285
1286 for diagram in diaglist:
1287
1288 leg_external = set()
1289 for ivx, vertex in enumerate(diagram.get('vertices')):
1290 for ileg, leg in enumerate(vertex.get('legs')):
1291
1292 if leg.get('state') and leg.get('id') in decay_ids and \
1293 leg.get('number') not in leg_external:
1294
1295
1296 leg = copy.copy(leg)
1297 leg.set('onshell', True)
1298 try:
1299 index = legs.index(leg)
1300 except ValueError:
1301 vertex.get('legs')[ileg] = leg
1302 legs.append(leg)
1303 else:
1304 vertex.get('legs')[ileg] = legs[index]
1305 leg_external.add(leg.get('number'))
1306 try:
1307 index = vertices.index(vertex)
1308 diagram.get('vertices')[ivx] = vertices[index]
1309 except ValueError:
1310 vertices.append(vertex)
1311
1312
1313
1314
1315 -class AmplitudeList(base_objects.PhysicsObjectList):
1316 """List of Amplitude objects
1317 """
1318
1320 """ Check the content of all processes of the amplitudes in this list to
1321 see if there is any which defines perturbation couplings. """
1322
1323 for amp in self:
1324 if amp.has_loop_process():
1325 return True
1326
1328 """Test if object obj is a valid Amplitude for the list."""
1329
1330 return isinstance(obj, Amplitude)
1331
1336 """A list of amplitudes + a list of decay chain amplitude lists;
1337 corresponding to a ProcessDefinition with a list of decay chains
1338 """
1339
1345
1346 - def __init__(self, argument = None, collect_mirror_procs = False,
1347 ignore_six_quark_processes = False, loop_filter=None, diagram_filter=False):
1348 """Allow initialization with Process and with ProcessDefinition"""
1349
1350 if isinstance(argument, base_objects.Process):
1351 super(DecayChainAmplitude, self).__init__()
1352 from madgraph.loop.loop_diagram_generation import LoopMultiProcess
1353 if argument['perturbation_couplings']:
1354 MultiProcessClass=LoopMultiProcess
1355 else:
1356 MultiProcessClass=MultiProcess
1357 if isinstance(argument, base_objects.ProcessDefinition):
1358 self['amplitudes'].extend(\
1359 MultiProcessClass.generate_multi_amplitudes(argument,
1360 collect_mirror_procs,
1361 ignore_six_quark_processes,
1362 loop_filter=loop_filter,
1363 diagram_filter=diagram_filter))
1364 else:
1365 self['amplitudes'].append(\
1366 MultiProcessClass.get_amplitude_from_proc(argument,
1367 loop_filter=loop_filter,
1368 diagram_filter=diagram_filter))
1369
1370
1371 process = copy.copy(self.get('amplitudes')[0].get('process'))
1372 process.set('decay_chains', base_objects.ProcessList())
1373 self['amplitudes'][0].set('process', process)
1374
1375 for process in argument.get('decay_chains'):
1376 if process.get('perturbation_couplings'):
1377 raise MadGraph5Error("Decay processes can not be perturbed")
1378 process.set('overall_orders', argument.get('overall_orders'))
1379 if not process.get('is_decay_chain'):
1380 process.set('is_decay_chain',True)
1381 if not process.get_ninitial() == 1:
1382 raise InvalidCmd("Decay chain process must have exactly one" + \
1383 " incoming particle")
1384 self['decay_chains'].append(\
1385 DecayChainAmplitude(process, collect_mirror_procs,
1386 ignore_six_quark_processes,
1387 diagram_filter=diagram_filter))
1388
1389
1390 decay_ids = sum([[a.get('process').get('legs')[0].get('id') \
1391 for a in dec.get('amplitudes')] for dec in \
1392 self['decay_chains']], [])
1393 decay_ids = set(decay_ids)
1394 for amp in self['amplitudes']:
1395 amp.trim_diagrams(decay_ids)
1396
1397
1398 for amp in self['amplitudes']:
1399 for l in amp.get('process').get('legs'):
1400 if l.get('id') in decay_ids:
1401 decay_ids.remove(l.get('id'))
1402
1403 if decay_ids:
1404 model = amp.get('process').get('model')
1405 names = [model.get_particle(id).get('name') for id in decay_ids]
1406
1407 logger.warning(
1408 "$RED Decay without corresponding particle in core process found.\n" + \
1409 "Decay information for particle(s) %s is discarded.\n" % ','.join(names) + \
1410 "Please check your process definition carefully. \n" + \
1411 "This warning usually means that you forgot parentheses in presence of subdecay.\n" + \
1412 "Example of correct syntax: p p > t t~, ( t > w+ b, w+ > l+ vl)")
1413
1414
1415 for dc in reversed(self['decay_chains']):
1416 for a in reversed(dc.get('amplitudes')):
1417
1418 if a.get('process').get('legs')[0].get('id') in decay_ids:
1419 dc.get('amplitudes').remove(a)
1420 if not dc.get('amplitudes'):
1421
1422 self['decay_chains'].remove(dc)
1423
1424
1425
1426 bad_procs = []
1427 for dc in self['decay_chains']:
1428 for amp in dc.get('amplitudes'):
1429 legs = amp.get('process').get('legs')
1430 fs_parts = [abs(l.get('id')) for l in legs if
1431 l.get('state')]
1432 is_part = [l.get('id') for l in legs if not
1433 l.get('state')][0]
1434 if abs(is_part) in fs_parts:
1435 bad_procs.append(amp.get('process'))
1436
1437 if bad_procs:
1438 logger.warning(
1439 "$RED Decay(s) with particle decaying to itself:\n" + \
1440 '\n'.join([p.nice_string() for p in bad_procs]) + \
1441 "\nPlease check your process definition carefully. \n")
1442
1443
1444 elif argument != None:
1445
1446 super(DecayChainAmplitude, self).__init__(argument)
1447 else:
1448
1449 super(DecayChainAmplitude, self).__init__()
1450
1451 - def filter(self, name, value):
1452 """Filter for valid amplitude property values."""
1453
1454 if name == 'amplitudes':
1455 if not isinstance(value, AmplitudeList):
1456 raise self.PhysicsObjectError("%s is not a valid AmplitudeList" % str(value))
1457 if name == 'decay_chains':
1458 if not isinstance(value, DecayChainAmplitudeList):
1459 raise self.PhysicsObjectError("%s is not a valid DecayChainAmplitudeList object" % \
1460 str(value))
1461 return True
1462
1464 """Return diagram property names as a nicely sorted list."""
1465
1466 return ['amplitudes', 'decay_chains']
1467
1468
1469
1471 """Returns number of diagrams for this amplitude"""
1472 return sum(len(a.get('diagrams')) for a in self.get('amplitudes')) \
1473 + sum(d.get_number_of_diagrams() for d in \
1474 self.get('decay_chains'))
1475
1477 """Returns a nicely formatted string of the amplitude content."""
1478 mystr = ""
1479 for amplitude in self.get('amplitudes'):
1480 mystr = mystr + amplitude.nice_string(indent) + "\n"
1481
1482 if self.get('decay_chains'):
1483 mystr = mystr + " " * indent + "Decays:\n"
1484 for dec in self.get('decay_chains'):
1485 mystr = mystr + dec.nice_string(indent + 2) + "\n"
1486
1487 return mystr[:-1]
1488
1490 """Returns a nicely formatted string of the amplitude processes."""
1491 mystr = ""
1492 for amplitude in self.get('amplitudes'):
1493 mystr = mystr + amplitude.nice_string_processes(indent) + "\n"
1494
1495 if self.get('decay_chains'):
1496 mystr = mystr + " " * indent + "Decays:\n"
1497 for dec in self.get('decay_chains'):
1498 mystr = mystr + dec.nice_string_processes(indent + 2) + "\n"
1499
1500 return mystr[:-1]
1501
1503 """Returns the number of initial state particles in the process."""
1504 return self.get('amplitudes')[0].get('process').get_ninitial()
1505
1507 """Returns a set of all particle ids for which a decay is defined"""
1508
1509 decay_ids = []
1510
1511
1512 for amp in sum([dc.get('amplitudes') for dc \
1513 in self['decay_chains']], []):
1514
1515 decay_ids.append(amp.get('process').get_initial_ids()[0])
1516
1517
1518 return list(set(decay_ids))
1519
1521 """ Returns wether this amplitude has a loop process."""
1522 return self['amplitudes'].has_any_loop_process()
1523
1525 """Recursive function to extract all amplitudes for this process"""
1526
1527 amplitudes = AmplitudeList()
1528
1529 amplitudes.extend(self.get('amplitudes'))
1530 for decay in self.get('decay_chains'):
1531 amplitudes.extend(decay.get_amplitudes())
1532
1533 return amplitudes
1534
1540 """List of DecayChainAmplitude objects
1541 """
1542
1544 """Test if object obj is a valid DecayChainAmplitude for the list."""
1545
1546 return isinstance(obj, DecayChainAmplitude)
1547
1548
1549
1550
1551
1552 -class MultiProcess(base_objects.PhysicsObject):
1553 """MultiProcess: list of process definitions
1554 list of processes (after cleaning)
1555 list of amplitudes (after generation)
1556 """
1557
1559 """Default values for all properties"""
1560
1561 self['process_definitions'] = base_objects.ProcessDefinitionList()
1562
1563
1564
1565 self['amplitudes'] = AmplitudeList()
1566
1567 self['collect_mirror_procs'] = False
1568
1569
1570 self['ignore_six_quark_processes'] = []
1571
1572
1573 self['use_numerical'] = False
1574
1575 - def __init__(self, argument=None, collect_mirror_procs = False,
1576 ignore_six_quark_processes = [], optimize=False,
1577 loop_filter=None, diagram_filter=None):
1605
1606
1607 - def filter(self, name, value):
1608 """Filter for valid process property values."""
1609
1610 if name == 'process_definitions':
1611 if not isinstance(value, base_objects.ProcessDefinitionList):
1612 raise self.PhysicsObjectError("%s is not a valid ProcessDefinitionList object" % str(value))
1613
1614 if name == 'amplitudes':
1615 if not isinstance(value, AmplitudeList):
1616 raise self.PhysicsObjectError("%s is not a valid AmplitudeList object" % str(value))
1617
1618 if name in ['collect_mirror_procs']:
1619 if not isinstance(value, bool):
1620 raise self.PhysicsObjectError("%s is not a valid boolean" % str(value))
1621
1622 if name == 'ignore_six_quark_processes':
1623 if not isinstance(value, list):
1624 raise self.PhysicsObjectError("%s is not a valid list" % str(value))
1625
1626 return True
1627
1628 - def get(self, name):
1629 """Get the value of the property name."""
1630
1631 if (name == 'amplitudes') and not self[name]:
1632 for process_def in self.get('process_definitions'):
1633 if process_def.get('decay_chains'):
1634
1635
1636 self['amplitudes'].append(\
1637 DecayChainAmplitude(process_def,
1638 self.get('collect_mirror_procs'),
1639 self.get('ignore_six_quark_processes'),
1640 diagram_filter=self['diagram_filter']))
1641 else:
1642 self['amplitudes'].extend(\
1643 self.generate_multi_amplitudes(process_def,
1644 self.get('collect_mirror_procs'),
1645 self.get('ignore_six_quark_processes'),
1646 self['use_numerical'],
1647 loop_filter=self['loop_filter'],
1648 diagram_filter=self['diagram_filter']))
1649
1650 return MultiProcess.__bases__[0].get(self, name)
1651
1653 """Return process property names as a nicely sorted list."""
1654
1655 return ['process_definitions', 'amplitudes']
1656
1658
1659 return self['process_definitions'][0]['model']
1660
1661 @classmethod
1662 - def generate_multi_amplitudes(cls,process_definition,
1663 collect_mirror_procs = False,
1664 ignore_six_quark_processes = [],
1665 use_numerical=False,
1666 loop_filter=None,
1667 diagram_filter=False):
1668 """Generate amplitudes in a semi-efficient way.
1669 Make use of crossing symmetry for processes that fail diagram
1670 generation, but not for processes that succeed diagram
1671 generation. Doing so will risk making it impossible to
1672 identify processes with identical amplitudes.
1673 """
1674 assert isinstance(process_definition, base_objects.ProcessDefinition), \
1675 "%s not valid ProcessDefinition object" % \
1676 repr(process_definition)
1677
1678
1679
1680 if not process_definition['born_sq_orders']:
1681 process_definition.set('orders', MultiProcess.\
1682 find_optimal_process_orders(process_definition,
1683 diagram_filter))
1684
1685 process_definition.check_expansion_orders()
1686
1687 processes = base_objects.ProcessList()
1688 amplitudes = AmplitudeList()
1689
1690
1691
1692 failed_procs = []
1693 success_procs = []
1694
1695 non_permuted_procs = []
1696
1697 permutations = []
1698
1699
1700
1701 model = process_definition['model']
1702
1703 islegs = [leg for leg in process_definition['legs'] \
1704 if leg['state'] == False]
1705 fslegs = [leg for leg in process_definition['legs'] \
1706 if leg['state'] == True]
1707
1708 isids = [leg['ids'] for leg in process_definition['legs'] \
1709 if leg['state'] == False]
1710 fsids = [leg['ids'] for leg in process_definition['legs'] \
1711 if leg['state'] == True]
1712 polids = [tuple(leg['polarization']) for leg in process_definition['legs'] \
1713 if leg['state'] == True]
1714
1715 for prod in itertools.product(*isids):
1716 islegs = [\
1717 base_objects.Leg({'id':id, 'state': False,
1718 'polarization': islegs[i]['polarization']})
1719 for i,id in enumerate(prod)]
1720
1721
1722
1723
1724 red_fsidlist = set()
1725
1726 for prod in itertools.product(*fsids):
1727 tag = zip(prod, polids)
1728 tag = sorted(tag)
1729
1730 if tuple(tag) in red_fsidlist:
1731 continue
1732
1733 red_fsidlist.add(tuple(tag))
1734
1735 leg_list = [copy.copy(leg) for leg in islegs]
1736 leg_list.extend([\
1737 base_objects.Leg({'id':id, 'state': True, 'polarization': fslegs[i]['polarization']}) \
1738 for i,id in enumerate(prod)])
1739
1740 legs = base_objects.LegList(leg_list)
1741
1742
1743 sorted_legs = sorted([(l,i+1) for (i,l) in \
1744 enumerate(legs.get_outgoing_id_list(model))])
1745 permutation = [l[1] for l in sorted_legs]
1746
1747 sorted_legs = array.array('i', [l[0] for l in sorted_legs])
1748
1749
1750 if ignore_six_quark_processes and \
1751 len([i for i in sorted_legs if abs(i) in \
1752 ignore_six_quark_processes]) >= 6:
1753 continue
1754
1755
1756
1757 if sorted_legs in failed_procs:
1758 continue
1759
1760
1761 if use_numerical:
1762
1763 initial_mass = abs(model['parameter_dict'][model.get_particle(legs[0].get('id')).get('mass')])
1764 if initial_mass == 0:
1765 continue
1766 for leg in legs[1:]:
1767 m = model['parameter_dict'][model.get_particle(leg.get('id')).get('mass')]
1768 initial_mass -= abs(m)
1769 if initial_mass.real <= 0:
1770 continue
1771
1772
1773 process = process_definition.get_process_with_legs(legs)
1774
1775 fast_proc = \
1776 array.array('i',[leg.get('id') for leg in legs])
1777 if collect_mirror_procs and \
1778 process_definition.get_ninitial() == 2:
1779
1780 mirror_proc = \
1781 array.array('i', [fast_proc[1], fast_proc[0]] + \
1782 list(fast_proc[2:]))
1783 try:
1784 mirror_amp = \
1785 amplitudes[non_permuted_procs.index(mirror_proc)]
1786 except Exception:
1787
1788 pass
1789 else:
1790
1791 mirror_amp.set('has_mirror_process', True)
1792 logger.info("Process %s added to mirror process %s" % \
1793 (process.base_string(),
1794 mirror_amp.get('process').base_string()))
1795 continue
1796
1797
1798
1799 if not process.get('required_s_channels') and \
1800 not process.get('forbidden_onsh_s_channels') and \
1801 not process.get('forbidden_s_channels') and \
1802 not process.get('is_decay_chain') and not diagram_filter:
1803 try:
1804 crossed_index = success_procs.index(sorted_legs)
1805
1806
1807
1808
1809 if 'loop_diagrams' in amplitudes[crossed_index]:
1810 raise ValueError
1811 except ValueError:
1812
1813 pass
1814 else:
1815
1816 amplitude = MultiProcess.cross_amplitude(\
1817 amplitudes[crossed_index],
1818 process,
1819 permutations[crossed_index],
1820 permutation)
1821 amplitudes.append(amplitude)
1822 success_procs.append(sorted_legs)
1823 permutations.append(permutation)
1824 non_permuted_procs.append(fast_proc)
1825 logger.info("Crossed process found for %s, reuse diagrams." % \
1826 process.base_string())
1827 continue
1828
1829
1830 amplitude = cls.get_amplitude_from_proc(process,
1831 loop_filter=loop_filter)
1832
1833 try:
1834 result = amplitude.generate_diagrams(diagram_filter=diagram_filter)
1835 except InvalidCmd as error:
1836 failed_procs.append(sorted_legs)
1837 else:
1838
1839 if amplitude.get('diagrams'):
1840 amplitudes.append(amplitude)
1841 success_procs.append(sorted_legs)
1842 permutations.append(permutation)
1843 non_permuted_procs.append(fast_proc)
1844 elif not result:
1845
1846 failed_procs.append(sorted_legs)
1847
1848
1849 if not amplitudes:
1850 if len(failed_procs) == 1 and 'error' in locals():
1851 raise error
1852 else:
1853 raise NoDiagramException("No amplitudes generated from process %s. Please enter a valid process" % \
1854 process_definition.nice_string())
1855
1856
1857
1858 return amplitudes
1859
1860 @classmethod
1862 """ Return the correct amplitude type according to the characteristics of
1863 the process proc. The only option that could be specified here is
1864 loop_filter and it is of course not relevant for a tree amplitude."""
1865
1866 return Amplitude({"process": proc})
1867
1868
1869 @staticmethod
1871 """Find the minimal WEIGHTED order for this set of processes.
1872
1873 The algorithm:
1874
1875 1) Check the coupling hierarchy of the model. Assign all
1876 particles to the different coupling hierarchies so that a
1877 particle is considered to be in the highest hierarchy (i.e.,
1878 with lowest value) where it has an interaction.
1879
1880 2) Pick out the legs in the multiprocess according to the
1881 highest hierarchy represented (so don't mix particles from
1882 different hierarchy classes in the same multiparticles!)
1883
1884 3) Find the starting maximum WEIGHTED order as the sum of the
1885 highest n-2 weighted orders
1886
1887 4) Pick out required s-channel particle hierarchies, and use
1888 the highest of the maximum WEIGHTED order from the legs and
1889 the minimum WEIGHTED order extracted from 2*s-channel
1890 hierarchys plus the n-2-2*(number of s-channels) lowest
1891 leg weighted orders.
1892
1893 5) Run process generation with the WEIGHTED order determined
1894 in 3)-4) - # final state gluons, with all gluons removed from
1895 the final state
1896
1897 6) If no process is found, increase WEIGHTED order by 1 and go
1898 back to 5), until we find a process which passes. Return that
1899 order.
1900
1901 7) Continue 5)-6) until we reach (n-2)*(highest hierarchy)-1.
1902 If still no process has passed, return
1903 WEIGHTED = (n-2)*(highest hierarchy)
1904 """
1905
1906 assert isinstance(process_definition, base_objects.ProcessDefinition), \
1907 "%s not valid ProcessDefinition object" % \
1908 repr(process_definition)
1909
1910 processes = base_objects.ProcessList()
1911 amplitudes = AmplitudeList()
1912
1913
1914 if process_definition.get('orders') or \
1915 process_definition.get('overall_orders') or \
1916 process_definition.get('NLO_mode')=='virt':
1917 return process_definition.get('orders')
1918
1919
1920 if process_definition.get_ninitial() == 1 and not \
1921 process_definition.get('is_decay_chain'):
1922 return process_definition.get('orders')
1923
1924 logger.info("Checking for minimal orders which gives processes.")
1925 logger.info("Please specify coupling orders to bypass this step.")
1926
1927
1928 max_order_now, particles, hierarchy = \
1929 process_definition.get_minimum_WEIGHTED()
1930 coupling = 'WEIGHTED'
1931
1932 model = process_definition.get('model')
1933
1934
1935 isids = [leg['ids'] for leg in \
1936 [leg for leg in process_definition['legs'] if leg['state'] == False]]
1937 fsids = [leg['ids'] for leg in \
1938 [leg for leg in process_definition['legs'] if leg['state'] == True]]
1939
1940 max_WEIGHTED_order = \
1941 (len(fsids + isids) - 2)*int(model.get_max_WEIGHTED())
1942
1943 hierarchydef = process_definition['model'].get('order_hierarchy')
1944 tmp = []
1945 hierarchy = list(hierarchydef.items())
1946 hierarchy.sort()
1947 for key, value in hierarchydef.items():
1948 if value>1:
1949 tmp.append('%s*%s' % (value,key))
1950 else:
1951 tmp.append('%s' % key)
1952 wgtdef = '+'.join(tmp)
1953
1954
1955 while max_order_now < max_WEIGHTED_order:
1956 logger.info("Trying coupling order WEIGHTED<=%d: WEIGTHED IS %s" % (max_order_now, wgtdef))
1957
1958 oldloglevel = logger.level
1959 logger.setLevel(logging.WARNING)
1960
1961
1962
1963 failed_procs = []
1964
1965 for prod in itertools.product(*isids):
1966 islegs = [ base_objects.Leg({'id':id, 'state': False}) \
1967 for id in prod]
1968
1969
1970
1971
1972 red_fsidlist = []
1973
1974 for prod in itertools.product(*fsids):
1975
1976
1977 if tuple(sorted(prod)) in red_fsidlist:
1978 continue
1979
1980 red_fsidlist.append(tuple(sorted(prod)));
1981
1982
1983
1984 nglue = 0
1985 if 21 in particles[0]:
1986 nglue = len([id for id in prod if id == 21])
1987 prod = [id for id in prod if id != 21]
1988
1989
1990 leg_list = [copy.copy(leg) for leg in islegs]
1991
1992 leg_list.extend([\
1993 base_objects.Leg({'id':id, 'state': True}) \
1994 for id in prod])
1995
1996 legs = base_objects.LegList(leg_list)
1997
1998
1999
2000 coupling_orders_now = {coupling: max_order_now - \
2001 nglue * model['order_hierarchy']['QCD']}
2002
2003
2004 process = base_objects.Process({\
2005 'legs':legs,
2006 'model':model,
2007 'id': process_definition.get('id'),
2008 'orders': coupling_orders_now,
2009 'required_s_channels': \
2010 process_definition.get('required_s_channels'),
2011 'forbidden_onsh_s_channels': \
2012 process_definition.get('forbidden_onsh_s_channels'),
2013 'sqorders_types': \
2014 process_definition.get('sqorders_types'),
2015 'squared_orders': \
2016 process_definition.get('squared_orders'),
2017 'split_orders': \
2018 process_definition.get('split_orders'),
2019 'forbidden_s_channels': \
2020 process_definition.get('forbidden_s_channels'),
2021 'forbidden_particles': \
2022 process_definition.get('forbidden_particles'),
2023 'is_decay_chain': \
2024 process_definition.get('is_decay_chain'),
2025 'overall_orders': \
2026 process_definition.get('overall_orders'),
2027 'split_orders': \
2028 process_definition.get('split_orders')})
2029
2030
2031 process.check_expansion_orders()
2032
2033
2034 sorted_legs = sorted(legs.get_outgoing_id_list(model))
2035
2036
2037 if tuple(sorted_legs) in failed_procs and not process_definition.get('forbidden_s_channels'):
2038 continue
2039
2040 amplitude = Amplitude({'process': process})
2041 try:
2042 amplitude.generate_diagrams(diagram_filter=diagram_filter)
2043 except InvalidCmd as error:
2044 failed_procs.append(tuple(sorted_legs))
2045 else:
2046 if amplitude.get('diagrams'):
2047
2048 logger.setLevel(oldloglevel)
2049 return {coupling: max_order_now}
2050 else:
2051 failed_procs.append(tuple(sorted_legs))
2052
2053 max_order_now += 1
2054 logger.setLevel(oldloglevel)
2055
2056
2057 return {coupling: max_order_now}
2058
2059 @staticmethod
2061 """Return the amplitude crossed with the permutation new_perm"""
2062
2063 perm_map = dict(list(zip(org_perm, new_perm)))
2064
2065 new_amp = copy.copy(amplitude)
2066
2067 for i, leg in enumerate(process.get('legs')):
2068 leg.set('number', i+1)
2069
2070 new_amp.set('process', process)
2071
2072 diagrams = base_objects.DiagramList([d.renumber_legs(perm_map,
2073 process.get('legs'),) for \
2074 d in new_amp.get('diagrams')])
2075 new_amp.set('diagrams', diagrams)
2076 new_amp.trim_diagrams()
2077
2078
2079 new_amp.set('has_mirror_process', False)
2080
2081 return new_amp
2082
2088 """Takes a list of lists and elements and returns a list of flat lists.
2089 Example: [[1,2], 3, [4,5]] -> [[1,3,4], [1,3,5], [2,3,4], [2,3,5]]
2090 """
2091
2092
2093 assert isinstance(mylist, list), "Expand_list argument must be a list"
2094
2095 res = []
2096
2097 tmplist = []
2098 for item in mylist:
2099 if isinstance(item, list):
2100 tmplist.append(item)
2101 else:
2102 tmplist.append([item])
2103
2104 for item in itertools.product(*tmplist):
2105 res.append(list(item))
2106
2107 return res
2108
2110 """Recursive function. Takes a list of lists and lists of lists
2111 and returns a list of flat lists.
2112 Example: [[1,2],[[4,5],[6,7]]] -> [[1,2,4,5], [1,2,6,7]]
2113 """
2114
2115 res = []
2116
2117 if not mylist or len(mylist) == 1 and not mylist[0]:
2118 return [[]]
2119
2120
2121 assert isinstance(mylist[0], list), \
2122 "Expand_list_list needs a list of lists and lists of lists"
2123
2124
2125 if len(mylist) == 1:
2126 if isinstance(mylist[0][0], list):
2127 return mylist[0]
2128 else:
2129 return mylist
2130
2131 if isinstance(mylist[0][0], list):
2132 for item in mylist[0]:
2133
2134
2135
2136 for rest in expand_list_list(mylist[1:]):
2137 reslist = copy.copy(item)
2138 reslist.extend(rest)
2139 res.append(reslist)
2140 else:
2141 for rest in expand_list_list(mylist[1:]):
2142 reslist = copy.copy(mylist[0])
2143 reslist.extend(rest)
2144 res.append(reslist)
2145
2146
2147 return res
2148