1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 from __future__ import division
16 from __future__ import absolute_import
17 import cmath
18 import copy
19 import operator
20 import six.moves.cPickle
21 import glob
22 import logging
23 import numbers
24 import os
25 import re
26 import shutil
27 import sys
28 import time
29 from madgraph.interface.tutorial_text import output
30
31 from six.moves import range
32 from six.moves import zip
33
34 root_path = os.path.split(os.path.dirname(os.path.realpath( __file__ )))[0]
35 sys.path.append(root_path)
36 from aloha.aloha_object import *
37 import aloha
38 import aloha.aloha_writers as aloha_writers
39 import aloha.aloha_lib as aloha_lib
40 import aloha.aloha_object as aloha_object
41 import aloha.aloha_parsers as aloha_parsers
42 import aloha.aloha_fct as aloha_fct
43 import models
44 try:
45 import madgraph.iolibs.files as files
46 import madgraph.various.misc as misc
47 except Exception:
48 import aloha.files as files
49 import aloha.misc as misc
50
51
52 aloha_path = os.path.dirname(os.path.realpath(__file__))
53 logger = logging.getLogger('ALOHA')
54
55 _conjugate_gap = 50
56 _spin2_mult = 1000
57
58 pjoin = os.path.join
59
60 ALOHAERROR = aloha.ALOHAERROR
63 """ store the result of the computation of Helicity Routine
64 this is use for storing and passing to writer """
65
66 - def __init__(self, expr, outgoing, spins, name, infostr, denom=None):
67 """ store the information """
68
69 self.spins = spins
70 self.expr = expr
71 self.denominator = denom
72 self.name = name
73 self.outgoing = outgoing
74 self.infostr = infostr
75 self.symmetries = []
76 self.combined = []
77 self.tag = []
78 self.contracted = {}
79
80
81
83 """ add an outgoing """
84
85 if not outgoing in self.symmetries:
86 self.symmetries.append(outgoing)
87
89 """add a combine rule """
90
91 if lor_list not in self.combined:
92 self.combined.append(lor_list)
93
94 - def write(self, output_dir, language='Fortran', mode='self', combine=True,**opt):
95 """ write the content of the object """
96 writer = aloha_writers.WriterFactory(self, language, output_dir, self.tag)
97 text = writer.write(mode=mode, **opt)
98 if combine:
99 for grouped in self.combined:
100 if isinstance(text, tuple):
101 text = tuple([old.__add__(new) for old, new in zip(text,
102 writer.write_combined(grouped, mode=mode+'no_include', **opt))])
103 else:
104 text += writer.write_combined(grouped, mode=mode+'no_include', **opt)
105 if aloha.mp_precision and 'MP' not in self.tag:
106 self.tag.append('MP')
107 text += self.write(output_dir, language, mode, **opt)
108 return text
109
111 """return some information on the routine
112 """
113 if info == "rank":
114 assert isinstance(self.expr, aloha_lib.SplitCoefficient)
115 rank= 1
116 for coeff in self.expr:
117 rank = max(sum(coeff), rank)
118 return rank -1
119 else:
120 raise ALOHAERROR('%s is not a valid information that can be computed' % info)
121
124 """ Launch the creation of the Helicity Routine"""
125
126 prop_lib = {}
127 counter = 0
128
130 """ An error class for ALOHA"""
131
132 - def __init__(self, lorentz, model=None):
133 """ initialize the run
134 lorentz: the lorentz information analyzed (UFO format)
135 language: define in which language we write the output
136 modes: 0 for all incoming particles
137 >0 defines the outgoing part (start to count at 1)
138 """
139
140 self.spins = [s for s in lorentz.spins]
141 self.name = lorentz.name
142 self.conjg = []
143 self.tag = []
144 self.outgoing = None
145 self.lorentz_expr = lorentz.structure
146 self.routine_kernel = None
147 self.spin2_massless = False
148 self.spin32_massless = False
149 self.contracted = {}
150 self.fct = {}
151 self.model = model
152 self.denominator = None
153
154
155 self.lastprint = 0
156
157 if hasattr(lorentz, 'formfactors') and lorentz.formfactors:
158 for formf in lorentz.formfactors:
159 pat = re.compile(r'\b%s\b' % formf.name)
160 self.lorentz_expr = pat.sub('(%s)' % formf.value, self.lorentz_expr)
161
171
173 """ return the full set of AbstractRoutineBuilder linked to fermion
174 clash"""
175
176 solution = []
177
178 for i, pair in enumerate(pair_list):
179 new_builder = self.define_conjugate_builder(pair)
180 solution.append(new_builder)
181 solution += new_builder.define_all_conjugate_builder(pair_list[i+1:])
182 return solution
183
185 """ return a AbstractRoutineBuilder for the conjugate operation.
186 If they are more than one pair of fermion. Then use pair to claim which
187 one is conjugated"""
188
189 new_builder = copy.copy(self)
190 new_builder.conjg = self.conjg[:]
191 try:
192 for index in pairs:
193 new_builder.apply_conjugation(index)
194 except TypeError:
195 new_builder.apply_conjugation(pairs)
196 return new_builder
197
199 """ apply conjugation on self object"""
200
201 nb_fermion = len([1 for s in self.spins if s % 2 == 0])
202 if isinstance(pair, tuple):
203 if len(pair) ==1 :
204 pair = pair[0]
205 else:
206 raise Exception
207
208
209 if (pair > 1 or nb_fermion >2) and not self.conjg:
210
211 data = aloha_fct.get_fermion_flow(self.lorentz_expr, nb_fermion)
212 target = dict([(2*i+1,2*i+2) for i in range(nb_fermion//2)])
213 if not data == target:
214 text = """Unable to deal with 4(or more) point interactions
215 in presence of majorana particle/flow violation"""
216 raise ALOHAERROR(text)
217
218 old_id = 2 * pair - 1
219 new_id = _conjugate_gap + old_id
220
221 self.kernel_tag = set()
222 aloha_lib.KERNEL.use_tag = set()
223 if not self.routine_kernel or isinstance(self.routine_kernel, str):
224 self.routine_kernel = eval(self.parse_expression(self.lorentz_expr))
225 self.kernel_tag = aloha_lib.KERNEL.use_tag
226
227
228 self.routine_kernel = \
229 C(new_id, old_id + 1) * self.routine_kernel * C(new_id + 1, old_id)
230
231 self.lorentz_expr = '('+self.lorentz_expr+') * C(%s,%s) * C(%s,%s)' % \
232 (new_id, old_id + 1, new_id + 1, old_id )
233
234 self.conjg.append(pair)
235
236
256
258 """change the sign of P for outcoming fermion in order to
259 correct the mismatch convention between HELAS and FR"""
260
261 if not expr:
262 expr = self.lorentz_expr
263
264 if need_P_sign:
265 expr = re.sub(r'\b(P|PSlash)\(', r'-\1(', expr)
266
267 calc = aloha_parsers.ALOHAExpressionParser()
268 lorentz_expr = calc.parse(expr)
269 return lorentz_expr
270
272 """compute the abstract routine associate to this mode """
273
274
275 aloha_lib.KERNEL.use_tag=set()
276
277 nb_spinor = 0
278 outgoing = self.outgoing
279 if (outgoing + 1) // 2 in self.conjg:
280
281 outgoing = outgoing + outgoing % 2 - (outgoing +1) % 2
282
283 if not self.routine_kernel:
284 AbstractRoutineBuilder.counter += 1
285 if self.tag == []:
286 logger.debug( 'aloha creates %s routines', self.name)
287 else:
288 logger.debug('aloha creates %s set of routines with options: %s' \
289 % (self.name, ','.join(self.tag)) )
290 try:
291 lorentz = self.parse_expression()
292 self.routine_kernel = lorentz
293 lorentz = eval(lorentz)
294 except NameError as error:
295 logger.error('unknow type in Lorentz Evaluation:%s'%str(error))
296 raise ALOHAERROR('unknow type in Lorentz Evaluation: %s ' % str(error))
297 else:
298 self.kernel_tag = set(aloha_lib.KERNEL.use_tag)
299 elif isinstance(self.routine_kernel,str):
300 lorentz = eval(self.routine_kernel)
301 aloha_lib.KERNEL.use_tag = set(self.kernel_tag)
302 else:
303 lorentz = copy.copy(self.routine_kernel)
304 aloha_lib.KERNEL.use_tag = set(self.kernel_tag)
305 for (i, spin ) in enumerate(self.spins):
306 id = i + 1
307
308 if id == outgoing:
309
310
311 propa = [t[1:] for t in self.tag if t.startswith('P')]
312 if propa == ['0']:
313 if spin == 3 and aloha.unitary_gauge == 2:
314 lorentz *= complex(0,1) * self.get_custom_propa('1PS', spin, id)
315 continue
316 else:
317 massless = True
318 self.denominator = None
319 elif propa == []:
320 massless = False
321 self.denominator = None
322 else:
323 lorentz *= complex(0,1) * self.get_custom_propa(propa[0], spin, id)
324 continue
325
326
327
328 if spin in [1,-1]:
329 lorentz *= complex(0,1)
330 elif spin == 2:
331
332 if (id + 1) // 2 in self.conjg:
333 id += _conjugate_gap + id % 2 - (id +1) % 2
334 if (id % 2):
335
336 lorentz *= complex(0,1) * SpinorPropagatorout(id, 'I2', outgoing)
337 else:
338
339 lorentz *= complex(0,1) * SpinorPropagatorin('I2', id, outgoing)
340 elif spin == 3 :
341 if massless or not aloha.unitary_gauge:
342 lorentz *= VectorPropagatorMassless(id, 'I2', id)
343 else:
344 lorentz *= VectorPropagator(id, 'I2', id)
345 elif spin == 4:
346
347 if (id + 1) // 2 in self.conjg:
348 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
349 else:
350 spin_id = id
351 nb_spinor += 1
352 if not massless and (spin_id % 2):
353 lorentz *= complex(0,1) * Spin3halfPropagatorout(id, 'I2', spin_id,'I3', outgoing)
354 elif not massless and not (spin_id % 2):
355 lorentz *= complex(0,1) * Spin3halfPropagatorin('I2', id , 'I3', spin_id, outgoing)
356 elif spin_id %2:
357 lorentz *= complex(0,1) * Spin3halfPropagatorMasslessOut(id, 'I2', spin_id,'I3', outgoing)
358 else :
359 lorentz *= complex(0,1) * Spin3halfPropagatorMasslessIn('I2', id, 'I3', spin_id, outgoing)
360
361 elif spin == 5 :
362
363 if massless:
364 lorentz *= complex(0,1) * Spin2masslessPropagator(_spin2_mult + id, \
365 2 * _spin2_mult + id,'I2','I3')
366 else:
367 lorentz *= complex(0,1) * Spin2Propagator(_spin2_mult + id, \
368 2 * _spin2_mult + id,'I2','I3', id)
369 else:
370 raise self.AbstractALOHAError(
371 'The spin value %s (2s+1) is not supported yet' % spin)
372 else:
373
374 if spin in [1,-1]:
375 lorentz *= Scalar(id)
376 elif spin == 2:
377
378 if (id+1) // 2 in self.conjg:
379 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
380 else:
381 spin_id = id
382 lorentz *= Spinor(spin_id, id)
383 elif spin == 3:
384 lorentz *= Vector(id, id)
385 elif spin == 4:
386
387 if (id+1) // 2 in self.conjg:
388 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
389 else:
390 spin_id = id
391 nb_spinor += 1
392 lorentz *= Spin3Half(id, spin_id, id)
393 elif spin == 5:
394 lorentz *= Spin2(1 * _spin2_mult + id, 2 * _spin2_mult + id, id)
395 else:
396 raise self.AbstractALOHAError(
397 'The spin value %s (2s+1) is not supported yet' % spin)
398
399
400 if not outgoing:
401 lorentz *= complex(0,-1)
402
403
404 lorentz = lorentz.simplify()
405
406
407 if any((tag.startswith('L') for tag in self.tag if len(tag)>1)):
408 return self.compute_loop_coefficient(lorentz, outgoing)
409
410 lorentz = lorentz.expand()
411 lorentz = lorentz.simplify()
412
413 if factorize:
414 lorentz = lorentz.factorize()
415
416 lorentz.tag = set(aloha_lib.KERNEL.use_tag)
417 return lorentz
418
419 @staticmethod
421 """Change the index of the propagator to match the current need"""
422
423 data = re.split(r'(\b[a-zA-Z]\w*?)\(([\'\w,\s\"\+\-]*?)\)',text)
424 to_change = {}
425 for old, new in tag.items():
426 if isinstance(new, str):
427 new='\'%s\'' % new
428 else:
429 new = str(new)
430 to_change[r'%s' % old] = new
431 pos=-2
432 while pos +3 < len(data):
433 pos = pos+3
434 ltype = data[pos]
435 if ltype != 'complex':
436 data[pos+1] = re.sub(r'\b(?<!-)(%s)\b' % '|'.join(to_change),
437 lambda x: to_change[x.group()], data[pos+1])
438 data[pos+1] = '(%s)' % data[pos+1]
439 text=''.join(data)
440 return text
441
443 """Return the ALOHA object associated to the user define propagator"""
444
445 if not propa.startswith('1'):
446 propagator = getattr(self.model.propagators, propa)
447 numerator = propagator.numerator
448 denominator = propagator.denominator
449 elif propa == "1L":
450 numerator = "EPSL(1,id) * EPSL(2,id)"
451 denominator = "-1*PVec(-2,id)*PVec(-2,id)*P(-3,id)*P(-3,id) * (P(-1,id)**2 - Mass(id) * Mass(id) + complex(0,1) * Mass(id) * Width(id))"
452 elif propa == "1T":
453 numerator = "-1*PVec(-2,id)*PVec(-2,id) * EPST2(1,id)*EPST2(2,id) + EPST1(1,id)*EPST1(2,id)"
454 denominator = "PVec(-2,id)*PVec(-2,id) * PT(-3,id)*PT(-3,id) * (P(-1,id)**2 - Mass(id) * Mass(id) + complex(0,1) * Mass(id) * Width(id))"
455 elif propa == "1A":
456 numerator = "(P(-2,id)**2 - Mass(id)**2) * P(1,id) * P(2,id)"
457 denominator = "P(-2,id)**2 * Mass(id)**2 * (P(-1,id)**2 - Mass(id) * Mass(id) + complex(0,1) * Mass(id) * Width(id))"
458 elif propa in ["1P"]:
459
460 spin_id = id
461 if (id + 1) // 2 in self.conjg:
462 spin_id += _conjugate_gap + id % 2 - (id +1) % 2
463 if (spin_id % 2):
464 numerator = "UFP(1,id)*UFPC(2,id)"
465 else:
466 numerator = "VFP(1,id)*VFPC(2,id)"
467
468 denominator = "(2*Tnorm(id)*TnormZ(id))*(P(-1,id)*P(-1,id) - Mass(id) * Mass(id) + complex(0,1) * Mass(id) * Width(id))"
469
470 elif propa == "1M":
471
472 spin_id = id
473 if (id + 1) // 2 in self.conjg:
474 spin_id += _conjugate_gap + id % 2 - (id +1) % 2
475 if (spin_id % 2):
476 numerator = "UFM(1,id)*UFMC(2,id)"
477 else:
478 numerator = "VFM(1,id)*VFMC(2,id)"
479 denominator = "(2*Tnorm(id)*TnormZ(id))*(P(-1,id)*P(-1,id) - Mass(id) * Mass(id) + complex(0,1) * Mass(id) * Width(id))"
480 elif propa == "1PS":
481 numerator = "(-1*(P(-1,id)*PBar(-1,id)) * Metric(1, 2) + P(1,id)*PBar(2,id) + PBar(1,id)*P(2,id))"
482 denominator = "(P(-3,id)*PBar(-3,id))*P(-2,id)**2"
483 elif propa == "1N":
484 if spin == 3:
485 numerator = '-1*IdentityL(1,2)'
486 else:
487 numerator = "-1"
488 denominator = "1"
489 else:
490 raise Exception
491
492
493 needPflipping = False
494 if spin in [1,-1]:
495 tag = {'id': id}
496 elif spin == 2:
497
498 if (id + 1) // 2 in self.conjg:
499 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
500 else:
501 spin_id = id
502 if (spin_id % 2):
503
504 needPflipping = True
505 tag ={'1': spin_id, '2': 'I2', 'id': id}
506 else:
507 tag ={'1': 'I2', '2': spin_id, 'id': id}
508 elif spin == 3 :
509 tag ={'1': id, '2': 'I2', 'id': id}
510 elif spin == 4:
511 delta = lambda i,j: aloha_object.Identity(i,j)
512 deltaL = lambda i,j: aloha_object.IdentityL(i,j)
513
514 if (id + 1) // 2 in self.conjg:
515 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
516 else:
517 spin_id = id
518 tag = {'1': 'pr1', '2': 'pr2', 'id':id}
519 if spin_id % 2:
520 needPflipping = True
521
522 propaR = deltaL('pr1',id) * deltaL('pr2', 'I2') * delta('pr1', spin_id) * delta('pr2', 'I3')
523 else:
524 propaR = deltaL('pr1',id) * deltaL('pr2', 'I2') * delta('pr2', spin_id) * delta('pr1', 'I3')
525
526 elif spin == 5 :
527 tag = {'1': _spin2_mult + id, '2': 'I2',
528 '51': 2 * _spin2_mult + id, '52': 'I3', 'id':id}
529
530 numerator = self.mod_propagator_expression(tag, numerator)
531 if denominator:
532 denominator = self.mod_propagator_expression(tag, denominator)
533
534 numerator = self.parse_expression(numerator, needPflipping)
535
536 if denominator:
537 self.denominator = self.parse_expression(denominator, needPflipping)
538 self.denominator = eval(self.denominator)
539 if not isinstance(self.denominator, numbers.Number):
540 self.denominator = self.denominator.simplify().expand().simplify().get((0,))
541 needPflipping = False
542 if spin ==4:
543 return eval(numerator) * propaR
544 else:
545 return eval(numerator)
546
547
548
549
550
552
553
554 l_in = [int(tag[1:]) for tag in self.tag if tag.startswith('L')][0]
555 if (l_in + 1) // 2 in self.conjg:
556
557 l_in = l_in + l_in % 2 - (l_in +1) % 2
558 assert l_in != outgoing, 'incoming Open Loop can not be the outcoming one'
559
560
561
562 Pdep = [aloha_lib.KERNEL.get(P) for P in lorentz.get_all_var_names()
563 if P.startswith('_P')]
564
565 Pdep = set([P for P in Pdep if P.particle in [outgoing, l_in]])
566 for P in Pdep:
567 if P.particle == l_in:
568 sign = 1
569 else:
570 sign = -1
571 id = P.id
572 lorentz_ind = P.lorentz_ind[0]
573 P_Lid = aloha_object.P(lorentz_ind, 'L')
574 P_obj = aloha_object.P(lorentz_ind, P.particle)
575 new_expr = sign*(P_Lid + P_obj)
576 lorentz = lorentz.replace(id, new_expr)
577
578
579 var_veto = ['PL_0', 'PL_1', 'PL_2', 'PL_3']
580 spin = aloha_writers.WriteALOHA.type_to_variable[abs(self.spins[l_in-1])]
581 size = aloha_writers.WriteALOHA.type_to_size[spin]-1
582 var_veto += ['%s%s_%s' % (spin,l_in,i) for i in range(1,size)]
583
584 veto_ids = aloha_lib.KERNEL.get_ids(var_veto)
585
586 lorentz = lorentz.expand(veto = veto_ids)
587 lorentz = lorentz.simplify()
588 coeff_expr = lorentz.split(veto_ids)
589
590 for key, expr in coeff_expr.items():
591 expr = expr.simplify()
592 coeff_expr[key] = expr.factorize()
593 coeff_expr.tag = set(aloha_lib.KERNEL.use_tag)
594
595 return coeff_expr
596
598 """Define the expression"""
599
600 self.expr = lorentz_expr
601
603 """Define the kernel at low level"""
604
605 if not lorentz:
606 logger.info('compute kernel %s' % self.counter)
607 AbstractRoutineBuilder.counter += 1
608 lorentz = eval(self.lorentz_expr)
609
610 if isinstance(lorentz, numbers.Number):
611 self.routine_kernel = lorentz
612 return lorentz
613 lorentz = lorentz.simplify()
614 lorentz = lorentz.expand()
615 lorentz = lorentz.simplify()
616
617 self.routine_kernel = lorentz
618 return lorentz
619
620
621 @staticmethod
623 """return the name of the """
624
625 name = '%s_%s' % (name, outgoing)
626 return name
627
628 @classmethod
635
638 """A special builder for combine routine if needed to write those
639 explicitely.
640 """
641 - def __init__(self, l_lorentz, model=None):
642 """ initialize the run
643 l_lorentz: list of lorentz information analyzed (UFO format)
644 language: define in which language we write the output
645 modes: 0 for all incoming particles
646 >0 defines the outgoing part (start to count at 1)
647 """
648 AbstractRoutineBuilder.__init__(self,l_lorentz[0], model)
649 lorentz = l_lorentz[0]
650 self.spins = lorentz.spins
651 l_name = [l.name for l in l_lorentz]
652 self.name = aloha_writers.combine_name(l_name[0], l_name[1:], None)
653 self.conjg = []
654 self.tag = []
655 self.outgoing = None
656 self.lorentz_expr = []
657 for i, lor in enumerate(l_lorentz):
658 self.lorentz_expr.append( 'Coup(%s) * (%s)' % (i+1, lor.structure))
659 self.lorentz_expr = ' + '.join(self.lorentz_expr)
660 self.routine_kernel = None
661 self.contracted = {}
662 self.fct = {}
663
665 """ A class to build and store the full set of Abstract ALOHA Routine"""
666
667 lastprint = 0
668
669 - def __init__(self, model_name, write_dir=None, format='Fortran',
670 explicit_combine=False):
671 """ load the UFO model and init the dictionary """
672
673
674 self.explicit_combine = explicit_combine
675
676 model_name_pattern = re.compile("^(?P<name>.+)-(?P<rest>[\w\d_]+)$")
677 model_name_re = model_name_pattern.match(model_name)
678 if model_name_re:
679 name = model_name_re.group('name')
680 rest = model_name_re.group("rest")
681 if rest == 'full' or \
682 os.path.isfile(os.path.join(root_path, "models", name,
683 "restrict_%s.dat" % rest)):
684 model_name = model_name_re.group("name")
685
686 self.model = models.load_model(model_name)
687
688
689
690
691
692
693
694
695
696 self.model_pos = os.path.dirname(self.model.__file__)
697
698
699 self.external_routines = []
700
701
702 dict.__init__(self)
703 self.symmetries = {}
704 self.multiple_lor = {}
705
706 if write_dir:
707 self.main(write_dir,format=format)
708
709 - def main(self, output_dir, format='Fortran'):
710 """ Compute if not already compute.
711 Write file in models/MY_MODEL/MY_FORMAT.
712 copy the file to output_dir
713 """
714 ext = {'Fortran':'f','Python':'py','CPP':'h'}
715
716
717
718 if not self.load():
719 self.compute_all()
720 logger.info(' %s aloha routine' % len(self))
721
722
723 if not output_dir:
724 output_dir = os.path.join(self.model_pos, format.lower())
725 logger.debug('aloha output dir is %s' % output_dir)
726 if not os.path.exists(output_dir):
727 os.mkdir(output_dir)
728
729
730 for (name, outgoing), abstract in self.items():
731 routine_name = AbstractRoutineBuilder.get_routine_name(name, outgoing)
732 if not os.path.exists(os.path.join(output_dir, routine_name) + '.' + ext[format]):
733 abstract.write(output_dir, format)
734 else:
735 logger.info('File for %s already present, skip the writing of this file' % routine_name)
736
737
738 - def save(self, filepos=None):
739 """ save the current model in a pkl file """
740
741 logger.info('save the aloha abstract routine in a pickle file')
742 if not filepos:
743 filepos = os.path.join(self.model_pos,'aloha.pkl')
744
745 fsock = open(filepos, 'w')
746 t=dict(self)
747 try:
748 six.moves.cPickle.dump(dict(self), fsock)
749 except:
750 logger.info('aloha not saved')
751
752 - def load(self, filepos=None):
753 """ reload the pickle file """
754 return False
755 if not filepos:
756 filepos = os.path.join(self.model_pos,'aloha.pkl')
757 if os.path.exists(filepos):
758 fsock = open(filepos, 'r')
759 self.update(six.moves.cPickle.load(fsock))
760 return True
761 else:
762 return False
763
764 - def get(self, lorentzname, outgoing):
765 """ return the AbstractRoutine with a given lorentz name, and for a given
766 outgoing particle """
767
768 try:
769 return self[(lorentzname, outgoing)]
770 except Exception:
771 logger.warning('(%s, %s) is not a valid key' %
772 (lorentzname, outgoing) )
773 return None
774
775 - def get_info(self, info, lorentzname, outgoing, tag, cached=False):
776 """return some information about the aloha routine
777 - "rank": return the rank of the loop function
778 If the cached option is set to true, then the result is stored and
779 recycled if possible.
780 """
781
782 if not aloha.loop_mode and any(t.startswith('L') for t in tag):
783 aloha.loop_mode = True
784
785
786 returned_dict = {}
787
788 if isinstance(info, str):
789 infos = [info]
790 else:
791 infos = info
792
793
794 if hasattr(self, 'cached_interaction_infos'):
795
796 for info_key in infos:
797 all_done = True
798 try:
799 returned_dict[info] = self.cached_interaction_infos[\
800 (lorentzname,outgoing,tuple(tag),info)]
801 except KeyError:
802
803
804 all_done = False
805 pass
806 if all_done:
807 if isinstance(info, str):
808 return returned_dict[info]
809 else:
810 return returned_dict
811 elif cached:
812 self.cached_interaction_infos = {}
813
814 init = False
815 for info_key in infos:
816 if info_key in returned_dict:
817 continue
818 elif not init:
819
820 lorentz = eval('self.model.lorentz.%s' % lorentzname)
821 abstract = AbstractRoutineBuilder(lorentz)
822 routine = abstract.compute_routine(outgoing, tag, factorize=False)
823 init = True
824
825 assert 'routine' in locals()
826 returned_dict[info_key] = routine.get_info(info_key)
827 if cached:
828
829 self.cached_interaction_infos[\
830 (lorentzname,outgoing,tuple(tag),info_key)]=returned_dict[info_key]
831
832 if isinstance(info, str):
833 return returned_dict[info]
834 else:
835 return returned_dict
836
837 - def set(self, lorentzname, outgoing, abstract_routine):
838 """ add in the dictionary """
839
840 self[(lorentzname, outgoing)] = abstract_routine
841
842 - def compute_all(self, save=True, wanted_lorentz = [], custom_propa=False):
843 """ define all the AbstractRoutine linked to a model """
844
845
846
847 self.look_for_symmetries()
848 conjugate_list = self.look_for_conjugate()
849 self.look_for_multiple_lorentz_interactions()
850
851 if not wanted_lorentz:
852 wanted_lorentz = [l.name for l in self.model.all_lorentz]
853 for lorentz in self.model.all_lorentz:
854 if not lorentz.name in wanted_lorentz:
855
856 continue
857
858 if -1 in lorentz.spins:
859
860 continue
861
862 if lorentz.structure == 'external':
863 for i in range(len(lorentz.spins)):
864 self.external_routines.append('%s_%s' % (lorentz.name, i))
865 continue
866
867
868 routines = [(i,[]) for i in range(len(lorentz.spins)+1)]
869
870 if custom_propa:
871 for vertex in self.model.all_vertices:
872 if lorentz in vertex.lorentz:
873 for i,part in enumerate(vertex.particles):
874 new_prop = False
875 if hasattr(part, 'propagator') and part.propagator:
876 new_prop = ['P%s' % part.propagator.name]
877 elif part.mass.name.lower() == 'zero':
878 new_prop = ['P0']
879 if new_prop and (i+1, new_prop) not in routines:
880 routines.append((i+1, new_prop))
881
882 builder = AbstractRoutineBuilder(lorentz, self.model)
883 self.compute_aloha(builder, routines=routines)
884
885 if lorentz.name in self.multiple_lor:
886 for m in self.multiple_lor[lorentz.name]:
887 for outgoing in range(len(lorentz.spins)+1):
888 try:
889 self[(lorentz.name, outgoing)].add_combine(m)
890 except Exception:
891 pass
892
893
894 if lorentz.name in conjugate_list:
895 conjg_builder_list= builder.define_all_conjugate_builder(\
896 conjugate_list[lorentz.name])
897 for conjg_builder in conjg_builder_list:
898
899 assert conjg_builder_list.count(conjg_builder) == 1
900 self.compute_aloha(conjg_builder, lorentz.name)
901 if lorentz.name in self.multiple_lor:
902 for m in self.multiple_lor[lorentz.name]:
903 for outgoing in range(len(lorentz.spins)+1):
904 realname = conjg_builder.name + ''.join(['C%s' % pair for pair in conjg_builder.conjg])
905 try:
906 self[(realname, outgoing)].add_combine(m)
907 except Exception as error:
908 self[(realname, self.symmetries[lorentz.name][outgoing])].add_combine(m)
909
910 if save:
911 self.save()
912
914 """add a series of Lorentz structure created dynamically"""
915
916 for lor in lorentzlist:
917 if not hasattr(self.model.lorentz, lor.name):
918 setattr(self.model.lorentz, lor.name, lor)
919
921 """ create the requested ALOHA routine.
922 data should be a list of tuple (lorentz, tag, outgoing)
923 tag should be the list of special tag (like conjugation on pair)
924 to apply on the object """
925
926 logger.info('aloha starts to compute helicity amplitudes')
927 start = time.time()
928
929
930 self.look_for_symmetries()
931
932
933 aloha.loop_mode = False
934
935 request = {}
936
937
938
939 for list_l_name, tag, outgoing in data[:]:
940 if outgoing == -1:
941 data.remove((list_l_name, tag, outgoing))
942 l_name = list_l_name[0]
943 lorentz = eval('self.model.lorentz.%s' % l_name)
944 for i in range(len(lorentz.spins)):
945 data.append((list_l_name, tag, i+1))
946
947
948
949 for list_l_name, tag, outgoing in data:
950
951
952 all_tag = tag[:]
953 conjugate = [i for i in tag if isinstance(i, int)]
954
955 tag = [i for i in tag if isinstance(i, str) and not i.startswith('P')]
956 tag = tag + ['C%s'%i for i in conjugate]
957 tag = tag + [i for i in all_tag if isinstance(i, str) and i.startswith('P')]
958
959 conjugate = tuple([int(float(c[1:])) for c in tag if c.startswith('C')])
960 loop = any((t.startswith('L') for t in tag))
961 if loop:
962 aloha.loop_mode = True
963 self.explicit_combine = True
964
965 for l_name in list_l_name:
966 try:
967 request[l_name][conjugate].append((outgoing,tag))
968 except Exception:
969 try:
970 request[l_name][conjugate] = [(outgoing,tag)]
971 except Exception:
972 request[l_name] = {conjugate: [(outgoing,tag)]}
973
974
975 for l_name in request:
976 lorentz = eval('self.model.lorentz.%s' % l_name)
977 if lorentz.structure == 'external':
978 for tmp in request[l_name]:
979 for outgoing, tag in request[l_name][tmp]:
980 name = aloha_writers.get_routine_name(lorentz.name,outgoing=outgoing,tag=tag)
981 if name not in self.external_routines:
982 self.external_routines.append(name)
983 continue
984
985 builder = AbstractRoutineBuilder(lorentz, self.model)
986
987
988 for conjg in request[l_name]:
989
990 def sorting(a,b):
991 if a[0] < b[0]: return -1
992 else: return 1
993 routines = request[l_name][conjg]
994 routines.sort(key=misc.cmp_to_key(sorting))
995 if not conjg:
996
997 self.compute_aloha(builder, routines=routines)
998 else:
999
1000 conjg_builder = builder.define_conjugate_builder(conjg)
1001
1002 self.compute_aloha(conjg_builder, symmetry=lorentz.name,
1003 routines=routines)
1004
1005
1006
1007 for list_l_name, tag, outgoing in data:
1008 if len(list_l_name) ==1:
1009 continue
1010
1011 conjugate = [i for i in tag if isinstance(i, int)]
1012 all_tag = tag[:]
1013 tag = [i for i in tag if isinstance(i, str) and not i.startswith('P')]
1014 tag = tag + ['C%s'%i for i in conjugate]
1015 tag = tag + [i for i in all_tag if isinstance(i, str) and i.startswith('P')]
1016
1017 if not self.explicit_combine:
1018 lorentzname = list_l_name[0]
1019 lorentzname += ''.join(tag)
1020 if (lorentzname, outgoing) in self:
1021 self[(lorentzname, outgoing)].add_combine(list_l_name[1:])
1022 else:
1023 lorentz = eval('self.model.lorentz.%s' % list_l_name[0])
1024 assert lorentz.structure == 'external'
1025 else:
1026 l_lorentz = []
1027 for l_name in list_l_name:
1028 l_lorentz.append(eval('self.model.lorentz.%s' % l_name))
1029 builder = CombineRoutineBuilder(l_lorentz)
1030
1031 for conjg in request[list_l_name[0]]:
1032
1033 def sorting(a,b):
1034 if a[0] < b[0]: return -1
1035 else: return 1
1036 routines = request[list_l_name[0]][conjg]
1037 routines.sort(key=operator.itemgetter(0))
1038 if not conjg:
1039
1040 self.compute_aloha(builder, routines=routines)
1041 else:
1042
1043 conjg_builder = builder.define_conjugate_builder(conjg)
1044
1045 self.compute_aloha(conjg_builder, symmetry=lorentz.name,
1046 routines=routines)
1047
1048 logger.info("aloha creates %s routines in %0.3f s", AbstractRoutineBuilder.counter, time.time()-start)
1049
1050 - def compute_aloha(self, builder, symmetry=None, routines=None, tag=[]):
1051 """ define all the AbstractRoutine linked to a given lorentz structure
1052 symmetry authorizes to use the symmetry of anoter lorentz structure.
1053 routines to define only a subset of the routines."""
1054
1055 name = builder.name
1056 if not symmetry:
1057 symmetry = name
1058 if not routines:
1059 if not tag:
1060 tag = ['C%s' % i for i in builder.conjg]
1061 else:
1062 addon = ['C%s' % i for i in builder.conjg]
1063 tag = [(i,addon +onetag) for i,onetag in tag]
1064 routines = [ tuple([i,tag]) for i in range(len(builder.spins) + 1 )]
1065
1066
1067 for outgoing, tag in routines:
1068 symmetric = self.has_symmetries(symmetry, outgoing, valid_output=routines)
1069 realname = name + ''.join(tag)
1070 if (realname, outgoing) in self:
1071 continue
1072
1073 if symmetric:
1074 self.get(realname, symmetric).add_symmetry(outgoing)
1075 else:
1076 wavefunction = builder.compute_routine(outgoing, tag)
1077
1078 self.set(realname, outgoing, wavefunction)
1079
1080
1082 """define all the AbstractRoutine linked to a given lorentz structure
1083 symmetry authorizes to use the symmetry of anoter lorentz structure.
1084 routines to define only a subset of the routines.
1085 Compare to compute_aloha, each routines are computed independently.
1086 """
1087
1088 name = builder.name
1089 if not routines:
1090 routines = [ tuple([i,[]]) for i in range(len(builder.spins) + 1 )]
1091
1092 for outgoing, tag in routines:
1093 builder.routine_kernel = None
1094 wavefunction = builder.compute_routine(outgoing, tag)
1095 self.set(name, outgoing, wavefunction)
1096
1097
1098 - def write(self, output_dir, language):
1099 """ write the full set of Helicity Routine in output_dir"""
1100 for abstract_routine in self.values():
1101 abstract_routine.write(output_dir, language)
1102
1103 for routine in self.external_routines:
1104 self.locate_external(routine, language, output_dir)
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1128 """search a valid external file and copy it to output_dir directory"""
1129
1130 language_to_ext = {'Python': 'py',
1131 'Fortran' : 'f',
1132 'CPP': 'C'}
1133 ext = language_to_ext[language]
1134 paths = [os.path.join(self.model_pos, language), self.model_pos,
1135 os.path.join(root_path, 'aloha', 'template_files', )]
1136
1137 ext_files = []
1138 for path in paths:
1139 base, amp = name.rsplit('_',1)
1140 ext_files = misc.glob('%s*_%s.%s' % (base,amp, ext), path)
1141 if ext_files:
1142 break
1143 else:
1144
1145 raise ALOHAERROR('No external routine \"%s.%s\" in directories\n %s' % \
1146 (name, ext, '\n'.join(paths)))
1147
1148 if output_dir:
1149 for filepath in ext_files:
1150
1151 files.cp(filepath, output_dir)
1152
1153 return ext_files
1154
1155
1156
1158 """Search some symmetries in the vertices.
1159 We search if some identical particles are in a vertices in order
1160 to avoid to compute symmetrical contributions"""
1161
1162 for vertex in self.model.all_vertices:
1163 for i, part1 in enumerate(vertex.particles):
1164 for j in range(i-1,-1,-1):
1165 part2 = vertex.particles[j]
1166 if part1.pdg_code == part2.pdg_code and part1.color == 1:
1167 if part1.spin == 2 and (i % 2 != j % 2 ):
1168 continue
1169 for lorentz in vertex.lorentz:
1170 if lorentz.name in self.symmetries:
1171 if i+1 in self.symmetries[lorentz.name]:
1172 self.symmetries[lorentz.name][i+1] = max(self.symmetries[lorentz.name][i+1], j+1)
1173 else:
1174 self.symmetries[lorentz.name][i+1] = j+1
1175 else:
1176 self.symmetries[lorentz.name] = {i+1:j+1}
1177 break
1178
1180 """Search the interaction associate with more than one lorentz structure.
1181 If those lorentz structure have the same order and the same color then
1182 associate a multiple lorentz routines to ALOHA """
1183
1184 orders = {}
1185 for coup in self.model.all_couplings:
1186 orders[coup.name] = str(coup.order)
1187
1188 for vertex in self.model.all_vertices:
1189 if len(vertex.lorentz) == 1:
1190 continue
1191
1192
1193
1194
1195
1196 combine = {}
1197 for (id_col, id_lor), coups in vertex.couplings.items():
1198 if not isinstance(coups, list):
1199 coups = [coups]
1200 for coup in coups:
1201 order = orders[coup.name]
1202 key = (id_col, order)
1203 if key in combine:
1204 combine[key].append(id_lor)
1205 else:
1206 combine[key] = [id_lor]
1207
1208
1209 for list_lor in combine.values():
1210 if len(list_lor) == 1:
1211 continue
1212 list_lor.sort()
1213 main = vertex.lorentz[list_lor[0]].name
1214 if main not in self.multiple_lor:
1215 self.multiple_lor[main] = []
1216
1217 info = tuple([vertex.lorentz[id].name for id in list_lor[1:]])
1218 if info not in self.multiple_lor[main]:
1219 self.multiple_lor[main].append(info)
1220
1221
1222 - def has_symmetries(self, l_name, outgoing, out=None, valid_output=None):
1223 """ This returns out if no symmetries are available, otherwise it finds
1224 the lowest equivalent outgoing by recursivally calling this function.
1225 auth is a list of authorize output, if define"""
1226
1227 try:
1228 equiv = self.symmetries[l_name][outgoing]
1229 except Exception:
1230 return out
1231 else:
1232 if not valid_output or equiv in valid_output:
1233 return self.has_symmetries(l_name, equiv, out=equiv,
1234 valid_output=valid_output)
1235 else:
1236 return self.has_symmetries(l_name, equiv, out=out,
1237 valid_output=valid_output)
1238
1240 """ create a list for the routine needing to be conjugate """
1241
1242
1243 need = False
1244 for particle in self.model.all_particles:
1245 if particle.spin == 2 and particle.selfconjugate:
1246 need = True
1247 break
1248
1249 if not need:
1250 for interaction in self.model.all_vertices:
1251 fermions = [p for p in interaction.particles if p.spin == 2]
1252 for i in range(0, len(fermions), 2):
1253 if fermions[i].pdg_code * fermions[i+1].pdg_code > 0:
1254
1255 need = True
1256 break
1257
1258
1259 if not need:
1260 return {}
1261
1262 conjugate_request = {}
1263
1264 for vertex in self.model.all_vertices:
1265 for i in range(0, len(vertex.particles), 2):
1266 part1 = vertex.particles[i]
1267 if part1.spin !=2:
1268
1269 break
1270
1271 if part1.selfconjugate:
1272 continue
1273 part2 = vertex.particles[i + 1]
1274 if part2.selfconjugate:
1275 continue
1276
1277
1278 for lorentz in vertex.lorentz:
1279 try:
1280 conjugate_request[lorentz.name].add(i//2+1)
1281 except Exception:
1282 conjugate_request[lorentz.name] = set([i//2+1])
1283
1284 for elem in conjugate_request:
1285 conjugate_request[elem] = list(conjugate_request[elem])
1286
1287 return conjugate_request
1288
1292 """find the list of Helicity routine in the directory and create a list
1293 of those files (but with compile extension)"""
1294
1295 aloha_files = []
1296
1297
1298 alohafile_pattern = re.compile(r'''_\d%s''' % file_ext)
1299 for filename in os.listdir(aloha_dir):
1300 if os.path.isfile(os.path.join(aloha_dir, filename)):
1301 if alohafile_pattern.search(filename):
1302 aloha_files.append(filename.replace(file_ext, comp_ext))
1303
1304 if os.path.exists(pjoin(aloha_dir, 'additional_aloha_function.f')):
1305 aloha_files.append('additional_aloha_function.o')
1306
1307 text="ALOHARoutine = "
1308 text += ' '.join(aloha_files)
1309 text +='\n'
1310
1311
1312 open(os.path.join(aloha_dir, 'aloha_file.inc'), 'w').write(text)
1313
1317
1318 def create(obj):
1319 """ """
1320 obj= obj.simplify()
1321 obj = obj.expand()
1322 obj = obj.simplify()
1323 return obj
1324
1325
1326 old_tag = set(aloha_lib.KERNEL.use_tag)
1327 name, i = tag
1328 if name == "Spin2Prop":
1329 lib[('Spin2Prop',i)] = create( Spin2Propagator(_spin2_mult + i, \
1330 2 * _spin2_mult + i,'I2','I3', i) )
1331 elif name == "Spin2PropMassless":
1332 lib[('Spin2PropMassless',i)] = create( Spin2masslessPropagator(
1333 _spin2_mult + i, 2 * _spin2_mult + i,'I2','I3'))
1334
1335 aloha_lib.KERNEL.use_tag = old_tag
1336 return lib
1337
1338
1339 if '__main__' == __name__:
1340 logging.basicConfig(level=0)
1341
1342 import profile
1343
1344
1345 start = time.time()
1350 - def write(alohagenerator):
1352 alohagenerator = main()
1353 logger.info('done in %s s' % (time.time()-start))
1354 write(alohagenerator)
1355
1356
1357 stop = time.time()
1358 logger.info('done in %s s' % (stop-start))
1359