1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 from __future__ import division
16 from __future__ import absolute_import
17 import cmath
18 import copy
19 import operator
20 import six.moves.cPickle
21 import glob
22 import logging
23 import numbers
24 import os
25 import re
26 import shutil
27 import sys
28 import time
29 from madgraph.interface.tutorial_text import output
30
31 from six.moves import range
32 from six.moves import zip
33
34 root_path = os.path.split(os.path.dirname(os.path.realpath( __file__ )))[0]
35 sys.path.append(root_path)
36 from aloha.aloha_object import *
37 import aloha
38 import aloha.aloha_writers as aloha_writers
39 import aloha.aloha_lib as aloha_lib
40 import aloha.aloha_object as aloha_object
41 import aloha.aloha_parsers as aloha_parsers
42 import aloha.aloha_fct as aloha_fct
43 import models
44 try:
45 import madgraph.iolibs.files as files
46 import madgraph.various.misc as misc
47 except Exception:
48 import aloha.files as files
49 import aloha.misc as misc
50
51
52 aloha_path = os.path.dirname(os.path.realpath(__file__))
53 logger = logging.getLogger('ALOHA')
54
55 _conjugate_gap = 50
56 _spin2_mult = 1000
57
58 pjoin = os.path.join
59
60 ALOHAERROR = aloha.ALOHAERROR
63 """ store the result of the computation of Helicity Routine
64 this is use for storing and passing to writer """
65
66 - def __init__(self, expr, outgoing, spins, name, infostr, denom=None):
67 """ store the information """
68
69 self.spins = spins
70 self.expr = expr
71 self.denominator = denom
72 self.name = name
73 self.outgoing = outgoing
74 self.infostr = infostr
75 self.symmetries = []
76 self.combined = []
77 self.tag = []
78 self.contracted = {}
79
80
81
83 """ add an outgoing """
84
85 if not outgoing in self.symmetries:
86 self.symmetries.append(outgoing)
87
89 """add a combine rule """
90
91 if lor_list not in self.combined:
92 self.combined.append(lor_list)
93
94 - def write(self, output_dir, language='Fortran', mode='self', combine=True,**opt):
95 """ write the content of the object """
96 writer = aloha_writers.WriterFactory(self, language, output_dir, self.tag)
97 text = writer.write(mode=mode, **opt)
98 if combine:
99 for grouped in self.combined:
100 if isinstance(text, tuple):
101 text = tuple([old.__add__(new) for old, new in zip(text,
102 writer.write_combined(grouped, mode=mode+'no_include', **opt))])
103 else:
104 text += writer.write_combined(grouped, mode=mode+'no_include', **opt)
105 if aloha.mp_precision and 'MP' not in self.tag:
106 self.tag.append('MP')
107 text += self.write(output_dir, language, mode, **opt)
108 return text
109
111 """return some information on the routine
112 """
113 if info == "rank":
114 assert isinstance(self.expr, aloha_lib.SplitCoefficient)
115 rank= 1
116 for coeff in self.expr:
117 rank = max(sum(coeff), rank)
118 return rank -1
119 else:
120 raise ALOHAERROR('%s is not a valid information that can be computed' % info)
121
124 """ Launch the creation of the Helicity Routine"""
125
126 prop_lib = {}
127 counter = 0
128
130 """ An error class for ALOHA"""
131
132 - def __init__(self, lorentz, model=None):
133 """ initialize the run
134 lorentz: the lorentz information analyzed (UFO format)
135 language: define in which language we write the output
136 modes: 0 for all incoming particles
137 >0 defines the outgoing part (start to count at 1)
138 """
139
140 self.spins = [s for s in lorentz.spins]
141 self.name = lorentz.name
142 self.conjg = []
143 self.tag = []
144 self.outgoing = None
145 self.lorentz_expr = lorentz.structure
146 self.routine_kernel = None
147 self.spin2_massless = False
148 self.spin32_massless = False
149 self.contracted = {}
150 self.fct = {}
151 self.model = model
152 self.denominator = None
153
154
155 self.lastprint = 0
156
157 if hasattr(lorentz, 'formfactors') and lorentz.formfactors:
158 for formf in lorentz.formfactors:
159 pat = re.compile(r'\b%s\b' % formf.name)
160 self.lorentz_expr = pat.sub('(%s)' % formf.value, self.lorentz_expr)
161
171
173 """ return the full set of AbstractRoutineBuilder linked to fermion
174 clash"""
175
176 solution = []
177
178 for i, pair in enumerate(pair_list):
179 new_builder = self.define_conjugate_builder(pair)
180 solution.append(new_builder)
181 solution += new_builder.define_all_conjugate_builder(pair_list[i+1:])
182 return solution
183
185 """ return a AbstractRoutineBuilder for the conjugate operation.
186 If they are more than one pair of fermion. Then use pair to claim which
187 one is conjugated"""
188
189 new_builder = copy.copy(self)
190 new_builder.conjg = self.conjg[:]
191 try:
192 for index in pairs:
193 new_builder.apply_conjugation(index)
194 except TypeError:
195 new_builder.apply_conjugation(pairs)
196 return new_builder
197
199 """ apply conjugation on self object"""
200
201 nb_fermion = len([1 for s in self.spins if s % 2 == 0])
202 if isinstance(pair, tuple):
203 if len(pair) ==1 :
204 pair = pair[0]
205 else:
206 raise Exception
207
208
209 if (pair > 1 or nb_fermion >2) and not self.conjg:
210
211 data = aloha_fct.get_fermion_flow(self.lorentz_expr, nb_fermion)
212 target = dict([(2*i+1,2*i+2) for i in range(nb_fermion//2)])
213 if not data == target:
214 text = """Unable to deal with 4(or more) point interactions
215 in presence of majorana particle/flow violation"""
216 raise ALOHAERROR(text)
217
218 old_id = 2 * pair - 1
219 new_id = _conjugate_gap + old_id
220
221 self.kernel_tag = set()
222 aloha_lib.KERNEL.use_tag = set()
223 if not self.routine_kernel or isinstance(self.routine_kernel, str):
224 self.routine_kernel = eval(self.parse_expression(self.lorentz_expr))
225 self.kernel_tag = aloha_lib.KERNEL.use_tag
226
227
228 self.routine_kernel = \
229 C(new_id, old_id + 1) * self.routine_kernel * C(new_id + 1, old_id)
230
231 self.lorentz_expr = '('+self.lorentz_expr+') * C(%s,%s) * C(%s,%s)' % \
232 (new_id, old_id + 1, new_id + 1, old_id )
233
234 self.conjg.append(pair)
235
236
256
258 """change the sign of P for outcoming fermion in order to
259 correct the mismatch convention between HELAS and FR"""
260
261 if not expr:
262 expr = self.lorentz_expr
263
264 if need_P_sign:
265 expr = re.sub(r'\b(P|PSlash)\(', r'-\1(', expr)
266
267 calc = aloha_parsers.ALOHAExpressionParser()
268 lorentz_expr = calc.parse(expr)
269 return lorentz_expr
270
272 """compute the abstract routine associate to this mode """
273
274
275 aloha_lib.KERNEL.use_tag=set()
276
277 nb_spinor = 0
278 outgoing = self.outgoing
279 if (outgoing + 1) // 2 in self.conjg:
280
281 outgoing = outgoing + outgoing % 2 - (outgoing +1) % 2
282
283 if not self.routine_kernel:
284 AbstractRoutineBuilder.counter += 1
285 if self.tag == []:
286 logger.info('aloha creates %s routines' % self.name)
287 elif AbstractALOHAModel.lastprint < time.time() - 1:
288 AbstractALOHAModel.lastprint = time.time()
289 logger.info('aloha creates %s set of routines with options: %s' \
290 % (self.name, ','.join(self.tag)) )
291 try:
292 lorentz = self.parse_expression()
293 self.routine_kernel = lorentz
294 lorentz = eval(lorentz)
295 except NameError as error:
296 logger.error('unknow type in Lorentz Evaluation:%s'%str(error))
297 raise ALOHAERROR('unknow type in Lorentz Evaluation: %s ' % str(error))
298 else:
299 self.kernel_tag = set(aloha_lib.KERNEL.use_tag)
300 elif isinstance(self.routine_kernel,str):
301 lorentz = eval(self.routine_kernel)
302 aloha_lib.KERNEL.use_tag = set(self.kernel_tag)
303 else:
304 lorentz = copy.copy(self.routine_kernel)
305 aloha_lib.KERNEL.use_tag = set(self.kernel_tag)
306 for (i, spin ) in enumerate(self.spins):
307 id = i + 1
308
309 if id == outgoing:
310
311
312 propa = [t[1:] for t in self.tag if t.startswith('P')]
313 if propa == ['0']:
314 if spin == 3 and aloha.unitary_gauge == 2:
315 lorentz *= complex(0,1) * self.get_custom_propa('1PS', spin, id)
316 continue
317 else:
318 massless = True
319 self.denominator = None
320 elif propa == []:
321 massless = False
322 self.denominator = None
323 else:
324 lorentz *= complex(0,1) * self.get_custom_propa(propa[0], spin, id)
325 continue
326
327
328
329 if spin in [1,-1]:
330 lorentz *= complex(0,1)
331 elif spin == 2:
332
333 if (id + 1) // 2 in self.conjg:
334 id += _conjugate_gap + id % 2 - (id +1) % 2
335 if (id % 2):
336
337 lorentz *= complex(0,1) * SpinorPropagatorout(id, 'I2', outgoing)
338 else:
339
340 lorentz *= complex(0,1) * SpinorPropagatorin('I2', id, outgoing)
341 elif spin == 3 :
342 if massless or not aloha.unitary_gauge:
343 lorentz *= VectorPropagatorMassless(id, 'I2', id)
344 else:
345 lorentz *= VectorPropagator(id, 'I2', id)
346 elif spin == 4:
347
348 if (id + 1) // 2 in self.conjg:
349 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
350 else:
351 spin_id = id
352 nb_spinor += 1
353 if not massless and (spin_id % 2):
354 lorentz *= complex(0,1) * Spin3halfPropagatorout(id, 'I2', spin_id,'I3', outgoing)
355 elif not massless and not (spin_id % 2):
356 lorentz *= complex(0,1) * Spin3halfPropagatorin('I2', id , 'I3', spin_id, outgoing)
357 elif spin_id %2:
358 lorentz *= complex(0,1) * Spin3halfPropagatorMasslessOut(id, 'I2', spin_id,'I3', outgoing)
359 else :
360 lorentz *= complex(0,1) * Spin3halfPropagatorMasslessIn('I2', id, 'I3', spin_id, outgoing)
361
362 elif spin == 5 :
363
364 if massless:
365 lorentz *= complex(0,1) * Spin2masslessPropagator(_spin2_mult + id, \
366 2 * _spin2_mult + id,'I2','I3')
367 else:
368 lorentz *= complex(0,1) * Spin2Propagator(_spin2_mult + id, \
369 2 * _spin2_mult + id,'I2','I3', id)
370 else:
371 raise self.AbstractALOHAError(
372 'The spin value %s (2s+1) is not supported yet' % spin)
373 else:
374
375 if spin in [1,-1]:
376 lorentz *= Scalar(id)
377 elif spin == 2:
378
379 if (id+1) // 2 in self.conjg:
380 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
381 else:
382 spin_id = id
383 lorentz *= Spinor(spin_id, id)
384 elif spin == 3:
385 lorentz *= Vector(id, id)
386 elif spin == 4:
387
388 if (id+1) // 2 in self.conjg:
389 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
390 else:
391 spin_id = id
392 nb_spinor += 1
393 lorentz *= Spin3Half(id, spin_id, id)
394 elif spin == 5:
395 lorentz *= Spin2(1 * _spin2_mult + id, 2 * _spin2_mult + id, id)
396 else:
397 raise self.AbstractALOHAError(
398 'The spin value %s (2s+1) is not supported yet' % spin)
399
400
401 if not outgoing:
402 lorentz *= complex(0,-1)
403
404
405 lorentz = lorentz.simplify()
406
407
408 if any((tag.startswith('L') for tag in self.tag if len(tag)>1)):
409 return self.compute_loop_coefficient(lorentz, outgoing)
410
411 lorentz = lorentz.expand()
412 lorentz = lorentz.simplify()
413
414 if factorize:
415 lorentz = lorentz.factorize()
416
417 lorentz.tag = set(aloha_lib.KERNEL.use_tag)
418 return lorentz
419
420 @staticmethod
422 """Change the index of the propagator to match the current need"""
423
424 data = re.split(r'(\b[a-zA-Z]\w*?)\(([\'\w,\s\"\+\-]*?)\)',text)
425 to_change = {}
426 for old, new in tag.items():
427 if isinstance(new, str):
428 new='\'%s\'' % new
429 else:
430 new = str(new)
431 to_change[r'%s' % old] = new
432 pos=-2
433 while pos +3 < len(data):
434 pos = pos+3
435 ltype = data[pos]
436 if ltype != 'complex':
437 data[pos+1] = re.sub(r'\b(?<!-)(%s)\b' % '|'.join(to_change),
438 lambda x: to_change[x.group()], data[pos+1])
439 data[pos+1] = '(%s)' % data[pos+1]
440 text=''.join(data)
441 return text
442
444 """Return the ALOHA object associated to the user define propagator"""
445
446 if not propa.startswith('1'):
447 propagator = getattr(self.model.propagators, propa)
448 numerator = propagator.numerator
449 denominator = propagator.denominator
450 elif propa == "1L":
451 numerator = "EPSL(1,id) * EPSL(2,id)"
452 denominator = "-1*PVec(-2,id)*PVec(-2,id)*P(-3,id)*P(-3,id) * (P(-1,id)**2 - Mass(id) * Mass(id) + complex(0,1) * Mass(id) * Width(id))"
453 elif propa == "1T":
454 numerator = "-1*PVec(-2,id)*PVec(-2,id) * EPST2(1,id)*EPST2(2,id) + EPST1(1,id)*EPST1(2,id)"
455 denominator = "PVec(-2,id)*PVec(-2,id) * PT(-3,id)*PT(-3,id) * (P(-1,id)**2 - Mass(id) * Mass(id) + complex(0,1) * Mass(id) * Width(id))"
456 elif propa == "1A":
457 numerator = "(P(-2,id)**2 - Mass(id)**2) * P(1,id) * P(2,id)"
458 denominator = "P(-2,id)**2 * Mass(id)**2 * (P(-1,id)**2 - Mass(id) * Mass(id) + complex(0,1) * Mass(id) * Width(id))"
459 elif propa in ["1P"]:
460
461 spin_id = id
462 if (id + 1) // 2 in self.conjg:
463 spin_id += _conjugate_gap + id % 2 - (id +1) % 2
464 if (spin_id % 2):
465 numerator = "UFP(1,id)*UFPC(2,id)"
466 else:
467 numerator = "VFP(1,id)*VFPC(2,id)"
468
469 denominator = "(2*Tnorm(id)*TnormZ(id))*(P(-1,id)*P(-1,id) - Mass(id) * Mass(id) + complex(0,1) * Mass(id) * Width(id))"
470
471 elif propa == "1M":
472
473 spin_id = id
474 if (id + 1) // 2 in self.conjg:
475 spin_id += _conjugate_gap + id % 2 - (id +1) % 2
476 if (spin_id % 2):
477 numerator = "UFM(1,id)*UFMC(2,id)"
478 else:
479 numerator = "VFM(1,id)*VFMC(2,id)"
480 denominator = "(2*Tnorm(id)*TnormZ(id))*(P(-1,id)*P(-1,id) - Mass(id) * Mass(id) + complex(0,1) * Mass(id) * Width(id))"
481 elif propa == "1PS":
482 numerator = "(-1*(P(-1,id)*PBar(-1,id)) * Metric(1, 2) + P(1,id)*PBar(2,id) + PBar(1,id)*P(2,id))"
483 denominator = "(P(-3,id)*PBar(-3,id))*P(-2,id)**2"
484 elif propa == "1N":
485 if spin == 3:
486 numerator = '-1*IdentityL(1,2)'
487 else:
488 numerator = "-1"
489 denominator = "1"
490 else:
491 raise Exception
492
493
494 needPflipping = False
495 if spin in [1,-1]:
496 tag = {'id': id}
497 elif spin == 2:
498
499 if (id + 1) // 2 in self.conjg:
500 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
501 else:
502 spin_id = id
503 if (spin_id % 2):
504
505 needPflipping = True
506 tag ={'1': spin_id, '2': 'I2', 'id': id}
507 else:
508 tag ={'1': 'I2', '2': spin_id, 'id': id}
509 elif spin == 3 :
510 tag ={'1': id, '2': 'I2', 'id': id}
511 elif spin == 4:
512 delta = lambda i,j: aloha_object.Identity(i,j)
513 deltaL = lambda i,j: aloha_object.IdentityL(i,j)
514
515 if (id + 1) // 2 in self.conjg:
516 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
517 else:
518 spin_id = id
519 tag = {'1': 'pr1', '2': 'pr2', 'id':id}
520 if spin_id % 2:
521 needPflipping = True
522
523 propaR = deltaL('pr1',id) * deltaL('pr2', 'I2') * delta('pr1', spin_id) * delta('pr2', 'I3')
524 else:
525 propaR = deltaL('pr1',id) * deltaL('pr2', 'I2') * delta('pr2', spin_id) * delta('pr1', 'I3')
526
527 elif spin == 5 :
528 tag = {'1': _spin2_mult + id, '2': 'I2',
529 '51': 2 * _spin2_mult + id, '52': 'I3', 'id':id}
530
531 numerator = self.mod_propagator_expression(tag, numerator)
532 if denominator:
533 denominator = self.mod_propagator_expression(tag, denominator)
534
535 numerator = self.parse_expression(numerator, needPflipping)
536
537 if denominator:
538 self.denominator = self.parse_expression(denominator, needPflipping)
539 self.denominator = eval(self.denominator)
540 if not isinstance(self.denominator, numbers.Number):
541 self.denominator = self.denominator.simplify().expand().simplify().get((0,))
542 needPflipping = False
543 if spin ==4:
544 return eval(numerator) * propaR
545 else:
546 return eval(numerator)
547
548
549
550
551
553
554
555 l_in = [int(tag[1:]) for tag in self.tag if tag.startswith('L')][0]
556 if (l_in + 1) // 2 in self.conjg:
557
558 l_in = l_in + l_in % 2 - (l_in +1) % 2
559 assert l_in != outgoing, 'incoming Open Loop can not be the outcoming one'
560
561
562
563 Pdep = [aloha_lib.KERNEL.get(P) for P in lorentz.get_all_var_names()
564 if P.startswith('_P')]
565
566 Pdep = set([P for P in Pdep if P.particle in [outgoing, l_in]])
567 for P in Pdep:
568 if P.particle == l_in:
569 sign = 1
570 else:
571 sign = -1
572 id = P.id
573 lorentz_ind = P.lorentz_ind[0]
574 P_Lid = aloha_object.P(lorentz_ind, 'L')
575 P_obj = aloha_object.P(lorentz_ind, P.particle)
576 new_expr = sign*(P_Lid + P_obj)
577 lorentz = lorentz.replace(id, new_expr)
578
579
580 var_veto = ['PL_0', 'PL_1', 'PL_2', 'PL_3']
581 spin = aloha_writers.WriteALOHA.type_to_variable[abs(self.spins[l_in-1])]
582 size = aloha_writers.WriteALOHA.type_to_size[spin]-1
583 var_veto += ['%s%s_%s' % (spin,l_in,i) for i in range(1,size)]
584
585 veto_ids = aloha_lib.KERNEL.get_ids(var_veto)
586
587 lorentz = lorentz.expand(veto = veto_ids)
588 lorentz = lorentz.simplify()
589 coeff_expr = lorentz.split(veto_ids)
590
591 for key, expr in coeff_expr.items():
592 expr = expr.simplify()
593 coeff_expr[key] = expr.factorize()
594 coeff_expr.tag = set(aloha_lib.KERNEL.use_tag)
595
596 return coeff_expr
597
599 """Define the expression"""
600
601 self.expr = lorentz_expr
602
604 """Define the kernel at low level"""
605
606 if not lorentz:
607 logger.info('compute kernel %s' % self.counter)
608 AbstractRoutineBuilder.counter += 1
609 lorentz = eval(self.lorentz_expr)
610
611 if isinstance(lorentz, numbers.Number):
612 self.routine_kernel = lorentz
613 return lorentz
614 lorentz = lorentz.simplify()
615 lorentz = lorentz.expand()
616 lorentz = lorentz.simplify()
617
618 self.routine_kernel = lorentz
619 return lorentz
620
621
622 @staticmethod
624 """return the name of the """
625
626 name = '%s_%s' % (name, outgoing)
627 return name
628
629 @classmethod
636
639 """A special builder for combine routine if needed to write those
640 explicitely.
641 """
642 - def __init__(self, l_lorentz, model=None):
643 """ initialize the run
644 l_lorentz: list of lorentz information analyzed (UFO format)
645 language: define in which language we write the output
646 modes: 0 for all incoming particles
647 >0 defines the outgoing part (start to count at 1)
648 """
649 AbstractRoutineBuilder.__init__(self,l_lorentz[0], model)
650 lorentz = l_lorentz[0]
651 self.spins = lorentz.spins
652 l_name = [l.name for l in l_lorentz]
653 self.name = aloha_writers.combine_name(l_name[0], l_name[1:], None)
654 self.conjg = []
655 self.tag = []
656 self.outgoing = None
657 self.lorentz_expr = []
658 for i, lor in enumerate(l_lorentz):
659 self.lorentz_expr.append( 'Coup(%s) * (%s)' % (i+1, lor.structure))
660 self.lorentz_expr = ' + '.join(self.lorentz_expr)
661 self.routine_kernel = None
662 self.contracted = {}
663 self.fct = {}
664
666 """ A class to build and store the full set of Abstract ALOHA Routine"""
667
668 lastprint = 0
669
670 - def __init__(self, model_name, write_dir=None, format='Fortran',
671 explicit_combine=False):
672 """ load the UFO model and init the dictionary """
673
674
675 self.explicit_combine = explicit_combine
676
677 model_name_pattern = re.compile("^(?P<name>.+)-(?P<rest>[\w\d_]+)$")
678 model_name_re = model_name_pattern.match(model_name)
679 if model_name_re:
680 name = model_name_re.group('name')
681 rest = model_name_re.group("rest")
682 if rest == 'full' or \
683 os.path.isfile(os.path.join(root_path, "models", name,
684 "restrict_%s.dat" % rest)):
685 model_name = model_name_re.group("name")
686
687 self.model = models.load_model(model_name)
688
689
690
691
692
693
694
695
696
697 self.model_pos = os.path.dirname(self.model.__file__)
698
699
700 self.external_routines = []
701
702
703 dict.__init__(self)
704 self.symmetries = {}
705 self.multiple_lor = {}
706
707 if write_dir:
708 self.main(write_dir,format=format)
709
710 - def main(self, output_dir, format='Fortran'):
711 """ Compute if not already compute.
712 Write file in models/MY_MODEL/MY_FORMAT.
713 copy the file to output_dir
714 """
715 ext = {'Fortran':'f','Python':'py','CPP':'h'}
716
717
718
719 if not self.load():
720 self.compute_all()
721 logger.info(' %s aloha routine' % len(self))
722
723
724 if not output_dir:
725 output_dir = os.path.join(self.model_pos, format.lower())
726 logger.debug('aloha output dir is %s' % output_dir)
727 if not os.path.exists(output_dir):
728 os.mkdir(output_dir)
729
730
731 for (name, outgoing), abstract in self.items():
732 routine_name = AbstractRoutineBuilder.get_routine_name(name, outgoing)
733 if not os.path.exists(os.path.join(output_dir, routine_name) + '.' + ext[format]):
734 abstract.write(output_dir, format)
735 else:
736 logger.info('File for %s already present, skip the writing of this file' % routine_name)
737
738
739 - def save(self, filepos=None):
740 """ save the current model in a pkl file """
741
742 logger.info('save the aloha abstract routine in a pickle file')
743 if not filepos:
744 filepos = os.path.join(self.model_pos,'aloha.pkl')
745
746 fsock = open(filepos, 'w')
747 t=dict(self)
748 try:
749 six.moves.cPickle.dump(dict(self), fsock)
750 except:
751 logger.info('aloha not saved')
752
753 - def load(self, filepos=None):
754 """ reload the pickle file """
755 return False
756 if not filepos:
757 filepos = os.path.join(self.model_pos,'aloha.pkl')
758 if os.path.exists(filepos):
759 fsock = open(filepos, 'r')
760 self.update(six.moves.cPickle.load(fsock))
761 return True
762 else:
763 return False
764
765 - def get(self, lorentzname, outgoing):
766 """ return the AbstractRoutine with a given lorentz name, and for a given
767 outgoing particle """
768
769 try:
770 return self[(lorentzname, outgoing)]
771 except Exception:
772 logger.warning('(%s, %s) is not a valid key' %
773 (lorentzname, outgoing) )
774 return None
775
776 - def get_info(self, info, lorentzname, outgoing, tag, cached=False):
777 """return some information about the aloha routine
778 - "rank": return the rank of the loop function
779 If the cached option is set to true, then the result is stored and
780 recycled if possible.
781 """
782
783 if not aloha.loop_mode and any(t.startswith('L') for t in tag):
784 aloha.loop_mode = True
785
786
787 returned_dict = {}
788
789 if isinstance(info, str):
790 infos = [info]
791 else:
792 infos = info
793
794
795 if hasattr(self, 'cached_interaction_infos'):
796
797 for info_key in infos:
798 try:
799 returned_dict[info] = self.cached_interaction_infos[\
800 (lorentzname,outgoing,tuple(tag),info)]
801 except KeyError:
802
803
804 pass
805 elif cached:
806 self.cached_interaction_infos = {}
807
808 init = False
809 for info_key in infos:
810 if info_key in returned_dict:
811 continue
812 elif not init:
813
814 lorentz = eval('self.model.lorentz.%s' % lorentzname)
815 abstract = AbstractRoutineBuilder(lorentz)
816 routine = abstract.compute_routine(outgoing, tag, factorize=False)
817 init = True
818
819 assert 'routine' in locals()
820 returned_dict[info_key] = routine.get_info(info_key)
821 if cached:
822
823 self.cached_interaction_infos[\
824 (lorentzname,outgoing,tuple(tag),info_key)]=returned_dict[info_key]
825
826 if isinstance(info, str):
827 return returned_dict[info]
828 else:
829 return returned_dict
830
831 - def set(self, lorentzname, outgoing, abstract_routine):
832 """ add in the dictionary """
833
834 self[(lorentzname, outgoing)] = abstract_routine
835
836 - def compute_all(self, save=True, wanted_lorentz = [], custom_propa=False):
837 """ define all the AbstractRoutine linked to a model """
838
839
840
841 self.look_for_symmetries()
842 conjugate_list = self.look_for_conjugate()
843 self.look_for_multiple_lorentz_interactions()
844
845 if not wanted_lorentz:
846 wanted_lorentz = [l.name for l in self.model.all_lorentz]
847 for lorentz in self.model.all_lorentz:
848 if not lorentz.name in wanted_lorentz:
849
850 continue
851
852 if -1 in lorentz.spins:
853
854 continue
855
856 if lorentz.structure == 'external':
857 for i in range(len(lorentz.spins)):
858 self.external_routines.append('%s_%s' % (lorentz.name, i))
859 continue
860
861
862 routines = [(i,[]) for i in range(len(lorentz.spins)+1)]
863
864 if custom_propa:
865 for vertex in self.model.all_vertices:
866 if lorentz in vertex.lorentz:
867 for i,part in enumerate(vertex.particles):
868 new_prop = False
869 if hasattr(part, 'propagator') and part.propagator:
870 new_prop = ['P%s' % part.propagator.name]
871 elif part.mass.name.lower() == 'zero':
872 new_prop = ['P0']
873 if new_prop and (i+1, new_prop) not in routines:
874 routines.append((i+1, new_prop))
875
876 builder = AbstractRoutineBuilder(lorentz, self.model)
877 self.compute_aloha(builder, routines=routines)
878
879 if lorentz.name in self.multiple_lor:
880 for m in self.multiple_lor[lorentz.name]:
881 for outgoing in range(len(lorentz.spins)+1):
882 try:
883 self[(lorentz.name, outgoing)].add_combine(m)
884 except Exception:
885 pass
886
887
888 if lorentz.name in conjugate_list:
889 conjg_builder_list= builder.define_all_conjugate_builder(\
890 conjugate_list[lorentz.name])
891 for conjg_builder in conjg_builder_list:
892
893 assert conjg_builder_list.count(conjg_builder) == 1
894 self.compute_aloha(conjg_builder, lorentz.name)
895 if lorentz.name in self.multiple_lor:
896 for m in self.multiple_lor[lorentz.name]:
897 for outgoing in range(len(lorentz.spins)+1):
898 realname = conjg_builder.name + ''.join(['C%s' % pair for pair in conjg_builder.conjg])
899 try:
900 self[(realname, outgoing)].add_combine(m)
901 except Exception as error:
902 self[(realname, self.symmetries[lorentz.name][outgoing])].add_combine(m)
903
904 if save:
905 self.save()
906
908 """add a series of Lorentz structure created dynamically"""
909
910 for lor in lorentzlist:
911 if not hasattr(self.model.lorentz, lor.name):
912 setattr(self.model.lorentz, lor.name, lor)
913
915 """ create the requested ALOHA routine.
916 data should be a list of tuple (lorentz, tag, outgoing)
917 tag should be the list of special tag (like conjugation on pair)
918 to apply on the object """
919
920
921
922 self.look_for_symmetries()
923
924
925 aloha.loop_mode = False
926
927 request = {}
928
929
930
931 for list_l_name, tag, outgoing in data[:]:
932 if outgoing == -1:
933 data.remove((list_l_name, tag, outgoing))
934 l_name = list_l_name[0]
935 lorentz = eval('self.model.lorentz.%s' % l_name)
936 for i in range(len(lorentz.spins)):
937 data.append((list_l_name, tag, i+1))
938
939
940
941 for list_l_name, tag, outgoing in data:
942
943
944 all_tag = tag[:]
945 conjugate = [i for i in tag if isinstance(i, int)]
946
947 tag = [i for i in tag if isinstance(i, str) and not i.startswith('P')]
948 tag = tag + ['C%s'%i for i in conjugate]
949 tag = tag + [i for i in all_tag if isinstance(i, str) and i.startswith('P')]
950
951 conjugate = tuple([int(float(c[1:])) for c in tag if c.startswith('C')])
952 loop = any((t.startswith('L') for t in tag))
953 if loop:
954 aloha.loop_mode = True
955 self.explicit_combine = True
956
957 for l_name in list_l_name:
958 try:
959 request[l_name][conjugate].append((outgoing,tag))
960 except Exception:
961 try:
962 request[l_name][conjugate] = [(outgoing,tag)]
963 except Exception:
964 request[l_name] = {conjugate: [(outgoing,tag)]}
965
966
967 for l_name in request:
968 lorentz = eval('self.model.lorentz.%s' % l_name)
969 if lorentz.structure == 'external':
970 for tmp in request[l_name]:
971 for outgoing, tag in request[l_name][tmp]:
972 name = aloha_writers.get_routine_name(lorentz.name,outgoing=outgoing,tag=tag)
973 if name not in self.external_routines:
974 self.external_routines.append(name)
975 continue
976
977 builder = AbstractRoutineBuilder(lorentz, self.model)
978
979
980 for conjg in request[l_name]:
981
982 def sorting(a,b):
983 if a[0] < b[0]: return -1
984 else: return 1
985 routines = request[l_name][conjg]
986 routines.sort(key=misc.cmp_to_key(sorting))
987 if not conjg:
988
989 self.compute_aloha(builder, routines=routines)
990 else:
991
992 conjg_builder = builder.define_conjugate_builder(conjg)
993
994 self.compute_aloha(conjg_builder, symmetry=lorentz.name,
995 routines=routines)
996
997
998
999 for list_l_name, tag, outgoing in data:
1000 if len(list_l_name) ==1:
1001 continue
1002
1003 conjugate = [i for i in tag if isinstance(i, int)]
1004 all_tag = tag[:]
1005 tag = [i for i in tag if isinstance(i, str) and not i.startswith('P')]
1006 tag = tag + ['C%s'%i for i in conjugate]
1007 tag = tag + [i for i in all_tag if isinstance(i, str) and i.startswith('P')]
1008
1009 if not self.explicit_combine:
1010 lorentzname = list_l_name[0]
1011 lorentzname += ''.join(tag)
1012 if (lorentzname, outgoing) in self:
1013 self[(lorentzname, outgoing)].add_combine(list_l_name[1:])
1014 else:
1015 lorentz = eval('self.model.lorentz.%s' % list_l_name[0])
1016 assert lorentz.structure == 'external'
1017 else:
1018 l_lorentz = []
1019 for l_name in list_l_name:
1020 l_lorentz.append(eval('self.model.lorentz.%s' % l_name))
1021 builder = CombineRoutineBuilder(l_lorentz)
1022
1023 for conjg in request[list_l_name[0]]:
1024
1025 def sorting(a,b):
1026 if a[0] < b[0]: return -1
1027 else: return 1
1028 routines = request[list_l_name[0]][conjg]
1029 routines.sort(key=operator.itemgetter(0))
1030 if not conjg:
1031
1032 self.compute_aloha(builder, routines=routines)
1033 else:
1034
1035 conjg_builder = builder.define_conjugate_builder(conjg)
1036
1037 self.compute_aloha(conjg_builder, symmetry=lorentz.name,
1038 routines=routines)
1039
1040
1041
1042 - def compute_aloha(self, builder, symmetry=None, routines=None, tag=[]):
1043 """ define all the AbstractRoutine linked to a given lorentz structure
1044 symmetry authorizes to use the symmetry of anoter lorentz structure.
1045 routines to define only a subset of the routines."""
1046
1047 name = builder.name
1048 if not symmetry:
1049 symmetry = name
1050 if not routines:
1051 if not tag:
1052 tag = ['C%s' % i for i in builder.conjg]
1053 else:
1054 addon = ['C%s' % i for i in builder.conjg]
1055 tag = [(i,addon +onetag) for i,onetag in tag]
1056 routines = [ tuple([i,tag]) for i in range(len(builder.spins) + 1 )]
1057
1058
1059 for outgoing, tag in routines:
1060 symmetric = self.has_symmetries(symmetry, outgoing, valid_output=routines)
1061 realname = name + ''.join(tag)
1062 if (realname, outgoing) in self:
1063 continue
1064
1065 if symmetric:
1066 self.get(realname, symmetric).add_symmetry(outgoing)
1067 else:
1068 wavefunction = builder.compute_routine(outgoing, tag)
1069
1070 self.set(realname, outgoing, wavefunction)
1071
1072
1074 """define all the AbstractRoutine linked to a given lorentz structure
1075 symmetry authorizes to use the symmetry of anoter lorentz structure.
1076 routines to define only a subset of the routines.
1077 Compare to compute_aloha, each routines are computed independently.
1078 """
1079
1080 name = builder.name
1081 if not routines:
1082 routines = [ tuple([i,[]]) for i in range(len(builder.spins) + 1 )]
1083
1084 for outgoing, tag in routines:
1085 builder.routine_kernel = None
1086 wavefunction = builder.compute_routine(outgoing, tag)
1087 self.set(name, outgoing, wavefunction)
1088
1089
1090 - def write(self, output_dir, language):
1091 """ write the full set of Helicity Routine in output_dir"""
1092 for abstract_routine in self.values():
1093 abstract_routine.write(output_dir, language)
1094
1095 for routine in self.external_routines:
1096 self.locate_external(routine, language, output_dir)
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1120 """search a valid external file and copy it to output_dir directory"""
1121
1122 language_to_ext = {'Python': 'py',
1123 'Fortran' : 'f',
1124 'CPP': 'C'}
1125 ext = language_to_ext[language]
1126 paths = [os.path.join(self.model_pos, language), self.model_pos,
1127 os.path.join(root_path, 'aloha', 'template_files', )]
1128
1129 ext_files = []
1130 for path in paths:
1131 base, amp = name.rsplit('_',1)
1132 ext_files = misc.glob('%s*_%s.%s' % (base,amp, ext), path)
1133 if ext_files:
1134 break
1135 else:
1136
1137 raise ALOHAERROR('No external routine \"%s.%s\" in directories\n %s' % \
1138 (name, ext, '\n'.join(paths)))
1139
1140 if output_dir:
1141 for filepath in ext_files:
1142
1143 files.cp(filepath, output_dir)
1144
1145 return ext_files
1146
1147
1148
1150 """Search some symmetries in the vertices.
1151 We search if some identical particles are in a vertices in order
1152 to avoid to compute symmetrical contributions"""
1153
1154 for vertex in self.model.all_vertices:
1155 for i, part1 in enumerate(vertex.particles):
1156 for j in range(i-1,-1,-1):
1157 part2 = vertex.particles[j]
1158 if part1.pdg_code == part2.pdg_code and part1.color == 1:
1159 if part1.spin == 2 and (i % 2 != j % 2 ):
1160 continue
1161 for lorentz in vertex.lorentz:
1162 if lorentz.name in self.symmetries:
1163 if i+1 in self.symmetries[lorentz.name]:
1164 self.symmetries[lorentz.name][i+1] = max(self.symmetries[lorentz.name][i+1], j+1)
1165 else:
1166 self.symmetries[lorentz.name][i+1] = j+1
1167 else:
1168 self.symmetries[lorentz.name] = {i+1:j+1}
1169 break
1170
1172 """Search the interaction associate with more than one lorentz structure.
1173 If those lorentz structure have the same order and the same color then
1174 associate a multiple lorentz routines to ALOHA """
1175
1176 orders = {}
1177 for coup in self.model.all_couplings:
1178 orders[coup.name] = str(coup.order)
1179
1180 for vertex in self.model.all_vertices:
1181 if len(vertex.lorentz) == 1:
1182 continue
1183
1184
1185
1186
1187
1188 combine = {}
1189 for (id_col, id_lor), coups in vertex.couplings.items():
1190 if not isinstance(coups, list):
1191 coups = [coups]
1192 for coup in coups:
1193 order = orders[coup.name]
1194 key = (id_col, order)
1195 if key in combine:
1196 combine[key].append(id_lor)
1197 else:
1198 combine[key] = [id_lor]
1199
1200
1201 for list_lor in combine.values():
1202 if len(list_lor) == 1:
1203 continue
1204 list_lor.sort()
1205 main = vertex.lorentz[list_lor[0]].name
1206 if main not in self.multiple_lor:
1207 self.multiple_lor[main] = []
1208
1209 info = tuple([vertex.lorentz[id].name for id in list_lor[1:]])
1210 if info not in self.multiple_lor[main]:
1211 self.multiple_lor[main].append(info)
1212
1213
1214 - def has_symmetries(self, l_name, outgoing, out=None, valid_output=None):
1215 """ This returns out if no symmetries are available, otherwise it finds
1216 the lowest equivalent outgoing by recursivally calling this function.
1217 auth is a list of authorize output, if define"""
1218
1219 try:
1220 equiv = self.symmetries[l_name][outgoing]
1221 except Exception:
1222 return out
1223 else:
1224 if not valid_output or equiv in valid_output:
1225 return self.has_symmetries(l_name, equiv, out=equiv,
1226 valid_output=valid_output)
1227 else:
1228 return self.has_symmetries(l_name, equiv, out=out,
1229 valid_output=valid_output)
1230
1232 """ create a list for the routine needing to be conjugate """
1233
1234
1235 need = False
1236 for particle in self.model.all_particles:
1237 if particle.spin == 2 and particle.selfconjugate:
1238 need = True
1239 break
1240
1241 if not need:
1242 for interaction in self.model.all_vertices:
1243 fermions = [p for p in interaction.particles if p.spin == 2]
1244 for i in range(0, len(fermions), 2):
1245 if fermions[i].pdg_code * fermions[i+1].pdg_code > 0:
1246
1247 need = True
1248 break
1249
1250
1251 if not need:
1252 return {}
1253
1254 conjugate_request = {}
1255
1256 for vertex in self.model.all_vertices:
1257 for i in range(0, len(vertex.particles), 2):
1258 part1 = vertex.particles[i]
1259 if part1.spin !=2:
1260
1261 break
1262
1263 if part1.selfconjugate:
1264 continue
1265 part2 = vertex.particles[i + 1]
1266 if part2.selfconjugate:
1267 continue
1268
1269
1270 for lorentz in vertex.lorentz:
1271 try:
1272 conjugate_request[lorentz.name].add(i//2+1)
1273 except Exception:
1274 conjugate_request[lorentz.name] = set([i//2+1])
1275
1276 for elem in conjugate_request:
1277 conjugate_request[elem] = list(conjugate_request[elem])
1278
1279 return conjugate_request
1280
1284 """find the list of Helicity routine in the directory and create a list
1285 of those files (but with compile extension)"""
1286
1287 aloha_files = []
1288
1289
1290 alohafile_pattern = re.compile(r'''_\d%s''' % file_ext)
1291 for filename in os.listdir(aloha_dir):
1292 if os.path.isfile(os.path.join(aloha_dir, filename)):
1293 if alohafile_pattern.search(filename):
1294 aloha_files.append(filename.replace(file_ext, comp_ext))
1295
1296 if os.path.exists(pjoin(aloha_dir, 'additional_aloha_function.f')):
1297 aloha_files.append('additional_aloha_function.o')
1298
1299 text="ALOHARoutine = "
1300 text += ' '.join(aloha_files)
1301 text +='\n'
1302
1303
1304 open(os.path.join(aloha_dir, 'aloha_file.inc'), 'w').write(text)
1305
1309
1310 def create(obj):
1311 """ """
1312 obj= obj.simplify()
1313 obj = obj.expand()
1314 obj = obj.simplify()
1315 return obj
1316
1317
1318 old_tag = set(aloha_lib.KERNEL.use_tag)
1319 name, i = tag
1320 if name == "Spin2Prop":
1321 lib[('Spin2Prop',i)] = create( Spin2Propagator(_spin2_mult + i, \
1322 2 * _spin2_mult + i,'I2','I3', i) )
1323 elif name == "Spin2PropMassless":
1324 lib[('Spin2PropMassless',i)] = create( Spin2masslessPropagator(
1325 _spin2_mult + i, 2 * _spin2_mult + i,'I2','I3'))
1326
1327 aloha_lib.KERNEL.use_tag = old_tag
1328 return lib
1329
1330
1331 if '__main__' == __name__:
1332 logging.basicConfig(level=0)
1333
1334 import profile
1335
1336
1337 start = time.time()
1342 - def write(alohagenerator):
1344 alohagenerator = main()
1345 logger.info('done in %s s' % (time.time()-start))
1346 write(alohagenerator)
1347
1348
1349 stop = time.time()
1350 logger.info('done in %s s' % (stop-start))
1351