1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 from __future__ import division
16 from __future__ import absolute_import
17 import cmath
18 import copy
19 import operator
20 import six.moves.cPickle
21 import glob
22 import logging
23 import numbers
24 import os
25 import re
26 import shutil
27 import sys
28 import time
29 from madgraph.interface.tutorial_text import output
30
31 from six.moves import range
32 from six.moves import zip
33
34 root_path = os.path.split(os.path.dirname(os.path.realpath( __file__ )))[0]
35 sys.path.append(root_path)
36 from aloha.aloha_object import *
37 import aloha
38 import aloha.aloha_writers as aloha_writers
39 import aloha.aloha_lib as aloha_lib
40 import aloha.aloha_object as aloha_object
41 import aloha.aloha_parsers as aloha_parsers
42 import aloha.aloha_fct as aloha_fct
43 import models
44 try:
45 import madgraph.iolibs.files as files
46 import madgraph.various.misc as misc
47 except Exception:
48 import aloha.files as files
49 import aloha.misc as misc
50
51
52 aloha_path = os.path.dirname(os.path.realpath(__file__))
53 logger = logging.getLogger('ALOHA')
54
55 _conjugate_gap = 50
56 _spin2_mult = 1000
57
58 pjoin = os.path.join
59
60 ALOHAERROR = aloha.ALOHAERROR
63 """ store the result of the computation of Helicity Routine
64 this is use for storing and passing to writer """
65
66 - def __init__(self, expr, outgoing, spins, name, infostr, denom=None):
67 """ store the information """
68
69 self.spins = spins
70 self.expr = expr
71 self.denominator = denom
72 self.name = name
73 self.outgoing = outgoing
74 self.infostr = infostr
75 self.symmetries = []
76 self.combined = []
77 self.tag = []
78 self.contracted = {}
79
80
81
83 """ add an outgoing """
84
85 if not outgoing in self.symmetries:
86 self.symmetries.append(outgoing)
87
89 """add a combine rule """
90
91 if lor_list not in self.combined:
92 self.combined.append(lor_list)
93
94 - def write(self, output_dir, language='Fortran', mode='self', combine=True,**opt):
95 """ write the content of the object """
96 writer = aloha_writers.WriterFactory(self, language, output_dir, self.tag)
97 text = writer.write(mode=mode, **opt)
98 if combine:
99 for grouped in self.combined:
100 if isinstance(text, tuple):
101 text = tuple([old.__add__(new) for old, new in zip(text,
102 writer.write_combined(grouped, mode=mode+'no_include', **opt))])
103 else:
104 text += writer.write_combined(grouped, mode=mode+'no_include', **opt)
105 if aloha.mp_precision and 'MP' not in self.tag:
106 self.tag.append('MP')
107 text += self.write(output_dir, language, mode, **opt)
108 return text
109
111 """return some information on the routine
112 """
113 if info == "rank":
114 assert isinstance(self.expr, aloha_lib.SplitCoefficient)
115 rank= 1
116 for coeff in self.expr:
117 rank = max(sum(coeff), rank)
118 return rank -1
119 else:
120 raise ALOHAERROR('%s is not a valid information that can be computed' % info)
121
124 """ Launch the creation of the Helicity Routine"""
125
126 prop_lib = {}
127 counter = 0
128
130 """ An error class for ALOHA"""
131
132 - def __init__(self, lorentz, model=None):
133 """ initialize the run
134 lorentz: the lorentz information analyzed (UFO format)
135 language: define in which language we write the output
136 modes: 0 for all incoming particles
137 >0 defines the outgoing part (start to count at 1)
138 """
139
140 self.spins = [s for s in lorentz.spins]
141 self.name = lorentz.name
142 self.conjg = []
143 self.tag = []
144 self.outgoing = None
145 self.lorentz_expr = lorentz.structure
146 self.routine_kernel = None
147 self.spin2_massless = False
148 self.spin32_massless = False
149 self.contracted = {}
150 self.fct = {}
151 self.model = model
152 self.denominator = None
153
154
155 self.lastprint = 0
156
157 if hasattr(lorentz, 'formfactors') and lorentz.formfactors:
158 for formf in lorentz.formfactors:
159 pat = re.compile(r'\b%s\b' % formf.name)
160 self.lorentz_expr = pat.sub('(%s)' % formf.value, self.lorentz_expr)
161
171
173 """ return the full set of AbstractRoutineBuilder linked to fermion
174 clash"""
175
176 solution = []
177
178 for i, pair in enumerate(pair_list):
179 new_builder = self.define_conjugate_builder(pair)
180 solution.append(new_builder)
181 solution += new_builder.define_all_conjugate_builder(pair_list[i+1:])
182 return solution
183
185 """ return a AbstractRoutineBuilder for the conjugate operation.
186 If they are more than one pair of fermion. Then use pair to claim which
187 one is conjugated"""
188
189 new_builder = copy.copy(self)
190 new_builder.conjg = self.conjg[:]
191 try:
192 for index in pairs:
193 new_builder.apply_conjugation(index)
194 except TypeError:
195 new_builder.apply_conjugation(pairs)
196 return new_builder
197
199 """ apply conjugation on self object"""
200
201 nb_fermion = len([1 for s in self.spins if s % 2 == 0])
202 if isinstance(pair, tuple):
203 if len(pair) ==1 :
204 pair = pair[0]
205 else:
206 raise Exception
207
208
209 if (pair > 1 or nb_fermion >2) and not self.conjg:
210
211 data = aloha_fct.get_fermion_flow(self.lorentz_expr, nb_fermion)
212 target = dict([(2*i+1,2*i+2) for i in range(nb_fermion//2)])
213 if not data == target:
214 text = """Unable to deal with 4(or more) point interactions
215 in presence of majorana particle/flow violation"""
216 raise ALOHAERROR(text)
217
218 old_id = 2 * pair - 1
219 new_id = _conjugate_gap + old_id
220
221 self.kernel_tag = set()
222 aloha_lib.KERNEL.use_tag = set()
223 if not self.routine_kernel or isinstance(self.routine_kernel, str):
224 self.routine_kernel = eval(self.parse_expression(self.lorentz_expr))
225 self.kernel_tag = aloha_lib.KERNEL.use_tag
226
227
228 self.routine_kernel = \
229 C(new_id, old_id + 1) * self.routine_kernel * C(new_id + 1, old_id)
230
231 self.lorentz_expr = '('+self.lorentz_expr+') * C(%s,%s) * C(%s,%s)' % \
232 (new_id, old_id + 1, new_id + 1, old_id )
233
234 self.conjg.append(pair)
235
236
256
258 """change the sign of P for outcoming fermion in order to
259 correct the mismatch convention between HELAS and FR"""
260
261 if not expr:
262 expr = self.lorentz_expr
263
264 if need_P_sign:
265 expr = re.sub(r'\b(P|PSlash)\(', r'-\1(', expr)
266
267 calc = aloha_parsers.ALOHAExpressionParser()
268 lorentz_expr = calc.parse(expr)
269 return lorentz_expr
270
272 """compute the abstract routine associate to this mode """
273
274
275 aloha_lib.KERNEL.use_tag=set()
276
277 nb_spinor = 0
278 outgoing = self.outgoing
279 if (outgoing + 1) // 2 in self.conjg:
280
281 outgoing = outgoing + outgoing % 2 - (outgoing +1) % 2
282
283 if not self.routine_kernel:
284 AbstractRoutineBuilder.counter += 1
285 if self.tag == []:
286 logger.info('aloha creates %s routines' % self.name)
287 elif AbstractALOHAModel.lastprint < time.time() - 1:
288 AbstractALOHAModel.lastprint = time.time()
289 logger.info('aloha creates %s set of routines with options: %s' \
290 % (self.name, ','.join(self.tag)) )
291 try:
292 lorentz = self.parse_expression()
293 self.routine_kernel = lorentz
294 lorentz = eval(lorentz)
295 except NameError as error:
296 logger.error('unknow type in Lorentz Evaluation:%s'%str(error))
297 raise ALOHAERROR('unknow type in Lorentz Evaluation: %s ' % str(error))
298 else:
299 self.kernel_tag = set(aloha_lib.KERNEL.use_tag)
300 elif isinstance(self.routine_kernel,str):
301 lorentz = eval(self.routine_kernel)
302 aloha_lib.KERNEL.use_tag = set(self.kernel_tag)
303 else:
304 lorentz = copy.copy(self.routine_kernel)
305 aloha_lib.KERNEL.use_tag = set(self.kernel_tag)
306 for (i, spin ) in enumerate(self.spins):
307 id = i + 1
308
309 if id == outgoing:
310
311
312 propa = [t[1:] for t in self.tag if t.startswith('P')]
313 if propa == ['0']:
314 if spin == 3 and aloha.unitary_gauge == 2:
315 misc.sprint(spin)
316 lorentz *= complex(0,1) * self.get_custom_propa('1PS', spin, id)
317 continue
318 else:
319 massless = True
320 self.denominator = None
321 elif propa == []:
322 massless = False
323 self.denominator = None
324 else:
325 lorentz *= complex(0,1) * self.get_custom_propa(propa[0], spin, id)
326 continue
327
328
329
330 if spin in [1,-1]:
331 lorentz *= complex(0,1)
332 elif spin == 2:
333
334 if (id + 1) // 2 in self.conjg:
335 id += _conjugate_gap + id % 2 - (id +1) % 2
336 if (id % 2):
337
338 lorentz *= complex(0,1) * SpinorPropagatorout(id, 'I2', outgoing)
339 else:
340
341 lorentz *= complex(0,1) * SpinorPropagatorin('I2', id, outgoing)
342 elif spin == 3 :
343 if massless or not aloha.unitary_gauge:
344 lorentz *= VectorPropagatorMassless(id, 'I2', id)
345 else:
346 lorentz *= VectorPropagator(id, 'I2', id)
347 elif spin == 4:
348
349 if (id + 1) // 2 in self.conjg:
350 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
351 else:
352 spin_id = id
353 nb_spinor += 1
354 if not massless and (spin_id % 2):
355 lorentz *= complex(0,1) * Spin3halfPropagatorout(id, 'I2', spin_id,'I3', outgoing)
356 elif not massless and not (spin_id % 2):
357 lorentz *= complex(0,1) * Spin3halfPropagatorin('I2', id , 'I3', spin_id, outgoing)
358 elif spin_id %2:
359 lorentz *= complex(0,1) * Spin3halfPropagatorMasslessOut(id, 'I2', spin_id,'I3', outgoing)
360 else :
361 lorentz *= complex(0,1) * Spin3halfPropagatorMasslessIn('I2', id, 'I3', spin_id, outgoing)
362
363 elif spin == 5 :
364
365 if massless:
366 lorentz *= complex(0,1) * Spin2masslessPropagator(_spin2_mult + id, \
367 2 * _spin2_mult + id,'I2','I3')
368 else:
369 lorentz *= complex(0,1) * Spin2Propagator(_spin2_mult + id, \
370 2 * _spin2_mult + id,'I2','I3', id)
371 else:
372 raise self.AbstractALOHAError(
373 'The spin value %s (2s+1) is not supported yet' % spin)
374 else:
375
376 if spin in [1,-1]:
377 lorentz *= Scalar(id)
378 elif spin == 2:
379
380 if (id+1) // 2 in self.conjg:
381 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
382 else:
383 spin_id = id
384 lorentz *= Spinor(spin_id, id)
385 elif spin == 3:
386 lorentz *= Vector(id, id)
387 elif spin == 4:
388
389 if (id+1) // 2 in self.conjg:
390 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
391 else:
392 spin_id = id
393 nb_spinor += 1
394 lorentz *= Spin3Half(id, spin_id, id)
395 elif spin == 5:
396 lorentz *= Spin2(1 * _spin2_mult + id, 2 * _spin2_mult + id, id)
397 else:
398 raise self.AbstractALOHAError(
399 'The spin value %s (2s+1) is not supported yet' % spin)
400
401
402 if not outgoing:
403 lorentz *= complex(0,-1)
404
405
406 lorentz = lorentz.simplify()
407
408
409 if any((tag.startswith('L') for tag in self.tag if len(tag)>1)):
410 return self.compute_loop_coefficient(lorentz, outgoing)
411
412 lorentz = lorentz.expand()
413 lorentz = lorentz.simplify()
414
415 if factorize:
416 lorentz = lorentz.factorize()
417
418 lorentz.tag = set(aloha_lib.KERNEL.use_tag)
419 return lorentz
420
421 @staticmethod
423 """Change the index of the propagator to match the current need"""
424
425 data = re.split(r'(\b[a-zA-Z]\w*?)\(([\'\w,\s\"\+\-]*?)\)',text)
426 to_change = {}
427 for old, new in tag.items():
428 if isinstance(new, str):
429 new='\'%s\'' % new
430 else:
431 new = str(new)
432 to_change[r'%s' % old] = new
433 pos=-2
434 while pos +3 < len(data):
435 pos = pos+3
436 ltype = data[pos]
437 if ltype != 'complex':
438 data[pos+1] = re.sub(r'\b(?<!-)(%s)\b' % '|'.join(to_change),
439 lambda x: to_change[x.group()], data[pos+1])
440 data[pos+1] = '(%s)' % data[pos+1]
441 text=''.join(data)
442 return text
443
445 """Return the ALOHA object associated to the user define propagator"""
446
447 if not propa.startswith('1'):
448 propagator = getattr(self.model.propagators, propa)
449 numerator = propagator.numerator
450 denominator = propagator.denominator
451 elif propa == "1L":
452 numerator = "EPSL(1,id) * EPSL(2,id)"
453 denominator = "-1*PVec(-2,id)*PVec(-2,id)*P(-3,id)*P(-3,id) * (P(-1,id)**2 - Mass(id) * Mass(id) + complex(0,1) * Mass(id) * Width(id))"
454 elif propa == "1T":
455 numerator = "-1*PVec(-2,id)*PVec(-2,id) * EPST2(1,id)*EPST2(2,id) + EPST1(1,id)*EPST1(2,id)"
456 denominator = "PVec(-2,id)*PVec(-2,id) * PT(-3,id)*PT(-3,id) * (P(-1,id)**2 - Mass(id) * Mass(id) + complex(0,1) * Mass(id) * Width(id))"
457 elif propa == "1A":
458 numerator = "(P(-2,id)**2 - Mass(id)**2) * P(1,id) * P(2,id)"
459 denominator = "P(-2,id)**2 * Mass(id)**2 * (P(-1,id)**2 - Mass(id) * Mass(id) + complex(0,1) * Mass(id) * Width(id))"
460 elif propa in ["1P"]:
461
462 spin_id = id
463 if (id + 1) // 2 in self.conjg:
464 spin_id += _conjugate_gap + id % 2 - (id +1) % 2
465 if (spin_id % 2):
466 numerator = "UFP(1,id)*UFPC(2,id)"
467 else:
468 numerator = "VFP(1,id)*VFPC(2,id)"
469
470 denominator = "(2*Tnorm(id)*TnormZ(id))*(P(-1,id)*P(-1,id) - Mass(id) * Mass(id) + complex(0,1) * Mass(id) * Width(id))"
471
472 elif propa == "1M":
473
474 spin_id = id
475 if (id + 1) // 2 in self.conjg:
476 spin_id += _conjugate_gap + id % 2 - (id +1) % 2
477 if (spin_id % 2):
478 numerator = "UFM(1,id)*UFMC(2,id)"
479 else:
480 numerator = "VFM(1,id)*VFMC(2,id)"
481 denominator = "(2*Tnorm(id)*TnormZ(id))*(P(-1,id)*P(-1,id) - Mass(id) * Mass(id) + complex(0,1) * Mass(id) * Width(id))"
482 elif propa == "1PS":
483 numerator = "(-1*(P(-1,id)*PBar(-1,id)) * Metric(1, 2) + P(1,id)*PBar(2,id) + PBar(1,id)*P(2,id))"
484 denominator = "(P(-3,id)*PBar(-3,id))*P(-2,id)**2"
485 else:
486 raise Exception
487
488
489 needPflipping = False
490 if spin in [1,-1]:
491 tag = {'id': id}
492 elif spin == 2:
493
494 if (id + 1) // 2 in self.conjg:
495 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
496 else:
497 spin_id = id
498 if (spin_id % 2):
499
500 needPflipping = True
501 tag ={'1': spin_id, '2': 'I2', 'id': id}
502 else:
503 tag ={'1': 'I2', '2': spin_id, 'id': id}
504 elif spin == 3 :
505 tag ={'1': id, '2': 'I2', 'id': id}
506 elif spin == 4:
507 delta = lambda i,j: aloha_object.Identity(i,j)
508 deltaL = lambda i,j: aloha_object.IdentityL(i,j)
509
510 if (id + 1) // 2 in self.conjg:
511 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
512 else:
513 spin_id = id
514 tag = {'1': 'pr1', '2': 'pr2', 'id':id}
515 if spin_id % 2:
516 needPflipping = True
517
518 propaR = deltaL('pr1',id) * deltaL('pr2', 'I2') * delta('pr1', spin_id) * delta('pr2', 'I3')
519 else:
520 propaR = deltaL('pr1',id) * deltaL('pr2', 'I2') * delta('pr2', spin_id) * delta('pr1', 'I3')
521
522 elif spin == 5 :
523 tag = {'1': _spin2_mult + id, '2': 'I2',
524 '51': 2 * _spin2_mult + id, '52': 'I3', 'id':id}
525
526 numerator = self.mod_propagator_expression(tag, numerator)
527 if denominator:
528 denominator = self.mod_propagator_expression(tag, denominator)
529
530 numerator = self.parse_expression(numerator, needPflipping)
531
532 if denominator:
533 self.denominator = self.parse_expression(denominator, needPflipping)
534 self.denominator = eval(self.denominator)
535 if not isinstance(self.denominator, numbers.Number):
536 self.denominator = self.denominator.simplify().expand().simplify().get((0,))
537 needPflipping = False
538 if spin ==4:
539 return eval(numerator) * propaR
540 else:
541 return eval(numerator)
542
543
544
545
547
548
549 l_in = [int(tag[1:]) for tag in self.tag if tag.startswith('L')][0]
550 if (l_in + 1) // 2 in self.conjg:
551
552 l_in = l_in + l_in % 2 - (l_in +1) % 2
553 assert l_in != outgoing, 'incoming Open Loop can not be the outcoming one'
554
555
556
557 Pdep = [aloha_lib.KERNEL.get(P) for P in lorentz.get_all_var_names()
558 if P.startswith('_P')]
559
560 Pdep = set([P for P in Pdep if P.particle in [outgoing, l_in]])
561 for P in Pdep:
562 if P.particle == l_in:
563 sign = 1
564 else:
565 sign = -1
566 id = P.id
567 lorentz_ind = P.lorentz_ind[0]
568 P_Lid = aloha_object.P(lorentz_ind, 'L')
569 P_obj = aloha_object.P(lorentz_ind, P.particle)
570 new_expr = sign*(P_Lid + P_obj)
571 lorentz = lorentz.replace(id, new_expr)
572
573
574 var_veto = ['PL_0', 'PL_1', 'PL_2', 'PL_3']
575 spin = aloha_writers.WriteALOHA.type_to_variable[abs(self.spins[l_in-1])]
576 size = aloha_writers.WriteALOHA.type_to_size[spin]-1
577 var_veto += ['%s%s_%s' % (spin,l_in,i) for i in range(1,size)]
578
579 veto_ids = aloha_lib.KERNEL.get_ids(var_veto)
580
581 lorentz = lorentz.expand(veto = veto_ids)
582 lorentz = lorentz.simplify()
583 coeff_expr = lorentz.split(veto_ids)
584
585 for key, expr in coeff_expr.items():
586 expr = expr.simplify()
587 coeff_expr[key] = expr.factorize()
588 coeff_expr.tag = set(aloha_lib.KERNEL.use_tag)
589
590 return coeff_expr
591
593 """Define the expression"""
594
595 self.expr = lorentz_expr
596
598 """Define the kernel at low level"""
599
600 if not lorentz:
601 logger.info('compute kernel %s' % self.counter)
602 AbstractRoutineBuilder.counter += 1
603 lorentz = eval(self.lorentz_expr)
604
605 if isinstance(lorentz, numbers.Number):
606 self.routine_kernel = lorentz
607 return lorentz
608 lorentz = lorentz.simplify()
609 lorentz = lorentz.expand()
610 lorentz = lorentz.simplify()
611
612 self.routine_kernel = lorentz
613 return lorentz
614
615
616 @staticmethod
618 """return the name of the """
619
620 name = '%s_%s' % (name, outgoing)
621 return name
622
623 @classmethod
630
633 """A special builder for combine routine if needed to write those
634 explicitely.
635 """
636 - def __init__(self, l_lorentz, model=None):
637 """ initialize the run
638 l_lorentz: list of lorentz information analyzed (UFO format)
639 language: define in which language we write the output
640 modes: 0 for all incoming particles
641 >0 defines the outgoing part (start to count at 1)
642 """
643 AbstractRoutineBuilder.__init__(self,l_lorentz[0], model)
644 lorentz = l_lorentz[0]
645 self.spins = lorentz.spins
646 l_name = [l.name for l in l_lorentz]
647 self.name = aloha_writers.combine_name(l_name[0], l_name[1:], None)
648 self.conjg = []
649 self.tag = []
650 self.outgoing = None
651 self.lorentz_expr = []
652 for i, lor in enumerate(l_lorentz):
653 self.lorentz_expr.append( 'Coup(%s) * (%s)' % (i+1, lor.structure))
654 self.lorentz_expr = ' + '.join(self.lorentz_expr)
655 self.routine_kernel = None
656 self.contracted = {}
657 self.fct = {}
658
660 """ A class to build and store the full set of Abstract ALOHA Routine"""
661
662 lastprint = 0
663
664 - def __init__(self, model_name, write_dir=None, format='Fortran',
665 explicit_combine=False):
666 """ load the UFO model and init the dictionary """
667
668
669 self.explicit_combine = explicit_combine
670
671 model_name_pattern = re.compile("^(?P<name>.+)-(?P<rest>[\w\d_]+)$")
672 model_name_re = model_name_pattern.match(model_name)
673 if model_name_re:
674 name = model_name_re.group('name')
675 rest = model_name_re.group("rest")
676 if rest == 'full' or \
677 os.path.isfile(os.path.join(root_path, "models", name,
678 "restrict_%s.dat" % rest)):
679 model_name = model_name_re.group("name")
680
681 self.model = models.load_model(model_name)
682
683
684
685
686
687
688
689
690
691 self.model_pos = os.path.dirname(self.model.__file__)
692
693
694 self.external_routines = []
695
696
697 dict.__init__(self)
698 self.symmetries = {}
699 self.multiple_lor = {}
700
701 if write_dir:
702 self.main(write_dir,format=format)
703
704 - def main(self, output_dir, format='Fortran'):
705 """ Compute if not already compute.
706 Write file in models/MY_MODEL/MY_FORMAT.
707 copy the file to output_dir
708 """
709 ext = {'Fortran':'f','Python':'py','CPP':'h'}
710
711
712
713 if not self.load():
714 self.compute_all()
715 logger.info(' %s aloha routine' % len(self))
716
717
718 if not output_dir:
719 output_dir = os.path.join(self.model_pos, format.lower())
720 logger.debug('aloha output dir is %s' % output_dir)
721 if not os.path.exists(output_dir):
722 os.mkdir(output_dir)
723
724
725 for (name, outgoing), abstract in self.items():
726 routine_name = AbstractRoutineBuilder.get_routine_name(name, outgoing)
727 if not os.path.exists(os.path.join(output_dir, routine_name) + '.' + ext[format]):
728 abstract.write(output_dir, format)
729 else:
730 logger.info('File for %s already present, skip the writing of this file' % routine_name)
731
732
733 - def save(self, filepos=None):
734 """ save the current model in a pkl file """
735
736 logger.info('save the aloha abstract routine in a pickle file')
737 if not filepos:
738 filepos = os.path.join(self.model_pos,'aloha.pkl')
739
740 fsock = open(filepos, 'w')
741 t=dict(self)
742 try:
743 six.moves.cPickle.dump(dict(self), fsock)
744 except:
745 logger.info('aloha not saved')
746
747 - def load(self, filepos=None):
748 """ reload the pickle file """
749 return False
750 if not filepos:
751 filepos = os.path.join(self.model_pos,'aloha.pkl')
752 if os.path.exists(filepos):
753 fsock = open(filepos, 'r')
754 self.update(six.moves.cPickle.load(fsock))
755 return True
756 else:
757 return False
758
759 - def get(self, lorentzname, outgoing):
760 """ return the AbstractRoutine with a given lorentz name, and for a given
761 outgoing particle """
762
763 try:
764 return self[(lorentzname, outgoing)]
765 except Exception:
766 logger.warning('(%s, %s) is not a valid key' %
767 (lorentzname, outgoing) )
768 return None
769
770 - def get_info(self, info, lorentzname, outgoing, tag, cached=False):
771 """return some information about the aloha routine
772 - "rank": return the rank of the loop function
773 If the cached option is set to true, then the result is stored and
774 recycled if possible.
775 """
776
777 if not aloha.loop_mode and any(t.startswith('L') for t in tag):
778 aloha.loop_mode = True
779
780
781 returned_dict = {}
782
783 if isinstance(info, str):
784 infos = [info]
785 else:
786 infos = info
787
788
789 if hasattr(self, 'cached_interaction_infos'):
790
791 for info_key in infos:
792 try:
793 returned_dict[info] = self.cached_interaction_infos[\
794 (lorentzname,outgoing,tuple(tag),info)]
795 except KeyError:
796
797
798 pass
799 elif cached:
800 self.cached_interaction_infos = {}
801
802 init = False
803 for info_key in infos:
804 if info_key in returned_dict:
805 continue
806 elif not init:
807
808 lorentz = eval('self.model.lorentz.%s' % lorentzname)
809 abstract = AbstractRoutineBuilder(lorentz)
810 routine = abstract.compute_routine(outgoing, tag, factorize=False)
811 init = True
812
813 assert 'routine' in locals()
814 returned_dict[info_key] = routine.get_info(info_key)
815 if cached:
816
817 self.cached_interaction_infos[\
818 (lorentzname,outgoing,tuple(tag),info_key)]=returned_dict[info_key]
819
820 if isinstance(info, str):
821 return returned_dict[info]
822 else:
823 return returned_dict
824
825 - def set(self, lorentzname, outgoing, abstract_routine):
826 """ add in the dictionary """
827
828 self[(lorentzname, outgoing)] = abstract_routine
829
830 - def compute_all(self, save=True, wanted_lorentz = [], custom_propa=False):
831 """ define all the AbstractRoutine linked to a model """
832
833
834
835 self.look_for_symmetries()
836 conjugate_list = self.look_for_conjugate()
837 self.look_for_multiple_lorentz_interactions()
838
839 if not wanted_lorentz:
840 wanted_lorentz = [l.name for l in self.model.all_lorentz]
841 for lorentz in self.model.all_lorentz:
842 if not lorentz.name in wanted_lorentz:
843
844 continue
845
846 if -1 in lorentz.spins:
847
848 continue
849
850 if lorentz.structure == 'external':
851 for i in range(len(lorentz.spins)):
852 self.external_routines.append('%s_%s' % (lorentz.name, i))
853 continue
854
855
856 routines = [(i,[]) for i in range(len(lorentz.spins)+1)]
857
858 if custom_propa:
859 for vertex in self.model.all_vertices:
860 if lorentz in vertex.lorentz:
861 for i,part in enumerate(vertex.particles):
862 new_prop = False
863 if hasattr(part, 'propagator') and part.propagator:
864 new_prop = ['P%s' % part.propagator.name]
865 elif part.mass.name.lower() == 'zero':
866 new_prop = ['P0']
867 if new_prop and (i+1, new_prop) not in routines:
868 routines.append((i+1, new_prop))
869
870 builder = AbstractRoutineBuilder(lorentz, self.model)
871 self.compute_aloha(builder, routines=routines)
872
873 if lorentz.name in self.multiple_lor:
874 for m in self.multiple_lor[lorentz.name]:
875 for outgoing in range(len(lorentz.spins)+1):
876 try:
877 self[(lorentz.name, outgoing)].add_combine(m)
878 except Exception:
879 pass
880
881
882 if lorentz.name in conjugate_list:
883 conjg_builder_list= builder.define_all_conjugate_builder(\
884 conjugate_list[lorentz.name])
885 for conjg_builder in conjg_builder_list:
886
887 assert conjg_builder_list.count(conjg_builder) == 1
888 self.compute_aloha(conjg_builder, lorentz.name)
889 if lorentz.name in self.multiple_lor:
890 for m in self.multiple_lor[lorentz.name]:
891 for outgoing in range(len(lorentz.spins)+1):
892 realname = conjg_builder.name + ''.join(['C%s' % pair for pair in conjg_builder.conjg])
893 try:
894 self[(realname, outgoing)].add_combine(m)
895 except Exception as error:
896 self[(realname, self.symmetries[lorentz.name][outgoing])].add_combine(m)
897
898 if save:
899 self.save()
900
902 """add a series of Lorentz structure created dynamically"""
903
904 for lor in lorentzlist:
905 if not hasattr(self.model.lorentz, lor.name):
906 setattr(self.model.lorentz, lor.name, lor)
907
909 """ create the requested ALOHA routine.
910 data should be a list of tuple (lorentz, tag, outgoing)
911 tag should be the list of special tag (like conjugation on pair)
912 to apply on the object """
913
914
915
916 self.look_for_symmetries()
917
918
919 aloha.loop_mode = False
920
921 request = {}
922
923 for list_l_name, tag, outgoing in data:
924
925 all_tag = tag[:]
926 conjugate = [i for i in tag if isinstance(i, int)]
927
928 tag = [i for i in tag if isinstance(i, str) and not i.startswith('P')]
929 tag = tag + ['C%s'%i for i in conjugate]
930 tag = tag + [i for i in all_tag if isinstance(i, str) and i.startswith('P')]
931
932 conjugate = tuple([int(float(c[1:])) for c in tag if c.startswith('C')])
933 loop = any((t.startswith('L') for t in tag))
934 if loop:
935 aloha.loop_mode = True
936 self.explicit_combine = True
937
938 for l_name in list_l_name:
939 try:
940 request[l_name][conjugate].append((outgoing,tag))
941 except Exception:
942 try:
943 request[l_name][conjugate] = [(outgoing,tag)]
944 except Exception:
945 request[l_name] = {conjugate: [(outgoing,tag)]}
946
947
948 for l_name in request:
949 lorentz = eval('self.model.lorentz.%s' % l_name)
950 if lorentz.structure == 'external':
951 for tmp in request[l_name]:
952 for outgoing, tag in request[l_name][tmp]:
953 name = aloha_writers.get_routine_name(lorentz.name,outgoing=outgoing,tag=tag)
954 if name not in self.external_routines:
955 self.external_routines.append(name)
956 continue
957
958 builder = AbstractRoutineBuilder(lorentz, self.model)
959
960
961 for conjg in request[l_name]:
962
963 def sorting(a,b):
964 if a[0] < b[0]: return -1
965 else: return 1
966 routines = request[l_name][conjg]
967 routines.sort(key=misc.cmp_to_key(sorting))
968 if not conjg:
969
970 self.compute_aloha(builder, routines=routines)
971 else:
972
973 conjg_builder = builder.define_conjugate_builder(conjg)
974
975 self.compute_aloha(conjg_builder, symmetry=lorentz.name,
976 routines=routines)
977
978
979
980 for list_l_name, tag, outgoing in data:
981 if len(list_l_name) ==1:
982 continue
983
984 conjugate = [i for i in tag if isinstance(i, int)]
985 all_tag = tag[:]
986 tag = [i for i in tag if isinstance(i, str) and not i.startswith('P')]
987 tag = tag + ['C%s'%i for i in conjugate]
988 tag = tag + [i for i in all_tag if isinstance(i, str) and i.startswith('P')]
989
990 if not self.explicit_combine:
991 lorentzname = list_l_name[0]
992 lorentzname += ''.join(tag)
993 if (lorentzname, outgoing) in self:
994 self[(lorentzname, outgoing)].add_combine(list_l_name[1:])
995 else:
996 lorentz = eval('self.model.lorentz.%s' % list_l_name[0])
997 assert lorentz.structure == 'external'
998 else:
999 l_lorentz = []
1000 for l_name in list_l_name:
1001 l_lorentz.append(eval('self.model.lorentz.%s' % l_name))
1002 builder = CombineRoutineBuilder(l_lorentz)
1003
1004 for conjg in request[list_l_name[0]]:
1005
1006 def sorting(a,b):
1007 if a[0] < b[0]: return -1
1008 else: return 1
1009 routines = request[list_l_name[0]][conjg]
1010 routines.sort(key=operator.itemgetter(0))
1011 if not conjg:
1012
1013 self.compute_aloha(builder, routines=routines)
1014 else:
1015
1016 conjg_builder = builder.define_conjugate_builder(conjg)
1017
1018 self.compute_aloha(conjg_builder, symmetry=lorentz.name,
1019 routines=routines)
1020
1021
1022
1023 - def compute_aloha(self, builder, symmetry=None, routines=None, tag=[]):
1024 """ define all the AbstractRoutine linked to a given lorentz structure
1025 symmetry authorizes to use the symmetry of anoter lorentz structure.
1026 routines to define only a subset of the routines."""
1027
1028 name = builder.name
1029 if not symmetry:
1030 symmetry = name
1031 if not routines:
1032 if not tag:
1033 tag = ['C%s' % i for i in builder.conjg]
1034 else:
1035 addon = ['C%s' % i for i in builder.conjg]
1036 tag = [(i,addon +onetag) for i,onetag in tag]
1037 routines = [ tuple([i,tag]) for i in range(len(builder.spins) + 1 )]
1038
1039
1040 for outgoing, tag in routines:
1041 symmetric = self.has_symmetries(symmetry, outgoing, valid_output=routines)
1042 realname = name + ''.join(tag)
1043 if (realname, outgoing) in self:
1044 continue
1045
1046 if symmetric:
1047 self.get(realname, symmetric).add_symmetry(outgoing)
1048 else:
1049 wavefunction = builder.compute_routine(outgoing, tag)
1050
1051 self.set(realname, outgoing, wavefunction)
1052
1053
1055 """define all the AbstractRoutine linked to a given lorentz structure
1056 symmetry authorizes to use the symmetry of anoter lorentz structure.
1057 routines to define only a subset of the routines.
1058 Compare to compute_aloha, each routines are computed independently.
1059 """
1060
1061 name = builder.name
1062 if not routines:
1063 routines = [ tuple([i,[]]) for i in range(len(builder.spins) + 1 )]
1064
1065 for outgoing, tag in routines:
1066 builder.routine_kernel = None
1067 wavefunction = builder.compute_routine(outgoing, tag)
1068 self.set(name, outgoing, wavefunction)
1069
1070
1071 - def write(self, output_dir, language):
1072 """ write the full set of Helicity Routine in output_dir"""
1073 for abstract_routine in self.values():
1074 abstract_routine.write(output_dir, language)
1075
1076 for routine in self.external_routines:
1077 self.locate_external(routine, language, output_dir)
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1101 """search a valid external file and copy it to output_dir directory"""
1102
1103 language_to_ext = {'Python': 'py',
1104 'Fortran' : 'f',
1105 'CPP': 'C'}
1106 ext = language_to_ext[language]
1107 paths = [os.path.join(self.model_pos, language), self.model_pos,
1108 os.path.join(root_path, 'aloha', 'template_files', )]
1109
1110 ext_files = []
1111 for path in paths:
1112 ext_files = misc.glob('%s.%s' % (name, ext), path)
1113 if ext_files:
1114 break
1115 else:
1116
1117 raise ALOHAERROR('No external routine \"%s.%s\" in directories\n %s' % \
1118 (name, ext, '\n'.join(paths)))
1119
1120 if output_dir:
1121 for filepath in ext_files:
1122
1123 files.cp(filepath, output_dir)
1124 return ext_files
1125
1126
1127
1129 """Search some symmetries in the vertices.
1130 We search if some identical particles are in a vertices in order
1131 to avoid to compute symmetrical contributions"""
1132
1133 for vertex in self.model.all_vertices:
1134 for i, part1 in enumerate(vertex.particles):
1135 for j in range(i-1,-1,-1):
1136 part2 = vertex.particles[j]
1137 if part1.pdg_code == part2.pdg_code and part1.color == 1:
1138 if part1.spin == 2 and (i % 2 != j % 2 ):
1139 continue
1140 for lorentz in vertex.lorentz:
1141 if lorentz.name in self.symmetries:
1142 if i+1 in self.symmetries[lorentz.name]:
1143 self.symmetries[lorentz.name][i+1] = max(self.symmetries[lorentz.name][i+1], j+1)
1144 else:
1145 self.symmetries[lorentz.name][i+1] = j+1
1146 else:
1147 self.symmetries[lorentz.name] = {i+1:j+1}
1148 break
1149
1151 """Search the interaction associate with more than one lorentz structure.
1152 If those lorentz structure have the same order and the same color then
1153 associate a multiple lorentz routines to ALOHA """
1154
1155 orders = {}
1156 for coup in self.model.all_couplings:
1157 orders[coup.name] = str(coup.order)
1158
1159 for vertex in self.model.all_vertices:
1160 if len(vertex.lorentz) == 1:
1161 continue
1162
1163
1164
1165
1166
1167 combine = {}
1168 for (id_col, id_lor), coups in vertex.couplings.items():
1169 if not isinstance(coups, list):
1170 coups = [coups]
1171 for coup in coups:
1172 order = orders[coup.name]
1173 key = (id_col, order)
1174 if key in combine:
1175 combine[key].append(id_lor)
1176 else:
1177 combine[key] = [id_lor]
1178
1179
1180 for list_lor in combine.values():
1181 if len(list_lor) == 1:
1182 continue
1183 list_lor.sort()
1184 main = vertex.lorentz[list_lor[0]].name
1185 if main not in self.multiple_lor:
1186 self.multiple_lor[main] = []
1187
1188 info = tuple([vertex.lorentz[id].name for id in list_lor[1:]])
1189 if info not in self.multiple_lor[main]:
1190 self.multiple_lor[main].append(info)
1191
1192
1193 - def has_symmetries(self, l_name, outgoing, out=None, valid_output=None):
1194 """ This returns out if no symmetries are available, otherwise it finds
1195 the lowest equivalent outgoing by recursivally calling this function.
1196 auth is a list of authorize output, if define"""
1197
1198 try:
1199 equiv = self.symmetries[l_name][outgoing]
1200 except Exception:
1201 return out
1202 else:
1203 if not valid_output or equiv in valid_output:
1204 return self.has_symmetries(l_name, equiv, out=equiv,
1205 valid_output=valid_output)
1206 else:
1207 return self.has_symmetries(l_name, equiv, out=out,
1208 valid_output=valid_output)
1209
1211 """ create a list for the routine needing to be conjugate """
1212
1213
1214 need = False
1215 for particle in self.model.all_particles:
1216 if particle.spin == 2 and particle.selfconjugate:
1217 need = True
1218 break
1219
1220 if not need:
1221 for interaction in self.model.all_vertices:
1222 fermions = [p for p in interaction.particles if p.spin == 2]
1223 for i in range(0, len(fermions), 2):
1224 if fermions[i].pdg_code * fermions[i+1].pdg_code > 0:
1225
1226 need = True
1227 break
1228
1229
1230 if not need:
1231 return {}
1232
1233 conjugate_request = {}
1234
1235 for vertex in self.model.all_vertices:
1236 for i in range(0, len(vertex.particles), 2):
1237 part1 = vertex.particles[i]
1238 if part1.spin !=2:
1239
1240 break
1241
1242 if part1.selfconjugate:
1243 continue
1244 part2 = vertex.particles[i + 1]
1245 if part2.selfconjugate:
1246 continue
1247
1248
1249 for lorentz in vertex.lorentz:
1250 try:
1251 conjugate_request[lorentz.name].add(i//2+1)
1252 except Exception:
1253 conjugate_request[lorentz.name] = set([i//2+1])
1254
1255 for elem in conjugate_request:
1256 conjugate_request[elem] = list(conjugate_request[elem])
1257
1258 return conjugate_request
1259
1263 """find the list of Helicity routine in the directory and create a list
1264 of those files (but with compile extension)"""
1265
1266 aloha_files = []
1267
1268
1269 alohafile_pattern = re.compile(r'''_\d%s''' % file_ext)
1270 for filename in os.listdir(aloha_dir):
1271 if os.path.isfile(os.path.join(aloha_dir, filename)):
1272 if alohafile_pattern.search(filename):
1273 aloha_files.append(filename.replace(file_ext, comp_ext))
1274
1275 if os.path.exists(pjoin(aloha_dir, 'additional_aloha_function.f')):
1276 aloha_files.append('additional_aloha_function.o')
1277
1278 text="ALOHARoutine = "
1279 text += ' '.join(aloha_files)
1280 text +='\n'
1281
1282
1283 open(os.path.join(aloha_dir, 'aloha_file.inc'), 'w').write(text)
1284
1288
1289 def create(obj):
1290 """ """
1291 obj= obj.simplify()
1292 obj = obj.expand()
1293 obj = obj.simplify()
1294 return obj
1295
1296
1297 old_tag = set(aloha_lib.KERNEL.use_tag)
1298 name, i = tag
1299 if name == "Spin2Prop":
1300 lib[('Spin2Prop',i)] = create( Spin2Propagator(_spin2_mult + i, \
1301 2 * _spin2_mult + i,'I2','I3', i) )
1302 elif name == "Spin2PropMassless":
1303 lib[('Spin2PropMassless',i)] = create( Spin2masslessPropagator(
1304 _spin2_mult + i, 2 * _spin2_mult + i,'I2','I3'))
1305
1306 aloha_lib.KERNEL.use_tag = old_tag
1307 return lib
1308
1309
1310 if '__main__' == __name__:
1311 logging.basicConfig(level=0)
1312
1313 import profile
1314
1315
1316 start = time.time()
1321 - def write(alohagenerator):
1323 alohagenerator = main()
1324 logger.info('done in %s s' % (time.time()-start))
1325 write(alohagenerator)
1326
1327
1328 stop = time.time()
1329 logger.info('done in %s s' % (stop-start))
1330