1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 from __future__ import division
16 import cmath
17 import copy
18 import cPickle
19 import glob
20 import logging
21 import numbers
22 import os
23 import re
24 import shutil
25 import sys
26 import time
27 from madgraph.interface.tutorial_text import output
28
29 root_path = os.path.split(os.path.dirname(os.path.realpath( __file__ )))[0]
30 sys.path.append(root_path)
31 from aloha.aloha_object import *
32 import aloha
33 import aloha.aloha_writers as aloha_writers
34 import aloha.aloha_lib as aloha_lib
35 import aloha.aloha_object as aloha_object
36 import aloha.aloha_parsers as aloha_parsers
37 import aloha.aloha_fct as aloha_fct
38 try:
39 import madgraph.iolibs.files as files
40 import madgraph.various.misc as misc
41 except Exception:
42 import aloha.files as files
43 import aloha.misc as misc
44
45 aloha_path = os.path.dirname(os.path.realpath(__file__))
46 logger = logging.getLogger('ALOHA')
47
48 _conjugate_gap = 50
49 _spin2_mult = 1000
50
51 pjoin = os.path.join
52
53 ALOHAERROR = aloha.ALOHAERROR
56 """ store the result of the computation of Helicity Routine
57 this is use for storing and passing to writer """
58
59 - def __init__(self, expr, outgoing, spins, name, infostr, denom=None):
60 """ store the information """
61
62 self.spins = spins
63 self.expr = expr
64 self.denominator = denom
65 self.name = name
66 self.outgoing = outgoing
67 self.infostr = infostr
68 self.symmetries = []
69 self.combined = []
70 self.tag = []
71 self.contracted = {}
72
73
74
76 """ add an outgoing """
77
78 if not outgoing in self.symmetries:
79 self.symmetries.append(outgoing)
80
82 """add a combine rule """
83
84 if lor_list not in self.combined:
85 self.combined.append(lor_list)
86
87 - def write(self, output_dir, language='Fortran', mode='self', combine=True,**opt):
88 """ write the content of the object """
89 writer = aloha_writers.WriterFactory(self, language, output_dir, self.tag)
90 text = writer.write(mode=mode, **opt)
91 if combine:
92 for grouped in self.combined:
93 if isinstance(text, tuple):
94 text = tuple([old.__add__(new) for old, new in zip(text,
95 writer.write_combined(grouped, mode=mode+'no_include', **opt))])
96 else:
97 text += writer.write_combined(grouped, mode=mode+'no_include', **opt)
98 if aloha.mp_precision and 'MP' not in self.tag:
99 self.tag.append('MP')
100 text += self.write(output_dir, language, mode, **opt)
101 return text
102
104 """return some information on the routine
105 """
106 if info == "rank":
107 assert isinstance(self.expr, aloha_lib.SplitCoefficient)
108 rank= 1
109 for coeff in self.expr:
110 rank = max(sum(coeff), rank)
111 return rank -1
112 else:
113 raise ALOHAERROR, '%s is not a valid information that can be computed' % info
114
117 """ Launch the creation of the Helicity Routine"""
118
119 prop_lib = {}
120 counter = 0
121
123 """ An error class for ALOHA"""
124
125 - def __init__(self, lorentz, model=None):
126 """ initialize the run
127 lorentz: the lorentz information analyzed (UFO format)
128 language: define in which language we write the output
129 modes: 0 for all incoming particles
130 >0 defines the outgoing part (start to count at 1)
131 """
132
133 self.spins = [s for s in lorentz.spins]
134 self.name = lorentz.name
135 self.conjg = []
136 self.tag = []
137 self.outgoing = None
138 self.lorentz_expr = lorentz.structure
139 self.routine_kernel = None
140 self.spin2_massless = False
141 self.spin32_massless = False
142 self.contracted = {}
143 self.fct = {}
144 self.model = model
145 self.denominator = None
146
147
148 self.lastprint = 0
149
150 if hasattr(lorentz, 'formfactors') and lorentz.formfactors:
151 for formf in lorentz.formfactors:
152 pat = re.compile(r'\b%s\b' % formf.name)
153 self.lorentz_expr = pat.sub('(%s)' % formf.value, self.lorentz_expr)
154
164
166 """ return the full set of AbstractRoutineBuilder linked to fermion
167 clash"""
168
169 solution = []
170
171 for i, pair in enumerate(pair_list):
172 new_builder = self.define_conjugate_builder(pair)
173 solution.append(new_builder)
174 solution += new_builder.define_all_conjugate_builder(pair_list[i+1:])
175 return solution
176
178 """ return a AbstractRoutineBuilder for the conjugate operation.
179 If they are more than one pair of fermion. Then use pair to claim which
180 one is conjugated"""
181
182 new_builder = copy.copy(self)
183 new_builder.conjg = self.conjg[:]
184 try:
185 for index in pairs:
186 new_builder.apply_conjugation(index)
187 except TypeError:
188 new_builder.apply_conjugation(pairs)
189 return new_builder
190
192 """ apply conjugation on self object"""
193
194 nb_fermion = len([1 for s in self.spins if s % 2 == 0])
195 if isinstance(pair, tuple):
196 if len(pair) ==1 :
197 pair = pair[0]
198 else:
199 raise Exception
200
201
202 if (pair > 1 or nb_fermion >2) and not self.conjg:
203
204 data = aloha_fct.get_fermion_flow(self.lorentz_expr, nb_fermion)
205 target = dict([(2*i+1,2*i+2) for i in range(nb_fermion//2)])
206 if not data == target:
207 text = """Unable to deal with 4(or more) point interactions
208 in presence of majorana particle/flow violation"""
209 raise ALOHAERROR, text
210
211 old_id = 2 * pair - 1
212 new_id = _conjugate_gap + old_id
213
214 self.kernel_tag = set()
215 aloha_lib.KERNEL.use_tag = set()
216 if not self.routine_kernel or isinstance(self.routine_kernel, str):
217 self.routine_kernel = eval(self.parse_expression(self.lorentz_expr))
218 self.kernel_tag = aloha_lib.KERNEL.use_tag
219
220
221 self.routine_kernel = \
222 C(new_id, old_id + 1) * self.routine_kernel * C(new_id + 1, old_id)
223
224 self.lorentz_expr = '('+self.lorentz_expr+') * C(%s,%s) * C(%s,%s)' % \
225 (new_id, old_id + 1, new_id + 1, old_id )
226
227 self.conjg.append(pair)
228
229
249
251 """change the sign of P for outcoming fermion in order to
252 correct the mismatch convention between HELAS and FR"""
253
254 if not expr:
255 expr = self.lorentz_expr
256
257 if need_P_sign:
258 expr = re.sub(r'\b(P|PSlash)\(', r'-\1(', expr)
259
260 calc = aloha_parsers.ALOHAExpressionParser()
261 lorentz_expr = calc.parse(expr)
262 return lorentz_expr
263
265 """compute the abstract routine associate to this mode """
266
267
268 aloha_lib.KERNEL.use_tag=set()
269
270 nb_spinor = 0
271 outgoing = self.outgoing
272 if (outgoing + 1) // 2 in self.conjg:
273
274 outgoing = outgoing + outgoing % 2 - (outgoing +1) % 2
275
276 if not self.routine_kernel:
277 AbstractRoutineBuilder.counter += 1
278 if self.tag == []:
279 logger.info('aloha creates %s routines' % self.name)
280 elif AbstractALOHAModel.lastprint < time.time() - 1:
281 AbstractALOHAModel.lastprint = time.time()
282 logger.info('aloha creates %s set of routines with options: %s' \
283 % (self.name, ','.join(self.tag)) )
284 try:
285 lorentz = self.parse_expression()
286 self.routine_kernel = lorentz
287 lorentz = eval(lorentz)
288 except NameError as error:
289 logger.error('unknow type in Lorentz Evaluation:%s'%str(error))
290 raise ALOHAERROR, 'unknow type in Lorentz Evaluation: %s ' % str(error)
291 else:
292 self.kernel_tag = set(aloha_lib.KERNEL.use_tag)
293 elif isinstance(self.routine_kernel,str):
294 lorentz = eval(self.routine_kernel)
295 aloha_lib.KERNEL.use_tag = set(self.kernel_tag)
296 else:
297 lorentz = copy.copy(self.routine_kernel)
298 aloha_lib.KERNEL.use_tag = set(self.kernel_tag)
299 for (i, spin ) in enumerate(self.spins):
300 id = i + 1
301
302 if id == outgoing:
303
304
305 propa = [t[1:] for t in self.tag if t.startswith('P')]
306 if propa == ['0']:
307 if spin == 3 and aloha.unitary_gauge == 2:
308 misc.sprint(spin)
309 lorentz *= complex(0,1) * self.get_custom_propa('1PS', spin, id)
310 continue
311 else:
312 massless = True
313 self.denominator = None
314 elif propa == []:
315 massless = False
316 self.denominator = None
317 else:
318 lorentz *= complex(0,1) * self.get_custom_propa(propa[0], spin, id)
319 continue
320
321
322
323 if spin in [1,-1]:
324 lorentz *= complex(0,1)
325 elif spin == 2:
326
327 if (id + 1) // 2 in self.conjg:
328 id += _conjugate_gap + id % 2 - (id +1) % 2
329 if (id % 2):
330
331 lorentz *= complex(0,1) * SpinorPropagatorout(id, 'I2', outgoing)
332 else:
333
334 lorentz *= complex(0,1) * SpinorPropagatorin('I2', id, outgoing)
335 elif spin == 3 :
336 if massless or not aloha.unitary_gauge:
337 lorentz *= VectorPropagatorMassless(id, 'I2', id)
338 else:
339 lorentz *= VectorPropagator(id, 'I2', id)
340 elif spin == 4:
341
342 if (id + 1) // 2 in self.conjg:
343 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
344 else:
345 spin_id = id
346 nb_spinor += 1
347 if not massless and (spin_id % 2):
348 lorentz *= complex(0,1) * Spin3halfPropagatorout(id, 'I2', spin_id,'I3', outgoing)
349 elif not massless and not (spin_id % 2):
350 lorentz *= complex(0,1) * Spin3halfPropagatorin('I2', id , 'I3', spin_id, outgoing)
351 elif spin_id %2:
352 lorentz *= complex(0,1) * Spin3halfPropagatorMasslessOut(id, 'I2', spin_id,'I3', outgoing)
353 else :
354 lorentz *= complex(0,1) * Spin3halfPropagatorMasslessIn('I2', id, 'I3', spin_id, outgoing)
355
356 elif spin == 5 :
357
358 if massless:
359 lorentz *= complex(0,1) * Spin2masslessPropagator(_spin2_mult + id, \
360 2 * _spin2_mult + id,'I2','I3')
361 else:
362 lorentz *= complex(0,1) * Spin2Propagator(_spin2_mult + id, \
363 2 * _spin2_mult + id,'I2','I3', id)
364 else:
365 raise self.AbstractALOHAError(
366 'The spin value %s (2s+1) is not supported yet' % spin)
367 else:
368
369 if spin in [1,-1]:
370 lorentz *= Scalar(id)
371 elif spin == 2:
372
373 if (id+1) // 2 in self.conjg:
374 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
375 else:
376 spin_id = id
377 lorentz *= Spinor(spin_id, id)
378 elif spin == 3:
379 lorentz *= Vector(id, id)
380 elif spin == 4:
381
382 if (id+1) // 2 in self.conjg:
383 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
384 else:
385 spin_id = id
386 nb_spinor += 1
387 lorentz *= Spin3Half(id, spin_id, id)
388 elif spin == 5:
389 lorentz *= Spin2(1 * _spin2_mult + id, 2 * _spin2_mult + id, id)
390 else:
391 raise self.AbstractALOHAError(
392 'The spin value %s (2s+1) is not supported yet' % spin)
393
394
395 if not outgoing:
396 lorentz *= complex(0,-1)
397
398
399 lorentz = lorentz.simplify()
400
401
402 if any((tag.startswith('L') for tag in self.tag if len(tag)>1)):
403 return self.compute_loop_coefficient(lorentz, outgoing)
404
405 lorentz = lorentz.expand()
406 lorentz = lorentz.simplify()
407
408 if factorize:
409 lorentz = lorentz.factorize()
410
411 lorentz.tag = set(aloha_lib.KERNEL.use_tag)
412 return lorentz
413
414 @staticmethod
416 """Change the index of the propagator to match the current need"""
417
418 data = re.split(r'(\b[a-zA-Z]\w*?)\(([\'\w,\s\"\+\-]*?)\)',text)
419 to_change = {}
420 for old, new in tag.items():
421 if isinstance(new, str):
422 new='\'%s\'' % new
423 else:
424 new = str(new)
425 to_change[r'%s' % old] = new
426 pos=-2
427 while pos +3 < len(data):
428 pos = pos+3
429 ltype = data[pos]
430 if ltype != 'complex':
431 data[pos+1] = re.sub(r'\b(?<!-)(%s)\b' % '|'.join(to_change),
432 lambda x: to_change[x.group()], data[pos+1])
433 data[pos+1] = '(%s)' % data[pos+1]
434 text=''.join(data)
435 return text
436
438 """Return the ALOHA object associated to the user define propagator"""
439
440 if not propa.startswith('1'):
441 propagator = getattr(self.model.propagators, propa)
442 numerator = propagator.numerator
443 denominator = propagator.denominator
444 elif propa == "1L":
445 numerator = "EPSL(1,id) * EPSL(2,id)"
446 denominator = "-1*PVec(-2,id)*PVec(-2,id)*P(-3,id)*P(-3,id) * (P(-1,id)**2 - Mass(id) * Mass(id) + complex(0,1) * Mass(id) * Width(id))"
447 elif propa == "1T":
448 numerator = "-1*PVec(-2,id)*PVec(-2,id) * EPST2(1,id)*EPST2(2,id) + EPST1(1,id)*EPST1(2,id)"
449 denominator = "PVec(-2,id)*PVec(-2,id) * PT(-3,id)*PT(-3,id) * (P(-1,id)**2 - Mass(id) * Mass(id) + complex(0,1) * Mass(id) * Width(id))"
450 elif propa == "1A":
451 numerator = "(P(-2,id)**2 - Mass(id)**2) * P(1,id) * P(2,id)"
452 denominator = "P(-2,id)**2 * Mass(id)**2 * (P(-1,id)**2 - Mass(id) * Mass(id) + complex(0,1) * Mass(id) * Width(id))"
453 elif propa in ["1P"]:
454
455 spin_id = id
456 if (id + 1) // 2 in self.conjg:
457 spin_id += _conjugate_gap + id % 2 - (id +1) % 2
458 if (spin_id % 2):
459 numerator = "UFP(1,id)*UFPC(2,id)"
460 else:
461 numerator = "VFP(1,id)*VFPC(2,id)"
462
463 denominator = "(2*Tnorm(id)*TnormZ(id))*(P(-1,id)*P(-1,id) - Mass(id) * Mass(id) + complex(0,1) * Mass(id) * Width(id))"
464
465 elif propa == "1M":
466
467 spin_id = id
468 if (id + 1) // 2 in self.conjg:
469 spin_id += _conjugate_gap + id % 2 - (id +1) % 2
470 if (spin_id % 2):
471 numerator = "UFM(1,id)*UFMC(2,id)"
472 else:
473 numerator = "VFM(1,id)*VFMC(2,id)"
474 denominator = "(2*Tnorm(id)*TnormZ(id))*(P(-1,id)*P(-1,id) - Mass(id) * Mass(id) + complex(0,1) * Mass(id) * Width(id))"
475 elif propa == "1PS":
476 numerator = "(-1*(P(-1,id)*PBar(-1,id)) * Metric(1, 2) + P(1,id)*PBar(2,id) + PBar(1,id)*P(2,id))"
477 denominator = "(P(-3,id)*PBar(-3,id))*P(-2,id)**2"
478 else:
479 raise Exception
480
481
482 needPflipping = False
483 if spin in [1,-1]:
484 tag = {'id': id}
485 elif spin == 2:
486
487 if (id + 1) // 2 in self.conjg:
488 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
489 else:
490 spin_id = id
491 if (spin_id % 2):
492
493 needPflipping = True
494 tag ={'1': spin_id, '2': 'I2', 'id': id}
495 else:
496 tag ={'1': 'I2', '2': spin_id, 'id': id}
497 elif spin == 3 :
498 tag ={'1': id, '2': 'I2', 'id': id}
499 elif spin == 4:
500 delta = lambda i,j: aloha_object.Identity(i,j)
501 deltaL = lambda i,j: aloha_object.IdentityL(i,j)
502
503 if (id + 1) // 2 in self.conjg:
504 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
505 else:
506 spin_id = id
507 tag = {'1': 'pr1', '2': 'pr2', 'id':id}
508 if spin_id % 2:
509 needPflipping = True
510
511 propaR = deltaL('pr1',id) * deltaL('pr2', 'I2') * delta('pr1', spin_id) * delta('pr2', 'I3')
512 else:
513 propaR = deltaL('pr1',id) * deltaL('pr2', 'I2') * delta('pr2', spin_id) * delta('pr1', 'I3')
514
515 elif spin == 5 :
516 tag = {'1': _spin2_mult + id, '2': 'I2',
517 '51': 2 * _spin2_mult + id, '52': 'I3', 'id':id}
518
519 numerator = self.mod_propagator_expression(tag, numerator)
520 if denominator:
521 denominator = self.mod_propagator_expression(tag, denominator)
522
523 numerator = self.parse_expression(numerator, needPflipping)
524
525 if denominator:
526 self.denominator = self.parse_expression(denominator, needPflipping)
527 self.denominator = eval(self.denominator)
528 if not isinstance(self.denominator, numbers.Number):
529 self.denominator = self.denominator.simplify().expand().simplify().get((0,))
530 needPflipping = False
531 if spin ==4:
532 return eval(numerator) * propaR
533 else:
534 return eval(numerator)
535
536
537
538
540
541
542 l_in = [int(tag[1:]) for tag in self.tag if tag.startswith('L')][0]
543 if (l_in + 1) // 2 in self.conjg:
544
545 l_in = l_in + l_in % 2 - (l_in +1) % 2
546 assert l_in != outgoing, 'incoming Open Loop can not be the outcoming one'
547
548
549
550 Pdep = [aloha_lib.KERNEL.get(P) for P in lorentz.get_all_var_names()
551 if P.startswith('_P')]
552
553 Pdep = set([P for P in Pdep if P.particle in [outgoing, l_in]])
554 for P in Pdep:
555 if P.particle == l_in:
556 sign = 1
557 else:
558 sign = -1
559 id = P.id
560 lorentz_ind = P.lorentz_ind[0]
561 P_Lid = aloha_object.P(lorentz_ind, 'L')
562 P_obj = aloha_object.P(lorentz_ind, P.particle)
563 new_expr = sign*(P_Lid + P_obj)
564 lorentz = lorentz.replace(id, new_expr)
565
566
567 var_veto = ['PL_0', 'PL_1', 'PL_2', 'PL_3']
568 spin = aloha_writers.WriteALOHA.type_to_variable[abs(self.spins[l_in-1])]
569 size = aloha_writers.WriteALOHA.type_to_size[spin]-1
570 var_veto += ['%s%s_%s' % (spin,l_in,i) for i in range(1,size)]
571
572 veto_ids = aloha_lib.KERNEL.get_ids(var_veto)
573
574 lorentz = lorentz.expand(veto = veto_ids)
575 lorentz = lorentz.simplify()
576 coeff_expr = lorentz.split(veto_ids)
577
578 for key, expr in coeff_expr.items():
579 expr = expr.simplify()
580 coeff_expr[key] = expr.factorize()
581 coeff_expr.tag = set(aloha_lib.KERNEL.use_tag)
582
583 return coeff_expr
584
586 """Define the expression"""
587
588 self.expr = lorentz_expr
589
591 """Define the kernel at low level"""
592
593 if not lorentz:
594 logger.info('compute kernel %s' % self.counter)
595 AbstractRoutineBuilder.counter += 1
596 lorentz = eval(self.lorentz_expr)
597
598 if isinstance(lorentz, numbers.Number):
599 self.routine_kernel = lorentz
600 return lorentz
601 lorentz = lorentz.simplify()
602 lorentz = lorentz.expand()
603 lorentz = lorentz.simplify()
604
605 self.routine_kernel = lorentz
606 return lorentz
607
608
609 @staticmethod
611 """return the name of the """
612
613 name = '%s_%s' % (name, outgoing)
614 return name
615
616 @classmethod
623
626 """A special builder for combine routine if needed to write those
627 explicitely.
628 """
629 - def __init__(self, l_lorentz, model=None):
630 """ initialize the run
631 l_lorentz: list of lorentz information analyzed (UFO format)
632 language: define in which language we write the output
633 modes: 0 for all incoming particles
634 >0 defines the outgoing part (start to count at 1)
635 """
636 AbstractRoutineBuilder.__init__(self,l_lorentz[0], model)
637 lorentz = l_lorentz[0]
638 self.spins = lorentz.spins
639 l_name = [l.name for l in l_lorentz]
640 self.name = aloha_writers.combine_name(l_name[0], l_name[1:], None)
641 self.conjg = []
642 self.tag = []
643 self.outgoing = None
644 self.lorentz_expr = []
645 for i, lor in enumerate(l_lorentz):
646 self.lorentz_expr.append( 'Coup(%s) * (%s)' % (i+1, lor.structure))
647 self.lorentz_expr = ' + '.join(self.lorentz_expr)
648 self.routine_kernel = None
649 self.contracted = {}
650 self.fct = {}
651
653 """ A class to build and store the full set of Abstract ALOHA Routine"""
654
655 lastprint = 0
656
657 - def __init__(self, model_name, write_dir=None, format='Fortran',
658 explicit_combine=False):
659 """ load the UFO model and init the dictionary """
660
661
662 self.explicit_combine = explicit_combine
663
664
665 model_name_pattern = re.compile("^(?P<name>.+)-(?P<rest>[\w\d_]+)$")
666 model_name_re = model_name_pattern.match(model_name)
667 if model_name_re:
668 name = model_name_re.group('name')
669 rest = model_name_re.group("rest")
670 if rest == 'full' or \
671 os.path.isfile(os.path.join(root_path, "models", name,
672 "restrict_%s.dat" % rest)):
673 model_name = model_name_re.group("name")
674
675
676 try:
677 python_pos = model_name
678 __import__(python_pos)
679 except Exception:
680 python_pos = 'models.%s' % model_name
681 __import__(python_pos)
682 self.model = sys.modules[python_pos]
683
684 self.model_pos = os.path.dirname(self.model.__file__)
685
686
687 self.external_routines = []
688
689
690 dict.__init__(self)
691 self.symmetries = {}
692 self.multiple_lor = {}
693
694 if write_dir:
695 self.main(write_dir,format=format)
696
697 - def main(self, output_dir, format='Fortran'):
698 """ Compute if not already compute.
699 Write file in models/MY_MODEL/MY_FORMAT.
700 copy the file to output_dir
701 """
702 ext = {'Fortran':'f','Python':'py','CPP':'h'}
703
704
705
706 if not self.load():
707 self.compute_all()
708 logger.info(' %s aloha routine' % len(self))
709
710
711 if not output_dir:
712 output_dir = os.path.join(self.model_pos, format.lower())
713 logger.debug('aloha output dir is %s' % output_dir)
714 if not os.path.exists(output_dir):
715 os.mkdir(output_dir)
716
717
718 for (name, outgoing), abstract in self.items():
719 routine_name = AbstractRoutineBuilder.get_routine_name(name, outgoing)
720 if not os.path.exists(os.path.join(output_dir, routine_name) + '.' + ext[format]):
721 abstract.write(output_dir, format)
722 else:
723 logger.info('File for %s already present, skip the writing of this file' % routine_name)
724
725
726 - def save(self, filepos=None):
727 """ save the current model in a pkl file """
728
729 logger.info('save the aloha abstract routine in a pickle file')
730 if not filepos:
731 filepos = os.path.join(self.model_pos,'aloha.pkl')
732
733 fsock = open(filepos, 'w')
734 cPickle.dump(dict(self), fsock)
735
736 - def load(self, filepos=None):
737 """ reload the pickle file """
738 return False
739 if not filepos:
740 filepos = os.path.join(self.model_pos,'aloha.pkl')
741 if os.path.exists(filepos):
742 fsock = open(filepos, 'r')
743 self.update(cPickle.load(fsock))
744 return True
745 else:
746 return False
747
748 - def get(self, lorentzname, outgoing):
749 """ return the AbstractRoutine with a given lorentz name, and for a given
750 outgoing particle """
751
752 try:
753 return self[(lorentzname, outgoing)]
754 except Exception:
755 logger.warning('(%s, %s) is not a valid key' %
756 (lorentzname, outgoing) )
757 return None
758
759 - def get_info(self, info, lorentzname, outgoing, tag, cached=False):
760 """return some information about the aloha routine
761 - "rank": return the rank of the loop function
762 If the cached option is set to true, then the result is stored and
763 recycled if possible.
764 """
765
766 if not aloha.loop_mode and any(t.startswith('L') for t in tag):
767 aloha.loop_mode = True
768
769
770 returned_dict = {}
771
772 if isinstance(info, str):
773 infos = [info]
774 else:
775 infos = info
776
777
778 if hasattr(self, 'cached_interaction_infos'):
779
780 for info_key in infos:
781 try:
782 returned_dict[info] = self.cached_interaction_infos[\
783 (lorentzname,outgoing,tuple(tag),info)]
784 except KeyError:
785
786
787 pass
788 elif cached:
789 self.cached_interaction_infos = {}
790
791 init = False
792 for info_key in infos:
793 if info_key in returned_dict:
794 continue
795 elif not init:
796
797 lorentz = eval('self.model.lorentz.%s' % lorentzname)
798 abstract = AbstractRoutineBuilder(lorentz)
799 routine = abstract.compute_routine(outgoing, tag, factorize=False)
800 init = True
801
802 assert 'routine' in locals()
803 returned_dict[info_key] = routine.get_info(info_key)
804 if cached:
805
806 self.cached_interaction_infos[\
807 (lorentzname,outgoing,tuple(tag),info_key)]=returned_dict[info_key]
808
809 if isinstance(info, str):
810 return returned_dict[info]
811 else:
812 return returned_dict
813
814 - def set(self, lorentzname, outgoing, abstract_routine):
815 """ add in the dictionary """
816
817 self[(lorentzname, outgoing)] = abstract_routine
818
819 - def compute_all(self, save=True, wanted_lorentz = [], custom_propa=False):
820 """ define all the AbstractRoutine linked to a model """
821
822
823
824 self.look_for_symmetries()
825 conjugate_list = self.look_for_conjugate()
826 self.look_for_multiple_lorentz_interactions()
827
828 if not wanted_lorentz:
829 wanted_lorentz = [l.name for l in self.model.all_lorentz]
830 for lorentz in self.model.all_lorentz:
831 if not lorentz.name in wanted_lorentz:
832
833 continue
834
835 if -1 in lorentz.spins:
836
837 continue
838
839 if lorentz.structure == 'external':
840 for i in range(len(lorentz.spins)):
841 self.external_routines.append('%s_%s' % (lorentz.name, i))
842 continue
843
844
845 routines = [(i,[]) for i in range(len(lorentz.spins)+1)]
846
847 if custom_propa:
848 for vertex in self.model.all_vertices:
849 if lorentz in vertex.lorentz:
850 for i,part in enumerate(vertex.particles):
851 new_prop = False
852 if hasattr(part, 'propagator') and part.propagator:
853 new_prop = ['P%s' % part.propagator.name]
854 elif part.mass.name.lower() == 'zero':
855 new_prop = ['P0']
856 if new_prop and (i+1, new_prop) not in routines:
857 routines.append((i+1, new_prop))
858
859 builder = AbstractRoutineBuilder(lorentz, self.model)
860 self.compute_aloha(builder, routines=routines)
861
862 if lorentz.name in self.multiple_lor:
863 for m in self.multiple_lor[lorentz.name]:
864 for outgoing in range(len(lorentz.spins)+1):
865 try:
866 self[(lorentz.name, outgoing)].add_combine(m)
867 except Exception:
868 pass
869
870
871 if lorentz.name in conjugate_list:
872 conjg_builder_list= builder.define_all_conjugate_builder(\
873 conjugate_list[lorentz.name])
874 for conjg_builder in conjg_builder_list:
875
876 assert conjg_builder_list.count(conjg_builder) == 1
877 self.compute_aloha(conjg_builder, lorentz.name)
878 if lorentz.name in self.multiple_lor:
879 for m in self.multiple_lor[lorentz.name]:
880 for outgoing in range(len(lorentz.spins)+1):
881 realname = conjg_builder.name + ''.join(['C%s' % pair for pair in conjg_builder.conjg])
882 try:
883 self[(realname, outgoing)].add_combine(m)
884 except Exception,error:
885 self[(realname, self.symmetries[lorentz.name][outgoing])].add_combine(m)
886
887 if save:
888 self.save()
889
891 """add a series of Lorentz structure created dynamically"""
892
893 for lor in lorentzlist:
894 if not hasattr(self.model.lorentz, lor.name):
895 setattr(self.model.lorentz, lor.name, lor)
896
898 """ create the requested ALOHA routine.
899 data should be a list of tuple (lorentz, tag, outgoing)
900 tag should be the list of special tag (like conjugation on pair)
901 to apply on the object """
902
903
904
905 self.look_for_symmetries()
906
907
908 aloha.loop_mode = False
909
910 request = {}
911
912 for list_l_name, tag, outgoing in data:
913
914 all_tag = tag[:]
915 conjugate = [i for i in tag if isinstance(i, int)]
916
917 tag = [i for i in tag if isinstance(i, str) and not i.startswith('P')]
918 tag = tag + ['C%s'%i for i in conjugate]
919 tag = tag + [i for i in all_tag if isinstance(i, str) and i.startswith('P')]
920
921 conjugate = tuple([int(c[1:]) for c in tag if c.startswith('C')])
922 loop = any((t.startswith('L') for t in tag))
923 if loop:
924 aloha.loop_mode = True
925 self.explicit_combine = True
926
927 for l_name in list_l_name:
928 try:
929 request[l_name][conjugate].append((outgoing,tag))
930 except Exception:
931 try:
932 request[l_name][conjugate] = [(outgoing,tag)]
933 except Exception:
934 request[l_name] = {conjugate: [(outgoing,tag)]}
935
936
937 for l_name in request:
938 lorentz = eval('self.model.lorentz.%s' % l_name)
939 if lorentz.structure == 'external':
940 for tmp in request[l_name]:
941 for outgoing, tag in request[l_name][tmp]:
942 name = aloha_writers.get_routine_name(lorentz.name,outgoing=outgoing,tag=tag)
943 if name not in self.external_routines:
944 self.external_routines.append(name)
945 continue
946
947 builder = AbstractRoutineBuilder(lorentz, self.model)
948
949
950 for conjg in request[l_name]:
951
952 def sorting(a,b):
953 if a[0] < b[0]: return -1
954 else: return 1
955 routines = request[l_name][conjg]
956 routines.sort(sorting)
957 if not conjg:
958
959 self.compute_aloha(builder, routines=routines)
960 else:
961
962 conjg_builder = builder.define_conjugate_builder(conjg)
963
964 self.compute_aloha(conjg_builder, symmetry=lorentz.name,
965 routines=routines)
966
967
968
969 for list_l_name, tag, outgoing in data:
970 if len(list_l_name) ==1:
971 continue
972
973 conjugate = [i for i in tag if isinstance(i, int)]
974 all_tag = tag[:]
975 tag = [i for i in tag if isinstance(i, str) and not i.startswith('P')]
976 tag = tag + ['C%s'%i for i in conjugate]
977 tag = tag + [i for i in all_tag if isinstance(i, str) and i.startswith('P')]
978
979 if not self.explicit_combine:
980 lorentzname = list_l_name[0]
981 lorentzname += ''.join(tag)
982 if self.has_key((lorentzname, outgoing)):
983 self[(lorentzname, outgoing)].add_combine(list_l_name[1:])
984 else:
985 lorentz = eval('self.model.lorentz.%s' % list_l_name[0])
986 assert lorentz.structure == 'external'
987 else:
988 l_lorentz = []
989 for l_name in list_l_name:
990 l_lorentz.append(eval('self.model.lorentz.%s' % l_name))
991 builder = CombineRoutineBuilder(l_lorentz)
992
993 for conjg in request[list_l_name[0]]:
994
995 def sorting(a,b):
996 if a[0] < b[0]: return -1
997 else: return 1
998 routines = request[list_l_name[0]][conjg]
999 routines.sort(sorting)
1000 if not conjg:
1001
1002 self.compute_aloha(builder, routines=routines)
1003 else:
1004
1005 conjg_builder = builder.define_conjugate_builder(conjg)
1006
1007 self.compute_aloha(conjg_builder, symmetry=lorentz.name,
1008 routines=routines)
1009
1010
1011
1012 - def compute_aloha(self, builder, symmetry=None, routines=None, tag=[]):
1013 """ define all the AbstractRoutine linked to a given lorentz structure
1014 symmetry authorizes to use the symmetry of anoter lorentz structure.
1015 routines to define only a subset of the routines."""
1016
1017 name = builder.name
1018 if not symmetry:
1019 symmetry = name
1020 if not routines:
1021 if not tag:
1022 tag = ['C%s' % i for i in builder.conjg]
1023 else:
1024 addon = ['C%s' % i for i in builder.conjg]
1025 tag = [(i,addon +onetag) for i,onetag in tag]
1026 routines = [ tuple([i,tag]) for i in range(len(builder.spins) + 1 )]
1027
1028
1029 for outgoing, tag in routines:
1030 symmetric = self.has_symmetries(symmetry, outgoing, valid_output=routines)
1031 realname = name + ''.join(tag)
1032 if (realname, outgoing) in self:
1033 continue
1034
1035 if symmetric:
1036 self.get(realname, symmetric).add_symmetry(outgoing)
1037 else:
1038 wavefunction = builder.compute_routine(outgoing, tag)
1039
1040 self.set(realname, outgoing, wavefunction)
1041
1042
1044 """define all the AbstractRoutine linked to a given lorentz structure
1045 symmetry authorizes to use the symmetry of anoter lorentz structure.
1046 routines to define only a subset of the routines.
1047 Compare to compute_aloha, each routines are computed independently.
1048 """
1049
1050 name = builder.name
1051 if not routines:
1052 routines = [ tuple([i,[]]) for i in range(len(builder.spins) + 1 )]
1053
1054 for outgoing, tag in routines:
1055 builder.routine_kernel = None
1056 wavefunction = builder.compute_routine(outgoing, tag)
1057 self.set(name, outgoing, wavefunction)
1058
1059
1060 - def write(self, output_dir, language):
1061 """ write the full set of Helicity Routine in output_dir"""
1062 for abstract_routine in self.values():
1063 abstract_routine.write(output_dir, language)
1064
1065 for routine in self.external_routines:
1066 self.locate_external(routine, language, output_dir)
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1090 """search a valid external file and copy it to output_dir directory"""
1091
1092 language_to_ext = {'Python': 'py',
1093 'Fortran' : 'f',
1094 'CPP': 'C'}
1095 ext = language_to_ext[language]
1096 paths = [os.path.join(self.model_pos, language), self.model_pos,
1097 os.path.join(root_path, 'aloha', 'template_files', )]
1098
1099 ext_files = []
1100 for path in paths:
1101 ext_files = misc.glob('%s.%s' % (name, ext), path)
1102 if ext_files:
1103 break
1104 else:
1105
1106 raise ALOHAERROR, 'No external routine \"%s.%s\" in directories\n %s' % \
1107 (name, ext, '\n'.join(paths))
1108
1109 if output_dir:
1110 for filepath in ext_files:
1111
1112 files.cp(filepath, output_dir)
1113 return ext_files
1114
1115
1116
1118 """Search some symmetries in the vertices.
1119 We search if some identical particles are in a vertices in order
1120 to avoid to compute symmetrical contributions"""
1121
1122 for vertex in self.model.all_vertices:
1123 for i, part1 in enumerate(vertex.particles):
1124 for j in range(i-1,-1,-1):
1125 part2 = vertex.particles[j]
1126 if part1.pdg_code == part2.pdg_code and part1.color == 1:
1127 if part1.spin == 2 and (i % 2 != j % 2 ):
1128 continue
1129 for lorentz in vertex.lorentz:
1130 if self.symmetries.has_key(lorentz.name):
1131 if self.symmetries[lorentz.name].has_key(i+1):
1132 self.symmetries[lorentz.name][i+1] = max(self.symmetries[lorentz.name][i+1], j+1)
1133 else:
1134 self.symmetries[lorentz.name][i+1] = j+1
1135 else:
1136 self.symmetries[lorentz.name] = {i+1:j+1}
1137 break
1138
1140 """Search the interaction associate with more than one lorentz structure.
1141 If those lorentz structure have the same order and the same color then
1142 associate a multiple lorentz routines to ALOHA """
1143
1144 orders = {}
1145 for coup in self.model.all_couplings:
1146 orders[coup.name] = str(coup.order)
1147
1148 for vertex in self.model.all_vertices:
1149 if len(vertex.lorentz) == 1:
1150 continue
1151
1152
1153
1154
1155
1156 combine = {}
1157 for (id_col, id_lor), coups in vertex.couplings.items():
1158 if not isinstance(coups, list):
1159 coups = [coups]
1160 for coup in coups:
1161 order = orders[coup.name]
1162 key = (id_col, order)
1163 if key in combine:
1164 combine[key].append(id_lor)
1165 else:
1166 combine[key] = [id_lor]
1167
1168
1169 for list_lor in combine.values():
1170 if len(list_lor) == 1:
1171 continue
1172 list_lor.sort()
1173 main = vertex.lorentz[list_lor[0]].name
1174 if main not in self.multiple_lor:
1175 self.multiple_lor[main] = []
1176
1177 info = tuple([vertex.lorentz[id].name for id in list_lor[1:]])
1178 if info not in self.multiple_lor[main]:
1179 self.multiple_lor[main].append(info)
1180
1181
1182 - def has_symmetries(self, l_name, outgoing, out=None, valid_output=None):
1183 """ This returns out if no symmetries are available, otherwise it finds
1184 the lowest equivalent outgoing by recursivally calling this function.
1185 auth is a list of authorize output, if define"""
1186
1187 try:
1188 equiv = self.symmetries[l_name][outgoing]
1189 except Exception:
1190 return out
1191 else:
1192 if not valid_output or equiv in valid_output:
1193 return self.has_symmetries(l_name, equiv, out=equiv,
1194 valid_output=valid_output)
1195 else:
1196 return self.has_symmetries(l_name, equiv, out=out,
1197 valid_output=valid_output)
1198
1200 """ create a list for the routine needing to be conjugate """
1201
1202
1203 need = False
1204 for particle in self.model.all_particles:
1205 if particle.spin == 2 and particle.selfconjugate:
1206 need = True
1207 break
1208
1209 if not need:
1210 for interaction in self.model.all_vertices:
1211 fermions = [p for p in interaction.particles if p.spin == 2]
1212 for i in range(0, len(fermions), 2):
1213 if fermions[i].pdg_code * fermions[i+1].pdg_code > 0:
1214
1215 need = True
1216 break
1217
1218
1219 if not need:
1220 return {}
1221
1222 conjugate_request = {}
1223
1224 for vertex in self.model.all_vertices:
1225 for i in range(0, len(vertex.particles), 2):
1226 part1 = vertex.particles[i]
1227 if part1.spin !=2:
1228
1229 break
1230
1231 if part1.selfconjugate:
1232 continue
1233 part2 = vertex.particles[i + 1]
1234 if part2.selfconjugate:
1235 continue
1236
1237
1238 for lorentz in vertex.lorentz:
1239 try:
1240 conjugate_request[lorentz.name].add(i//2+1)
1241 except Exception:
1242 conjugate_request[lorentz.name] = set([i//2+1])
1243
1244 for elem in conjugate_request:
1245 conjugate_request[elem] = list(conjugate_request[elem])
1246
1247 return conjugate_request
1248
1252 """find the list of Helicity routine in the directory and create a list
1253 of those files (but with compile extension)"""
1254
1255 aloha_files = []
1256
1257
1258 alohafile_pattern = re.compile(r'''_\d%s''' % file_ext)
1259 for filename in os.listdir(aloha_dir):
1260 if os.path.isfile(os.path.join(aloha_dir, filename)):
1261 if alohafile_pattern.search(filename):
1262 aloha_files.append(filename.replace(file_ext, comp_ext))
1263
1264 if os.path.exists(pjoin(aloha_dir, 'additional_aloha_function.f')):
1265 aloha_files.append('additional_aloha_function.o')
1266
1267 text="ALOHARoutine = "
1268 text += ' '.join(aloha_files)
1269 text +='\n'
1270
1271
1272 file(os.path.join(aloha_dir, 'aloha_file.inc'), 'w').write(text)
1273
1277
1278 def create(obj):
1279 """ """
1280 obj= obj.simplify()
1281 obj = obj.expand()
1282 obj = obj.simplify()
1283 return obj
1284
1285
1286 old_tag = set(aloha_lib.KERNEL.use_tag)
1287 name, i = tag
1288 if name == "Spin2Prop":
1289 lib[('Spin2Prop',i)] = create( Spin2Propagator(_spin2_mult + i, \
1290 2 * _spin2_mult + i,'I2','I3', i) )
1291 elif name == "Spin2PropMassless":
1292 lib[('Spin2PropMassless',i)] = create( Spin2masslessPropagator(
1293 _spin2_mult + i, 2 * _spin2_mult + i,'I2','I3'))
1294
1295 aloha_lib.KERNEL.use_tag = old_tag
1296 return lib
1297
1298
1299 if '__main__' == __name__:
1300 logging.basicConfig(level=0)
1301
1302 import profile
1303
1304
1305 start = time.time()
1310 - def write(alohagenerator):
1312 alohagenerator = main()
1313 logger.info('done in %s s' % (time.time()-start))
1314 write(alohagenerator)
1315
1316
1317 stop = time.time()
1318 logger.info('done in %s s' % (stop-start))
1319