1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 from __future__ import division
16 import cmath
17 import copy
18 import cPickle
19 import glob
20 import logging
21 import numbers
22 import os
23 import re
24 import shutil
25 import sys
26 import time
27
28 root_path = os.path.split(os.path.dirname(os.path.realpath( __file__ )))[0]
29 sys.path.append(root_path)
30 from aloha.aloha_object import *
31 import aloha
32 import aloha.aloha_writers as aloha_writers
33 import aloha.aloha_lib as aloha_lib
34 import aloha.aloha_object as aloha_object
35 import aloha.aloha_parsers as aloha_parsers
36 import aloha.aloha_fct as aloha_fct
37 try:
38 import madgraph.iolibs.files as files
39 import madgraph.various.misc as misc
40 except Exception:
41 import aloha.files as files
42 import aloha.misc as misc
43
44 aloha_path = os.path.dirname(os.path.realpath(__file__))
45 logger = logging.getLogger('ALOHA')
46
47 _conjugate_gap = 50
48 _spin2_mult = 1000
49
50 pjoin = os.path.join
51
52 ALOHAERROR = aloha.ALOHAERROR
55 """ store the result of the computation of Helicity Routine
56 this is use for storing and passing to writer """
57
58 - def __init__(self, expr, outgoing, spins, name, infostr, denom=None):
59 """ store the information """
60
61 self.spins = spins
62 self.expr = expr
63 self.denominator = denom
64 self.name = name
65 self.outgoing = outgoing
66 self.infostr = infostr
67 self.symmetries = []
68 self.combined = []
69 self.tag = []
70 self.contracted = {}
71
72
73
75 """ add an outgoing """
76
77 if not outgoing in self.symmetries:
78 self.symmetries.append(outgoing)
79
81 """add a combine rule """
82
83 if lor_list not in self.combined:
84 self.combined.append(lor_list)
85
86 - def write(self, output_dir, language='Fortran', mode='self', combine=True,**opt):
87 """ write the content of the object """
88 writer = aloha_writers.WriterFactory(self, language, output_dir, self.tag)
89 text = writer.write(mode=mode, **opt)
90 if combine:
91 for grouped in self.combined:
92 if isinstance(text, tuple):
93 text = tuple([old.__add__(new) for old, new in zip(text,
94 writer.write_combined(grouped, mode=mode+'no_include', **opt))])
95 else:
96 text += writer.write_combined(grouped, mode=mode+'no_include', **opt)
97 if aloha.mp_precision and 'MP' not in self.tag:
98 self.tag.append('MP')
99 text += self.write(output_dir, language, mode, **opt)
100 return text
101
103 """return some information on the routine
104 """
105 if info == "rank":
106 assert isinstance(self.expr, aloha_lib.SplitCoefficient)
107 rank= 1
108 for coeff in self.expr:
109 rank = max(sum(coeff), rank)
110 return rank -1
111 else:
112 raise ALOHAERROR, '%s is not a valid information that can be computed' % info
113
116 """ Launch the creation of the Helicity Routine"""
117
118 prop_lib = {}
119 counter = 0
120
122 """ An error class for ALOHA"""
123
124 - def __init__(self, lorentz, model=None):
125 """ initialize the run
126 lorentz: the lorentz information analyzed (UFO format)
127 language: define in which language we write the output
128 modes: 0 for all incoming particles
129 >0 defines the outgoing part (start to count at 1)
130 """
131
132 self.spins = [s for s in lorentz.spins]
133 self.name = lorentz.name
134 self.conjg = []
135 self.tag = []
136 self.outgoing = None
137 self.lorentz_expr = lorentz.structure
138 self.routine_kernel = None
139 self.spin2_massless = False
140 self.spin32_massless = False
141 self.contracted = {}
142 self.fct = {}
143 self.model = model
144 self.denominator = None
145
146
147 self.lastprint = 0
148
149 if hasattr(lorentz, 'formfactors') and lorentz.formfactors:
150 for formf in lorentz.formfactors:
151 pat = re.compile(r'\b%s\b' % formf.name)
152 self.lorentz_expr = pat.sub('(%s)' % formf.value, self.lorentz_expr)
153
163
165 """ return the full set of AbstractRoutineBuilder linked to fermion
166 clash"""
167
168 solution = []
169
170 for i, pair in enumerate(pair_list):
171 new_builder = self.define_conjugate_builder(pair)
172 solution.append(new_builder)
173 solution += new_builder.define_all_conjugate_builder(pair_list[i+1:])
174 return solution
175
177 """ return a AbstractRoutineBuilder for the conjugate operation.
178 If they are more than one pair of fermion. Then use pair to claim which
179 one is conjugated"""
180
181 new_builder = copy.copy(self)
182 new_builder.conjg = self.conjg[:]
183 try:
184 for index in pairs:
185 new_builder.apply_conjugation(index)
186 except TypeError:
187 new_builder.apply_conjugation(pairs)
188 return new_builder
189
191 """ apply conjugation on self object"""
192
193 nb_fermion = len([1 for s in self.spins if s % 2 == 0])
194 if isinstance(pair, tuple):
195 if len(pair) ==1 :
196 pair = pair[0]
197 else:
198 raise Exception
199
200
201 if (pair > 1 or nb_fermion >2) and not self.conjg:
202
203 data = aloha_fct.get_fermion_flow(self.lorentz_expr, nb_fermion)
204 target = dict([(2*i+1,2*i+2) for i in range(nb_fermion//2)])
205 if not data == target:
206 text = """Unable to deal with 4(or more) point interactions
207 in presence of majorana particle/flow violation"""
208 raise ALOHAERROR, text
209
210 old_id = 2 * pair - 1
211 new_id = _conjugate_gap + old_id
212
213 self.kernel_tag = set()
214 aloha_lib.KERNEL.use_tag = set()
215 if not self.routine_kernel or isinstance(self.routine_kernel, str):
216 self.routine_kernel = eval(self.parse_expression(self.lorentz_expr))
217 self.kernel_tag = aloha_lib.KERNEL.use_tag
218
219
220 self.routine_kernel = \
221 C(new_id, old_id + 1) * self.routine_kernel * C(new_id + 1, old_id)
222
223 self.lorentz_expr = '('+self.lorentz_expr+') * C(%s,%s) * C(%s,%s)' % \
224 (new_id, old_id + 1, new_id + 1, old_id )
225
226 self.conjg.append(pair)
227
228
247
249 """change the sign of P for outcoming fermion in order to
250 correct the mismatch convention between HELAS and FR"""
251
252 if not expr:
253 expr = self.lorentz_expr
254
255 if need_P_sign:
256 expr = re.sub(r'\b(P|PSlash)\(', r'-\1(', expr)
257
258 calc = aloha_parsers.ALOHAExpressionParser()
259 lorentz_expr = calc.parse(expr)
260 return lorentz_expr
261
263 """compute the abstract routine associate to this mode """
264
265
266 aloha_lib.KERNEL.use_tag=set()
267
268 nb_spinor = 0
269 outgoing = self.outgoing
270 if (outgoing + 1) // 2 in self.conjg:
271
272 outgoing = outgoing + outgoing % 2 - (outgoing +1) % 2
273
274 if not self.routine_kernel:
275 AbstractRoutineBuilder.counter += 1
276 if self.tag == []:
277 logger.info('aloha creates %s routines' % self.name)
278 elif AbstractALOHAModel.lastprint < time.time() - 1:
279 AbstractALOHAModel.lastprint = time.time()
280 logger.info('aloha creates %s set of routines with options: %s' \
281 % (self.name, ','.join(self.tag)) )
282 try:
283 lorentz = self.parse_expression()
284 self.routine_kernel = lorentz
285 lorentz = eval(lorentz)
286 except NameError as error:
287 logger.error('unknow type in Lorentz Evaluation:%s'%str(error))
288 raise ALOHAERROR, 'unknow type in Lorentz Evaluation: %s ' % str(error)
289 else:
290 self.kernel_tag = set(aloha_lib.KERNEL.use_tag)
291 elif isinstance(self.routine_kernel,str):
292 lorentz = eval(self.routine_kernel)
293 aloha_lib.KERNEL.use_tag = set(self.kernel_tag)
294 else:
295 lorentz = copy.copy(self.routine_kernel)
296 aloha_lib.KERNEL.use_tag = set(self.kernel_tag)
297 for (i, spin ) in enumerate(self.spins):
298 id = i + 1
299
300 if id == outgoing:
301
302
303 propa = [t[1:] for t in self.tag if t.startswith('P')]
304 if propa == ['0']:
305 massless = True
306 self.denominator = None
307 elif propa == []:
308 massless = False
309 self.denominator = None
310 else:
311 lorentz *= complex(0,1) * self.get_custom_propa(propa[0], spin, id)
312 continue
313
314
315
316 if spin in [1,-1]:
317 lorentz *= complex(0,1)
318 elif spin == 2:
319
320 if (id + 1) // 2 in self.conjg:
321 id += _conjugate_gap + id % 2 - (id +1) % 2
322 if (id % 2):
323
324 lorentz *= complex(0,1) * SpinorPropagatorout(id, 'I2', outgoing)
325 else:
326
327 lorentz *= complex(0,1) * SpinorPropagatorin('I2', id, outgoing)
328 elif spin == 3 :
329 if massless or not aloha.unitary_gauge:
330 lorentz *= VectorPropagatorMassless(id, 'I2', id)
331 else:
332 lorentz *= VectorPropagator(id, 'I2', id)
333 elif spin == 4:
334
335 if (id + 1) // 2 in self.conjg:
336 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
337 else:
338 spin_id = id
339 nb_spinor += 1
340 if not massless and (spin_id % 2):
341 lorentz *= complex(0,1) * Spin3halfPropagatorout(id, 'I2', spin_id,'I3', outgoing)
342 elif not massless and not (spin_id % 2):
343 lorentz *= complex(0,1) * Spin3halfPropagatorin('I2', id , 'I3', spin_id, outgoing)
344 elif spin_id %2:
345 lorentz *= complex(0,1) * Spin3halfPropagatorMasslessOut(id, 'I2', spin_id,'I3', outgoing)
346 else :
347 lorentz *= complex(0,1) * Spin3halfPropagatorMasslessIn('I2', id, 'I3', spin_id, outgoing)
348
349 elif spin == 5 :
350
351 if massless:
352 lorentz *= complex(0,1) * Spin2masslessPropagator(_spin2_mult + id, \
353 2 * _spin2_mult + id,'I2','I3')
354 else:
355 lorentz *= complex(0,1) * Spin2Propagator(_spin2_mult + id, \
356 2 * _spin2_mult + id,'I2','I3', id)
357 else:
358 raise self.AbstractALOHAError(
359 'The spin value %s (2s+1) is not supported yet' % spin)
360 else:
361
362 if spin in [1,-1]:
363 lorentz *= Scalar(id)
364 elif spin == 2:
365
366 if (id+1) // 2 in self.conjg:
367 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
368 else:
369 spin_id = id
370 lorentz *= Spinor(spin_id, id)
371 elif spin == 3:
372 lorentz *= Vector(id, id)
373 elif spin == 4:
374
375 if (id+1) // 2 in self.conjg:
376 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
377 else:
378 spin_id = id
379 nb_spinor += 1
380 lorentz *= Spin3Half(id, spin_id, id)
381 elif spin == 5:
382 lorentz *= Spin2(1 * _spin2_mult + id, 2 * _spin2_mult + id, id)
383 else:
384 raise self.AbstractALOHAError(
385 'The spin value %s (2s+1) is not supported yet' % spin)
386
387
388 if not outgoing:
389 lorentz *= complex(0,-1)
390
391
392 lorentz = lorentz.simplify()
393
394
395 if any((tag.startswith('L') for tag in self.tag if len(tag)>1)):
396 return self.compute_loop_coefficient(lorentz, outgoing)
397
398 lorentz = lorentz.expand()
399 lorentz = lorentz.simplify()
400
401 if factorize:
402 lorentz = lorentz.factorize()
403
404 lorentz.tag = set(aloha_lib.KERNEL.use_tag)
405 return lorentz
406
407 @staticmethod
409 """Change the index of the propagator to match the current need"""
410 data = re.split(r'(\b[a-zA-Z]\w*?)\(([\'\w,\s]*?)\)',text)
411
412 pos=-2
413 while pos +3 < len(data):
414 pos = pos+3
415 ltype = data[pos]
416 if ltype != 'complex':
417 for old, new in tag.items():
418 if isinstance(new, str):
419 new='\'%s\'' % new
420 else:
421 new = str(new)
422 data[pos+1] = re.sub(r'\b%s\b' % old, new, data[pos+1])
423 data[pos+1] = '(%s)' % data[pos+1]
424 text=''.join(data)
425 return text
426
428 """Return the ALOHA object associated to the user define propagator"""
429
430 propagator = getattr(self.model.propagators, propa)
431 numerator = propagator.numerator
432 denominator = propagator.denominator
433
434
435 needPflipping = False
436 if spin in [1,-1]:
437 tag = {'id': id}
438 elif spin == 2:
439
440 if (id + 1) // 2 in self.conjg:
441 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
442 else:
443 spin_id = id
444 if (spin_id % 2):
445
446 needPflipping = True
447 tag ={'1': spin_id, '2': 'I2', 'id': id}
448 else:
449 tag ={'1': 'I2', '2': spin_id, 'id': id}
450 elif spin == 3 :
451 tag ={'1': id, '2': 'I2', 'id': id}
452 elif spin == 4:
453 delta = lambda i,j: aloha_object.Identity(i,j)
454 deltaL = lambda i,j: aloha_object.IdentityL(i,j)
455
456 if (id + 1) // 2 in self.conjg:
457 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
458 else:
459 spin_id = id
460 if spin_id % 2:
461 needPflipping = True
462 tag = {'1': 'pr_1', '2': 'pr_2', 'id':id}
463 else:
464 tag = {'1': 'pr_2', '2': 'pr_1'}
465 numerator *= deltaL('pr_1',id) * deltaL('pr_2', 'I2') * \
466 delta('pr_1', spin_id) * delta('pr_2', 'I3')
467 elif spin == 5 :
468 tag = {'1': _spin2_mult + id, '2': 2 * _spin2_mult + id,
469 '51': 'I2', '52': 'I3', 'id':id}
470
471 numerator = self.mod_propagator_expression(tag, numerator)
472 if denominator:
473 denominator = self.mod_propagator_expression(tag, denominator)
474
475 numerator = self.parse_expression(numerator, needPflipping)
476 if denominator:
477 self.denominator = self.parse_expression(denominator, needPflipping)
478 self.denominator = eval(self.denominator)
479 if not isinstance(self.denominator, numbers.Number):
480 self.denominator = self.denominator.simplify().expand().simplify().get((0,))
481
482 return eval(numerator)
483
484
485
486
488
489
490 l_in = [int(tag[1:]) for tag in self.tag if tag.startswith('L')][0]
491 if (l_in + 1) // 2 in self.conjg:
492
493 l_in = l_in + l_in % 2 - (l_in +1) % 2
494 assert l_in != outgoing, 'incoming Open Loop can not be the outcoming one'
495
496
497
498 Pdep = [aloha_lib.KERNEL.get(P) for P in lorentz.get_all_var_names()
499 if P.startswith('_P')]
500
501 Pdep = set([P for P in Pdep if P.particle in [outgoing, l_in]])
502 for P in Pdep:
503 if P.particle == l_in:
504 sign = 1
505 else:
506 sign = -1
507 id = P.id
508 lorentz_ind = P.lorentz_ind[0]
509 P_Lid = aloha_object.P(lorentz_ind, 'L')
510 P_obj = aloha_object.P(lorentz_ind, P.particle)
511 new_expr = sign*(P_Lid + P_obj)
512 lorentz = lorentz.replace(id, new_expr)
513
514
515 var_veto = ['PL_0', 'PL_1', 'PL_2', 'PL_3']
516 spin = aloha_writers.WriteALOHA.type_to_variable[abs(self.spins[l_in-1])]
517 size = aloha_writers.WriteALOHA.type_to_size[spin]-1
518 var_veto += ['%s%s_%s' % (spin,l_in,i) for i in range(1,size)]
519
520 veto_ids = aloha_lib.KERNEL.get_ids(var_veto)
521
522 lorentz = lorentz.expand(veto = veto_ids)
523 lorentz = lorentz.simplify()
524 coeff_expr = lorentz.split(veto_ids)
525
526 for key, expr in coeff_expr.items():
527 expr = expr.simplify()
528 coeff_expr[key] = expr.factorize()
529 coeff_expr.tag = set(aloha_lib.KERNEL.use_tag)
530
531 return coeff_expr
532
534 """Define the expression"""
535
536 self.expr = lorentz_expr
537
539 """Define the kernel at low level"""
540
541 if not lorentz:
542 logger.info('compute kernel %s' % self.counter)
543 AbstractRoutineBuilder.counter += 1
544 lorentz = eval(self.lorentz_expr)
545
546 if isinstance(lorentz, numbers.Number):
547 self.routine_kernel = lorentz
548 return lorentz
549 lorentz = lorentz.simplify()
550 lorentz = lorentz.expand()
551 lorentz = lorentz.simplify()
552
553 self.routine_kernel = lorentz
554 return lorentz
555
556
557 @staticmethod
559 """return the name of the """
560
561 name = '%s_%s' % (name, outgoing)
562 return name
563
564 @classmethod
571
574 """A special builder for combine routine if needed to write those
575 explicitely.
576 """
577 - def __init__(self, l_lorentz, model=None):
578 """ initialize the run
579 l_lorentz: list of lorentz information analyzed (UFO format)
580 language: define in which language we write the output
581 modes: 0 for all incoming particles
582 >0 defines the outgoing part (start to count at 1)
583 """
584 AbstractRoutineBuilder.__init__(self,l_lorentz[0], model)
585 lorentz = l_lorentz[0]
586 self.spins = lorentz.spins
587 l_name = [l.name for l in l_lorentz]
588 self.name = aloha_writers.combine_name(l_name[0], l_name[1:], None)
589 self.conjg = []
590 self.tag = []
591 self.outgoing = None
592 self.lorentz_expr = []
593 for i, lor in enumerate(l_lorentz):
594 self.lorentz_expr.append( 'Coup(%s) * (%s)' % (i+1, lor.structure))
595 self.lorentz_expr = ' + '.join(self.lorentz_expr)
596 self.routine_kernel = None
597 self.contracted = {}
598 self.fct = {}
599
601 """ A class to build and store the full set of Abstract ALOHA Routine"""
602
603 lastprint = 0
604
605 - def __init__(self, model_name, write_dir=None, format='Fortran',
606 explicit_combine=False):
607 """ load the UFO model and init the dictionary """
608
609
610 self.explicit_combine = explicit_combine
611
612
613 model_name_pattern = re.compile("^(?P<name>.+)-(?P<rest>[\w\d_]+)$")
614 model_name_re = model_name_pattern.match(model_name)
615 if model_name_re:
616 name = model_name_re.group('name')
617 rest = model_name_re.group("rest")
618 if rest == 'full' or \
619 os.path.isfile(os.path.join(root_path, "models", name,
620 "restrict_%s.dat" % rest)):
621 model_name = model_name_re.group("name")
622
623
624 try:
625 python_pos = model_name
626 __import__(python_pos)
627 except Exception:
628 python_pos = 'models.%s' % model_name
629 __import__(python_pos)
630 self.model = sys.modules[python_pos]
631
632 self.model_pos = os.path.dirname(self.model.__file__)
633
634
635 self.external_routines = []
636
637
638 dict.__init__(self)
639 self.symmetries = {}
640 self.multiple_lor = {}
641
642 if write_dir:
643 self.main(write_dir,format=format)
644
645 - def main(self, output_dir, format='Fortran'):
646 """ Compute if not already compute.
647 Write file in models/MY_MODEL/MY_FORMAT.
648 copy the file to output_dir
649 """
650 ext = {'Fortran':'f','Python':'py','CPP':'h'}
651
652
653
654 if not self.load():
655 self.compute_all()
656 logger.info(' %s aloha routine' % len(self))
657
658
659 if not output_dir:
660 output_dir = os.path.join(self.model_pos, format.lower())
661 logger.debug('aloha output dir is %s' % output_dir)
662 if not os.path.exists(output_dir):
663 os.mkdir(output_dir)
664
665
666 for (name, outgoing), abstract in self.items():
667 routine_name = AbstractRoutineBuilder.get_routine_name(name, outgoing)
668 if not os.path.exists(os.path.join(output_dir, routine_name) + '.' + ext[format]):
669 abstract.write(output_dir, format)
670 else:
671 logger.info('File for %s already present, skip the writing of this file' % routine_name)
672
673
674 - def save(self, filepos=None):
675 """ save the current model in a pkl file """
676
677 logger.info('save the aloha abstract routine in a pickle file')
678 if not filepos:
679 filepos = os.path.join(self.model_pos,'aloha.pkl')
680
681 fsock = open(filepos, 'w')
682 cPickle.dump(dict(self), fsock)
683
684 - def load(self, filepos=None):
685 """ reload the pickle file """
686 return False
687 if not filepos:
688 filepos = os.path.join(self.model_pos,'aloha.pkl')
689 if os.path.exists(filepos):
690 fsock = open(filepos, 'r')
691 self.update(cPickle.load(fsock))
692 return True
693 else:
694 return False
695
696 - def get(self, lorentzname, outgoing):
697 """ return the AbstractRoutine with a given lorentz name, and for a given
698 outgoing particle """
699
700 try:
701 return self[(lorentzname, outgoing)]
702 except Exception:
703 logger.warning('(%s, %s) is not a valid key' %
704 (lorentzname, outgoing) )
705 return None
706
707 - def get_info(self, info, lorentzname, outgoing, tag, cached=False):
708 """return some information about the aloha routine
709 - "rank": return the rank of the loop function
710 If the cached option is set to true, then the result is stored and
711 recycled if possible.
712 """
713
714 if not aloha.loop_mode and any(t.startswith('L') for t in tag):
715 aloha.loop_mode = True
716
717
718 returned_dict = {}
719
720 if isinstance(info, str):
721 infos = [info]
722 else:
723 infos = info
724
725
726 if hasattr(self, 'cached_interaction_infos'):
727
728 for info_key in infos:
729 try:
730 returned_dict[info] = self.cached_interaction_infos[\
731 (lorentzname,outgoing,tuple(tag),info)]
732 except KeyError:
733
734
735 pass
736 elif cached:
737 self.cached_interaction_infos = {}
738
739 init = False
740 for info_key in infos:
741 if info_key in returned_dict:
742 continue
743 elif not init:
744
745 lorentz = eval('self.model.lorentz.%s' % lorentzname)
746 abstract = AbstractRoutineBuilder(lorentz)
747 routine = abstract.compute_routine(outgoing, tag, factorize=False)
748 init = True
749
750 assert 'routine' in locals()
751 returned_dict[info_key] = routine.get_info(info_key)
752 if cached:
753
754 self.cached_interaction_infos[\
755 (lorentzname,outgoing,tuple(tag),info_key)]=returned_dict[info_key]
756
757 if isinstance(info, str):
758 return returned_dict[info]
759 else:
760 return returned_dict
761
762 - def set(self, lorentzname, outgoing, abstract_routine):
763 """ add in the dictionary """
764
765 self[(lorentzname, outgoing)] = abstract_routine
766
767 - def compute_all(self, save=True, wanted_lorentz = [], custom_propa=False):
768 """ define all the AbstractRoutine linked to a model """
769
770
771
772 self.look_for_symmetries()
773 conjugate_list = self.look_for_conjugate()
774 self.look_for_multiple_lorentz_interactions()
775
776 if not wanted_lorentz:
777 wanted_lorentz = [l.name for l in self.model.all_lorentz]
778 for lorentz in self.model.all_lorentz:
779 if not lorentz.name in wanted_lorentz:
780
781 continue
782
783 if -1 in lorentz.spins:
784
785 continue
786
787 if lorentz.structure == 'external':
788 for i in range(len(lorentz.spins)):
789 self.external_routines.append('%s_%s' % (lorentz.name, i))
790 continue
791
792
793 routines = [(i,[]) for i in range(len(lorentz.spins)+1)]
794
795 if custom_propa:
796 for vertex in self.model.all_vertices:
797 if lorentz in vertex.lorentz:
798 for i,part in enumerate(vertex.particles):
799 new_prop = False
800 if hasattr(part, 'propagator') and part.propagator:
801 new_prop = ['P%s' % part.propagator.name]
802 elif part.mass.name.lower() == 'zero':
803 new_prop = ['P0']
804 if new_prop and (i+1, new_prop) not in routines:
805 routines.append((i+1, new_prop))
806
807 builder = AbstractRoutineBuilder(lorentz, self.model)
808 self.compute_aloha(builder, routines=routines)
809
810 if lorentz.name in self.multiple_lor:
811 for m in self.multiple_lor[lorentz.name]:
812 for outgoing in range(len(lorentz.spins)+1):
813 try:
814 self[(lorentz.name, outgoing)].add_combine(m)
815 except Exception:
816 pass
817
818
819 if lorentz.name in conjugate_list:
820 conjg_builder_list= builder.define_all_conjugate_builder(\
821 conjugate_list[lorentz.name])
822 for conjg_builder in conjg_builder_list:
823
824 assert conjg_builder_list.count(conjg_builder) == 1
825 self.compute_aloha(conjg_builder, lorentz.name)
826 if lorentz.name in self.multiple_lor:
827 for m in self.multiple_lor[lorentz.name]:
828 for outgoing in range(len(lorentz.spins)+1):
829 realname = conjg_builder.name + ''.join(['C%s' % pair for pair in conjg_builder.conjg])
830 try:
831 self[(realname, outgoing)].add_combine(m)
832 except Exception,error:
833 self[(realname, self.symmetries[lorentz.name][outgoing])].add_combine(m)
834
835 if save:
836 self.save()
837
839 """add a series of Lorentz structure created dynamically"""
840
841 for lor in lorentzlist:
842 if not hasattr(self.model.lorentz, lor.name):
843 setattr(self.model.lorentz, lor.name, lor)
844
846 """ create the requested ALOHA routine.
847 data should be a list of tuple (lorentz, tag, outgoing)
848 tag should be the list of special tag (like conjugation on pair)
849 to apply on the object """
850
851
852
853 self.look_for_symmetries()
854
855
856 aloha.loop_mode = False
857
858 request = {}
859
860 for list_l_name, tag, outgoing in data:
861
862 all_tag = tag[:]
863 conjugate = [i for i in tag if isinstance(i, int)]
864
865 tag = [i for i in tag if isinstance(i, str) and not i.startswith('P')]
866 tag = tag + ['C%s'%i for i in conjugate]
867 tag = tag + [i for i in all_tag if isinstance(i, str) and i.startswith('P')]
868
869 conjugate = tuple([int(c[1:]) for c in tag if c.startswith('C')])
870 loop = any((t.startswith('L') for t in tag))
871 if loop:
872 aloha.loop_mode = True
873 self.explicit_combine = True
874
875 for l_name in list_l_name:
876 try:
877 request[l_name][conjugate].append((outgoing,tag))
878 except Exception:
879 try:
880 request[l_name][conjugate] = [(outgoing,tag)]
881 except Exception:
882 request[l_name] = {conjugate: [(outgoing,tag)]}
883
884
885 for l_name in request:
886 lorentz = eval('self.model.lorentz.%s' % l_name)
887 if lorentz.structure == 'external':
888 for tmp in request[l_name]:
889 for outgoing, tag in request[l_name][tmp]:
890 name = aloha_writers.get_routine_name(lorentz.name,outgoing=outgoing,tag=tag)
891 if name not in self.external_routines:
892 self.external_routines.append(name)
893 continue
894
895 builder = AbstractRoutineBuilder(lorentz, self.model)
896
897
898 for conjg in request[l_name]:
899
900 def sorting(a,b):
901 if a[0] < b[0]: return -1
902 else: return 1
903 routines = request[l_name][conjg]
904 routines.sort(sorting)
905 if not conjg:
906
907 self.compute_aloha(builder, routines=routines)
908 else:
909
910 conjg_builder = builder.define_conjugate_builder(conjg)
911
912 self.compute_aloha(conjg_builder, symmetry=lorentz.name,
913 routines=routines)
914
915
916
917 for list_l_name, tag, outgoing in data:
918 if len(list_l_name) ==1:
919 continue
920
921 conjugate = [i for i in tag if isinstance(i, int)]
922 all_tag = tag[:]
923 tag = [i for i in tag if isinstance(i, str) and not i.startswith('P')]
924 tag = tag + ['C%s'%i for i in conjugate]
925 tag = tag + [i for i in all_tag if isinstance(i, str) and i.startswith('P')]
926
927 if not self.explicit_combine:
928 lorentzname = list_l_name[0]
929 lorentzname += ''.join(tag)
930 if self.has_key((lorentzname, outgoing)):
931 self[(lorentzname, outgoing)].add_combine(list_l_name[1:])
932 else:
933 lorentz = eval('self.model.lorentz.%s' % lorentzname)
934 assert lorentz.structure == 'external'
935 else:
936 l_lorentz = []
937 for l_name in list_l_name:
938 l_lorentz.append(eval('self.model.lorentz.%s' % l_name))
939 builder = CombineRoutineBuilder(l_lorentz)
940
941 for conjg in request[list_l_name[0]]:
942
943 def sorting(a,b):
944 if a[0] < b[0]: return -1
945 else: return 1
946 routines = request[list_l_name[0]][conjg]
947 routines.sort(sorting)
948 if not conjg:
949
950 self.compute_aloha(builder, routines=routines)
951 else:
952
953 conjg_builder = builder.define_conjugate_builder(conjg)
954
955 self.compute_aloha(conjg_builder, symmetry=lorentz.name,
956 routines=routines)
957
958
959
960 - def compute_aloha(self, builder, symmetry=None, routines=None, tag=[]):
961 """ define all the AbstractRoutine linked to a given lorentz structure
962 symmetry authorizes to use the symmetry of anoter lorentz structure.
963 routines to define only a subset of the routines."""
964
965 name = builder.name
966 if not symmetry:
967 symmetry = name
968 if not routines:
969 if not tag:
970 tag = ['C%s' % i for i in builder.conjg]
971 else:
972 addon = ['C%s' % i for i in builder.conjg]
973 tag = [(i,addon +onetag) for i,onetag in tag]
974 routines = [ tuple([i,tag]) for i in range(len(builder.spins) + 1 )]
975
976
977 for outgoing, tag in routines:
978 symmetric = self.has_symmetries(symmetry, outgoing, valid_output=routines)
979 realname = name + ''.join(tag)
980 if (realname, outgoing) in self:
981 continue
982
983 if symmetric:
984 self.get(realname, symmetric).add_symmetry(outgoing)
985 else:
986 wavefunction = builder.compute_routine(outgoing, tag)
987
988 self.set(realname, outgoing, wavefunction)
989
990
992 """define all the AbstractRoutine linked to a given lorentz structure
993 symmetry authorizes to use the symmetry of anoter lorentz structure.
994 routines to define only a subset of the routines.
995 Compare to compute_aloha, each routines are computed independently.
996 """
997
998 name = builder.name
999 if not routines:
1000 routines = [ tuple([i,[]]) for i in range(len(builder.spins) + 1 )]
1001
1002 for outgoing, tag in routines:
1003 builder.routine_kernel = None
1004 wavefunction = builder.compute_routine(outgoing, tag)
1005 self.set(name, outgoing, wavefunction)
1006
1007
1008 - def write(self, output_dir, language):
1009 """ write the full set of Helicity Routine in output_dir"""
1010 for abstract_routine in self.values():
1011 abstract_routine.write(output_dir, language)
1012
1013 for routine in self.external_routines:
1014 self.locate_external(routine, language, output_dir)
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1038 """search a valid external file and copy it to output_dir directory"""
1039
1040 language_to_ext = {'Python': 'py',
1041 'Fortran' : 'f',
1042 'CPP': 'C'}
1043 ext = language_to_ext[language]
1044 paths = [os.path.join(self.model_pos, language), self.model_pos,
1045 os.path.join(root_path, 'aloha', 'template_files', )]
1046
1047 ext_files = []
1048 for path in paths:
1049 ext_files = misc.glob('%s.%s' % (name, ext), path)
1050 if ext_files:
1051 break
1052 else:
1053
1054 raise ALOHAERROR, 'No external routine \"%s.%s\" in directories\n %s' % \
1055 (name, ext, '\n'.join(paths))
1056
1057 if output_dir:
1058 for filepath in ext_files:
1059
1060 files.cp(filepath, output_dir)
1061 return ext_files
1062
1063
1064
1066 """Search some symmetries in the vertices.
1067 We search if some identical particles are in a vertices in order
1068 to avoid to compute symmetrical contributions"""
1069
1070 for vertex in self.model.all_vertices:
1071 for i, part1 in enumerate(vertex.particles):
1072 for j in range(i-1,-1,-1):
1073 part2 = vertex.particles[j]
1074 if part1.pdg_code == part2.pdg_code and part1.color == 1:
1075 if part1.spin == 2 and (i % 2 != j % 2 ):
1076 continue
1077 for lorentz in vertex.lorentz:
1078 if self.symmetries.has_key(lorentz.name):
1079 if self.symmetries[lorentz.name].has_key(i+1):
1080 self.symmetries[lorentz.name][i+1] = max(self.symmetries[lorentz.name][i+1], j+1)
1081 else:
1082 self.symmetries[lorentz.name][i+1] = j+1
1083 else:
1084 self.symmetries[lorentz.name] = {i+1:j+1}
1085 break
1086
1088 """Search the interaction associate with more than one lorentz structure.
1089 If those lorentz structure have the same order and the same color then
1090 associate a multiple lorentz routines to ALOHA """
1091
1092 orders = {}
1093 for coup in self.model.all_couplings:
1094 orders[coup.name] = str(coup.order)
1095
1096 for vertex in self.model.all_vertices:
1097 if len(vertex.lorentz) == 1:
1098 continue
1099
1100
1101
1102
1103
1104 combine = {}
1105 for (id_col, id_lor), coups in vertex.couplings.items():
1106 if not isinstance(coups, list):
1107 coups = [coups]
1108 for coup in coups:
1109 order = orders[coup.name]
1110 key = (id_col, order)
1111 if key in combine:
1112 combine[key].append(id_lor)
1113 else:
1114 combine[key] = [id_lor]
1115
1116
1117 for list_lor in combine.values():
1118 if len(list_lor) == 1:
1119 continue
1120 list_lor.sort()
1121 main = vertex.lorentz[list_lor[0]].name
1122 if main not in self.multiple_lor:
1123 self.multiple_lor[main] = []
1124
1125 info = tuple([vertex.lorentz[id].name for id in list_lor[1:]])
1126 if info not in self.multiple_lor[main]:
1127 self.multiple_lor[main].append(info)
1128
1129
1130 - def has_symmetries(self, l_name, outgoing, out=None, valid_output=None):
1131 """ This returns out if no symmetries are available, otherwise it finds
1132 the lowest equivalent outgoing by recursivally calling this function.
1133 auth is a list of authorize output, if define"""
1134
1135 try:
1136 equiv = self.symmetries[l_name][outgoing]
1137 except Exception:
1138 return out
1139 else:
1140 if not valid_output or equiv in valid_output:
1141 return self.has_symmetries(l_name, equiv, out=equiv,
1142 valid_output=valid_output)
1143 else:
1144 return self.has_symmetries(l_name, equiv, out=out,
1145 valid_output=valid_output)
1146
1148 """ create a list for the routine needing to be conjugate """
1149
1150
1151 need = False
1152 for particle in self.model.all_particles:
1153 if particle.spin == 2 and particle.selfconjugate:
1154 need = True
1155 break
1156
1157 if not need:
1158 for interaction in self.model.all_vertices:
1159 fermions = [p for p in interaction.particles if p.spin == 2]
1160 for i in range(0, len(fermions), 2):
1161 if fermions[i].pdg_code * fermions[i+1].pdg_code > 0:
1162
1163 need = True
1164 break
1165
1166
1167 if not need:
1168 return {}
1169
1170 conjugate_request = {}
1171
1172 for vertex in self.model.all_vertices:
1173 for i in range(0, len(vertex.particles), 2):
1174 part1 = vertex.particles[i]
1175 if part1.spin !=2:
1176
1177 break
1178
1179 if part1.selfconjugate:
1180 continue
1181 part2 = vertex.particles[i + 1]
1182 if part2.selfconjugate:
1183 continue
1184
1185
1186 for lorentz in vertex.lorentz:
1187 try:
1188 conjugate_request[lorentz.name].add(i//2+1)
1189 except Exception:
1190 conjugate_request[lorentz.name] = set([i//2+1])
1191
1192 for elem in conjugate_request:
1193 conjugate_request[elem] = list(conjugate_request[elem])
1194
1195 return conjugate_request
1196
1200 """find the list of Helicity routine in the directory and create a list
1201 of those files (but with compile extension)"""
1202
1203 aloha_files = []
1204
1205
1206 alohafile_pattern = re.compile(r'''_\d%s''' % file_ext)
1207 for filename in os.listdir(aloha_dir):
1208 if os.path.isfile(os.path.join(aloha_dir, filename)):
1209 if alohafile_pattern.search(filename):
1210 aloha_files.append(filename.replace(file_ext, comp_ext))
1211
1212 if os.path.exists(pjoin(aloha_dir, 'additional_aloha_function.f')):
1213 aloha_files.append('additional_aloha_function.o')
1214
1215 text="ALOHARoutine = "
1216 text += ' '.join(aloha_files)
1217 text +='\n'
1218
1219
1220 file(os.path.join(aloha_dir, 'aloha_file.inc'), 'w').write(text)
1221
1225
1226 def create(obj):
1227 """ """
1228 obj= obj.simplify()
1229 obj = obj.expand()
1230 obj = obj.simplify()
1231 return obj
1232
1233
1234 old_tag = set(aloha_lib.KERNEL.use_tag)
1235 print 'create lib',tag
1236 name, i = tag
1237 if name == "Spin2Prop":
1238 lib[('Spin2Prop',i)] = create( Spin2Propagator(_spin2_mult + i, \
1239 2 * _spin2_mult + i,'I2','I3', i) )
1240 elif name == "Spin2PropMassless":
1241 lib[('Spin2PropMassless',i)] = create( Spin2masslessPropagator(
1242 _spin2_mult + i, 2 * _spin2_mult + i,'I2','I3'))
1243
1244 aloha_lib.KERNEL.use_tag = old_tag
1245 return lib
1246
1247
1248 if '__main__' == __name__:
1249 logging.basicConfig(level=0)
1250
1251 import profile
1252
1253
1254 start = time.time()
1259 - def write(alohagenerator):
1261 alohagenerator = main()
1262 logger.info('done in %s s' % (time.time()-start))
1263 write(alohagenerator)
1264
1265
1266 stop = time.time()
1267 logger.info('done in %s s' % (stop-start))
1268