1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 from __future__ import division
16 import cmath
17 import copy
18 import cPickle
19 import glob
20 import logging
21 import numbers
22 import os
23 import re
24 import shutil
25 import sys
26 import time
27
28 root_path = os.path.split(os.path.dirname(os.path.realpath( __file__ )))[0]
29 sys.path.append(root_path)
30 from aloha.aloha_object import *
31 import aloha
32 import aloha.aloha_writers as aloha_writers
33 import aloha.aloha_lib as aloha_lib
34 import aloha.aloha_object as aloha_object
35 import aloha.aloha_parsers as aloha_parsers
36 import aloha.aloha_fct as aloha_fct
37 try:
38 import madgraph.iolibs.files as files
39 import madgraph.various.misc as misc
40 except Exception:
41 import aloha.files as files
42 import aloha.misc as misc
43
44 aloha_path = os.path.dirname(os.path.realpath(__file__))
45 logger = logging.getLogger('ALOHA')
46
47 _conjugate_gap = 50
48 _spin2_mult = 1000
49
50 pjoin = os.path.join
51
52 ALOHAERROR = aloha.ALOHAERROR
55 """ store the result of the computation of Helicity Routine
56 this is use for storing and passing to writer """
57
58 - def __init__(self, expr, outgoing, spins, name, infostr, denom=None):
59 """ store the information """
60
61 self.spins = spins
62 self.expr = expr
63 self.denominator = denom
64 self.name = name
65 self.outgoing = outgoing
66 self.infostr = infostr
67 self.symmetries = []
68 self.combined = []
69 self.tag = []
70 self.contracted = {}
71
72
73
75 """ add an outgoing """
76
77 if not outgoing in self.symmetries:
78 self.symmetries.append(outgoing)
79
81 """add a combine rule """
82
83 if lor_list not in self.combined:
84 self.combined.append(lor_list)
85
86 - def write(self, output_dir, language='Fortran', mode='self', combine=True,**opt):
87 """ write the content of the object """
88 writer = aloha_writers.WriterFactory(self, language, output_dir, self.tag)
89 text = writer.write(mode=mode, **opt)
90 if combine:
91 for grouped in self.combined:
92 if isinstance(text, tuple):
93 text = tuple([old.__add__(new) for old, new in zip(text,
94 writer.write_combined(grouped, mode=mode+'no_include', **opt))])
95 else:
96 text += writer.write_combined(grouped, mode=mode+'no_include', **opt)
97 if aloha.mp_precision and 'MP' not in self.tag:
98 self.tag.append('MP')
99 text += self.write(output_dir, language, mode, **opt)
100 return text
101
103 """return some information on the routine
104 """
105 if info == "rank":
106 assert isinstance(self.expr, aloha_lib.SplitCoefficient)
107 rank= 1
108 for coeff in self.expr:
109 rank = max(sum(coeff), rank)
110 return rank -1
111 else:
112 raise ALOHAERROR, '%s is not a valid information that can be computed' % info
113
116 """ Launch the creation of the Helicity Routine"""
117
118 prop_lib = {}
119 counter = 0
120
122 """ An error class for ALOHA"""
123
124 - def __init__(self, lorentz, model=None):
125 """ initialize the run
126 lorentz: the lorentz information analyzed (UFO format)
127 language: define in which language we write the output
128 modes: 0 for all incoming particles
129 >0 defines the outgoing part (start to count at 1)
130 """
131
132 self.spins = [s for s in lorentz.spins]
133 self.name = lorentz.name
134 self.conjg = []
135 self.tag = []
136 self.outgoing = None
137 self.lorentz_expr = lorentz.structure
138 self.routine_kernel = None
139 self.spin2_massless = False
140 self.spin32_massless = False
141 self.contracted = {}
142 self.fct = {}
143 self.model = model
144 self.denominator = None
145
146
147 self.lastprint = 0
148
149 if hasattr(lorentz, 'formfactors') and lorentz.formfactors:
150 for formf in lorentz.formfactors:
151 pat = re.compile(r'\b%s\b' % formf.name)
152 self.lorentz_expr = pat.sub('(%s)' % formf.value, self.lorentz_expr)
153
163
165 """ return the full set of AbstractRoutineBuilder linked to fermion
166 clash"""
167
168 solution = []
169
170 for i, pair in enumerate(pair_list):
171 new_builder = self.define_conjugate_builder(pair)
172 solution.append(new_builder)
173 solution += new_builder.define_all_conjugate_builder(pair_list[i+1:])
174 return solution
175
177 """ return a AbstractRoutineBuilder for the conjugate operation.
178 If they are more than one pair of fermion. Then use pair to claim which
179 one is conjugated"""
180
181 new_builder = copy.copy(self)
182 new_builder.conjg = self.conjg[:]
183 try:
184 for index in pairs:
185 new_builder.apply_conjugation(index)
186 except TypeError:
187 new_builder.apply_conjugation(pairs)
188 return new_builder
189
191 """ apply conjugation on self object"""
192
193 nb_fermion = len([1 for s in self.spins if s % 2 == 0])
194 if isinstance(pair, tuple):
195 if len(pair) ==1 :
196 pair = pair[0]
197 else:
198 raise Exception
199
200
201 if (pair > 1 or nb_fermion >2) and not self.conjg:
202
203 data = aloha_fct.get_fermion_flow(self.lorentz_expr, nb_fermion)
204 target = dict([(2*i+1,2*i+2) for i in range(nb_fermion//2)])
205 if not data == target:
206 text = """Unable to deal with 4(or more) point interactions
207 in presence of majorana particle/flow violation"""
208 raise ALOHAERROR, text
209
210 old_id = 2 * pair - 1
211 new_id = _conjugate_gap + old_id
212
213 self.kernel_tag = set()
214 if not self.routine_kernel or isinstance(self.routine_kernel, str):
215 self.routine_kernel = eval(self.parse_expression(self.lorentz_expr))
216
217
218
219 self.routine_kernel = \
220 C(new_id, old_id + 1) * self.routine_kernel * C(new_id + 1, old_id)
221
222 self.lorentz_expr = '('+self.lorentz_expr+') * C(%s,%s) * C(%s,%s)' % \
223 (new_id, old_id + 1, new_id + 1, old_id )
224
225 self.conjg.append(pair)
226
227
246
248 """change the sign of P for outcoming fermion in order to
249 correct the mismatch convention between HELAS and FR"""
250
251 if not expr:
252 expr = self.lorentz_expr
253
254 if need_P_sign:
255 expr = re.sub(r'\b(P|PSlash)\(', r'-\1(', expr)
256
257 calc = aloha_parsers.ALOHAExpressionParser()
258 lorentz_expr = calc.parse(expr)
259 return lorentz_expr
260
262 """compute the abstract routine associate to this mode """
263
264
265 aloha_lib.KERNEL.use_tag=set()
266
267 nb_spinor = 0
268 outgoing = self.outgoing
269 if (outgoing + 1) // 2 in self.conjg:
270
271 outgoing = outgoing + outgoing % 2 - (outgoing +1) % 2
272
273 if not self.routine_kernel:
274 AbstractRoutineBuilder.counter += 1
275 if self.tag == []:
276 logger.info('aloha creates %s routines' % self.name)
277 elif AbstractALOHAModel.lastprint < time.time() - 1:
278 AbstractALOHAModel.lastprint = time.time()
279 logger.info('aloha creates %s set of routines with options: %s' \
280 % (self.name, ','.join(self.tag)) )
281 try:
282 lorentz = self.parse_expression()
283 self.routine_kernel = lorentz
284 lorentz = eval(lorentz)
285 except NameError as error:
286 logger.error('unknow type in Lorentz Evaluation:%s'%str(error))
287 raise ALOHAERROR, 'unknow type in Lorentz Evaluation: %s ' % str(error)
288 else:
289 self.kernel_tag = set(aloha_lib.KERNEL.use_tag)
290 elif isinstance(self.routine_kernel,str):
291 lorentz = eval(self.routine_kernel)
292 aloha_lib.KERNEL.use_tag = set(self.kernel_tag)
293 else:
294 lorentz = copy.copy(self.routine_kernel)
295 aloha_lib.KERNEL.use_tag = set(self.kernel_tag)
296 for (i, spin ) in enumerate(self.spins):
297 id = i + 1
298
299 if id == outgoing:
300
301
302 propa = [t[1:] for t in self.tag if t.startswith('P')]
303 if propa == ['0']:
304 massless = True
305 self.denominator = None
306 elif propa == []:
307 massless = False
308 self.denominator = None
309 else:
310 lorentz *= complex(0,1) * self.get_custom_propa(propa[0], spin, id)
311 continue
312
313
314
315 if spin in [1,-1]:
316 lorentz *= complex(0,1)
317 elif spin == 2:
318
319 if (id + 1) // 2 in self.conjg:
320 id += _conjugate_gap + id % 2 - (id +1) % 2
321 if (id % 2):
322
323 lorentz *= complex(0,1) * SpinorPropagatorout(id, 'I2', outgoing)
324 else:
325
326 lorentz *= complex(0,1) * SpinorPropagatorin('I2', id, outgoing)
327 elif spin == 3 :
328 if massless or not aloha.unitary_gauge:
329 lorentz *= VectorPropagatorMassless(id, 'I2', id)
330 else:
331 lorentz *= VectorPropagator(id, 'I2', id)
332 elif spin == 4:
333
334 if (id + 1) // 2 in self.conjg:
335 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
336 else:
337 spin_id = id
338 nb_spinor += 1
339 if not massless and (spin_id % 2):
340 lorentz *= complex(0,1) * Spin3halfPropagatorout(id, 'I2', spin_id,'I3', outgoing)
341 elif not massless and not (spin_id % 2):
342 lorentz *= complex(0,1) * Spin3halfPropagatorin('I2', id , 'I3', spin_id, outgoing)
343 elif spin_id %2:
344 lorentz *= complex(0,1) * Spin3halfPropagatorMasslessOut(id, 'I2', spin_id,'I3', outgoing)
345 else :
346 lorentz *= complex(0,1) * Spin3halfPropagatorMasslessIn('I2', id, 'I3', spin_id, outgoing)
347
348 elif spin == 5 :
349
350 if massless:
351 lorentz *= complex(0,1) * Spin2masslessPropagator(_spin2_mult + id, \
352 2 * _spin2_mult + id,'I2','I3')
353 else:
354 lorentz *= complex(0,1) * Spin2Propagator(_spin2_mult + id, \
355 2 * _spin2_mult + id,'I2','I3', id)
356 else:
357 raise self.AbstractALOHAError(
358 'The spin value %s (2s+1) is not supported yet' % spin)
359 else:
360
361 if spin in [1,-1]:
362 lorentz *= Scalar(id)
363 elif spin == 2:
364
365 if (id+1) // 2 in self.conjg:
366 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
367 else:
368 spin_id = id
369 lorentz *= Spinor(spin_id, id)
370 elif spin == 3:
371 lorentz *= Vector(id, id)
372 elif spin == 4:
373
374 if (id+1) // 2 in self.conjg:
375 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
376 else:
377 spin_id = id
378 nb_spinor += 1
379 lorentz *= Spin3Half(id, spin_id, id)
380 elif spin == 5:
381 lorentz *= Spin2(1 * _spin2_mult + id, 2 * _spin2_mult + id, id)
382 else:
383 raise self.AbstractALOHAError(
384 'The spin value %s (2s+1) is not supported yet' % spin)
385
386
387 if not outgoing:
388 lorentz *= complex(0,-1)
389
390
391 lorentz = lorentz.simplify()
392
393
394 if any((tag.startswith('L') for tag in self.tag if len(tag)>1)):
395 return self.compute_loop_coefficient(lorentz, outgoing)
396
397 lorentz = lorentz.expand()
398 lorentz = lorentz.simplify()
399
400 if factorize:
401 lorentz = lorentz.factorize()
402
403 lorentz.tag = set(aloha_lib.KERNEL.use_tag)
404 return lorentz
405
406 @staticmethod
408 """Change the index of the propagator to match the current need"""
409 data = re.split(r'(\b[a-zA-Z]\w*?)\(([\'\w,\s]*?)\)',text)
410
411 pos=-2
412 while pos +3 < len(data):
413 pos = pos+3
414 ltype = data[pos]
415 if ltype != 'complex':
416 for old, new in tag.items():
417 if isinstance(new, str):
418 new='\'%s\'' % new
419 else:
420 new = str(new)
421 data[pos+1] = re.sub(r'\b%s\b' % old, new, data[pos+1])
422 data[pos+1] = '(%s)' % data[pos+1]
423 text=''.join(data)
424 return text
425
427 """Return the ALOHA object associated to the user define propagator"""
428
429 propagator = getattr(self.model.propagators, propa)
430 numerator = propagator.numerator
431 denominator = propagator.denominator
432
433
434 needPflipping = False
435 if spin in [1,-1]:
436 tag = {'id': id}
437 elif spin == 2:
438
439 if (id + 1) // 2 in self.conjg:
440 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
441 else:
442 spin_id = id
443 if (spin_id % 2):
444
445 needPflipping = True
446 tag ={'1': spin_id, '2': 'I2', 'id': id}
447 else:
448 tag ={'1': 'I2', '2': spin_id, 'id': id}
449 elif spin == 3 :
450 tag ={'1': id, '2': 'I2', 'id': id}
451 elif spin == 4:
452 delta = lambda i,j: aloha_object.Identity(i,j)
453 deltaL = lambda i,j: aloha_object.IdentityL(i,j)
454
455 if (id + 1) // 2 in self.conjg:
456 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2
457 else:
458 spin_id = id
459 if spin_id % 2:
460 needPflipping = True
461 tag = {'1': 'pr_1', '2': 'pr_2', 'id':id}
462 else:
463 tag = {'1': 'pr_2', '2': 'pr_1'}
464 numerator *= deltaL('pr_1',id) * deltaL('pr_2', 'I2') * \
465 delta('pr_1', spin_id) * delta('pr_2', 'I3')
466 elif spin == 5 :
467 tag = {'1': _spin2_mult + id, '2': 2 * _spin2_mult + id,
468 '51': 'I2', '52': 'I3', 'id':id}
469
470 numerator = self.mod_propagator_expression(tag, numerator)
471 if denominator:
472 denominator = self.mod_propagator_expression(tag, denominator)
473
474 numerator = self.parse_expression(numerator, needPflipping)
475 if denominator:
476 self.denominator = self.parse_expression(denominator, needPflipping)
477 self.denominator = eval(self.denominator)
478 if not isinstance(self.denominator, numbers.Number):
479 self.denominator = self.denominator.simplify().expand().simplify().get((0,))
480
481 return eval(numerator)
482
483
484
485
487
488
489 l_in = [int(tag[1:]) for tag in self.tag if tag.startswith('L')][0]
490 if (l_in + 1) // 2 in self.conjg:
491
492 l_in = l_in + l_in % 2 - (l_in +1) % 2
493 assert l_in != outgoing, 'incoming Open Loop can not be the outcoming one'
494
495
496
497 Pdep = [aloha_lib.KERNEL.get(P) for P in lorentz.get_all_var_names()
498 if P.startswith('_P')]
499
500 Pdep = set([P for P in Pdep if P.particle in [outgoing, l_in]])
501 for P in Pdep:
502 if P.particle == l_in:
503 sign = 1
504 else:
505 sign = -1
506 id = P.id
507 lorentz_ind = P.lorentz_ind[0]
508 P_Lid = aloha_object.P(lorentz_ind, 'L')
509 P_obj = aloha_object.P(lorentz_ind, P.particle)
510 new_expr = sign*(P_Lid + P_obj)
511 lorentz = lorentz.replace(id, new_expr)
512
513
514 var_veto = ['PL_0', 'PL_1', 'PL_2', 'PL_3']
515 spin = aloha_writers.WriteALOHA.type_to_variable[abs(self.spins[l_in-1])]
516 size = aloha_writers.WriteALOHA.type_to_size[spin]-1
517 var_veto += ['%s%s_%s' % (spin,l_in,i) for i in range(1,size)]
518
519 veto_ids = aloha_lib.KERNEL.get_ids(var_veto)
520
521 lorentz = lorentz.expand(veto = veto_ids)
522 lorentz = lorentz.simplify()
523 coeff_expr = lorentz.split(veto_ids)
524
525 for key, expr in coeff_expr.items():
526 expr = expr.simplify()
527 coeff_expr[key] = expr.factorize()
528 coeff_expr.tag = set(aloha_lib.KERNEL.use_tag)
529
530 return coeff_expr
531
533 """Define the expression"""
534
535 self.expr = lorentz_expr
536
538 """Define the kernel at low level"""
539
540 if not lorentz:
541 logger.info('compute kernel %s' % self.counter)
542 AbstractRoutineBuilder.counter += 1
543 lorentz = eval(self.lorentz_expr)
544
545 if isinstance(lorentz, numbers.Number):
546 self.routine_kernel = lorentz
547 return lorentz
548 lorentz = lorentz.simplify()
549 lorentz = lorentz.expand()
550 lorentz = lorentz.simplify()
551
552 self.routine_kernel = lorentz
553 return lorentz
554
555
556 @staticmethod
558 """return the name of the """
559
560 name = '%s_%s' % (name, outgoing)
561 return name
562
563 @classmethod
570
573 """A special builder for combine routine if needed to write those
574 explicitely.
575 """
576 - def __init__(self, l_lorentz, model=None):
577 """ initialize the run
578 l_lorentz: list of lorentz information analyzed (UFO format)
579 language: define in which language we write the output
580 modes: 0 for all incoming particles
581 >0 defines the outgoing part (start to count at 1)
582 """
583 AbstractRoutineBuilder.__init__(self,l_lorentz[0], model)
584 lorentz = l_lorentz[0]
585 self.spins = lorentz.spins
586 l_name = [l.name for l in l_lorentz]
587 self.name = aloha_writers.combine_name(l_name[0], l_name[1:], None)
588 self.conjg = []
589 self.tag = []
590 self.outgoing = None
591 self.lorentz_expr = []
592 for i, lor in enumerate(l_lorentz):
593 self.lorentz_expr.append( 'Coup(%s) * (%s)' % (i+1, lor.structure))
594 self.lorentz_expr = ' + '.join(self.lorentz_expr)
595 self.routine_kernel = None
596 self.contracted = {}
597 self.fct = {}
598
600 """ A class to build and store the full set of Abstract ALOHA Routine"""
601
602 lastprint = 0
603
604 - def __init__(self, model_name, write_dir=None, format='Fortran',
605 explicit_combine=False):
606 """ load the UFO model and init the dictionary """
607
608
609 self.explicit_combine = explicit_combine
610
611
612 model_name_pattern = re.compile("^(?P<name>.+)-(?P<rest>[\w\d_]+)$")
613 model_name_re = model_name_pattern.match(model_name)
614 if model_name_re:
615 name = model_name_re.group('name')
616 rest = model_name_re.group("rest")
617 if rest == 'full' or \
618 os.path.isfile(os.path.join(root_path, "models", name,
619 "restrict_%s.dat" % rest)):
620 model_name = model_name_re.group("name")
621
622
623 try:
624 python_pos = model_name
625 __import__(python_pos)
626 except Exception:
627 python_pos = 'models.%s' % model_name
628 __import__(python_pos)
629 self.model = sys.modules[python_pos]
630
631 self.model_pos = os.path.dirname(self.model.__file__)
632
633
634 self.external_routines = []
635
636
637 dict.__init__(self)
638 self.symmetries = {}
639 self.multiple_lor = {}
640
641 if write_dir:
642 self.main(write_dir,format=format)
643
644 - def main(self, output_dir, format='Fortran'):
645 """ Compute if not already compute.
646 Write file in models/MY_MODEL/MY_FORMAT.
647 copy the file to output_dir
648 """
649 ext = {'Fortran':'f','Python':'py','CPP':'h'}
650
651
652
653 if not self.load():
654 self.compute_all()
655 logger.info(' %s aloha routine' % len(self))
656
657
658 if not output_dir:
659 output_dir = os.path.join(self.model_pos, format.lower())
660 logger.debug('aloha output dir is %s' % output_dir)
661 if not os.path.exists(output_dir):
662 os.mkdir(output_dir)
663
664
665 for (name, outgoing), abstract in self.items():
666 routine_name = AbstractRoutineBuilder.get_routine_name(name, outgoing)
667 if not os.path.exists(os.path.join(output_dir, routine_name) + '.' + ext[format]):
668 abstract.write(output_dir, format)
669 else:
670 logger.info('File for %s already present, skip the writing of this file' % routine_name)
671
672
673 - def save(self, filepos=None):
674 """ save the current model in a pkl file """
675
676 logger.info('save the aloha abstract routine in a pickle file')
677 if not filepos:
678 filepos = os.path.join(self.model_pos,'aloha.pkl')
679
680 fsock = open(filepos, 'w')
681 cPickle.dump(dict(self), fsock)
682
683 - def load(self, filepos=None):
684 """ reload the pickle file """
685 return False
686 if not filepos:
687 filepos = os.path.join(self.model_pos,'aloha.pkl')
688 if os.path.exists(filepos):
689 fsock = open(filepos, 'r')
690 self.update(cPickle.load(fsock))
691 return True
692 else:
693 return False
694
695 - def get(self, lorentzname, outgoing):
696 """ return the AbstractRoutine with a given lorentz name, and for a given
697 outgoing particle """
698
699 try:
700 return self[(lorentzname, outgoing)]
701 except Exception:
702 logger.warning('(%s, %s) is not a valid key' %
703 (lorentzname, outgoing) )
704 return None
705
706 - def get_info(self, info, lorentzname, outgoing, tag, cached=False):
707 """return some information about the aloha routine
708 - "rank": return the rank of the loop function
709 If the cached option is set to true, then the result is stored and
710 recycled if possible.
711 """
712
713 if not aloha.loop_mode and any(t.startswith('L') for t in tag):
714 aloha.loop_mode = True
715
716
717 returned_dict = {}
718
719 if isinstance(info, str):
720 infos = [info]
721 else:
722 infos = info
723
724
725 if hasattr(self, 'cached_interaction_infos'):
726
727 for info_key in infos:
728 try:
729 returned_dict[info] = self.cached_interaction_infos[\
730 (lorentzname,outgoing,tuple(tag),info)]
731 except KeyError:
732
733
734 pass
735 elif cached:
736 self.cached_interaction_infos = {}
737
738 init = False
739 for info_key in infos:
740 if info_key in returned_dict:
741 continue
742 elif not init:
743
744 lorentz = eval('self.model.lorentz.%s' % lorentzname)
745 abstract = AbstractRoutineBuilder(lorentz)
746 routine = abstract.compute_routine(outgoing, tag, factorize=False)
747 init = True
748
749 assert 'routine' in locals()
750 returned_dict[info_key] = routine.get_info(info_key)
751 if cached:
752
753 self.cached_interaction_infos[\
754 (lorentzname,outgoing,tuple(tag),info_key)]=returned_dict[info_key]
755
756 if isinstance(info, str):
757 return returned_dict[info]
758 else:
759 return returned_dict
760
761 - def set(self, lorentzname, outgoing, abstract_routine):
762 """ add in the dictionary """
763
764 self[(lorentzname, outgoing)] = abstract_routine
765
766 - def compute_all(self, save=True, wanted_lorentz = [], custom_propa=False):
767 """ define all the AbstractRoutine linked to a model """
768
769
770
771 self.look_for_symmetries()
772 conjugate_list = self.look_for_conjugate()
773 self.look_for_multiple_lorentz_interactions()
774
775 if not wanted_lorentz:
776 wanted_lorentz = [l.name for l in self.model.all_lorentz]
777 for lorentz in self.model.all_lorentz:
778 if not lorentz.name in wanted_lorentz:
779
780 continue
781
782 if -1 in lorentz.spins:
783
784 continue
785
786 if lorentz.structure == 'external':
787 for i in range(len(lorentz.spins)):
788 self.external_routines.append('%s_%s' % (lorentz.name, i))
789 continue
790
791
792 routines = [(i,[]) for i in range(len(lorentz.spins)+1)]
793
794 if custom_propa:
795 for vertex in self.model.all_vertices:
796 if lorentz in vertex.lorentz:
797 for i,part in enumerate(vertex.particles):
798 new_prop = False
799 if hasattr(part, 'propagator') and part.propagator:
800 new_prop = ['P%s' % part.propagator.name]
801 elif part.mass.name.lower() == 'zero':
802 new_prop = ['P0']
803 if new_prop and (i+1, new_prop) not in routines:
804 routines.append((i+1, new_prop))
805
806 builder = AbstractRoutineBuilder(lorentz, self.model)
807 self.compute_aloha(builder, routines=routines)
808
809 if lorentz.name in self.multiple_lor:
810 for m in self.multiple_lor[lorentz.name]:
811 for outgoing in range(len(lorentz.spins)+1):
812 try:
813 self[(lorentz.name, outgoing)].add_combine(m)
814 except Exception:
815 pass
816
817
818 if lorentz.name in conjugate_list:
819 conjg_builder_list= builder.define_all_conjugate_builder(\
820 conjugate_list[lorentz.name])
821 for conjg_builder in conjg_builder_list:
822
823 assert conjg_builder_list.count(conjg_builder) == 1
824 self.compute_aloha(conjg_builder, lorentz.name)
825 if lorentz.name in self.multiple_lor:
826 for m in self.multiple_lor[lorentz.name]:
827 for outgoing in range(len(lorentz.spins)+1):
828 realname = conjg_builder.name + ''.join(['C%s' % pair for pair in conjg_builder.conjg])
829 try:
830 self[(realname, outgoing)].add_combine(m)
831 except Exception,error:
832 self[(realname, self.symmetries[lorentz.name][outgoing])].add_combine(m)
833
834 if save:
835 self.save()
836
838 """add a series of Lorentz structure created dynamically"""
839
840 for lor in lorentzlist:
841 if not hasattr(self.model.lorentz, lor.name):
842 setattr(self.model.lorentz, lor.name, lor)
843
845 """ create the requested ALOHA routine.
846 data should be a list of tuple (lorentz, tag, outgoing)
847 tag should be the list of special tag (like conjugation on pair)
848 to apply on the object """
849
850
851
852 self.look_for_symmetries()
853
854
855 aloha.loop_mode = False
856
857 request = {}
858
859 for list_l_name, tag, outgoing in data:
860
861 all_tag = tag[:]
862 conjugate = [i for i in tag if isinstance(i, int)]
863
864 tag = [i for i in tag if isinstance(i, str) and not i.startswith('P')]
865 tag = tag + ['C%s'%i for i in conjugate]
866 tag = tag + [i for i in all_tag if isinstance(i, str) and i.startswith('P')]
867
868 conjugate = tuple([int(c[1:]) for c in tag if c.startswith('C')])
869 loop = any((t.startswith('L') for t in tag))
870 if loop:
871 aloha.loop_mode = True
872 self.explicit_combine = True
873
874 for l_name in list_l_name:
875 try:
876 request[l_name][conjugate].append((outgoing,tag))
877 except Exception:
878 try:
879 request[l_name][conjugate] = [(outgoing,tag)]
880 except Exception:
881 request[l_name] = {conjugate: [(outgoing,tag)]}
882
883
884 for l_name in request:
885 lorentz = eval('self.model.lorentz.%s' % l_name)
886 if lorentz.structure == 'external':
887 for tmp in request[l_name]:
888 for outgoing, tag in request[l_name][tmp]:
889 name = aloha_writers.get_routine_name(lorentz.name,outgoing=outgoing,tag=tag)
890 if name not in self.external_routines:
891 self.external_routines.append(name)
892 continue
893
894 builder = AbstractRoutineBuilder(lorentz, self.model)
895
896
897 for conjg in request[l_name]:
898
899 def sorting(a,b):
900 if a[0] < b[0]: return -1
901 else: return 1
902 routines = request[l_name][conjg]
903 routines.sort(sorting)
904 if not conjg:
905
906 self.compute_aloha(builder, routines=routines)
907 else:
908
909 conjg_builder = builder.define_conjugate_builder(conjg)
910
911 self.compute_aloha(conjg_builder, symmetry=lorentz.name,
912 routines=routines)
913
914
915
916 for list_l_name, tag, outgoing in data:
917 if len(list_l_name) ==1:
918 continue
919
920 conjugate = [i for i in tag if isinstance(i, int)]
921 all_tag = tag[:]
922 tag = [i for i in tag if isinstance(i, str) and not i.startswith('P')]
923 tag = tag + ['C%s'%i for i in conjugate]
924 tag = tag + [i for i in all_tag if isinstance(i, str) and i.startswith('P')]
925
926 if not self.explicit_combine:
927 lorentzname = list_l_name[0]
928 lorentzname += ''.join(tag)
929 if self.has_key((lorentzname, outgoing)):
930 self[(lorentzname, outgoing)].add_combine(list_l_name[1:])
931 else:
932 lorentz = eval('self.model.lorentz.%s' % lorentzname)
933 assert lorentz.structure == 'external'
934 else:
935 l_lorentz = []
936 for l_name in list_l_name:
937 l_lorentz.append(eval('self.model.lorentz.%s' % l_name))
938 builder = CombineRoutineBuilder(l_lorentz)
939
940 for conjg in request[list_l_name[0]]:
941
942 def sorting(a,b):
943 if a[0] < b[0]: return -1
944 else: return 1
945 routines = request[list_l_name[0]][conjg]
946 routines.sort(sorting)
947 if not conjg:
948
949 self.compute_aloha(builder, routines=routines)
950 else:
951
952 conjg_builder = builder.define_conjugate_builder(conjg)
953
954 self.compute_aloha(conjg_builder, symmetry=lorentz.name,
955 routines=routines)
956
957
958
959 - def compute_aloha(self, builder, symmetry=None, routines=None, tag=[]):
960 """ define all the AbstractRoutine linked to a given lorentz structure
961 symmetry authorizes to use the symmetry of anoter lorentz structure.
962 routines to define only a subset of the routines."""
963
964 name = builder.name
965 if not symmetry:
966 symmetry = name
967 if not routines:
968 if not tag:
969 tag = ['C%s' % i for i in builder.conjg]
970 else:
971 addon = ['C%s' % i for i in builder.conjg]
972 tag = [(i,addon +onetag) for i,onetag in tag]
973 routines = [ tuple([i,tag]) for i in range(len(builder.spins) + 1 )]
974
975
976 for outgoing, tag in routines:
977 symmetric = self.has_symmetries(symmetry, outgoing, valid_output=routines)
978 realname = name + ''.join(tag)
979 if (realname, outgoing) in self:
980 continue
981
982 if symmetric:
983 self.get(realname, symmetric).add_symmetry(outgoing)
984 else:
985 wavefunction = builder.compute_routine(outgoing, tag)
986
987 self.set(realname, outgoing, wavefunction)
988
989
991 """define all the AbstractRoutine linked to a given lorentz structure
992 symmetry authorizes to use the symmetry of anoter lorentz structure.
993 routines to define only a subset of the routines.
994 Compare to compute_aloha, each routines are computed independently.
995 """
996
997 name = builder.name
998 if not routines:
999 routines = [ tuple([i,[]]) for i in range(len(builder.spins) + 1 )]
1000
1001 for outgoing, tag in routines:
1002 builder.routine_kernel = None
1003 wavefunction = builder.compute_routine(outgoing, tag)
1004 self.set(name, outgoing, wavefunction)
1005
1006
1007 - def write(self, output_dir, language):
1008 """ write the full set of Helicity Routine in output_dir"""
1009 for abstract_routine in self.values():
1010 abstract_routine.write(output_dir, language)
1011
1012 for routine in self.external_routines:
1013 self.locate_external(routine, language, output_dir)
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1037 """search a valid external file and copy it to output_dir directory"""
1038
1039 language_to_ext = {'Python': 'py',
1040 'Fortran' : 'f',
1041 'CPP': 'C'}
1042 ext = language_to_ext[language]
1043 paths = [os.path.join(self.model_pos, language), self.model_pos,
1044 os.path.join(root_path, 'aloha', 'template_files', )]
1045
1046 ext_files = []
1047 for path in paths:
1048 ext_files = misc.glob('%s.%s' % (name, ext), path)
1049 if ext_files:
1050 break
1051 else:
1052
1053 raise ALOHAERROR, 'No external routine \"%s.%s\" in directories\n %s' % \
1054 (name, ext, '\n'.join(paths))
1055
1056 if output_dir:
1057 for filepath in ext_files:
1058
1059 files.cp(filepath, output_dir)
1060 return ext_files
1061
1062
1063
1065 """Search some symmetries in the vertices.
1066 We search if some identical particles are in a vertices in order
1067 to avoid to compute symmetrical contributions"""
1068
1069 for vertex in self.model.all_vertices:
1070 for i, part1 in enumerate(vertex.particles):
1071 for j in range(i-1,-1,-1):
1072 part2 = vertex.particles[j]
1073 if part1.pdg_code == part2.pdg_code and part1.color == 1:
1074 if part1.spin == 2 and (i % 2 != j % 2 ):
1075 continue
1076 for lorentz in vertex.lorentz:
1077 if self.symmetries.has_key(lorentz.name):
1078 if self.symmetries[lorentz.name].has_key(i+1):
1079 self.symmetries[lorentz.name][i+1] = max(self.symmetries[lorentz.name][i+1], j+1)
1080 else:
1081 self.symmetries[lorentz.name][i+1] = j+1
1082 else:
1083 self.symmetries[lorentz.name] = {i+1:j+1}
1084 break
1085
1087 """Search the interaction associate with more than one lorentz structure.
1088 If those lorentz structure have the same order and the same color then
1089 associate a multiple lorentz routines to ALOHA """
1090
1091 orders = {}
1092 for coup in self.model.all_couplings:
1093 orders[coup.name] = str(coup.order)
1094
1095 for vertex in self.model.all_vertices:
1096 if len(vertex.lorentz) == 1:
1097 continue
1098
1099
1100
1101
1102
1103 combine = {}
1104 for (id_col, id_lor), coups in vertex.couplings.items():
1105 if not isinstance(coups, list):
1106 coups = [coups]
1107 for coup in coups:
1108 order = orders[coup.name]
1109 key = (id_col, order)
1110 if key in combine:
1111 combine[key].append(id_lor)
1112 else:
1113 combine[key] = [id_lor]
1114
1115
1116 for list_lor in combine.values():
1117 if len(list_lor) == 1:
1118 continue
1119 list_lor.sort()
1120 main = vertex.lorentz[list_lor[0]].name
1121 if main not in self.multiple_lor:
1122 self.multiple_lor[main] = []
1123
1124 info = tuple([vertex.lorentz[id].name for id in list_lor[1:]])
1125 if info not in self.multiple_lor[main]:
1126 self.multiple_lor[main].append(info)
1127
1128
1129 - def has_symmetries(self, l_name, outgoing, out=None, valid_output=None):
1130 """ This returns out if no symmetries are available, otherwise it finds
1131 the lowest equivalent outgoing by recursivally calling this function.
1132 auth is a list of authorize output, if define"""
1133
1134 try:
1135 equiv = self.symmetries[l_name][outgoing]
1136 except Exception:
1137 return out
1138 else:
1139 if not valid_output or equiv in valid_output:
1140 return self.has_symmetries(l_name, equiv, out=equiv,
1141 valid_output=valid_output)
1142 else:
1143 return self.has_symmetries(l_name, equiv, out=out,
1144 valid_output=valid_output)
1145
1147 """ create a list for the routine needing to be conjugate """
1148
1149
1150 need = False
1151 for particle in self.model.all_particles:
1152 if particle.spin == 2 and particle.selfconjugate:
1153 need = True
1154 break
1155
1156 if not need:
1157 for interaction in self.model.all_vertices:
1158 fermions = [p for p in interaction.particles if p.spin == 2]
1159 for i in range(0, len(fermions), 2):
1160 if fermions[i].pdg_code * fermions[i+1].pdg_code > 0:
1161
1162 need = True
1163 break
1164
1165
1166 if not need:
1167 return {}
1168
1169 conjugate_request = {}
1170
1171 for vertex in self.model.all_vertices:
1172 for i in range(0, len(vertex.particles), 2):
1173 part1 = vertex.particles[i]
1174 if part1.spin !=2:
1175
1176 break
1177
1178 if part1.selfconjugate:
1179 continue
1180 part2 = vertex.particles[i + 1]
1181 if part2.selfconjugate:
1182 continue
1183
1184
1185 for lorentz in vertex.lorentz:
1186 try:
1187 conjugate_request[lorentz.name].add(i//2+1)
1188 except Exception:
1189 conjugate_request[lorentz.name] = set([i//2+1])
1190
1191 for elem in conjugate_request:
1192 conjugate_request[elem] = list(conjugate_request[elem])
1193
1194 return conjugate_request
1195
1199 """find the list of Helicity routine in the directory and create a list
1200 of those files (but with compile extension)"""
1201
1202 aloha_files = []
1203
1204
1205 alohafile_pattern = re.compile(r'''_\d%s''' % file_ext)
1206 for filename in os.listdir(aloha_dir):
1207 if os.path.isfile(os.path.join(aloha_dir, filename)):
1208 if alohafile_pattern.search(filename):
1209 aloha_files.append(filename.replace(file_ext, comp_ext))
1210
1211 if os.path.exists(pjoin(aloha_dir, 'additional_aloha_function.f')):
1212 aloha_files.append('additional_aloha_function.o')
1213
1214 text="ALOHARoutine = "
1215 text += ' '.join(aloha_files)
1216 text +='\n'
1217
1218
1219 file(os.path.join(aloha_dir, 'aloha_file.inc'), 'w').write(text)
1220
1224
1225 def create(obj):
1226 """ """
1227 obj= obj.simplify()
1228 obj = obj.expand()
1229 obj = obj.simplify()
1230 return obj
1231
1232
1233 old_tag = set(aloha_lib.KERNEL.use_tag)
1234 print 'create lib',tag
1235 name, i = tag
1236 if name == "Spin2Prop":
1237 lib[('Spin2Prop',i)] = create( Spin2Propagator(_spin2_mult + i, \
1238 2 * _spin2_mult + i,'I2','I3', i) )
1239 elif name == "Spin2PropMassless":
1240 lib[('Spin2PropMassless',i)] = create( Spin2masslessPropagator(
1241 _spin2_mult + i, 2 * _spin2_mult + i,'I2','I3'))
1242
1243 aloha_lib.KERNEL.use_tag = old_tag
1244 return lib
1245
1246
1247 if '__main__' == __name__:
1248 logging.basicConfig(level=0)
1249
1250 import profile
1251
1252
1253 start = time.time()
1258 - def write(alohagenerator):
1260 alohagenerator = main()
1261 logger.info('done in %s s' % (time.time()-start))
1262 write(alohagenerator)
1263
1264
1265 stop = time.time()
1266 logger.info('done in %s s' % (stop-start))
1267