Package aloha :: Module create_aloha
[hide private]
[frames] | no frames]

Source Code for Module aloha.create_aloha

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2010 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  from __future__ import division 
  16  import cmath 
  17  import copy 
  18  import cPickle 
  19  import glob 
  20  import logging 
  21  import numbers 
  22  import os 
  23  import re 
  24  import shutil 
  25  import sys 
  26  import time 
  27   
  28  root_path = os.path.split(os.path.dirname(os.path.realpath( __file__ )))[0] 
  29  sys.path.append(root_path) 
  30  from aloha.aloha_object import * 
  31  import aloha 
  32  import aloha.aloha_writers as aloha_writers 
  33  import aloha.aloha_lib as aloha_lib 
  34  import aloha.aloha_object as aloha_object 
  35  import aloha.aloha_parsers as aloha_parsers 
  36  import aloha.aloha_fct as aloha_fct 
  37  try: 
  38      import madgraph.iolibs.files as files 
  39      import madgraph.various.misc as misc 
  40  except Exception: 
  41      import aloha.files as files 
  42      import aloha.misc as misc 
  43       
  44  aloha_path = os.path.dirname(os.path.realpath(__file__)) 
  45  logger = logging.getLogger('ALOHA') 
  46   
  47  _conjugate_gap = 50 
  48  _spin2_mult = 1000 
  49   
  50  pjoin = os.path.join 
  51   
  52  ALOHAERROR = aloha.ALOHAERROR 
53 54 -class AbstractRoutine(object):
55 """ store the result of the computation of Helicity Routine 56 this is use for storing and passing to writer """ 57
58 - def __init__(self, expr, outgoing, spins, name, infostr, denom=None):
59 """ store the information """ 60 61 self.spins = spins 62 self.expr = expr 63 self.denominator = denom 64 self.name = name 65 self.outgoing = outgoing 66 self.infostr = infostr 67 self.symmetries = [] 68 self.combined = [] 69 self.tag = [] 70 self.contracted = {}
71 72 73
74 - def add_symmetry(self, outgoing):
75 """ add an outgoing """ 76 77 if not outgoing in self.symmetries: 78 self.symmetries.append(outgoing)
79
80 - def add_combine(self, lor_list):
81 """add a combine rule """ 82 83 if lor_list not in self.combined: 84 self.combined.append(lor_list)
85
86 - def write(self, output_dir, language='Fortran', mode='self', combine=True,**opt):
87 """ write the content of the object """ 88 writer = aloha_writers.WriterFactory(self, language, output_dir, self.tag) 89 text = writer.write(mode=mode, **opt) 90 if combine: 91 for grouped in self.combined: 92 if isinstance(text, tuple): 93 text = tuple([old.__add__(new) for old, new in zip(text, 94 writer.write_combined(grouped, mode=mode+'no_include', **opt))]) 95 else: 96 text += writer.write_combined(grouped, mode=mode+'no_include', **opt) 97 if aloha.mp_precision and 'MP' not in self.tag: 98 self.tag.append('MP') 99 text += self.write(output_dir, language, mode, **opt) 100 return text
101
102 - def get_info(self, info):
103 """return some information on the routine 104 """ 105 if info == "rank": 106 assert isinstance(self.expr, aloha_lib.SplitCoefficient) 107 rank= 1 108 for coeff in self.expr: 109 if max(sum(coeff), rank): 110 rank = sum(coeff) 111 return rank -1 # due to the coefficient associate to the wavefunctions 112 else: 113 raise ALOHAERROR, '%s is not a valid information that can be computed' % info
114
115 116 -class AbstractRoutineBuilder(object):
117 """ Launch the creation of the Helicity Routine""" 118 119 prop_lib = {} # Store computation for the propagator 120 counter = 0 # counter for statistic only 121
122 - class AbstractALOHAError(Exception):
123 """ An error class for ALOHA"""
124
125 - def __init__(self, lorentz, model=None):
126 """ initialize the run 127 lorentz: the lorentz information analyzed (UFO format) 128 language: define in which language we write the output 129 modes: 0 for all incoming particles 130 >0 defines the outgoing part (start to count at 1) 131 """ 132 133 self.spins = [s for s in lorentz.spins] 134 self.name = lorentz.name 135 self.conjg = [] 136 self.tag = [] 137 self.outgoing = None 138 self.lorentz_expr = lorentz.structure 139 self.routine_kernel = None 140 self.spin2_massless = False 141 self.spin32_massless = False 142 self.contracted = {} 143 self.fct = {} 144 self.model = model 145 self.denominator = None 146 # assert model 147 148 self.lastprint = 0 # to avoid that ALOHA makes too many printout 149 150 if hasattr(lorentz, 'formfactors') and lorentz.formfactors: 151 for formf in lorentz.formfactors: 152 pat = re.compile(r'\b%s\b' % formf.name) 153 self.lorentz_expr = pat.sub('(%s)' % formf.value, self.lorentz_expr)
154
155 - def compute_routine(self, mode, tag=[], factorize=True):
156 """compute the expression and return it""" 157 self.outgoing = mode 158 self.tag = tag 159 if __debug__: 160 if mode == 0: 161 assert not any(t.startswith('L') for t in tag) 162 self.expr = self.compute_aloha_high_kernel(mode, factorize) 163 return self.define_simple_output()
164
165 - def define_all_conjugate_builder(self, pair_list):
166 """ return the full set of AbstractRoutineBuilder linked to fermion 167 clash""" 168 169 solution = [] 170 171 for i, pair in enumerate(pair_list): 172 new_builder = self.define_conjugate_builder(pair) 173 solution.append(new_builder) 174 solution += new_builder.define_all_conjugate_builder(pair_list[i+1:]) 175 return solution
176
177 - def define_conjugate_builder(self, pairs=1):
178 """ return a AbstractRoutineBuilder for the conjugate operation. 179 If they are more than one pair of fermion. Then use pair to claim which 180 one is conjugated""" 181 182 new_builder = copy.copy(self) 183 new_builder.conjg = self.conjg[:] 184 try: 185 for index in pairs: 186 new_builder.apply_conjugation(index) 187 except TypeError: 188 new_builder.apply_conjugation(pairs) 189 return new_builder
190
191 - def apply_conjugation(self, pair=1):
192 """ apply conjugation on self object""" 193 194 nb_fermion = len([1 for s in self.spins if s % 2 == 0]) 195 if (pair > 1 or nb_fermion >2) and not self.conjg: 196 # self.conjg avoif multiple check 197 data = aloha_fct.get_fermion_flow(self.lorentz_expr, nb_fermion) 198 target = dict([(2*i+1,2*i+2) for i in range(nb_fermion//2)]) 199 if not data == target: 200 text = """Unable to deal with 4(or more) point interactions 201 in presence of majorana particle/flow violation""" 202 raise ALOHAERROR, text 203 204 old_id = 2 * pair - 1 205 new_id = _conjugate_gap + old_id 206 207 self.kernel_tag = set() 208 if not self.routine_kernel or isinstance(self.routine_kernel, str): 209 self.routine_kernel = eval(self.lorentz_expr) 210 211 # We need to compute C Gamma^T C^-1 = C_ab G_cb (-1) C_cd 212 # = C_ac G_bc (-1) C_bd = C_ac G_bc C_db 213 self.routine_kernel = \ 214 C(new_id, old_id + 1) * self.routine_kernel * C(new_id + 1, old_id) 215 216 self.lorentz_expr = '('+self.lorentz_expr+') * C(%s,%s) * C(%s,%s)' % \ 217 (new_id, old_id + 1, new_id + 1, old_id ) 218 219 self.conjg.append(pair)
220 221
222 - def define_simple_output(self):
223 """ define a simple output for this AbstractRoutine """ 224 225 infostr = str(self.lorentz_expr) 226 227 output = AbstractRoutine(self.expr, self.outgoing, self.spins, self.name, \ 228 infostr, self.denominator) 229 output.contracted = dict([(name, aloha_lib.KERNEL.reduced_expr2[name]) 230 for name in aloha_lib.KERNEL.use_tag 231 if name.startswith('TMP')]) 232 233 output.fct = dict([(name, aloha_lib.KERNEL.reduced_expr2[name]) 234 for name in aloha_lib.KERNEL.use_tag 235 if name.startswith('FCT')]) 236 237 output.tag = [t for t in self.tag if not t.startswith('C')] 238 output.tag += ['C%s' % pair for pair in self.conjg] 239 return output
240
241 - def parse_expression(self, expr=None, need_P_sign=False):
242 """change the sign of P for outcoming fermion in order to 243 correct the mismatch convention between HELAS and FR""" 244 245 if not expr: 246 expr = self.lorentz_expr 247 248 if need_P_sign: 249 expr = re.sub(r'\b(P|PSlash)\(', r'-\1\(', expr) 250 251 calc = aloha_parsers.ALOHAExpressionParser() 252 lorentz_expr = calc.parse(expr) 253 return lorentz_expr
254
255 - def compute_aloha_high_kernel(self, mode, factorize=True):
256 """compute the abstract routine associate to this mode """ 257 258 # reset tag for particles 259 aloha_lib.KERNEL.use_tag=set() 260 #multiply by the wave functions 261 nb_spinor = 0 262 outgoing = self.outgoing 263 if (outgoing + 1) // 2 in self.conjg: 264 #flip the outgoing tag if in conjugate 265 outgoing = outgoing + outgoing % 2 - (outgoing +1) % 2 266 267 if not self.routine_kernel: 268 AbstractRoutineBuilder.counter += 1 269 if self.tag == []: 270 logger.info('aloha creates %s routines' % self.name) 271 elif AbstractALOHAModel.lastprint < time.time() - 1: 272 AbstractALOHAModel.lastprint = time.time() 273 logger.info('aloha creates %s set of routines with options: %s' \ 274 % (self.name, ','.join(self.tag)) ) 275 try: 276 lorentz = self.parse_expression() 277 self.routine_kernel = lorentz 278 lorentz = eval(lorentz) 279 except NameError as error: 280 logger.error('unknow type in Lorentz Evaluation:%s'%str(error)) 281 raise ALOHAERROR, 'unknow type in Lorentz Evaluation: %s ' % str(error) 282 else: 283 self.kernel_tag = set(aloha_lib.KERNEL.use_tag) 284 elif isinstance(self.routine_kernel,str): 285 lorentz = eval(self.routine_kernel) 286 aloha_lib.KERNEL.use_tag = set(self.kernel_tag) 287 else: 288 lorentz = copy.copy(self.routine_kernel) 289 aloha_lib.KERNEL.use_tag = set(self.kernel_tag) 290 for (i, spin ) in enumerate(self.spins): 291 id = i + 1 292 #Check if this is the outgoing particle 293 if id == outgoing: 294 295 # check if we need a special propagator 296 propa = [t[1:] for t in self.tag if t.startswith('P')] 297 if propa == ['0']: 298 massless = True 299 self.denominator = None 300 elif propa == []: 301 massless = False 302 self.denominator = None 303 else: 304 lorentz *= complex(0,1) * self.get_custom_propa(propa[0], spin, id) 305 continue 306 307 308 309 if spin in [1,-1]: 310 lorentz *= complex(0,1) 311 elif spin == 2: 312 # shift and flip the tag if we multiply by C matrices 313 if (id + 1) // 2 in self.conjg: 314 id += _conjugate_gap + id % 2 - (id +1) % 2 315 if (id % 2): 316 #propagator outcoming 317 lorentz *= complex(0,1) * SpinorPropagatorout(id, 'I2', outgoing) 318 else: 319 # #propagator incoming 320 lorentz *= complex(0,1) * SpinorPropagatorin('I2', id, outgoing) 321 elif spin == 3 : 322 if massless or not aloha.unitary_gauge: 323 lorentz *= VectorPropagatorMassless(id, 'I2', id) 324 else: 325 lorentz *= VectorPropagator(id, 'I2', id) 326 elif spin == 4: 327 # shift and flip the tag if we multiply by C matrices 328 if (id + 1) // 2 in self.conjg: 329 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2 330 else: 331 spin_id = id 332 nb_spinor += 1 333 if not massless and (spin_id % 2): 334 lorentz *= complex(0,1) * Spin3halfPropagatorout(id, 'I2', spin_id,'I3', outgoing) 335 elif not massless and not (spin_id % 2): 336 lorentz *= complex(0,1) * Spin3halfPropagatorin('I2', id , 'I3', spin_id, outgoing) 337 elif spin_id %2: 338 lorentz *= complex(0,1) * Spin3halfPropagatorMasslessOut(id, 'I2', spin_id,'I3', outgoing) 339 else : 340 lorentz *= complex(0,1) * Spin3halfPropagatorMasslessIn('I2', id, 'I3', spin_id, outgoing) 341 342 elif spin == 5 : 343 #lorentz *= 1 # delayed evaluation (fastenize the code) 344 if massless: 345 lorentz *= complex(0,1) * Spin2masslessPropagator(_spin2_mult + id, \ 346 2 * _spin2_mult + id,'I2','I3') 347 else: 348 lorentz *= complex(0,1) * Spin2Propagator(_spin2_mult + id, \ 349 2 * _spin2_mult + id,'I2','I3', id) 350 else: 351 raise self.AbstractALOHAError( 352 'The spin value %s (2s+1) is not supported yet' % spin) 353 else: 354 # This is an incoming particle 355 if spin in [1,-1]: 356 lorentz *= Scalar(id) 357 elif spin == 2: 358 # shift the tag if we multiply by C matrices 359 if (id+1) // 2 in self.conjg: 360 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2 361 else: 362 spin_id = id 363 lorentz *= Spinor(spin_id, id) 364 elif spin == 3: 365 lorentz *= Vector(id, id) 366 elif spin == 4: 367 # shift the tag if we multiply by C matrices 368 if (id+1) // 2 in self.conjg: 369 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2 370 else: 371 spin_id = id 372 nb_spinor += 1 373 lorentz *= Spin3Half(id, spin_id, id) 374 elif spin == 5: 375 lorentz *= Spin2(1 * _spin2_mult + id, 2 * _spin2_mult + id, id) 376 else: 377 raise self.AbstractALOHAError( 378 'The spin value %s (2s+1) is not supported yet' % spin) 379 380 # If no particle OffShell 381 if not outgoing: 382 lorentz *= complex(0,-1) 383 # Propagator are taken care separately 384 385 lorentz = lorentz.simplify() 386 387 # Modify the expression in case of loop-pozzorini 388 if any((tag.startswith('L') for tag in self.tag if len(tag)>1)): 389 return self.compute_loop_coefficient(lorentz, outgoing) 390 391 lorentz = lorentz.expand() 392 lorentz = lorentz.simplify() 393 394 if factorize: 395 lorentz = lorentz.factorize() 396 397 lorentz.tag = set(aloha_lib.KERNEL.use_tag) 398 return lorentz
399 400 @staticmethod
401 - def mod_propagator_expression(tag, text):
402 """Change the index of the propagator to match the current need""" 403 data = re.split(r'(\b[a-zA-Z]\w*?)\(([\'\w,\s]*?)\)',text) 404 405 pos=-2 406 while pos +3 < len(data): 407 pos = pos+3 408 ltype = data[pos] 409 if ltype != 'complex': 410 for old, new in tag.items(): 411 if isinstance(new, str): 412 new='\'%s\'' % new 413 else: 414 new = str(new) 415 data[pos+1] = re.sub(r'\b%s\b' % old, new, data[pos+1]) 416 data[pos+1] = '(%s)' % data[pos+1] 417 text=''.join(data) 418 return text
419
420 - def get_custom_propa(self, propa, spin, id):
421 """Return the ALOHA object associated to the user define propagator""" 422 423 propagator = getattr(self.model.propagators, propa) 424 numerator = propagator.numerator 425 denominator = propagator.denominator 426 427 # Find how to make the replacement for the various tag in the propagator expression 428 needPflipping = False 429 if spin in [1,-1]: 430 tag = {'id': id} 431 elif spin == 2: 432 # shift and flip the tag if we multiply by C matrices 433 if (id + 1) // 2 in self.conjg: 434 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2 435 else: 436 spin_id = id 437 if (spin_id % 2): 438 #propagator outcoming 439 needPflipping = True 440 tag ={'1': spin_id, '2': 'I2', 'id': id} 441 else: 442 tag ={'1': 'I2', '2': spin_id, 'id': id} 443 elif spin == 3 : 444 tag ={'1': id, '2': 'I2', 'id': id} 445 elif spin == 4: 446 delta = lambda i,j: aloha_object.Identity(i,j) 447 deltaL = lambda i,j: aloha_object.IdentityL(i,j) 448 # shift and flip the tag if we multiply by C matrices 449 if (id + 1) // 2 in self.conjg: 450 spin_id = id + _conjugate_gap + id % 2 - (id +1) % 2 451 else: 452 spin_id = id 453 if spin_id % 2: 454 needPflipping = True 455 tag = {'1': 'pr_1', '2': 'pr_2', 'id':id} 456 else: 457 tag = {'1': 'pr_2', '2': 'pr_1'} 458 numerator *= deltaL('pr_1',id) * deltaL('pr_2', 'I2') * \ 459 delta('pr_1', spin_id) * delta('pr_2', 'I3') 460 elif spin == 5 : 461 tag = {'1': _spin2_mult + id, '2': 2 * _spin2_mult + id, 462 '51': 'I2', '52': 'I3', 'id':id} 463 464 numerator = self.mod_propagator_expression(tag, numerator) 465 if denominator: 466 denominator = self.mod_propagator_expression(tag, denominator) 467 468 numerator = self.parse_expression(numerator, needPflipping) 469 if denominator: 470 self.denominator = self.parse_expression(denominator, needPflipping) 471 self.denominator = eval(self.denominator) 472 if not isinstance(self.denominator, numbers.Number): 473 self.denominator = self.denominator.simplify().expand().simplify().get((0,)) 474 475 return eval(numerator)
476 477 478 479
480 - def compute_loop_coefficient(self, lorentz, outgoing):
481 482 483 l_in = [int(tag[1:]) for tag in self.tag if tag.startswith('L')][0] 484 if (l_in + 1) // 2 in self.conjg: 485 #flip the outgoing tag if in conjugate 486 l_in = l_in + l_in % 2 - (l_in +1) % 2 487 assert l_in != outgoing, 'incoming Open Loop can not be the outcoming one' 488 489 # modify the expression for the momenta 490 # P_i -> P_i + P_L and P_o -> -P_o - P_L 491 Pdep = [aloha_lib.KERNEL.get(P) for P in lorentz.get_all_var_names() 492 if P.startswith('_P')] 493 494 Pdep = set([P for P in Pdep if P.particle in [outgoing, l_in]]) 495 for P in Pdep: 496 if P.particle == l_in: 497 sign = 1 498 else: 499 sign = -1 500 id = P.id 501 lorentz_ind = P.lorentz_ind[0] 502 P_Lid = aloha_object.P(lorentz_ind, 'L') 503 P_obj = aloha_object.P(lorentz_ind, P.particle) 504 new_expr = sign*(P_Lid + P_obj) 505 lorentz = lorentz.replace(id, new_expr) 506 507 # Compute the variable from which we need to split the expression 508 var_veto = ['PL_0', 'PL_1', 'PL_2', 'PL_3'] 509 spin = aloha_writers.WriteALOHA.type_to_variable[abs(self.spins[l_in-1])] 510 size = aloha_writers.WriteALOHA.type_to_size[spin]-1 511 var_veto += ['%s%s_%s' % (spin,l_in,i) for i in range(1,size)] 512 # compute their unique identifiant 513 veto_ids = aloha_lib.KERNEL.get_ids(var_veto) 514 515 lorentz = lorentz.expand(veto = veto_ids) 516 lorentz = lorentz.simplify() 517 coeff_expr = lorentz.split(veto_ids) 518 519 for key, expr in coeff_expr.items(): 520 expr = expr.simplify() 521 coeff_expr[key] = expr.factorize() 522 coeff_expr.tag = set(aloha_lib.KERNEL.use_tag) 523 524 return coeff_expr
525
526 - def define_lorentz_expr(self, lorentz_expr):
527 """Define the expression""" 528 529 self.expr = lorentz_expr
530
531 - def define_routine_kernel(self, lorentz=None):
532 """Define the kernel at low level""" 533 534 if not lorentz: 535 logger.info('compute kernel %s' % self.counter) 536 AbstractRoutineBuilder.counter += 1 537 lorentz = eval(self.lorentz_expr) 538 539 if isinstance(lorentz, numbers.Number): 540 self.routine_kernel = lorentz 541 return lorentz 542 lorentz = lorentz.simplify() 543 lorentz = lorentz.expand() 544 lorentz = lorentz.simplify() 545 546 self.routine_kernel = lorentz 547 return lorentz
548 549 550 @staticmethod
551 - def get_routine_name(name, outgoing):
552 """return the name of the """ 553 554 name = '%s_%s' % (name, outgoing) 555 return name
556 557 @classmethod
558 - def load_library(cls, tag):
559 # load the library 560 if tag in cls.prop_lib: 561 return 562 else: 563 cls.prop_lib = create_prop_library(tag, cls.aloha_lib)
564
565 566 -class CombineRoutineBuilder(AbstractRoutineBuilder):
567 """A special builder for combine routine if needed to write those 568 explicitely. 569 """
570 - def __init__(self, l_lorentz, model=None):
571 """ initialize the run 572 l_lorentz: list of lorentz information analyzed (UFO format) 573 language: define in which language we write the output 574 modes: 0 for all incoming particles 575 >0 defines the outgoing part (start to count at 1) 576 """ 577 AbstractRoutineBuilder.__init__(self,l_lorentz[0], model) 578 lorentz = l_lorentz[0] 579 self.spins = lorentz.spins 580 l_name = [l.name for l in l_lorentz] 581 self.name = aloha_writers.combine_name(l_name[0], l_name[1:], None) 582 self.conjg = [] 583 self.tag = [] 584 self.outgoing = None 585 self.lorentz_expr = [] 586 for i, lor in enumerate(l_lorentz): 587 self.lorentz_expr.append( 'Coup(%s) * (%s)' % (i+1, lor.structure)) 588 self.lorentz_expr = ' + '.join(self.lorentz_expr) 589 self.routine_kernel = None 590 self.contracted = {} 591 self.fct = {}
592
593 -class AbstractALOHAModel(dict):
594 """ A class to build and store the full set of Abstract ALOHA Routine""" 595 596 lastprint = 0 597
598 - def __init__(self, model_name, write_dir=None, format='Fortran', 599 explicit_combine=False):
600 """ load the UFO model and init the dictionary """ 601 602 # Option 603 self.explicit_combine = explicit_combine 604 605 # Extract the model name if combined with restriction 606 model_name_pattern = re.compile("^(?P<name>.+)-(?P<rest>[\w\d_]+)$") 607 model_name_re = model_name_pattern.match(model_name) 608 if model_name_re: 609 name = model_name_re.group('name') 610 rest = model_name_re.group("rest") 611 if rest == 'full' or \ 612 os.path.isfile(os.path.join(root_path, "models", name, 613 "restrict_%s.dat" % rest)): 614 model_name = model_name_re.group("name") 615 616 # load the UFO model 617 try: 618 python_pos = model_name 619 __import__(python_pos) 620 except Exception: 621 python_pos = 'models.%s' % model_name 622 __import__(python_pos) 623 self.model = sys.modules[python_pos] 624 # find the position on the disk 625 self.model_pos = os.path.dirname(self.model.__file__) 626 627 # list the external routine 628 self.external_routines = [] 629 630 # init the dictionary 631 dict.__init__(self) 632 self.symmetries = {} 633 self.multiple_lor = {} 634 635 if write_dir: 636 self.main(write_dir,format=format)
637
638 - def main(self, output_dir, format='Fortran'):
639 """ Compute if not already compute. 640 Write file in models/MY_MODEL/MY_FORMAT. 641 copy the file to output_dir 642 """ 643 ext = {'Fortran':'f','Python':'py','CPP':'h'} 644 645 646 # Check if a pickle file exists 647 if not self.load(): 648 self.compute_all() 649 logger.info(' %s aloha routine' % len(self)) 650 651 # Check that output directory exists 652 if not output_dir: 653 output_dir = os.path.join(self.model_pos, format.lower()) 654 logger.debug('aloha output dir is %s' % output_dir) 655 if not os.path.exists(output_dir): 656 os.mkdir(output_dir) 657 658 # Check that all routine are generated at default places: 659 for (name, outgoing), abstract in self.items(): 660 routine_name = AbstractRoutineBuilder.get_routine_name(name, outgoing) 661 if not glob.glob(os.path.join(output_dir, routine_name) + '.' + ext[format]): 662 abstract.write(output_dir, format) 663 else: 664 logger.info('File for %s already present, skip the writing of this file' % routine_name)
665 666
667 - def save(self, filepos=None):
668 """ save the current model in a pkl file """ 669 670 logger.info('save the aloha abstract routine in a pickle file') 671 if not filepos: 672 filepos = os.path.join(self.model_pos,'aloha.pkl') 673 674 fsock = open(filepos, 'w') 675 cPickle.dump(dict(self), fsock)
676
677 - def load(self, filepos=None):
678 """ reload the pickle file """ 679 return False 680 if not filepos: 681 filepos = os.path.join(self.model_pos,'aloha.pkl') 682 if os.path.exists(filepos): 683 fsock = open(filepos, 'r') 684 self.update(cPickle.load(fsock)) 685 return True 686 else: 687 return False
688
689 - def get(self, lorentzname, outgoing):
690 """ return the AbstractRoutine with a given lorentz name, and for a given 691 outgoing particle """ 692 693 try: 694 return self[(lorentzname, outgoing)] 695 except Exception: 696 logger.warning('(%s, %s) is not a valid key' % 697 (lorentzname, outgoing) ) 698 return None
699
700 - def get_info(self, info, lorentzname, outgoing, tag, cached=False):
701 """return some information about the aloha routine 702 - "rank": return the rank of the loop function 703 If the cached option is set to true, then the result is stored and 704 recycled if possible. 705 """ 706 707 if not aloha.loop_mode and any(t.startswith('L') for t in tag): 708 aloha.loop_mode = True 709 710 711 returned_dict = {} 712 # Make sure the input argument is a list 713 if isinstance(info, str): 714 infos = [info] 715 else: 716 infos = info 717 718 # First deal with the caching of infos 719 if hasattr(self, 'cached_interaction_infos'): 720 # Now try to recover it 721 for info_key in infos: 722 try: 723 returned_dict[info] = self.cached_interaction_infos[\ 724 (lorentzname,outgoing,tuple(tag),info)] 725 except KeyError: 726 # Some information has never been computed before, so they 727 # will be computed later. 728 pass 729 elif cached: 730 self.cached_interaction_infos = {} 731 732 init = False 733 for info_key in infos: 734 if info_key in returned_dict: 735 continue 736 elif not init: 737 # need to create the aloha object 738 lorentz = eval('self.model.lorentz.%s' % lorentzname) 739 abstract = AbstractRoutineBuilder(lorentz) 740 routine = abstract.compute_routine(outgoing, tag, factorize=False) 741 init = True 742 743 assert 'routine' in locals() 744 returned_dict[info_key] = routine.get_info(info_key) 745 if cached: 746 # Cache the information computed 747 self.cached_interaction_infos[\ 748 (lorentzname,outgoing,tuple(tag),info_key)]=returned_dict[info_key] 749 750 if isinstance(info, str): 751 return returned_dict[info] 752 else: 753 return returned_dict
754
755 - def set(self, lorentzname, outgoing, abstract_routine):
756 """ add in the dictionary """ 757 758 self[(lorentzname, outgoing)] = abstract_routine
759
760 - def compute_all(self, save=True, wanted_lorentz = [], custom_propa=False):
761 """ define all the AbstractRoutine linked to a model """ 762 763 # Search identical particles in the vertices in order to avoid 764 #to compute identical contribution 765 self.look_for_symmetries() 766 conjugate_list = self.look_for_conjugate() 767 self.look_for_multiple_lorentz_interactions() 768 769 if not wanted_lorentz: 770 wanted_lorentz = [l.name for l in self.model.all_lorentz] 771 for lorentz in self.model.all_lorentz: 772 if not lorentz.name in wanted_lorentz: 773 # Only include the routines we ask for 774 continue 775 776 if -1 in lorentz.spins: 777 # No Ghost in ALOHA 778 continue 779 780 if lorentz.structure == 'external': 781 for i in range(len(lorentz.spins)): 782 self.external_routines.append('%s_%s' % (lorentz.name, i)) 783 continue 784 785 #standard routines 786 routines = [(i,[]) for i in range(len(lorentz.spins)+1)] 787 # search for special propagators 788 if custom_propa: 789 for vertex in self.model.all_vertices: 790 if lorentz in vertex.lorentz: 791 for i,part in enumerate(vertex.particles): 792 new_prop = False 793 if hasattr(part, 'propagator') and part.propagator: 794 new_prop = ['P%s' % part.propagator.name] 795 elif part.mass.name.lower() == 'zero': 796 new_prop = ['P0'] 797 if new_prop and (i+1, new_prop) not in routines: 798 routines.append((i+1, new_prop)) 799 800 builder = AbstractRoutineBuilder(lorentz, self.model) 801 self.compute_aloha(builder, routines=routines) 802 803 if lorentz.name in self.multiple_lor: 804 for m in self.multiple_lor[lorentz.name]: 805 for outgoing in range(len(lorentz.spins)+1): 806 try: 807 self[(lorentz.name, outgoing)].add_combine(m) 808 except Exception: 809 pass # this routine is a symmetric one, so it 810 # already has the combination. 811 812 if lorentz.name in conjugate_list: 813 conjg_builder_list= builder.define_all_conjugate_builder(\ 814 conjugate_list[lorentz.name]) 815 for conjg_builder in conjg_builder_list: 816 # No duplication of conjugation: 817 assert conjg_builder_list.count(conjg_builder) == 1 818 self.compute_aloha(conjg_builder, lorentz.name) 819 if lorentz.name in self.multiple_lor: 820 for m in self.multiple_lor[lorentz.name]: 821 for outgoing in range(len(lorentz.spins)+1): 822 realname = conjg_builder.name + ''.join(['C%s' % pair for pair in conjg_builder.conjg]) 823 try: 824 self[(realname, outgoing)].add_combine(m) 825 except Exception,error: 826 self[(realname, self.symmetries[lorentz.name][outgoing])].add_combine(m) 827 828 if save: 829 self.save()
830
831 - def add_Lorentz_object(self, lorentzlist):
832 """add a series of Lorentz structure created dynamically""" 833 834 for lor in lorentzlist: 835 if not hasattr(self.model.lorentz, lor.name): 836 setattr(self.model.lorentz, lor.name, lor)
837
838 - def compute_subset(self, data):
839 """ create the requested ALOHA routine. 840 data should be a list of tuple (lorentz, tag, outgoing) 841 tag should be the list of special tag (like conjugation on pair) 842 to apply on the object """ 843 844 # Search identical particles in the vertices in order to avoid 845 #to compute identical contribution 846 self.look_for_symmetries() 847 # reorganize the data (in order to use optimization for a given lorentz 848 #structure 849 aloha.loop_mode = False 850 # self.explicit_combine = False 851 request = {} 852 853 for list_l_name, tag, outgoing in data: 854 #allow tag to have integer for retro-compatibility 855 all_tag = tag[:] 856 conjugate = [i for i in tag if isinstance(i, int)] 857 858 tag = [i for i in tag if isinstance(i, str) and not i.startswith('P')] 859 tag = tag + ['C%s'%i for i in conjugate] 860 tag = tag + [i for i in all_tag if isinstance(i, str) and i.startswith('P')] 861 862 conjugate = tuple([int(c[1:]) for c in tag if c.startswith('C')]) 863 loop = any((t.startswith('L') for t in tag)) 864 if loop: 865 aloha.loop_mode = True 866 self.explicit_combine = True 867 868 for l_name in list_l_name: 869 try: 870 request[l_name][conjugate].append((outgoing,tag)) 871 except Exception: 872 try: 873 request[l_name][conjugate] = [(outgoing,tag)] 874 except Exception: 875 request[l_name] = {conjugate: [(outgoing,tag)]} 876 877 # Loop on the structure to build exactly what is request 878 for l_name in request: 879 lorentz = eval('self.model.lorentz.%s' % l_name) 880 if lorentz.structure == 'external': 881 for tmp in request[l_name]: 882 for outgoing, tag in request[l_name][tmp]: 883 name = aloha_writers.get_routine_name(lorentz.name,outgoing=outgoing,tag=tag) 884 if name not in self.external_routines: 885 self.external_routines.append(name) 886 continue 887 888 builder = AbstractRoutineBuilder(lorentz, self.model) 889 890 891 for conjg in request[l_name]: 892 #ensure that routines are in rising order (for symetries) 893 def sorting(a,b): 894 if a[0] < b[0]: return -1 895 else: return 1
896 routines = request[l_name][conjg] 897 routines.sort(sorting) 898 if not conjg: 899 # No need to conjugate -> compute directly 900 self.compute_aloha(builder, routines=routines) 901 else: 902 # Define the high level conjugate routine 903 conjg_builder = builder.define_conjugate_builder(conjg) 904 # Compute routines 905 self.compute_aloha(conjg_builder, symmetry=lorentz.name, 906 routines=routines) 907 908 909 # Build mutiple lorentz call 910 for list_l_name, tag, outgoing in data: 911 if len(list_l_name) ==1: 912 continue 913 #allow tag to have integer for retrocompatibility 914 conjugate = [i for i in tag if isinstance(i, int)] 915 all_tag = tag[:] 916 tag = [i for i in tag if isinstance(i, str) and not i.startswith('P')] 917 tag = tag + ['C%s'%i for i in conjugate] 918 tag = tag + [i for i in all_tag if isinstance(i, str) and i.startswith('P')] 919 920 if not self.explicit_combine: 921 lorentzname = list_l_name[0] 922 lorentzname += ''.join(tag) 923 if self.has_key((lorentzname, outgoing)): 924 self[(lorentzname, outgoing)].add_combine(list_l_name[1:]) 925 else: 926 lorentz = eval('self.model.lorentz.%s' % lorentzname) 927 assert lorentz.structure == 'external' 928 else: 929 l_lorentz = [] 930 for l_name in list_l_name: 931 l_lorentz.append(eval('self.model.lorentz.%s' % l_name)) 932 builder = CombineRoutineBuilder(l_lorentz) 933 934 for conjg in request[list_l_name[0]]: 935 #ensure that routines are in rising order (for symetries) 936 def sorting(a,b): 937 if a[0] < b[0]: return -1 938 else: return 1
939 routines = request[list_l_name[0]][conjg] 940 routines.sort(sorting) 941 if not conjg: 942 # No need to conjugate -> compute directly 943 self.compute_aloha(builder, routines=routines) 944 else: 945 # Define the high level conjugate routine 946 conjg_builder = builder.define_conjugate_builder(conjg) 947 # Compute routines 948 self.compute_aloha(conjg_builder, symmetry=lorentz.name, 949 routines=routines) 950 951 952
953 - def compute_aloha(self, builder, symmetry=None, routines=None, tag=[]):
954 """ define all the AbstractRoutine linked to a given lorentz structure 955 symmetry authorizes to use the symmetry of anoter lorentz structure. 956 routines to define only a subset of the routines.""" 957 958 name = builder.name 959 if not symmetry: 960 symmetry = name 961 if not routines: 962 if not tag: 963 tag = ['C%s' % i for i in builder.conjg] 964 else: 965 addon = ['C%s' % i for i in builder.conjg] 966 tag = [(i,addon +onetag) for i,onetag in tag] 967 routines = [ tuple([i,tag]) for i in range(len(builder.spins) + 1 )] 968 969 # Create the routines 970 for outgoing, tag in routines: 971 symmetric = self.has_symmetries(symmetry, outgoing, valid_output=routines) 972 realname = name + ''.join(tag) 973 if (realname, outgoing) in self: 974 continue # already computed 975 976 if symmetric: 977 self.get(realname, symmetric).add_symmetry(outgoing) 978 else: 979 wavefunction = builder.compute_routine(outgoing, tag) 980 #Store the information 981 self.set(realname, outgoing, wavefunction)
982 983
984 - def compute_aloha_without_kernel(self, builder, symmetry=None, routines=None):
985 """define all the AbstractRoutine linked to a given lorentz structure 986 symmetry authorizes to use the symmetry of anoter lorentz structure. 987 routines to define only a subset of the routines. 988 Compare to compute_aloha, each routines are computed independently. 989 """ 990 991 name = builder.name 992 if not routines: 993 routines = [ tuple([i,[]]) for i in range(len(builder.spins) + 1 )] 994 995 for outgoing, tag in routines: 996 builder.routine_kernel = None 997 wavefunction = builder.compute_routine(outgoing, tag) 998 self.set(name, outgoing, wavefunction)
999 1000
1001 - def write(self, output_dir, language):
1002 """ write the full set of Helicity Routine in output_dir""" 1003 for abstract_routine in self.values(): 1004 abstract_routine.write(output_dir, language) 1005 1006 for routine in self.external_routines: 1007 self.locate_external(routine, language, output_dir)
1008 1009 # if aloha_lib.KERNEL.unknow_fct: 1010 # if language == 'Fortran': 1011 # logger.warning('''Some function present in the lorentz structure are not 1012 # recognized. A Template file has been created: 1013 # %s 1014 # Please edit this file to include the associated definition.''' % \ 1015 # pjoin(output_dir, 'additional_aloha_function.f') ) 1016 # else: 1017 # logger.warning('''Some function present in the lorentz structure are 1018 # not recognized. Please edit the code to add the defnition of such function.''') 1019 # logger.info('list of missing fct: %s .' % \ 1020 # ','.join([a[0] for a in aloha_lib.KERNEL.unknow_fct])) 1021 # 1022 # for fct_name, nb_arg in aloha_lib.KERNEL.unknow_fct: 1023 # if language == 'Fortran': 1024 # aloha_writers.write_template_fct(fct_name, nb_arg, output_dir) 1025 1026 1027 1028 #self.write_aloha_file_inc(output_dir) 1029
1030 - def locate_external(self, name, language, output_dir=None):
1031 """search a valid external file and copy it to output_dir directory""" 1032 1033 language_to_ext = {'Python': 'py', 1034 'Fortran' : 'f', 1035 'CPP': 'C'} 1036 ext = language_to_ext[language] 1037 paths = [os.path.join(self.model_pos, language), self.model_pos, 1038 os.path.join(root_path, 'aloha', 'template_files', )] 1039 1040 ext_files = [] 1041 for path in paths: 1042 ext_files = glob.glob(os.path.join(path, '%s.%s' % (name, ext))) 1043 if ext_files: 1044 break 1045 else: 1046 1047 raise ALOHAERROR, 'No external routine \"%s.%s\" in directories\n %s' % \ 1048 (name, ext, '\n'.join(paths)) 1049 1050 if output_dir: 1051 for filepath in ext_files: 1052 1053 files.cp(filepath, output_dir) 1054 return ext_files
1055 1056 1057
1058 - def look_for_symmetries(self):
1059 """Search some symmetries in the vertices. 1060 We search if some identical particles are in a vertices in order 1061 to avoid to compute symmetrical contributions""" 1062 1063 for vertex in self.model.all_vertices: 1064 for i, part1 in enumerate(vertex.particles): 1065 for j in range(i-1,-1,-1): 1066 part2 = vertex.particles[j] 1067 if part1.pdg_code == part2.pdg_code and part1.color == 1: 1068 if part1.spin == 2 and (i % 2 != j % 2 ): 1069 continue 1070 for lorentz in vertex.lorentz: 1071 if self.symmetries.has_key(lorentz.name): 1072 if self.symmetries[lorentz.name].has_key(i+1): 1073 self.symmetries[lorentz.name][i+1] = max(self.symmetries[lorentz.name][i+1], j+1) 1074 else: 1075 self.symmetries[lorentz.name][i+1] = j+1 1076 else: 1077 self.symmetries[lorentz.name] = {i+1:j+1} 1078 break
1079
1080 - def look_for_multiple_lorentz_interactions(self):
1081 """Search the interaction associate with more than one lorentz structure. 1082 If those lorentz structure have the same order and the same color then 1083 associate a multiple lorentz routines to ALOHA """ 1084 1085 orders = {} 1086 for coup in self.model.all_couplings: 1087 orders[coup.name] = str(coup.order) 1088 1089 for vertex in self.model.all_vertices: 1090 if len(vertex.lorentz) == 1: 1091 continue 1092 #remove ghost 1093 #if -1 in vertex.lorentz[0].spins: 1094 # continue 1095 1096 # assign each order/color to a set of lorentz routine 1097 combine = {} 1098 for (id_col, id_lor), coups in vertex.couplings.items(): 1099 if not isinstance(coups, list): 1100 coups = [coups] 1101 for coup in coups: 1102 order = orders[coup.name] 1103 key = (id_col, order) 1104 if key in combine: 1105 combine[key].append(id_lor) 1106 else: 1107 combine[key] = [id_lor] 1108 1109 # Check if more than one routine are associated 1110 for list_lor in combine.values(): 1111 if len(list_lor) == 1: 1112 continue 1113 list_lor.sort() 1114 main = vertex.lorentz[list_lor[0]].name 1115 if main not in self.multiple_lor: 1116 self.multiple_lor[main] = [] 1117 1118 info = tuple([vertex.lorentz[id].name for id in list_lor[1:]]) 1119 if info not in self.multiple_lor[main]: 1120 self.multiple_lor[main].append(info)
1121 1122
1123 - def has_symmetries(self, l_name, outgoing, out=None, valid_output=None):
1124 """ This returns out if no symmetries are available, otherwise it finds 1125 the lowest equivalent outgoing by recursivally calling this function. 1126 auth is a list of authorize output, if define""" 1127 1128 try: 1129 equiv = self.symmetries[l_name][outgoing] 1130 except Exception: 1131 return out 1132 else: 1133 if not valid_output or equiv in valid_output: 1134 return self.has_symmetries(l_name, equiv, out=equiv, 1135 valid_output=valid_output) 1136 else: 1137 return self.has_symmetries(l_name, equiv, out=out, 1138 valid_output=valid_output)
1139
1140 - def look_for_conjugate(self):
1141 """ create a list for the routine needing to be conjugate """ 1142 1143 # Check if they are majorana in the model. 1144 need = False 1145 for particle in self.model.all_particles: 1146 if particle.spin == 2 and particle.selfconjugate: 1147 need = True 1148 break 1149 1150 if not need: 1151 for interaction in self.model.all_vertices: 1152 fermions = [p for p in interaction.particles if p.spin == 2] 1153 for i in range(0, len(fermions), 2): 1154 if fermions[i].pdg_code * fermions[i+1].pdg_code > 0: 1155 # This is a fermion flow violating interaction 1156 need = True 1157 break 1158 1159 # No majorana particles 1160 if not need: 1161 return {} 1162 1163 conjugate_request = {} 1164 # Check each vertex if they are fermion and/or majorana 1165 for vertex in self.model.all_vertices: 1166 for i in range(0, len(vertex.particles), 2): 1167 part1 = vertex.particles[i] 1168 if part1.spin !=2: 1169 # deal only with fermion 1170 break 1171 # check if this pair contains a majorana 1172 if part1.selfconjugate: 1173 continue 1174 part2 = vertex.particles[i + 1] 1175 if part2.selfconjugate: 1176 continue 1177 1178 # No majorana => add the associate lorentz structure 1179 for lorentz in vertex.lorentz: 1180 try: 1181 conjugate_request[lorentz.name].add(i//2+1) 1182 except Exception: 1183 conjugate_request[lorentz.name] = set([i//2+1]) 1184 1185 for elem in conjugate_request: 1186 conjugate_request[elem] = list(conjugate_request[elem]) 1187 1188 return conjugate_request
1189
1190 1191 1192 -def write_aloha_file_inc(aloha_dir,file_ext, comp_ext):
1193 """find the list of Helicity routine in the directory and create a list 1194 of those files (but with compile extension)""" 1195 1196 aloha_files = [] 1197 1198 # Identify the valid files 1199 alohafile_pattern = re.compile(r'''_\d%s''' % file_ext) 1200 for filename in os.listdir(aloha_dir): 1201 if os.path.isfile(os.path.join(aloha_dir, filename)): 1202 if alohafile_pattern.search(filename): 1203 aloha_files.append(filename.replace(file_ext, comp_ext)) 1204 1205 if os.path.exists(pjoin(aloha_dir, 'additional_aloha_function.f')): 1206 aloha_files.append('additional_aloha_function.o') 1207 1208 text="ALOHARoutine = " 1209 text += ' '.join(aloha_files) 1210 text +='\n' 1211 1212 1213 file(os.path.join(aloha_dir, 'aloha_file.inc'), 'w').write(text)
1214
1215 1216 1217 -def create_prop_library(tag, lib={}):
1218 1219 def create(obj): 1220 """ """ 1221 obj= obj.simplify() 1222 obj = obj.expand() 1223 obj = obj.simplify() 1224 return obj
1225 1226 # avoid to add tag in global 1227 old_tag = set(aloha_lib.KERNEL.use_tag) 1228 print 'create lib',tag 1229 name, i = tag 1230 if name == "Spin2Prop": 1231 lib[('Spin2Prop',i)] = create( Spin2Propagator(_spin2_mult + i, \ 1232 2 * _spin2_mult + i,'I2','I3', i) ) 1233 elif name == "Spin2PropMassless": 1234 lib[('Spin2PropMassless',i)] = create( Spin2masslessPropagator( 1235 _spin2_mult + i, 2 * _spin2_mult + i,'I2','I3')) 1236 1237 aloha_lib.KERNEL.use_tag = old_tag 1238 return lib 1239 1240 1241 if '__main__' == __name__: 1242 logging.basicConfig(level=0) 1243 #create_library() 1244 import profile 1245 #model 1246 1247 start = time.time()
1248 - def main():
1249 alohagenerator = AbstractALOHAModel('sm') 1250 alohagenerator.compute_all(save=False) 1251 return alohagenerator
1252 - def write(alohagenerator):
1253 alohagenerator.write('/tmp/', 'Python')
1254 alohagenerator = main() 1255 logger.info('done in %s s' % (time.time()-start)) 1256 write(alohagenerator) 1257 #profile.run('main()') 1258 #profile.run('write(alohagenerator)') 1259 stop = time.time() 1260 logger.info('done in %s s' % (stop-start)) 1261