1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 """Several different checks for processes (and hence models):
16 permutation tests, gauge invariance tests, lorentz invariance
17 tests. Also class for evaluation of Python matrix elements,
18 MatrixElementEvaluator."""
19
20 from __future__ import division
21
22 from __future__ import absolute_import
23 import array
24 import copy
25 import fractions
26 import itertools
27 import logging
28 import math
29 import os
30 import sys
31 import re
32 import shutil
33 import random
34 import glob
35 import re
36 import subprocess
37 import time
38 import datetime
39 import errno
40 import pickle
41
42
43
44 import aloha
45 import aloha.aloha_writers as aloha_writers
46 import aloha.create_aloha as create_aloha
47
48 import madgraph.iolibs.export_python as export_python
49 import madgraph.iolibs.helas_call_writers as helas_call_writers
50 import models.import_ufo as import_ufo
51 import madgraph.iolibs.save_load_object as save_load_object
52 import madgraph.iolibs.file_writers as writers
53
54 import madgraph.core.base_objects as base_objects
55 import madgraph.core.color_algebra as color
56 import madgraph.core.color_amp as color_amp
57 import madgraph.core.helas_objects as helas_objects
58 import madgraph.core.diagram_generation as diagram_generation
59
60 import madgraph.various.rambo as rambo
61 import madgraph.various.misc as misc
62 import madgraph.various.progressbar as pbar
63 import madgraph.various.banner as bannermod
64 import madgraph.various.progressbar as pbar
65
66 import madgraph.loop.loop_diagram_generation as loop_diagram_generation
67 import madgraph.loop.loop_helas_objects as loop_helas_objects
68 import madgraph.loop.loop_base_objects as loop_base_objects
69 import models.check_param_card as check_param_card
70
71 from madgraph.interface.madevent_interface import MadLoopInitializer
72 from madgraph.interface.common_run_interface import AskforEditCard
73 from madgraph import MG5DIR, InvalidCmd, MadGraph5Error
74
75 from madgraph.iolibs.files import cp
76
77 import models.model_reader as model_reader
78 import aloha.template_files.wavefunctions as wavefunctions
79 from aloha.template_files.wavefunctions import \
80 ixxxxx, oxxxxx, vxxxxx, sxxxxx, txxxxx, irxxxx, orxxxx
81 import six
82 StringIO = six
83 from six.moves import range
84 from six.moves import zip
85 import io
86 if six.PY3:
87 file = io.FileIO
88
89
90 ADDED_GLOBAL = []
91
92 temp_dir_prefix = "TMP_CHECK"
93
94 pjoin = os.path.join
97 for value in list(to_clean):
98 del globals()[value]
99 to_clean.remove(value)
100
105 """ Just an 'option container' to mimick the interface which is passed to the
106 tests. We put in only what is now used from interface by the test:
107 cmd.options['fortran_compiler']
108 cmd.options['complex_mass_scheme']
109 cmd._mgme_dir"""
110 - def __init__(self, mgme_dir = "", complex_mass_scheme = False,
111 fortran_compiler = 'gfortran' ):
112 self._mgme_dir = mgme_dir
113 self.options = {}
114 self.options['complex_mass_scheme']=complex_mass_scheme
115 self.options['fortran_compiler']=fortran_compiler
116
117
118
119
120
121 logger = logging.getLogger('madgraph.various.process_checks')
126 """boost the set momenta in the 'boost direction' by the 'beta'
127 factor"""
128
129 boost_p = []
130 gamma = 1/ math.sqrt(1 - beta**2)
131 for imp in p:
132 bosst_p = imp[boost_direction]
133 E, px, py, pz = imp
134 boost_imp = []
135
136 boost_imp.append(gamma * E - gamma * beta * bosst_p)
137
138 if boost_direction == 1:
139 boost_imp.append(-gamma * beta * E + gamma * px)
140 else:
141 boost_imp.append(px)
142
143 if boost_direction == 2:
144 boost_imp.append(-gamma * beta * E + gamma * py)
145 else:
146 boost_imp.append(py)
147
148 if boost_direction == 3:
149 boost_imp.append(-gamma * beta * E + gamma * pz)
150 else:
151 boost_imp.append(pz)
152
153 boost_p.append(boost_imp)
154
155 return boost_p
156
161 """Class taking care of matrix element evaluation, storing
162 relevant quantities for speedup."""
163
164 - def __init__(self, model , param_card = None,
165 auth_skipping = False, reuse = True, cmd = FakeInterface()):
166 """Initialize object with stored_quantities, helas_writer,
167 model, etc.
168 auth_skipping = True means that any identical matrix element will be
169 evaluated only once
170 reuse = True means that the matrix element corresponding to a
171 given process can be reused (turn off if you are using
172 different models for the same process)"""
173
174 self.cmd = cmd
175
176
177 self.helas_writer = helas_call_writers.PythonUFOHelasCallWriter(model)
178
179
180 self.full_model = model_reader.ModelReader(model)
181 try:
182 self.full_model.set_parameters_and_couplings(param_card)
183 except MadGraph5Error:
184 if isinstance(param_card, (str,file)):
185 raise
186 logger.warning('param_card present in the event file not compatible.'+
187 ' We will use the default one.')
188 self.full_model.set_parameters_and_couplings()
189
190 self.auth_skipping = auth_skipping
191 self.reuse = reuse
192 self.cmass_scheme = cmd.options['complex_mass_scheme']
193 self.store_aloha = []
194 self.stored_quantities = {}
195
196
197
198
199 - def evaluate_matrix_element(self, matrix_element, p=None, full_model=None,
200 gauge_check=False, auth_skipping=None, output='m2',
201 options=None):
202 """Calculate the matrix element and evaluate it for a phase space point
203 output is either m2, amp, jamp
204 """
205
206 if full_model:
207 self.full_model = full_model
208 process = matrix_element.get('processes')[0]
209 model = process.get('model')
210
211
212 if "matrix_elements" not in self.stored_quantities:
213 self.stored_quantities['matrix_elements'] = []
214 matrix_methods = {}
215
216 if self.reuse and "Matrix_%s" % process.shell_string() in globals() and p:
217 if matrix_element not in self.stored_quantities['matrix_elements']:
218 self.stored_quantities['matrix_elements'].append(matrix_element)
219
220 matrix = eval("Matrix_%s()" % process.shell_string(), globals())
221 me_value = matrix.smatrix(p, self.full_model)
222 if output == "m2":
223 return matrix.smatrix(p, self.full_model), matrix.amp2
224 else:
225 m2 = matrix.smatrix(p, self.full_model)
226 return {'m2': m2, output:getattr(matrix, output)}
227
228 if (auth_skipping or self.auth_skipping) and matrix_element in \
229 self.stored_quantities['matrix_elements']:
230
231 logger.info("Skipping %s, " % process.nice_string() + \
232 "identical matrix element already tested" \
233 )
234 return None
235
236
237 self.stored_quantities['matrix_elements'].append(matrix_element)
238
239
240
241 if "list_colorize" not in self.stored_quantities:
242 self.stored_quantities["list_colorize"] = []
243 if "list_color_basis" not in self.stored_quantities:
244 self.stored_quantities["list_color_basis"] = []
245 if "list_color_matrices" not in self.stored_quantities:
246 self.stored_quantities["list_color_matrices"] = []
247
248 col_basis = color_amp.ColorBasis()
249 new_amp = matrix_element.get_base_amplitude()
250 matrix_element.set('base_amplitude', new_amp)
251 colorize_obj = col_basis.create_color_dict_list(new_amp)
252
253 try:
254
255
256
257 col_index = self.stored_quantities["list_colorize"].index(colorize_obj)
258 except ValueError:
259
260
261 self.stored_quantities['list_colorize'].append(colorize_obj)
262 col_basis.build()
263 self.stored_quantities['list_color_basis'].append(col_basis)
264 col_matrix = color_amp.ColorMatrix(col_basis)
265 self.stored_quantities['list_color_matrices'].append(col_matrix)
266 col_index = -1
267
268
269 matrix_element.set('color_basis',
270 self.stored_quantities['list_color_basis'][col_index])
271 matrix_element.set('color_matrix',
272 self.stored_quantities['list_color_matrices'][col_index])
273
274
275 if "used_lorentz" not in self.stored_quantities:
276 self.stored_quantities["used_lorentz"] = []
277
278 me_used_lorentz = set(matrix_element.get_used_lorentz())
279 me_used_lorentz = [lorentz for lorentz in me_used_lorentz \
280 if lorentz not in self.store_aloha]
281
282 aloha_model = create_aloha.AbstractALOHAModel(model.get('modelpath'))
283 aloha_model.add_Lorentz_object(model.get('lorentz'))
284 aloha_model.compute_subset(me_used_lorentz)
285
286
287 aloha_routines = []
288 for routine in aloha_model.values():
289 aloha_routines.append(routine.write(output_dir = None,
290 mode='mg5',
291 language = 'Python'))
292 for routine in aloha_model.external_routines:
293 aloha_routines.append(
294 open(aloha_model.locate_external(routine, 'Python')).read())
295
296
297 previous_globals = list(globals().keys())
298 for routine in aloha_routines:
299 exec(routine, globals())
300 for key in globals().keys():
301 if key not in previous_globals:
302 ADDED_GLOBAL.append(key)
303
304
305 self.store_aloha.extend(me_used_lorentz)
306
307 exporter = export_python.ProcessExporterPython(matrix_element,
308 self.helas_writer)
309 try:
310 matrix_methods = exporter.get_python_matrix_methods(\
311 gauge_check=gauge_check)
312
313 except helas_call_writers.HelasWriterError as error:
314 logger.info(error)
315 return None
316
317
318
319 if self.reuse:
320
321 exec(matrix_methods[process.shell_string()], globals())
322 ADDED_GLOBAL.append('Matrix_%s' % process.shell_string())
323 else:
324
325 exec(matrix_methods[process.shell_string()], globals())
326
327 if not p:
328 p, w_rambo = self.get_momenta(process, options)
329
330 exec("data = Matrix_%s()" % process.shell_string(), globals())
331 if output == "m2":
332 return data.smatrix(p, self.full_model), data.amp2
333 else:
334 m2 = data.smatrix(p,self.full_model)
335 return {'m2': m2, output:getattr(data, output)}
336
337 @staticmethod
339 """ Check whether the specified kinematic point passes isolation cuts
340 """
341
342 def Pt(pmom):
343 """ Computes the pt of a 4-momentum"""
344 return math.sqrt(pmom[1]**2+pmom[2]**2)
345
346 def DeltaR(p1,p2):
347 """ Computes the DeltaR between two 4-momenta"""
348
349 p1_vec=math.sqrt(p1[1]**2+p1[2]**2+p1[3]**2)
350 p2_vec=math.sqrt(p2[1]**2+p2[2]**2+p2[3]**2)
351 eta1=0.5*math.log((p1_vec+p1[3])/(p1_vec-p1[3]))
352 eta2=0.5*math.log((p2_vec+p2[3])/(p2_vec-p2[3]))
353
354 phi1=math.atan2(p1[2],p1[1])
355 phi2=math.atan2(p2[2],p2[1])
356 dphi=abs(phi2-phi1)
357
358 dphi=abs(abs(dphi-math.pi)-math.pi)
359
360 return math.sqrt(dphi**2+(eta2-eta1)**2)
361
362 for i, pmom in enumerate(pmoms[2:]):
363
364 if Pt(pmom)<ptcut:
365 return False
366
367 for pmom2 in pmoms[3+i:]:
368 if DeltaR(pmom,pmom2)<drcut:
369 return False
370 return True
371
372
373
374
375 - def get_momenta(self, process, options=None, special_mass=None):
376 """Get a point in phase space for the external states in the given
377 process, with the CM energy given. The incoming particles are
378 assumed to be oriented along the z axis, with particle 1 along the
379 positive z axis.
380 For the CMS check, one must be able to chose the mass of the special
381 resonance particle with id = -1, and the special_mass option allows
382 to specify it."""
383
384 if not options:
385 energy=1000
386 events=None
387 else:
388 energy = options['energy']
389 events = options['events']
390 to_skip = options['skip_evt']
391
392 if not (isinstance(process, base_objects.Process) and \
393 isinstance(energy, (float,int))):
394 raise rambo.RAMBOError("Not correct type for arguments to get_momenta")
395
396
397 sorted_legs = sorted(process.get('legs'), key=lambda l: l.get('number'))
398
399
400 if events:
401 ids = [l.get('id') for l in sorted_legs]
402 import MadSpin.decay as madspin
403 if not hasattr(self, 'event_file'):
404 fsock = open(events)
405 self.event_file = madspin.Event(fsock)
406
407 skip = 0
408 while self.event_file.get_next_event() != 'no_event':
409 event = self.event_file.particle
410
411 event_ids = [p['pid'] for p in event.values()]
412 if event_ids == ids:
413 skip += 1
414 if skip > to_skip:
415 break
416 else:
417 raise MadGraph5Error('No compatible events for %s' % ids)
418 p = []
419 for part in event.values():
420 m = part['momentum']
421 p.append([m.E, m.px, m.py, m.pz])
422 return p, 1
423
424 nincoming = len([leg for leg in sorted_legs if leg.get('state') == False])
425 nfinal = len(sorted_legs) - nincoming
426
427
428 mass = []
429 for l in sorted_legs:
430 if l.get('id') != 0:
431 mass_string = self.full_model.get_particle(l.get('id')).get('mass')
432 mass.append(self.full_model.get('parameter_dict')[mass_string].real)
433 else:
434 if isinstance(special_mass, float):
435 mass.append(special_mass)
436 else:
437 raise Exception("A 'special_mass' option must be specified"+\
438 " in get_momenta when a leg with id=-10 is present (for CMS check)")
439
440
441
442
443
444
445
446
447
448 energy = max(energy, sum(mass[:nincoming])*1.2,sum(mass[nincoming:])*1.2)
449
450
451
452
453
454
455
456 if nfinal == 1:
457 p = []
458 energy = mass[-1]
459 p.append([energy/2,0,0,energy/2])
460 p.append([energy/2,0,0,-energy/2])
461 p.append([mass[-1],0,0,0])
462 return p, 1.0
463
464 e2 = energy**2
465 m1 = mass[0]
466 p = []
467
468 masses = rambo.FortranList(nfinal)
469 for i in range(nfinal):
470 masses[i+1] = mass[nincoming + i]
471
472 if nincoming == 1:
473
474 p.append([abs(m1), 0., 0., 0.])
475 p_rambo, w_rambo = rambo.RAMBO(nfinal, abs(m1), masses)
476
477 for i in range(1, nfinal+1):
478 momi = [p_rambo[(4,i)], p_rambo[(1,i)],
479 p_rambo[(2,i)], p_rambo[(3,i)]]
480 p.append(momi)
481
482 return p, w_rambo
483
484 if nincoming != 2:
485 raise rambo.RAMBOError('Need 1 or 2 incoming particles')
486
487 if nfinal == 1:
488 energy = masses[1]
489 if masses[1] == 0.0:
490 raise rambo.RAMBOError('The kinematic 2 > 1 with the final'+\
491 ' state particle massless is invalid')
492
493 e2 = energy**2
494 m2 = mass[1]
495
496 mom = math.sqrt((e2**2 - 2*e2*m1**2 + m1**4 - 2*e2*m2**2 - \
497 2*m1**2*m2**2 + m2**4) / (4*e2))
498 e1 = math.sqrt(mom**2+m1**2)
499 e2 = math.sqrt(mom**2+m2**2)
500
501 p.append([e1, 0., 0., mom])
502 p.append([e2, 0., 0., -mom])
503
504 if nfinal == 1:
505 p.append([energy, 0., 0., 0.])
506 return p, 1.
507
508 p_rambo, w_rambo = rambo.RAMBO(nfinal, energy, masses)
509
510
511 for i in range(1, nfinal+1):
512 momi = [p_rambo[(4,i)], p_rambo[(1,i)],
513 p_rambo[(2,i)], p_rambo[(3,i)]]
514 p.append(momi)
515
516 return p, w_rambo
517
523 """Class taking care of matrix element evaluation for loop processes."""
524
525 - def __init__(self,cuttools_dir=None, output_path=None, tir_dir={},
526 cmd=FakeInterface(),*args,**kwargs):
527 """Allow for initializing the MG5 root where the temporary fortran
528 output for checks is placed."""
529
530 super(LoopMatrixElementEvaluator,self).__init__(*args,cmd=cmd,**kwargs)
531
532 self.mg_root=self.cmd._mgme_dir
533
534 if output_path is None:
535 self.output_path = self.cmd._mgme_dir
536 else:
537 self.output_path = output_path
538
539 self.cuttools_dir=cuttools_dir
540 self.tir_dir=tir_dir
541 self.loop_optimized_output = cmd.options['loop_optimized_output']
542
543
544 self.proliferate=True
545
546
547
548
549 - def evaluate_matrix_element(self, matrix_element, p=None, options=None,
550 gauge_check=False, auth_skipping=None, output='m2',
551 PS_name = None, MLOptions={}):
552 """Calculate the matrix element and evaluate it for a phase space point
553 Output can only be 'm2. The 'jamp' and 'amp' returned values are just
554 empty lists at this point.
555 If PS_name is not none the written out PS.input will be saved in
556 the file PS.input_<PS_name> as well."""
557
558 process = matrix_element.get('processes')[0]
559 model = process.get('model')
560
561 if options and 'split_orders' in list(options.keys()):
562 split_orders = options['split_orders']
563 else:
564 split_orders = -1
565
566 if "loop_matrix_elements" not in self.stored_quantities:
567 self.stored_quantities['loop_matrix_elements'] = []
568
569 if (auth_skipping or self.auth_skipping) and matrix_element in \
570 [el[0] for el in self.stored_quantities['loop_matrix_elements']]:
571
572 logger.info("Skipping %s, " % process.nice_string() + \
573 "identical matrix element already tested" )
574 return None
575
576
577 if not p:
578 p, w_rambo = self.get_momenta(process, options=options)
579
580 if matrix_element in [el[0] for el in \
581 self.stored_quantities['loop_matrix_elements']]:
582 export_dir=self.stored_quantities['loop_matrix_elements'][\
583 [el[0] for el in self.stored_quantities['loop_matrix_elements']\
584 ].index(matrix_element)][1]
585 logger.debug("Reusing generated output %s"%str(export_dir))
586 else:
587 export_dir=pjoin(self.output_path,temp_dir_prefix)
588 if os.path.isdir(export_dir):
589 if not self.proliferate:
590 raise InvalidCmd("The directory %s already exist. Please remove it."%str(export_dir))
591 else:
592 id=1
593 while os.path.isdir(pjoin(self.output_path,\
594 '%s_%i'%(temp_dir_prefix,id))):
595 id+=1
596 export_dir=pjoin(self.output_path,'%s_%i'%(temp_dir_prefix,id))
597
598 if self.proliferate:
599 self.stored_quantities['loop_matrix_elements'].append(\
600 (matrix_element,export_dir))
601
602
603
604 import madgraph.loop.loop_exporters as loop_exporters
605 if self.loop_optimized_output:
606 exporter_class=loop_exporters.LoopProcessOptimizedExporterFortranSA
607 else:
608 exporter_class=loop_exporters.LoopProcessExporterFortranSA
609
610 MLoptions = {'clean': True,
611 'complex_mass': self.cmass_scheme,
612 'export_format':'madloop',
613 'mp':True,
614 'SubProc_prefix':'P',
615 'compute_color_flows': not process.get('has_born'),
616 'loop_dir': pjoin(self.mg_root,'Template','loop_material'),
617 'cuttools_dir': self.cuttools_dir,
618 'fortran_compiler': self.cmd.options['fortran_compiler'],
619 'output_dependencies': self.cmd.options['output_dependencies']}
620
621 MLoptions.update(self.tir_dir)
622
623 FortranExporter = exporter_class(export_dir, MLoptions)
624 FortranModel = helas_call_writers.FortranUFOHelasCallWriter(model)
625 FortranExporter.copy_template(model)
626 FortranExporter.generate_subprocess_directory(matrix_element, FortranModel)
627 wanted_lorentz = list(set(matrix_element.get_used_lorentz()))
628 wanted_couplings = list(set([c for l in matrix_element.get_used_couplings() \
629 for c in l]))
630 FortranExporter.convert_model(model,wanted_lorentz,wanted_couplings)
631 FortranExporter.finalize(matrix_element,"",self.cmd.options, ['nojpeg'])
632
633 MadLoopInitializer.fix_PSPoint_in_check(pjoin(export_dir,'SubProcesses'),
634 split_orders=split_orders)
635
636 self.fix_MadLoopParamCard(pjoin(export_dir,'Cards'),
637 mp = gauge_check and self.loop_optimized_output, MLOptions=MLOptions)
638
639 if gauge_check:
640 file_path, orig_file_content, new_file_content = \
641 self.setup_ward_check(pjoin(export_dir,'SubProcesses'),
642 ['helas_calls_ampb_1.f','loop_matrix.f'])
643 file = open(file_path,'w')
644 file.write(new_file_content)
645 file.close()
646 if self.loop_optimized_output:
647 mp_file_path, mp_orig_file_content, mp_new_file_content = \
648 self.setup_ward_check(pjoin(export_dir,'SubProcesses'),
649 ['mp_helas_calls_ampb_1.f','mp_compute_loop_coefs.f'],mp=True)
650 mp_file = open(mp_file_path,'w')
651 mp_file.write(mp_new_file_content)
652 mp_file.close()
653
654
655 finite_m2 = self.get_me_value(process.shell_string_v4(), 0,\
656 export_dir, p, PS_name = PS_name, verbose=False)[0][0]
657
658
659 if gauge_check:
660 file = open(file_path,'w')
661 file.write(orig_file_content)
662 file.close()
663 if self.loop_optimized_output:
664 mp_file = open(mp_file_path,'w')
665 mp_file.write(mp_orig_file_content)
666 mp_file.close()
667
668
669 if not self.proliferate:
670 shutil.rmtree(export_dir)
671
672 if output == "m2":
673
674
675 return finite_m2, []
676 else:
677 return {'m2': finite_m2, output:[]}
678
679 - def fix_MadLoopParamCard(self,dir_name, mp=False, loop_filter=False,
680 DoubleCheckHelicityFilter=False, MLOptions={}):
681 """ Set parameters in MadLoopParams.dat suited for these checks.MP
682 stands for multiple precision and can either be a bool or an integer
683 to specify the mode."""
684
685
686 file = open(pjoin(dir_name,'MadLoopParams.dat'), 'r')
687 MLCard = bannermod.MadLoopParam(file)
688
689 if isinstance(mp,bool):
690 mode = 4 if mp else 1
691 else:
692 mode = mp
693
694 for key, value in MLOptions.items():
695 if key == "MLReductionLib":
696 if isinstance(value, int):
697 ml_reds = str(value)
698 if isinstance(value,list):
699 if len(value)==0:
700 ml_reds = '1'
701 else:
702 ml_reds="|".join([str(vl) for vl in value])
703 elif isinstance(value, str):
704 ml_reds = value
705 elif isinstance(value, int):
706 ml_reds = str(value)
707 else:
708 raise MadGraph5Error('The argument %s '%str(value)+\
709 ' in fix_MadLoopParamCard must be a string, integer'+\
710 ' or a list.')
711 MLCard.set("MLReductionLib",ml_reds)
712 elif key == 'ImprovePS':
713 MLCard.set('ImprovePSPoint',2 if value else -1)
714 elif key == 'ForceMP':
715 mode = 4
716 elif key in MLCard:
717 MLCard.set(key,value)
718 else:
719 raise Exception('The MadLoop options %s specified in function'%key+\
720 ' fix_MadLoopParamCard does not correspond to an option defined'+\
721 ' MadLoop nor is it specially handled in this function.')
722 if not mode is None:
723 MLCard.set('CTModeRun',mode)
724 MLCard.set('CTModeInit',mode)
725 MLCard.set('UseLoopFilter',loop_filter)
726 MLCard.set('DoubleCheckHelicityFilter',DoubleCheckHelicityFilter)
727
728 MLCard.write(pjoin(dir_name,os.pardir,'SubProcesses','MadLoopParams.dat'))
729
730 @classmethod
731 - def get_me_value(cls, proc, proc_id, working_dir, PSpoint=[], PS_name = None,
732 verbose=True, format='tuple', skip_compilation=False):
733 """Compile and run ./check, then parse the output and return the result
734 for process with id = proc_id and PSpoint if specified.
735 If PS_name is not none the written out PS.input will be saved in
736 the file PS.input_<PS_name> as well"""
737 if verbose:
738 sys.stdout.write('.')
739 sys.stdout.flush()
740
741 shell_name = None
742 directories = misc.glob('P%i_*' % proc_id, pjoin(working_dir, 'SubProcesses'))
743 if directories and os.path.isdir(directories[0]):
744 shell_name = os.path.basename(directories[0])
745
746
747 if not shell_name:
748 logging.info("Directory hasn't been created for process %s: %s", proc, directories)
749 return ((0.0, 0.0, 0.0, 0.0, 0), [])
750
751 if verbose: logging.debug("Working on process %s in dir %s" % (proc, shell_name))
752
753 dir_name = pjoin(working_dir, 'SubProcesses', shell_name)
754 if not skip_compilation:
755
756 if os.path.isfile(pjoin(dir_name,'check')):
757 os.remove(pjoin(dir_name,'check'))
758 try:
759 os.remove(pjoin(dir_name,'check_sa.o'))
760 os.remove(pjoin(dir_name,'loop_matrix.o'))
761 except OSError:
762 pass
763
764 devnull = open(os.devnull, 'w')
765 retcode = subprocess.call(['make','check'],
766 cwd=dir_name, stdout=devnull, stderr=devnull)
767 devnull.close()
768
769 if retcode != 0:
770 logging.info("Error while executing make in %s" % shell_name)
771 return ((0.0, 0.0, 0.0, 0.0, 0), [])
772
773
774 if PSpoint:
775 misc.write_PS_input(pjoin(dir_name, 'PS.input'),PSpoint)
776
777
778 if not PS_name is None:
779 misc.write_PS_input(pjoin(dir_name, \
780 'PS.input_%s'%PS_name),PSpoint)
781
782 try:
783 output = subprocess.Popen('./check',
784 cwd=dir_name,
785 stdout=subprocess.PIPE, stderr=subprocess.STDOUT).stdout
786 output.read()
787 output.close()
788 if os.path.exists(pjoin(dir_name,'result.dat')):
789 return cls.parse_check_output(open(pjoin(dir_name,\
790 'result.dat')),format=format)
791 else:
792 logging.warning("Error while looking for file %s"%str(os.path\
793 .join(dir_name,'result.dat')))
794 return ((0.0, 0.0, 0.0, 0.0, 0), [])
795 except IOError:
796 logging.warning("Error while executing ./check in %s" % shell_name)
797 return ((0.0, 0.0, 0.0, 0.0, 0), [])
798
799 @classmethod
801 """Parse the output string and return a pair where first four values are
802 the finite, born, single and double pole of the ME and the fourth is the
803 GeV exponent and the second value is a list of 4 momenta for all particles
804 involved. Return the answer in two possible formats, 'tuple' or 'dict'."""
805
806 res_dict = {'res_p':[],
807 'born':0.0,
808 'finite':0.0,
809 '1eps':0.0,
810 '2eps':0.0,
811 'gev_pow':0,
812 'export_format':'Default',
813 'accuracy':0.0,
814 'return_code':0,
815 'Split_Orders_Names':[],
816 'Loop_SO_Results':[],
817 'Born_SO_Results':[],
818 'Born_kept':[],
819 'Loop_kept':[]
820 }
821 res_p = []
822
823
824
825 if isinstance(output,(file,io.TextIOWrapper)) or isinstance(output,list):
826 text=output
827 elif isinstance(output,(str)) or (six.PY2 and isinstance(output, six.text_type)):
828 text=output.split('\n')
829 elif isinstance(output, bytes):
830 text=output.decode().split('\n')
831 else:
832 raise MadGraph5Error('Type for argument output not supported in'+\
833 ' parse_check_output: %s' % type(output))
834 for line in text:
835 splitline=line.split()
836 if len(splitline)==0:
837 continue
838 elif splitline[0]=='PS':
839 res_p.append([float(s) for s in splitline[1:]])
840 elif splitline[0]=='ASO2PI':
841 res_dict['alphaS_over_2pi']=float(splitline[1])
842 elif splitline[0]=='BORN':
843 res_dict['born']=float(splitline[1])
844 elif splitline[0]=='FIN':
845 res_dict['finite']=float(splitline[1])
846 elif splitline[0]=='1EPS':
847 res_dict['1eps']=float(splitline[1])
848 elif splitline[0]=='2EPS':
849 res_dict['2eps']=float(splitline[1])
850 elif splitline[0]=='EXP':
851 res_dict['gev_pow']=int(splitline[1])
852 elif splitline[0]=='Export_Format':
853 res_dict['export_format']=splitline[1]
854 elif splitline[0]=='ACC':
855 res_dict['accuracy']=float(splitline[1])
856 elif splitline[0]=='RETCODE':
857 res_dict['return_code']=int(splitline[1])
858 elif splitline[0]=='Split_Orders_Names':
859 res_dict['Split_Orders_Names']=splitline[1:]
860 elif splitline[0] in ['Born_kept', 'Loop_kept']:
861 res_dict[splitline[0]] = [kept=='T' for kept in splitline[1:]]
862 elif splitline[0] in ['Loop_SO_Results', 'Born_SO_Results']:
863
864
865
866
867 res_dict[splitline[0]].append(\
868 ([int(el) for el in splitline[1:]],{}))
869 elif splitline[0]=='SO_Loop':
870 res_dict['Loop_SO_Results'][-1][1][splitline[1]]=\
871 float(splitline[2])
872 elif splitline[0]=='SO_Born':
873 res_dict['Born_SO_Results'][-1][1][splitline[1]]=\
874 float(splitline[2])
875
876 res_dict['res_p'] = res_p
877
878 if format=='tuple':
879 return ((res_dict['finite'],res_dict['born'],res_dict['1eps'],
880 res_dict['2eps'],res_dict['gev_pow']), res_dict['res_p'])
881 else:
882 return res_dict
883
884 @staticmethod
886 """ Changes the file model_functions.f in the SOURCE of the process output
887 so as to change how logarithms are analytically continued and see how
888 it impacts the CMS check."""
889 valid_modes = ['default','recompile']
890 if not (mode in valid_modes or (isinstance(mode, list) and
891 len(mode)==2 and all(m in ['logp','logm','log'] for m in mode))):
892 raise MadGraph5Error("Mode '%s' not reckonized"%mode+
893 " in function apply_log_tweak.")
894
895 model_path = pjoin(proc_path,'Source','MODEL')
896 directories = misc.glob('P0_*', pjoin(proc_path,'SubProcesses'))
897 if directories and os.path.isdir(directories[0]):
898 exe_path = directories[0]
899 else:
900 raise MadGraph5Error('Could not find a process executable '+\
901 'directory in %s'%proc_dir)
902 bu_path = pjoin(model_path, 'model_functions.f__backUp__')
903
904 if mode=='default':
905
906 if not os.path.isfile(bu_path):
907 raise MadGraph5Error('Back up file %s could not be found.'%bu_path)
908 shutil.move(bu_path, pjoin(model_path, 'model_functions.f'))
909 return
910
911 if mode=='recompile':
912 try:
913 os.remove(pjoin(model_path,'model_functions.o'))
914 os.remove(pjoin(proc_path,'lib','libmodel.a'))
915 except:
916 pass
917 misc.compile(cwd=model_path)
918
919 try:
920 os.remove(pjoin(exe_path,'check'))
921 except:
922 pass
923 misc.compile(arg=['check'], cwd=exe_path)
924 return
925
926 if mode[0]==mode[1]:
927 return
928
929
930 mp_prefix = 'MP_'
931 target_line = 'FUNCTION %%sREG%s(ARG)'%mode[0].lower()
932
933
934 if not os.path.isfile(bu_path):
935 shutil.copy(pjoin(model_path, 'model_functions.f'), bu_path)
936 model_functions = open(pjoin(model_path,'model_functions.f'),'r')
937
938 new_model_functions = []
939 has_replaced = False
940 just_replaced = False
941 find_one_replacement= False
942 mp_mode = None
943 suffix = {'log':'','logp':r'\s*\+\s*TWOPII','logm':r'\s*\-\s*TWOPII'}
944 replace_regex=r'^\s*%%sREG%s\s*=\s*LOG\(ARG\)%s'%(mode[0],suffix[mode[0]])
945 for line in model_functions:
946
947 if just_replaced:
948 if not re.match(r'\s{6}', line):
949 continue
950 else:
951 just_replaced = False
952 if mp_mode is None:
953
954 new_model_functions.append(line)
955 if (target_line%mp_prefix).lower() in line.lower():
956 mp_mode = mp_prefix
957 elif (target_line%'').lower() in line.lower():
958 mp_mode = ''
959 else:
960
961 if not has_replaced and re.match(replace_regex%mp_mode,line,
962 re.IGNORECASE):
963
964 if mode[0]=='log':
965 if mp_mode=='':
966 new_line =\
967 """ if(dble(arg).lt.0.0d0.and.dimag(arg).gt.0.0d0)then
968 reg%s=log(arg) %s TWOPII
969 else
970 reg%s=log(arg)
971 endif\n"""%(mode[0],'+' if mode[1]=='logp' else '-',mode[0])
972 else:
973 new_line =\
974 """ if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).lt.0.0e0_16)then
975 mp_reg%s=log(arg) %s TWOPII
976 else
977 mp_reg%s=log(arg)
978 endif\n"""%(mode[0],'+' if mode[1]=='logp' else '-',mode[0])
979 else:
980 new_line = ' '*6+"%sreg%s=log(arg) %s\n"%(mp_mode,mode[0],
981 ('' if mode[1]=='log' else ('+TWOPII' if mode[1]=='logp' else '-TWOPII')))
982 new_model_functions.append(new_line)
983 just_replaced = True
984 has_replaced = True
985 find_one_replacement = True
986 else:
987 new_model_functions.append(line)
988 if re.match(r'^\s*END\s*$',line,re.IGNORECASE):
989 mp_mode = None
990 has_replaced = False
991
992 if not find_one_replacement:
993 logger.warning('No replacement was found/performed for token '+
994 "'%s->%s'."%(mode[0],mode[1]))
995 else:
996 open(pjoin(model_path,'model_functions.f'),'w').\
997 write(''.join(new_model_functions))
998 return
999
1001 """ Modify loop_matrix.f so to have one external massless gauge boson
1002 polarization vector turned into its momentum. It is not a pretty and
1003 flexible solution but it works for this particular case."""
1004
1005 shell_name = None
1006 directories = misc.glob('P0_*', working_dir)
1007 if directories and os.path.isdir(directories[0]):
1008 shell_name = os.path.basename(directories[0])
1009
1010 dir_name = pjoin(working_dir, shell_name)
1011
1012
1013 ind=0
1014 while ind<len(file_names) and not os.path.isfile(pjoin(dir_name,
1015 file_names[ind])):
1016 ind += 1
1017 if ind==len(file_names):
1018 raise Exception("No helas calls output file found.")
1019
1020 helas_file_name=pjoin(dir_name,file_names[ind])
1021 file = open(pjoin(dir_name,helas_file_name), 'r')
1022
1023 helas_calls_out=""
1024 original_file=""
1025 gaugeVectorRegExp=re.compile(\
1026 r"CALL (MP\_)?VXXXXX\(P\(0,(?P<p_id>\d+)\),((D)?CMPLX\()?ZERO((,KIND\=16)?\))?,"+
1027 r"NHEL\(\d+\),[\+\-]1\*IC\(\d+\),W\(1,(?P<wf_id>\d+(,H)?)\)\)")
1028 foundGauge=False
1029
1030 for line in file:
1031 helas_calls_out+=line
1032 original_file+=line
1033 if line.find("INCLUDE 'coupl.inc'") != -1 or \
1034 line.find("INCLUDE 'mp_coupl_same_name.inc'") !=-1:
1035 helas_calls_out+=" INTEGER WARDINT\n"
1036 if not foundGauge:
1037 res=gaugeVectorRegExp.search(line)
1038 if res!=None:
1039 foundGauge=True
1040 helas_calls_out+=" DO WARDINT=1,4\n"
1041 helas_calls_out+=" W(WARDINT+4,"+res.group('wf_id')+")="
1042 if not mp:
1043 helas_calls_out+=\
1044 "DCMPLX(P(WARDINT-1,"+res.group('p_id')+"),0.0D0)\n"
1045 else:
1046 helas_calls_out+="CMPLX(P(WARDINT-1,"+\
1047 res.group('p_id')+"),0.0E0_16,KIND=16)\n"
1048 helas_calls_out+=" ENDDO\n"
1049 file.close()
1050
1051 return pjoin(dir_name,helas_file_name), original_file, helas_calls_out
1052
1057 """Class taking care of matrix element evaluation and running timing for
1058 loop processes."""
1059
1063
1064 @classmethod
1066 """ Return a dictionary of the parameter of the MadLoopParamCard.
1067 The key is the name of the parameter and the value is the corresponding
1068 string read from the card."""
1069
1070 return bannermod.MadLoopParam(MLCardPath)
1071
1072
1073 @classmethod
1075 """ Set the parameters in MadLoopParamCard to the values specified in
1076 the dictionary params.
1077 The key is the name of the parameter and the value is the corresponding
1078 string to write in the card."""
1079
1080 MLcard = bannermod.MadLoopParam(MLCardPath)
1081 for key,value in params.items():
1082 MLcard.set(key, value, changeifuserset=False)
1083 MLcard.write(MLCardPath, commentdefault=True)
1084
1086 """ Edit loop_matrix.f in order to skip the loop evaluation phase.
1087 Notice this only affects the double precision evaluation which is
1088 normally fine as we do not make the timing check on mp."""
1089
1090 file = open(pjoin(dir_name,'loop_matrix.f'), 'r')
1091 loop_matrix = file.read()
1092 file.close()
1093
1094 file = open(pjoin(dir_name,'loop_matrix.f'), 'w')
1095 loop_matrix = re.sub(r"SKIPLOOPEVAL=\S+\)","SKIPLOOPEVAL=%s)"%('.TRUE.'
1096 if skip else '.FALSE.'), loop_matrix)
1097 file.write(loop_matrix)
1098 file.close()
1099
1101 """ Edit loop_matrix.f in order to set the flag which stops the
1102 execution after booting the program (i.e. reading the color data)."""
1103
1104 file = open(pjoin(dir_name,'loop_matrix.f'), 'r')
1105 loop_matrix = file.read()
1106 file.close()
1107
1108 file = open(pjoin(dir_name,'loop_matrix.f'), 'w')
1109 loop_matrix = re.sub(r"BOOTANDSTOP=\S+\)","BOOTANDSTOP=%s)"%('.TRUE.'
1110 if bootandstop else '.FALSE.'), loop_matrix)
1111 file.write(loop_matrix)
1112 file.close()
1113
1114 - def setup_process(self, matrix_element, export_dir, reusing = False,
1115 param_card = None, MLOptions={},clean=True):
1116 """ Output the matrix_element in argument and perform the initialization
1117 while providing some details about the output in the dictionary returned.
1118 Returns None if anything fails"""
1119
1120 infos={'Process_output': None,
1121 'HELAS_MODEL_compilation' : None,
1122 'dir_path' : None,
1123 'Initialization' : None,
1124 'Process_compilation' : None}
1125
1126 if not reusing and clean:
1127 if os.path.isdir(export_dir):
1128 clean_up(self.output_path)
1129 if os.path.isdir(export_dir):
1130 raise InvalidCmd(\
1131 "The directory %s already exist. Please remove it."\
1132 %str(export_dir))
1133 else:
1134 if not os.path.isdir(export_dir):
1135 raise InvalidCmd(\
1136 "Could not find the directory %s to reuse."%str(export_dir))
1137
1138
1139 if not reusing and clean:
1140 model = matrix_element['processes'][0].get('model')
1141
1142
1143 import madgraph.loop.loop_exporters as loop_exporters
1144 if self.loop_optimized_output:
1145 exporter_class=loop_exporters.LoopProcessOptimizedExporterFortranSA
1146 else:
1147 exporter_class=loop_exporters.LoopProcessExporterFortranSA
1148
1149 MLoptions = {'clean': True,
1150 'complex_mass': self.cmass_scheme,
1151 'export_format':'madloop',
1152 'mp':True,
1153 'SubProc_prefix':'P',
1154 'compute_color_flows':not matrix_element['processes'][0].get('has_born'),
1155 'loop_dir': pjoin(self.mg_root,'Template','loop_material'),
1156 'cuttools_dir': self.cuttools_dir,
1157 'fortran_compiler':self.cmd.options['fortran_compiler'],
1158 'output_dependencies':self.cmd.options['output_dependencies']}
1159
1160 MLoptions.update(self.tir_dir)
1161
1162 start=time.time()
1163 FortranExporter = exporter_class(export_dir, MLoptions)
1164 FortranModel = helas_call_writers.FortranUFOHelasCallWriter(model)
1165 FortranExporter.copy_template(model)
1166 FortranExporter.generate_subprocess_directory(matrix_element, FortranModel)
1167 wanted_lorentz = list(set(matrix_element.get_used_lorentz()))
1168 wanted_couplings = list(set([c for l in matrix_element.get_used_couplings() \
1169 for c in l]))
1170 FortranExporter.convert_model(self.full_model,wanted_lorentz,wanted_couplings)
1171 infos['Process_output'] = time.time()-start
1172 start=time.time()
1173 FortranExporter.finalize(matrix_element,"",self.cmd.options, ['nojpeg'])
1174 infos['HELAS_MODEL_compilation'] = time.time()-start
1175
1176
1177 if param_card != None:
1178 if isinstance(param_card, str):
1179 cp(pjoin(param_card),\
1180 pjoin(export_dir,'Cards','param_card.dat'))
1181 else:
1182 param_card.write(pjoin(export_dir,'Cards','param_card.dat'))
1183
1184
1185
1186 MadLoopInitializer.fix_PSPoint_in_check(
1187 pjoin(export_dir,'SubProcesses'), read_ps = False, npoints = 4)
1188
1189 self.fix_MadLoopParamCard(pjoin(export_dir,'Cards'),
1190 mp = False, loop_filter = True,MLOptions=MLOptions)
1191
1192 shell_name = None
1193 directories = misc.glob('P0_*', pjoin(export_dir, 'SubProcesses'))
1194 if directories and os.path.isdir(directories[0]):
1195 shell_name = os.path.basename(directories[0])
1196 dir_name = pjoin(export_dir, 'SubProcesses', shell_name)
1197 infos['dir_path']=dir_name
1198
1199
1200
1201 if not MadLoopInitializer.need_MadLoopInit(
1202 export_dir, subproc_prefix='P'):
1203 return infos
1204
1205 attempts = [3,15]
1206
1207 try:
1208 os.remove(pjoin(dir_name,'check'))
1209 os.remove(pjoin(dir_name,'check_sa.o'))
1210 except OSError:
1211 pass
1212
1213 nPS_necessary = MadLoopInitializer.run_initialization(dir_name,
1214 pjoin(export_dir,'SubProcesses'),infos,\
1215 req_files = ['HelFilter.dat','LoopFilter.dat'],
1216 attempts = attempts)
1217 if attempts is None:
1218 logger.error("Could not compile the process %s,"%shell_name+\
1219 " try to generate it via the 'generate' command.")
1220 return None
1221 if nPS_necessary is None:
1222 logger.error("Could not initialize the process %s"%shell_name+\
1223 " with %s PS points."%max(attempts))
1224 return None
1225 elif nPS_necessary > min(attempts):
1226 logger.warning("Could not initialize the process %s"%shell_name+\
1227 " with %d PS points. It needed %d."%(min(attempts),nPS_necessary))
1228
1229 return infos
1230
1231 - def time_matrix_element(self, matrix_element, reusing = False,
1232 param_card = None, keep_folder = False, options=None,
1233 MLOptions = {}):
1234 """ Output the matrix_element in argument and give detail information
1235 about the timing for its output and running"""
1236
1237
1238
1239 make_it_quick=False
1240
1241 if options and 'split_orders' in list(options.keys()):
1242 split_orders = options['split_orders']
1243 else:
1244 split_orders = -1
1245
1246 assert ((not reusing and isinstance(matrix_element, \
1247 helas_objects.HelasMatrixElement)) or (reusing and
1248 isinstance(matrix_element, base_objects.Process)))
1249 if not reusing:
1250 proc_name = matrix_element['processes'][0].shell_string()[2:]
1251 else:
1252 proc_name = matrix_element.shell_string()[2:]
1253
1254 export_dir=pjoin(self.output_path,('SAVED' if keep_folder else '')+\
1255 temp_dir_prefix+"_%s"%proc_name)
1256
1257 res_timings = self.setup_process(matrix_element,export_dir, \
1258 reusing, param_card,MLOptions = MLOptions,clean=True)
1259
1260 if res_timings == None:
1261 return None
1262 dir_name=res_timings['dir_path']
1263
1264 def check_disk_usage(path):
1265 return subprocess.Popen("du -shc -L "+str(path), \
1266 stdout=subprocess.PIPE, shell=True).communicate()[0].decode().split()[-2]
1267
1268
1269
1270
1271
1272 res_timings['du_source']=check_disk_usage(pjoin(\
1273 export_dir,'Source','*','*.f'))
1274 res_timings['du_process']=check_disk_usage(pjoin(dir_name,'*.f'))
1275 res_timings['du_color']=check_disk_usage(pjoin(dir_name,
1276 'MadLoop5_resources','*.dat'))
1277 res_timings['du_exe']=check_disk_usage(pjoin(dir_name,'check'))
1278
1279 if not res_timings['Initialization']==None:
1280 time_per_ps_estimate = (res_timings['Initialization']/4.0)/2.0
1281 elif make_it_quick:
1282 time_per_ps_estimate = -1.0
1283 else:
1284
1285
1286 MadLoopInitializer.fix_PSPoint_in_check(pjoin(export_dir,'SubProcesses'),
1287 read_ps = False, npoints = 3, hel_config = -1,
1288 split_orders=split_orders)
1289 compile_time, run_time, ram_usage = MadLoopInitializer.make_and_run(dir_name)
1290 time_per_ps_estimate = run_time/3.0
1291
1292 self.boot_time_setup(dir_name,bootandstop=True)
1293 compile_time, run_time, ram_usage = MadLoopInitializer.make_and_run(dir_name)
1294 res_timings['Booting_time'] = run_time
1295 self.boot_time_setup(dir_name,bootandstop=False)
1296
1297
1298 contributing_hel=0
1299 n_contrib_hel=0
1300 proc_prefix_file = open(pjoin(dir_name,'proc_prefix.txt'),'r')
1301 proc_prefix = proc_prefix_file.read()
1302 proc_prefix_file.close()
1303 helicities = open(pjoin(dir_name,'MadLoop5_resources',
1304 '%sHelFilter.dat'%proc_prefix)).read().split()
1305 for i, hel in enumerate(helicities):
1306 if (self.loop_optimized_output and int(hel)>-10000) or hel=='T':
1307 if contributing_hel==0:
1308 contributing_hel=i+1
1309 n_contrib_hel += 1
1310
1311 if contributing_hel==0:
1312 logger.error("Could not find a contributing helicity "+\
1313 "configuration for process %s."%proc_name)
1314 return None
1315
1316 res_timings['n_contrib_hel']=n_contrib_hel
1317 res_timings['n_tot_hel']=len(helicities)
1318
1319
1320 if not make_it_quick:
1321 target_pspoints_number = max(int(30.0/time_per_ps_estimate)+1,50)
1322 else:
1323 target_pspoints_number = 10
1324
1325 logger.info("Checking timing for process %s "%proc_name+\
1326 "with %d PS points."%target_pspoints_number)
1327
1328 MadLoopInitializer.fix_PSPoint_in_check(pjoin(export_dir,'SubProcesses'),
1329 read_ps = False, npoints = target_pspoints_number*2, \
1330 hel_config = contributing_hel, split_orders=split_orders)
1331 compile_time, run_time, ram_usage = MadLoopInitializer.make_and_run(dir_name)
1332
1333 if compile_time == None: return None
1334
1335 res_timings['run_polarized_total']=\
1336 (run_time-res_timings['Booting_time'])/(target_pspoints_number*2)
1337
1338 if make_it_quick:
1339 res_timings['run_unpolarized_total'] = 1.0
1340 res_timings['ram_usage'] = 0.0
1341 else:
1342 MadLoopInitializer.fix_PSPoint_in_check(pjoin(export_dir,'SubProcesses'),
1343 read_ps = False, npoints = target_pspoints_number, hel_config = -1,
1344 split_orders=split_orders)
1345 compile_time, run_time, ram_usage = MadLoopInitializer.make_and_run(dir_name,
1346 checkRam=True)
1347
1348 if compile_time == None: return None
1349 res_timings['run_unpolarized_total']=\
1350 (run_time-res_timings['Booting_time'])/target_pspoints_number
1351 res_timings['ram_usage'] = ram_usage
1352
1353 if not self.loop_optimized_output:
1354 return res_timings
1355
1356
1357
1358
1359
1360 self.skip_loop_evaluation_setup(dir_name,skip=True)
1361
1362 if make_it_quick:
1363 res_timings['run_unpolarized_coefs'] = 1.0
1364 else:
1365 MadLoopInitializer.fix_PSPoint_in_check(pjoin(export_dir,'SubProcesses'),
1366 read_ps = False, npoints = target_pspoints_number, hel_config = -1,
1367 split_orders=split_orders)
1368 compile_time, run_time, ram_usage = MadLoopInitializer.make_and_run(dir_name)
1369 if compile_time == None: return None
1370 res_timings['run_unpolarized_coefs']=\
1371 (run_time-res_timings['Booting_time'])/target_pspoints_number
1372
1373 MadLoopInitializer.fix_PSPoint_in_check(pjoin(export_dir,'SubProcesses'),
1374 read_ps = False, npoints = target_pspoints_number*2, \
1375 hel_config = contributing_hel, split_orders=split_orders)
1376 compile_time, run_time, ram_usage = MadLoopInitializer.make_and_run(dir_name)
1377 if compile_time == None: return None
1378 res_timings['run_polarized_coefs']=\
1379 (run_time-res_timings['Booting_time'])/(target_pspoints_number*2)
1380
1381
1382 self.skip_loop_evaluation_setup(dir_name,skip=False)
1383
1384 return res_timings
1385
1386
1387
1388
1389
1390 - def check_matrix_element_stability(self, matrix_element,options=None,
1391 infos_IN = None, param_card = None, keep_folder = False,
1392 MLOptions = {}):
1393 """ Output the matrix_element in argument, run in for nPoints and return
1394 a dictionary containing the stability information on each of these points.
1395 If infos are provided, then the matrix element output is skipped and
1396 reused from a previous run and the content of infos.
1397 """
1398
1399 if not options:
1400 reusing = False
1401 nPoints = 100
1402 split_orders = -1
1403 else:
1404 reusing = options['reuse']
1405 nPoints = options['npoints']
1406 split_orders = options['split_orders']
1407
1408 assert ((not reusing and isinstance(matrix_element, \
1409 helas_objects.HelasMatrixElement)) or (reusing and
1410 isinstance(matrix_element, base_objects.Process)))
1411
1412
1413 def format_PS_point(ps, rotation=0):
1414 """ Write out the specified PS point to the file dir_path/PS.input
1415 while rotating it if rotation!=0. We consider only rotations of 90
1416 but one could think of having rotation of arbitrary angle too.
1417 The first two possibilities, 1 and 2 are a rotation and boost
1418 along the z-axis so that improve_ps can still work.
1419 rotation=0 => No rotation
1420 rotation=1 => Z-axis pi/2 rotation
1421 rotation=2 => Z-axis pi/4 rotation
1422 rotation=3 => Z-axis boost
1423 rotation=4 => (x'=z,y'=-x,z'=-y)
1424 rotation=5 => (x'=-z,y'=y,z'=x)"""
1425 if rotation==0:
1426 p_out=copy.copy(ps)
1427 elif rotation==1:
1428 p_out = [[pm[0],-pm[2],pm[1],pm[3]] for pm in ps]
1429 elif rotation==2:
1430 sq2 = math.sqrt(2.0)
1431 p_out = [[pm[0],(pm[1]-pm[2])/sq2,(pm[1]+pm[2])/sq2,pm[3]] for pm in ps]
1432 elif rotation==3:
1433 p_out = boost_momenta(ps, 3)
1434
1435
1436 elif rotation==4:
1437 p_out=[[pm[0],pm[3],-pm[1],-pm[2]] for pm in ps]
1438 elif rotation==5:
1439 p_out=[[pm[0],-pm[3],pm[2],pm[1]] for pm in ps]
1440 else:
1441 raise MadGraph5Error("Rotation id %i not implemented"%rotation)
1442
1443 return '\n'.join([' '.join(['%.16E'%pi for pi in p]) for p in p_out])
1444
1445 def pick_PS_point(proc, options):
1446 """ Randomly generate a PS point and make sure it is eligible. Then
1447 return it. Users can edit the cuts here if they want."""
1448
1449 p, w_rambo = self.get_momenta(proc, options)
1450 if options['events']:
1451 return p
1452
1453 while (not MatrixElementEvaluator.pass_isolation_cuts(p) and len(p)>3):
1454 p, w_rambo = self.get_momenta(proc, options)
1455
1456
1457
1458
1459 if len(p)==3:
1460 p = boost_momenta(p,3,random.uniform(0.0,0.99))
1461 return p
1462
1463
1464
1465
1466 accuracy_threshold=1.0e-1
1467
1468
1469
1470 num_rotations = 1
1471
1472 if "MLReductionLib" not in MLOptions:
1473 tools=[1]
1474 else:
1475 tools=MLOptions["MLReductionLib"]
1476 tools=list(set(tools))
1477
1478
1479 tool_var={'pjfry':2,'golem':4,'samurai':5,'ninja':6,'collier':7}
1480 for tool in ['pjfry','golem','samurai','ninja','collier']:
1481 tool_dir='%s_dir'%tool
1482 if not tool_dir in self.tir_dir:
1483 continue
1484 tool_libpath=self.tir_dir[tool_dir]
1485 tool_libname="lib%s.a"%tool
1486 if (not isinstance(tool_libpath,str)) or (not os.path.exists(tool_libpath)) \
1487 or (not os.path.isfile(pjoin(tool_libpath,tool_libname))):
1488 if tool_var[tool] in tools:
1489 tools.remove(tool_var[tool])
1490 if not tools:
1491 return None
1492
1493
1494 if not reusing:
1495 process = matrix_element['processes'][0]
1496 else:
1497 process = matrix_element
1498 proc_name = process.shell_string()[2:]
1499 export_dir=pjoin(self.mg_root,("SAVED" if keep_folder else "")+\
1500 temp_dir_prefix+"_%s"%proc_name)
1501
1502 tools_name=bannermod.MadLoopParam._ID_reduction_tool_map
1503
1504 return_dict={}
1505 return_dict['Stability']={}
1506 infos_save={'Process_output': None,
1507 'HELAS_MODEL_compilation' : None,
1508 'dir_path' : None,
1509 'Initialization' : None,
1510 'Process_compilation' : None}
1511
1512 for tool in tools:
1513 tool_name=tools_name[tool]
1514
1515
1516
1517
1518
1519 DP_stability = []
1520 QP_stability = []
1521
1522 Unstable_PS_points = []
1523
1524 Exceptional_PS_points = []
1525
1526 MLoptions=MLOptions
1527 MLoptions["MLReductionLib"]=tool
1528 clean = (tool==tools[0]) and not nPoints==0
1529 if infos_IN==None or (tool_name not in infos_IN):
1530 infos=infos_IN
1531 else:
1532 infos=infos_IN[tool_name]
1533
1534 if not infos:
1535 infos = self.setup_process(matrix_element,export_dir, \
1536 reusing, param_card,MLoptions,clean)
1537 if not infos:
1538 return None
1539
1540 if clean:
1541 infos_save['Process_output']=infos['Process_output']
1542 infos_save['HELAS_MODEL_compilation']=infos['HELAS_MODEL_compilation']
1543 infos_save['dir_path']=infos['dir_path']
1544 infos_save['Process_compilation']=infos['Process_compilation']
1545 else:
1546 if not infos['Process_output']:
1547 infos['Process_output']=infos_save['Process_output']
1548 if not infos['HELAS_MODEL_compilation']:
1549 infos['HELAS_MODEL_compilation']=infos_save['HELAS_MODEL_compilation']
1550 if not infos['dir_path']:
1551 infos['dir_path']=infos_save['dir_path']
1552 if not infos['Process_compilation']:
1553 infos['Process_compilation']=infos_save['Process_compilation']
1554
1555 dir_path=infos['dir_path']
1556
1557
1558 savefile='SavedStabilityRun_%s%%s.pkl'%tools_name[tool]
1559 data_i = 0
1560
1561 if reusing:
1562
1563 data_i=0
1564 while os.path.isfile(pjoin(dir_path,savefile%('_%d'%data_i))):
1565 pickle_path = pjoin(dir_path,savefile%('_%d'%data_i))
1566 saved_run = save_load_object.load_from_file(pickle_path)
1567 if data_i>0:
1568 logger.info("Loading additional data stored in %s."%
1569 str(pickle_path))
1570 logger.info("Loaded data moved to %s."%str(pjoin(
1571 dir_path,'LOADED_'+savefile%('_%d'%data_i))))
1572 shutil.move(pickle_path,
1573 pjoin(dir_path,'LOADED_'+savefile%('%d'%data_i)))
1574 DP_stability.extend(saved_run['DP_stability'])
1575 QP_stability.extend(saved_run['QP_stability'])
1576 Unstable_PS_points.extend(saved_run['Unstable_PS_points'])
1577 Exceptional_PS_points.extend(saved_run['Exceptional_PS_points'])
1578 data_i += 1
1579
1580 return_dict['Stability'][tool_name] = {'DP_stability':DP_stability,
1581 'QP_stability':QP_stability,
1582 'Unstable_PS_points':Unstable_PS_points,
1583 'Exceptional_PS_points':Exceptional_PS_points}
1584
1585 if nPoints==0:
1586 if len(return_dict['Stability'][tool_name]['DP_stability'])!=0:
1587
1588 if data_i>1:
1589 save_load_object.save_to_file(pjoin(dir_path,
1590 savefile%'_0'),return_dict['Stability'][tool_name])
1591 continue
1592 else:
1593 logger.info("ERROR: Not reusing a directory or any pickled"+
1594 " result for tool %s and the number"%tool_name+\
1595 " of point for the check is zero.")
1596 return None
1597
1598 logger.info("Checking stability of process %s "%proc_name+\
1599 "with %d PS points by %s."%(nPoints,tool_name))
1600 if infos['Initialization'] != None:
1601 time_per_ps_estimate = (infos['Initialization']/4.0)/2.0
1602 sec_needed = int(time_per_ps_estimate*nPoints*4)
1603 else:
1604 sec_needed = 0
1605
1606 progress_bar = None
1607 time_info = False
1608 if sec_needed>5:
1609 time_info = True
1610 logger.info("This check should take about "+\
1611 "%s to run. Started on %s."%(\
1612 str(datetime.timedelta(seconds=sec_needed)),\
1613 datetime.datetime.now().strftime("%d-%m-%Y %H:%M")))
1614 if logger.getEffectiveLevel()<logging.WARNING and \
1615 (sec_needed>5 or infos['Initialization'] == None):
1616 widgets = ['Stability check:', pbar.Percentage(), ' ',
1617 pbar.Bar(),' ', pbar.ETA(), ' ']
1618 progress_bar = pbar.ProgressBar(widgets=widgets, maxval=nPoints,
1619 fd=sys.stdout)
1620 MadLoopInitializer.fix_PSPoint_in_check(pjoin(export_dir,'SubProcesses'),
1621 read_ps = True, npoints = 1, hel_config = -1, split_orders=split_orders)
1622
1623
1624
1625 try:
1626 os.remove(pjoin(dir_path,'check'))
1627 os.remove(pjoin(dir_path,'check_sa.o'))
1628 except OSError:
1629 pass
1630
1631 devnull = open(os.devnull, 'w')
1632 retcode = subprocess.call(['make','check'],
1633 cwd=dir_path, stdout=devnull, stderr=devnull)
1634 devnull.close()
1635 if retcode != 0:
1636 logging.info("Error while executing make in %s" % dir_path)
1637 return None
1638
1639
1640
1641
1642 if not os.path.isfile(pjoin(dir_path,'StabilityCheckDriver.f')):
1643
1644
1645 if os.path.isfile(pjoin(dir_path,'born_matrix.f')):
1646 checkerName = 'StabilityCheckDriver.f'
1647 else:
1648 checkerName = 'StabilityCheckDriver_loop_induced.f'
1649
1650 with open(pjoin(self.mg_root,'Template','loop_material','Checks',
1651 checkerName),'r') as checkerFile:
1652 with open(pjoin(dir_path,'proc_prefix.txt')) as proc_prefix:
1653 checkerToWrite = checkerFile.read()%{'proc_prefix':
1654 proc_prefix.read()}
1655 checkerFile = open(pjoin(dir_path,'StabilityCheckDriver.f'),'w')
1656 checkerFile.write(checkerToWrite)
1657 checkerFile.close()
1658
1659
1660
1661
1662
1663 if os.path.isfile(pjoin(dir_path,'StabilityCheckDriver')):
1664 os.remove(pjoin(dir_path,'StabilityCheckDriver'))
1665 if os.path.isfile(pjoin(dir_path,'loop_matrix.o')):
1666 os.remove(pjoin(dir_path,'loop_matrix.o'))
1667 misc.compile(arg=['StabilityCheckDriver'], cwd=dir_path, \
1668 mode='fortran', job_specs = False)
1669
1670
1671
1672
1673 if len(process['legs'])==3:
1674 self.fix_MadLoopParamCard(dir_path, mp=False,
1675 loop_filter=False, DoubleCheckHelicityFilter=True)
1676
1677 StabChecker = subprocess.Popen([pjoin(dir_path,'StabilityCheckDriver')],
1678 stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
1679 cwd=dir_path, bufsize=0)
1680 start_index = len(DP_stability)
1681 if progress_bar!=None:
1682 progress_bar.start()
1683
1684
1685 interrupted = False
1686
1687
1688 retry = 0
1689
1690 i=start_index
1691 if options and 'events' in options and options['events']:
1692
1693 import MadSpin.decay as madspin
1694 fsock = open(options['events'])
1695 self.event_file = madspin.Event(fsock)
1696 while i<(start_index+nPoints):
1697
1698 qp_dict={}
1699 dp_dict={}
1700 UPS = None
1701 EPS = None
1702
1703 if retry==0:
1704 p = pick_PS_point(process, options)
1705
1706 try:
1707 if progress_bar!=None:
1708 progress_bar.update(i+1-start_index)
1709
1710 PSPoint = format_PS_point(p,0)
1711 dp_res=[]
1712 dp_res.append(self.get_me_value(StabChecker,PSPoint,1,
1713 split_orders=split_orders))
1714 dp_dict['CTModeA']=dp_res[-1]
1715 dp_res.append(self.get_me_value(StabChecker,PSPoint,2,
1716 split_orders=split_orders))
1717 dp_dict['CTModeB']=dp_res[-1]
1718 for rotation in range(1,num_rotations+1):
1719 PSPoint = format_PS_point(p,rotation)
1720 dp_res.append(self.get_me_value(StabChecker,PSPoint,1,
1721 split_orders=split_orders))
1722 dp_dict['Rotation%i'%rotation]=dp_res[-1]
1723
1724 if any([not res for res in dp_res]):
1725 return None
1726 dp_accuracy =((max(dp_res)-min(dp_res))/
1727 abs(sum(dp_res)/len(dp_res)))
1728 dp_dict['Accuracy'] = dp_accuracy
1729 if dp_accuracy>accuracy_threshold:
1730 if tool in [1,6]:
1731
1732 UPS = [i,p]
1733 qp_res=[]
1734 PSPoint = format_PS_point(p,0)
1735 qp_res.append(self.get_me_value(StabChecker,PSPoint,4,
1736 split_orders=split_orders))
1737 qp_dict['CTModeA']=qp_res[-1]
1738 qp_res.append(self.get_me_value(StabChecker,PSPoint,5,
1739 split_orders=split_orders))
1740 qp_dict['CTModeB']=qp_res[-1]
1741 for rotation in range(1,num_rotations+1):
1742 PSPoint = format_PS_point(p,rotation)
1743 qp_res.append(self.get_me_value(StabChecker,PSPoint,4,
1744 split_orders=split_orders))
1745 qp_dict['Rotation%i'%rotation]=qp_res[-1]
1746
1747 if any([not res for res in qp_res]):
1748 return None
1749
1750 qp_accuracy = ((max(qp_res)-min(qp_res))/
1751 abs(sum(qp_res)/len(qp_res)))
1752 qp_dict['Accuracy']=qp_accuracy
1753 if qp_accuracy>accuracy_threshold:
1754 EPS = [i,p]
1755 else:
1756
1757
1758 UPS = [i,p]
1759
1760 except KeyboardInterrupt:
1761 interrupted = True
1762 break
1763 except IOError as e:
1764 if e.errno == errno.EINTR:
1765 if retry==100:
1766 logger.error("Failed hundred times consecutively because"+
1767 " of system call interruptions.")
1768 raise
1769 else:
1770 logger.debug("Recovered from a system call interruption."+\
1771 "PSpoint #%i, Attempt #%i."%(i,retry+1))
1772
1773 time.sleep(0.5)
1774
1775 retry = retry+1
1776
1777 try:
1778 StabChecker.kill()
1779 except Exception:
1780 pass
1781 StabChecker = subprocess.Popen(\
1782 [pjoin(dir_path,'StabilityCheckDriver')],
1783 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
1784 stderr=subprocess.PIPE, cwd=dir_path, bufsize=0)
1785 continue
1786 else:
1787 raise
1788
1789
1790
1791 retry = 0
1792
1793 i=i+1
1794
1795
1796 DP_stability.append(dp_dict)
1797 QP_stability.append(qp_dict)
1798 if not EPS is None:
1799 Exceptional_PS_points.append(EPS)
1800 if not UPS is None:
1801 Unstable_PS_points.append(UPS)
1802
1803 if progress_bar!=None:
1804 progress_bar.finish()
1805 if time_info:
1806 logger.info('Finished check on %s.'%datetime.datetime.now().strftime(\
1807 "%d-%m-%Y %H:%M"))
1808
1809
1810 if not interrupted:
1811 StabChecker.stdin.write('y\n'.encode())
1812 else:
1813 StabChecker.kill()
1814
1815
1816
1817
1818
1819
1820
1821 save_load_object.save_to_file(pjoin(dir_path,savefile%'_0'),\
1822 return_dict['Stability'][tool_name])
1823
1824 if interrupted:
1825 break
1826
1827 return_dict['Process'] = matrix_element.get('processes')[0] if not \
1828 reusing else matrix_element
1829 return return_dict
1830
1831 @classmethod
1832 - def get_me_value(cls, StabChecker, PSpoint, mode, hel=-1, mu_r=-1.0,
1833 split_orders=-1):
1834 """ This version of get_me_value is simplified for the purpose of this
1835 class. No compilation is necessary. The CT mode can be specified."""
1836
1837
1838 StabChecker.stdin.write('\x1a'.encode())
1839 StabChecker.stdin.write('1\n'.encode())
1840 StabChecker.stdin.write(('%d\n'%mode).encode())
1841 StabChecker.stdin.write(('%s\n'%PSpoint).encode())
1842 StabChecker.stdin.write(('%.16E\n'%mu_r).encode())
1843 StabChecker.stdin.write(('%d\n'%hel).encode())
1844 StabChecker.stdin.write(('%d\n'%split_orders).encode())
1845
1846
1847 try:
1848
1849 while True:
1850 output = StabChecker.stdout.readline().decode()
1851
1852 if output != '':
1853 last_non_empty = output
1854 if output==' ##TAG#RESULT_START#TAG##\n':
1855 break
1856
1857 ret_code = StabChecker.poll()
1858 if not ret_code is None:
1859 output = StabChecker.stdout.readline().decode()
1860 if output != '':
1861 last_non_empty = output
1862 error = StabChecker.stderr.readline().decode()
1863 raise MadGraph5Error("The MadLoop stability checker crashed with return code = %d, and last output:\n\nstdout: %s\nstderr: %s\n"%\
1864 (ret_code, last_non_empty, error))
1865
1866 res = ""
1867 while True:
1868 output = StabChecker.stdout.readline().decode()
1869 if output != '':
1870 last_non_empty = output
1871 if str(output)==' ##TAG#RESULT_STOP#TAG##\n':
1872 break
1873 else:
1874 res += output
1875 ret_code = StabChecker.poll()
1876 if not ret_code is None:
1877 output = StabChecker.stdout.readline().decode()
1878 if output != '':
1879 last_non_empty = output
1880 error = StabChecker.stderr.readline().decode()
1881 raise MadGraph5Error("The MadLoop stability checker crashed with return code = %d, and last output:\n\nstdout: %s\nstderr: %s\n"%\
1882 (ret_code, last_non_empty, error))
1883
1884 return cls.parse_check_output(res,format='tuple')[0][0]
1885 except IOError as e:
1886 logging.warning("Error while running MadLoop. Exception = %s"%str(e))
1887 raise e
1888
1891 """ Perform a python evaluation of the matrix element independently for
1892 all possible helicity configurations for a fixed number of points N and
1893 returns the average for each in the format [[hel_config, eval],...].
1894 This is used to determine what are the vanishing and dependent helicity
1895 configurations at generation time and accordingly setup the output.
1896 This is not yet implemented at LO."""
1897
1898
1899 assert isinstance(process,base_objects.Process)
1900 assert process.get('perturbation_couplings')==[]
1901
1902 N_eval=50
1903
1904 evaluator = MatrixElementEvaluator(process.get('model'), param_card,
1905 auth_skipping = False, reuse = True)
1906
1907 amplitude = diagram_generation.Amplitude(process)
1908 matrix_element = helas_objects.HelasMatrixElement(amplitude,gen_color=False)
1909
1910 cumulative_helEvals = []
1911
1912 for i in range(N_eval):
1913 p, w_rambo = evaluator.get_momenta(process)
1914 helEvals = evaluator.evaluate_matrix_element(\
1915 matrix_element, p = p, output = 'helEvals')['helEvals']
1916 if cumulative_helEvals==[]:
1917 cumulative_helEvals=copy.copy(helEvals)
1918 else:
1919 cumulative_helEvals = [[h[0],h[1]+helEvals[i][1]] for i, h in \
1920 enumerate(cumulative_helEvals)]
1921
1922
1923 cumulative_helEvals = [[h[0],h[1]/N_eval] for h in cumulative_helEvals]
1924
1925
1926
1927 clean_added_globals(ADDED_GLOBAL)
1928
1929 return cumulative_helEvals
1930
1933 """A wrapper function for running an iteration of a function over
1934 a multiprocess, without having to first create a process list
1935 (which makes a big difference for very large multiprocesses.
1936 stored_quantities is a dictionary for any quantities that we want
1937 to reuse between runs."""
1938
1939 model = multiprocess.get('model')
1940 isids = [leg.get('ids') for leg in multiprocess.get('legs') \
1941 if not leg.get('state')]
1942 fsids = [leg.get('ids') for leg in multiprocess.get('legs') \
1943 if leg.get('state')]
1944
1945 id_anti_id_dict = {}
1946 for id in set(tuple(sum(isids+fsids, []))):
1947 id_anti_id_dict[id] = model.get_particle(id).get_anti_pdg_code()
1948 id_anti_id_dict[model.get_particle(id).get_anti_pdg_code()] = id
1949 sorted_ids = []
1950 results = []
1951 for is_prod in itertools.product(*isids):
1952 for fs_prod in itertools.product(*fsids):
1953
1954
1955 if check_already_checked(is_prod, fs_prod, sorted_ids,
1956 multiprocess, model, id_anti_id_dict):
1957 continue
1958
1959 process = multiprocess.get_process_with_legs(base_objects.LegList(\
1960 [base_objects.Leg({'id': id, 'state':False}) for \
1961 id in is_prod] + \
1962 [base_objects.Leg({'id': id, 'state':True}) for \
1963 id in fs_prod]))
1964
1965 if opt is not None:
1966 if isinstance(opt, dict):
1967 try:
1968 value = opt[process.base_string()]
1969 except Exception:
1970 continue
1971 result = function(process, stored_quantities, value, options=options)
1972 else:
1973 result = function(process, stored_quantities, opt, options=options)
1974 else:
1975 result = function(process, stored_quantities, options=options)
1976
1977 if result:
1978 results.append(result)
1979
1980 return results
1981
1982
1983
1984
1985
1986 -def check_already_checked(is_ids, fs_ids, sorted_ids, process, model,
1987 id_anti_id_dict = {}):
1988 """Check if process already checked, if so return True, otherwise add
1989 process and antiprocess to sorted_ids."""
1990
1991
1992 if id_anti_id_dict:
1993 is_ids = [id_anti_id_dict[id] for id in \
1994 is_ids]
1995 else:
1996 is_ids = [model.get_particle(id).get_anti_pdg_code() for id in \
1997 is_ids]
1998
1999 ids = array.array('i', sorted(is_ids + list(fs_ids)) + \
2000 [process.get('id')])
2001
2002 if ids in sorted_ids:
2003
2004 return True
2005
2006
2007 sorted_ids.append(ids)
2008
2009
2010 return False
2011
2017 """ Generate a loop matrix element from the process definition, and returns
2018 it along with the timing information dictionary.
2019 If reuse is True, it reuses the already output directory if found.
2020 There is the possibility of specifying the proc_name."""
2021
2022 assert isinstance(process_definition,
2023 (base_objects.ProcessDefinition,base_objects.Process))
2024 assert process_definition.get('perturbation_couplings')!=[]
2025
2026 if isinstance(process_definition,base_objects.ProcessDefinition):
2027 if any(len(l.get('ids'))>1 for l in process_definition.get('legs')):
2028 raise InvalidCmd("This check can only be performed on single "+
2029 " processes. (i.e. without multiparticle labels).")
2030
2031 isids = [leg.get('ids')[0] for leg in process_definition.get('legs') \
2032 if not leg.get('state')]
2033 fsids = [leg.get('ids')[0] for leg in process_definition.get('legs') \
2034 if leg.get('state')]
2035
2036
2037 process = process_definition.get_process(isids,fsids)
2038 else:
2039 process = process_definition
2040
2041 if not output_path is None:
2042 root_path = output_path
2043 else:
2044 root_path = cmd._mgme_dir
2045
2046 timing = {'Diagrams_generation': None,
2047 'n_loops': None,
2048 'HelasDiagrams_generation': None,
2049 'n_loop_groups': None,
2050 'n_loop_wfs': None,
2051 'loop_wfs_ranks': None}
2052
2053 if proc_name:
2054 proc_dir = pjoin(root_path,proc_name)
2055 else:
2056 proc_dir = pjoin(root_path,"SAVED"+temp_dir_prefix+"_%s"%(
2057 '_'.join(process.shell_string().split('_')[1:])))
2058 if reuse and os.path.isdir(proc_dir):
2059 logger.info("Reusing directory %s"%str(proc_dir))
2060
2061 return timing, process
2062
2063 logger.info("Generating p%s"%process_definition.nice_string()[1:])
2064
2065 start=time.time()
2066 try:
2067 amplitude = loop_diagram_generation.LoopAmplitude(process,
2068 loop_filter=loop_filter)
2069 except InvalidCmd:
2070
2071
2072 return time.time()-start, None
2073 if not amplitude.get('diagrams'):
2074
2075 return time.time()-start, None
2076
2077
2078
2079 loop_optimized_output = cmd.options['loop_optimized_output']
2080 timing['Diagrams_generation']=time.time()-start
2081 timing['n_loops']=len(amplitude.get('loop_diagrams'))
2082 start=time.time()
2083
2084 matrix_element = loop_helas_objects.LoopHelasMatrixElement(amplitude,
2085 optimized_output = loop_optimized_output,gen_color=True)
2086
2087
2088
2089 matrix_element.compute_all_analytic_information()
2090 timing['HelasDiagrams_generation']=time.time()-start
2091
2092 if loop_optimized_output:
2093 timing['n_loop_groups']=len(matrix_element.get('loop_groups'))
2094 lwfs=[l for ldiag in matrix_element.get_loop_diagrams() for l in \
2095 ldiag.get('loop_wavefunctions')]
2096 timing['n_loop_wfs']=len(lwfs)
2097 timing['loop_wfs_ranks']=[]
2098 for rank in range(0,max([l.get_analytic_info('wavefunction_rank') \
2099 for l in lwfs])+1):
2100 timing['loop_wfs_ranks'].append(\
2101 len([1 for l in lwfs if \
2102 l.get_analytic_info('wavefunction_rank')==rank]))
2103
2104 return timing, matrix_element
2105
2106
2107
2108
2109 -def check_profile(process_definition, param_card = None,cuttools="",tir={},
2110 options = {}, cmd = FakeInterface(),output_path=None,MLOptions={}):
2111 """For a single loop process, check both its timings and then its stability
2112 in one go without regenerating it."""
2113
2114 if 'reuse' not in options:
2115 keep_folder=False
2116 else:
2117 keep_folder = options['reuse']
2118
2119 model=process_definition.get('model')
2120
2121 timing1, matrix_element = generate_loop_matrix_element(process_definition,
2122 keep_folder,output_path=output_path,cmd=cmd)
2123 reusing = isinstance(matrix_element, base_objects.Process)
2124 options['reuse'] = reusing
2125 myProfiler = LoopMatrixElementTimer(cuttools_dir=cuttools,tir_dir=tir,
2126 model=model, output_path=output_path, cmd=cmd)
2127
2128 if not myProfiler.loop_optimized_output:
2129 MLoptions={}
2130 else:
2131 MLoptions=MLOptions
2132
2133 timing2 = myProfiler.time_matrix_element(matrix_element, reusing,
2134 param_card, keep_folder=keep_folder,options=options,
2135 MLOptions = MLoptions)
2136
2137 timing2['reduction_tool'] = MLoptions['MLReductionLib'][0]
2138
2139 if timing2 == None:
2140 return None, None
2141
2142
2143 timing = dict(list(timing1.items())+list(timing2.items()))
2144 stability = myProfiler.check_matrix_element_stability(matrix_element,
2145 options=options, infos_IN=timing,param_card=param_card,
2146 keep_folder = keep_folder,
2147 MLOptions = MLoptions)
2148 if stability == None:
2149 return None, None
2150 else:
2151 timing['loop_optimized_output']=myProfiler.loop_optimized_output
2152 stability['loop_optimized_output']=myProfiler.loop_optimized_output
2153 return timing, stability
2154
2155
2156
2157
2158 -def check_stability(process_definition, param_card = None,cuttools="",tir={},
2159 options=None,nPoints=100, output_path=None,
2160 cmd = FakeInterface(), MLOptions = {}):
2161 """For a single loop process, give a detailed summary of the generation and
2162 execution timing."""
2163
2164 if "reuse" in options:
2165 reuse=options['reuse']
2166 else:
2167 reuse=False
2168
2169 reuse=options['reuse']
2170 keep_folder = reuse
2171 model=process_definition.get('model')
2172
2173 timing, matrix_element = generate_loop_matrix_element(process_definition,
2174 reuse, output_path=output_path, cmd=cmd)
2175 reusing = isinstance(matrix_element, base_objects.Process)
2176 options['reuse'] = reusing
2177 myStabilityChecker = LoopMatrixElementTimer(cuttools_dir=cuttools,tir_dir=tir,
2178 output_path=output_path,model=model,cmd=cmd)
2179
2180 if not myStabilityChecker.loop_optimized_output:
2181 MLoptions = {}
2182 else:
2183 MLoptions = MLOptions
2184
2185 if 'COLLIERComputeUVpoles' not in MLoptions:
2186 MLoptions['COLLIERComputeUVpoles']=False
2187 if 'COLLIERComputeIRpoles' not in MLoptions:
2188 MLoptions['COLLIERComputeIRpoles']=False
2189
2190 if 'COLLIERRequiredAccuracy' not in MLoptions:
2191 MLoptions['COLLIERRequiredAccuracy']=1e-13
2192
2193 if 'COLLIERUseInternalStabilityTest' not in MLoptions:
2194 MLoptions['COLLIERUseInternalStabilityTest']=False
2195
2196
2197
2198 MLoptions['COLLIERGlobalCache'] = 0
2199
2200 if "MLReductionLib" not in MLOptions:
2201 MLoptions["MLReductionLib"] = []
2202 if cuttools:
2203 MLoptions["MLReductionLib"].extend([1])
2204 if "iregi_dir" in tir:
2205 MLoptions["MLReductionLib"].extend([3])
2206 if "pjfry_dir" in tir:
2207 MLoptions["MLReductionLib"].extend([2])
2208 if "golem_dir" in tir:
2209 MLoptions["MLReductionLib"].extend([4])
2210 if "samurai_dir" in tir:
2211 MLoptions["MLReductionLib"].extend([5])
2212 if "ninja_dir" in tir:
2213 MLoptions["MLReductionLib"].extend([6])
2214 if "collier_dir" in tir:
2215 MLoptions["MLReductionLib"].extend([7])
2216
2217 stability = myStabilityChecker.check_matrix_element_stability(matrix_element,
2218 options=options,param_card=param_card,
2219 keep_folder=keep_folder,
2220 MLOptions=MLoptions)
2221
2222 if stability == None:
2223 return None
2224 else:
2225 stability['loop_optimized_output']=myStabilityChecker.loop_optimized_output
2226 return stability
2227
2228
2229
2230
2231 -def check_timing(process_definition, param_card= None, cuttools="",tir={},
2232 output_path=None, options={}, cmd = FakeInterface(),
2233 MLOptions = {}):
2234 """For a single loop process, give a detailed summary of the generation and
2235 execution timing."""
2236
2237 if 'reuse' not in options:
2238 keep_folder = False
2239 else:
2240 keep_folder = options['reuse']
2241 model=process_definition.get('model')
2242 timing1, matrix_element = generate_loop_matrix_element(process_definition,
2243 keep_folder, output_path=output_path, cmd=cmd)
2244 reusing = isinstance(matrix_element, base_objects.Process)
2245 options['reuse'] = reusing
2246 myTimer = LoopMatrixElementTimer(cuttools_dir=cuttools,model=model,tir_dir=tir,
2247 output_path=output_path, cmd=cmd)
2248
2249 if not myTimer.loop_optimized_output:
2250 MLoptions = {}
2251 else:
2252 MLoptions = MLOptions
2253
2254 if 'COLLIERComputeUVpoles' not in MLoptions:
2255 MLoptions['COLLIERComputeUVpoles']=False
2256 if 'COLLIERComputeIRpoles' not in MLoptions:
2257 MLoptions['COLLIERComputeIRpoles']=False
2258
2259 if 'COLLIERGlobalCache' not in MLoptions:
2260 MLoptions['COLLIERGlobalCache']=-1
2261
2262 if 'MLReductionLib' not in MLoptions or \
2263 len(MLoptions['MLReductionLib'])==0:
2264 MLoptions['MLReductionLib'] = [6]
2265
2266 timing2 = myTimer.time_matrix_element(matrix_element, reusing, param_card,
2267 keep_folder = keep_folder, options=options,
2268 MLOptions = MLoptions)
2269
2270 if timing2 == None:
2271 return None
2272 else:
2273
2274 res = dict(list(timing1.items())+list(timing2.items()))
2275 res['loop_optimized_output']=myTimer.loop_optimized_output
2276 res['reduction_tool'] = MLoptions['MLReductionLib'][0]
2277 return res
2278
2279
2280
2281
2282 -def check_processes(processes, param_card = None, quick = [],cuttools="",tir={},
2283 options=None, reuse = False, output_path=None, cmd = FakeInterface()):
2284 """Check processes by generating them with all possible orderings
2285 of particles (which means different diagram building and Helas
2286 calls), and comparing the resulting matrix element values."""
2287
2288 cmass_scheme = cmd.options['complex_mass_scheme']
2289 if isinstance(processes, base_objects.ProcessDefinition):
2290
2291
2292 multiprocess = processes
2293 model = multiprocess.get('model')
2294
2295
2296 if multiprocess.get('perturbation_couplings')==[]:
2297 evaluator = MatrixElementEvaluator(model,
2298 auth_skipping = True, reuse = False, cmd = cmd)
2299 else:
2300 evaluator = LoopMatrixElementEvaluator(cuttools_dir=cuttools,tir_dir=tir,
2301 model=model, auth_skipping = True,
2302 reuse = False, output_path=output_path, cmd = cmd)
2303
2304 results = run_multiprocs_no_crossings(check_process,
2305 multiprocess,
2306 evaluator,
2307 quick,
2308 options)
2309
2310 if "used_lorentz" not in evaluator.stored_quantities:
2311 evaluator.stored_quantities["used_lorentz"] = []
2312
2313 if multiprocess.get('perturbation_couplings')!=[] and not reuse:
2314
2315 clean_up(output_path)
2316
2317 return results, evaluator.stored_quantities["used_lorentz"]
2318
2319 elif isinstance(processes, base_objects.Process):
2320 processes = base_objects.ProcessList([processes])
2321 elif isinstance(processes, base_objects.ProcessList):
2322 pass
2323 else:
2324 raise InvalidCmd("processes is of non-supported format")
2325
2326 if not processes:
2327 raise InvalidCmd("No processes given")
2328
2329 model = processes[0].get('model')
2330
2331
2332 if processes[0].get('perturbation_couplings')==[]:
2333 evaluator = MatrixElementEvaluator(model, param_card,
2334 auth_skipping = True, reuse = False, cmd = cmd)
2335 else:
2336 evaluator = LoopMatrixElementEvaluator(cuttools_dir=cuttools, tir_dir=tir,
2337 model=model,param_card=param_card,
2338 auth_skipping = True, reuse = False,
2339 output_path=output_path, cmd = cmd)
2340
2341
2342
2343 sorted_ids = []
2344 comparison_results = []
2345
2346
2347 for process in processes:
2348
2349
2350 if check_already_checked([l.get('id') for l in process.get('legs') if \
2351 not l.get('state')],
2352 [l.get('id') for l in process.get('legs') if \
2353 l.get('state')],
2354 sorted_ids, process, model):
2355 continue
2356
2357 res = check_process(process, evaluator, quick, options)
2358 if res:
2359 comparison_results.append(res)
2360
2361 if "used_lorentz" not in evaluator.stored_quantities:
2362 evaluator.stored_quantities["used_lorentz"] = []
2363
2364 if processes[0].get('perturbation_couplings')!=[] and not reuse:
2365
2366 clean_up(output_path)
2367
2368 return comparison_results, evaluator.stored_quantities["used_lorentz"]
2369
2371 """Check the helas calls for a process by generating the process
2372 using all different permutations of the process legs (or, if
2373 quick, use a subset of permutations), and check that the matrix
2374 element is invariant under this."""
2375
2376 model = process.get('model')
2377
2378
2379 for i, leg in enumerate(process.get('legs')):
2380 leg.set('number', i+1)
2381
2382 logger.info("Checking crossings of %s" % \
2383 process.nice_string().replace('Process:', 'process'))
2384
2385 process_matrix_elements = []
2386
2387
2388
2389 if quick:
2390 leg_positions = [[] for leg in process.get('legs')]
2391 quick = list(range(1,len(process.get('legs')) + 1))
2392
2393 values = []
2394
2395
2396 number_checked=0
2397 for legs in itertools.permutations(process.get('legs')):
2398
2399 order = [l.get('number') for l in legs]
2400 if quick:
2401 found_leg = True
2402 for num in quick:
2403
2404
2405 leg_position = legs.index([l for l in legs if \
2406 l.get('number') == num][0])
2407
2408 if not leg_position in leg_positions[num-1]:
2409 found_leg = False
2410 leg_positions[num-1].append(leg_position)
2411
2412 if found_leg:
2413 continue
2414
2415
2416
2417 if quick and process.get('perturbation_couplings') and number_checked >3:
2418 continue
2419
2420 legs = base_objects.LegList(legs)
2421
2422 if order != list(range(1,len(legs) + 1)):
2423 logger.info("Testing permutation: %s" % \
2424 order)
2425
2426 newproc = copy.copy(process)
2427 newproc.set('legs',legs)
2428
2429
2430 try:
2431 if newproc.get('perturbation_couplings')==[]:
2432 amplitude = diagram_generation.Amplitude(newproc)
2433 else:
2434
2435 loop_base_objects.cutting_method = 'optimal' if \
2436 number_checked%2 == 0 else 'default'
2437 amplitude = loop_diagram_generation.LoopAmplitude(newproc)
2438 except InvalidCmd:
2439 result=False
2440 else:
2441 result = amplitude.get('diagrams')
2442
2443 loop_base_objects.cutting_method = 'optimal'
2444
2445 if not result:
2446
2447 logging.info("No diagrams for %s" % \
2448 process.nice_string().replace('Process', 'process'))
2449 break
2450
2451 if order == list(range(1,len(legs) + 1)):
2452
2453 p, w_rambo = evaluator.get_momenta(process, options)
2454
2455
2456 if not isinstance(amplitude,loop_diagram_generation.LoopAmplitude):
2457 matrix_element = helas_objects.HelasMatrixElement(amplitude,
2458 gen_color=False)
2459 else:
2460 matrix_element = loop_helas_objects.LoopHelasMatrixElement(amplitude,
2461 optimized_output=evaluator.loop_optimized_output)
2462
2463
2464
2465
2466 if amplitude.get('process').get('has_born'):
2467
2468
2469 if matrix_element in process_matrix_elements:
2470
2471
2472 continue
2473
2474 process_matrix_elements.append(matrix_element)
2475
2476 res = evaluator.evaluate_matrix_element(matrix_element, p = p,
2477 options=options)
2478 if res == None:
2479 break
2480
2481 values.append(res[0])
2482 number_checked += 1
2483
2484
2485
2486 if abs(max(values)) + abs(min(values)) > 0 and \
2487 2 * abs(max(values) - min(values)) / \
2488 (abs(max(values)) + abs(min(values))) > 0.01:
2489 break
2490
2491
2492 if not values:
2493 return None
2494
2495
2496
2497 diff = 0
2498 if abs(max(values)) + abs(min(values)) > 0:
2499 diff = 2* abs(max(values) - min(values)) / \
2500 (abs(max(values)) + abs(min(values)))
2501
2502
2503 if process.get('perturbation_couplings'):
2504 passed = diff < 1.e-5
2505 else:
2506 passed = diff < 1.e-8
2507
2508 return {"process": process,
2509 "momenta": p,
2510 "values": values,
2511 "difference": diff,
2512 "passed": passed}
2513
2515 """Clean-up the possible left-over outputs from 'evaluate_matrix element' of
2516 the LoopMatrixEvaluator (when its argument proliferate is set to true). """
2517
2518 if mg_root is None:
2519 pass
2520
2521 directories = misc.glob('%s*' % temp_dir_prefix, mg_root)
2522 if directories != []:
2523 logger.debug("Cleaning temporary %s* check runs."%temp_dir_prefix)
2524 for dir in directories:
2525
2526 if os.path.isdir(pjoin(dir,'SubProcesses')):
2527 shutil.rmtree(dir)
2528
2537
2538 -def output_profile(myprocdef, stability, timing, output_path, reusing=False):
2539 """Present the results from a timing and stability consecutive check"""
2540
2541
2542 opt = timing['loop_optimized_output']
2543
2544 text = 'Timing result for the '+('optimized' if opt else 'default')+\
2545 ' output:\n'
2546 text += output_timings(myprocdef,timing)
2547
2548 text += '\nStability result for the '+('optimized' if opt else 'default')+\
2549 ' output:\n'
2550 text += output_stability(stability,output_path, reusing=reusing)
2551
2552 mode = 'optimized' if opt else 'default'
2553 logFilePath = pjoin(output_path, 'profile_%s_%s.log'\
2554 %(mode,stability['Process'].shell_string()))
2555 logFile = open(logFilePath, 'w')
2556 logFile.write(text)
2557 logFile.close()
2558 logger.info('Log of this profile check was output to file %s'\
2559 %str(logFilePath))
2560 return text
2561
2563 """Present the result of a stability check in a nice format.
2564 The full info is printed out in 'Stability_result_<proc_shell_string>.dat'
2565 under the MadGraph5_aMC@NLO root folder (output_path)"""
2566
2567 def accuracy(eval_list):
2568 """ Compute the accuracy from different evaluations."""
2569 return (2.0*(max(eval_list)-min(eval_list))/
2570 abs(max(eval_list)+min(eval_list)))
2571
2572 def best_estimate(eval_list):
2573 """ Returns the best estimate from different evaluations."""
2574 return (max(eval_list)+min(eval_list))/2.0
2575
2576 def loop_direction_test_power(eval_list):
2577 """ Computes the loop direction test power P is computed as follow:
2578 P = accuracy(loop_dir_test) / accuracy(all_test)
2579 So that P is large if the loop direction test is effective.
2580 The tuple returned is (log(median(P)),log(min(P)),frac)
2581 where frac is the fraction of events with powers smaller than -3
2582 which means events for which the reading direction test shows an
2583 accuracy three digits higher than it really is according to the other
2584 tests."""
2585 powers=[]
2586 for eval in eval_list:
2587 loop_dir_evals = [eval['CTModeA'],eval['CTModeB']]
2588
2589 other_evals = [eval[key] for key in eval.keys() if key not in \
2590 ['CTModeB','Accuracy']]
2591 if accuracy(other_evals)!=0.0 and accuracy(loop_dir_evals)!=0.0:
2592 powers.append(accuracy(loop_dir_evals)/accuracy(other_evals))
2593
2594 n_fail=0
2595 for p in powers:
2596 if (math.log(p)/math.log(10))<-3:
2597 n_fail+=1
2598
2599 if len(powers)==0:
2600 return (None,None,None)
2601
2602 return (math.log(median(powers))/math.log(10),
2603 math.log(min(powers))/math.log(10),
2604 n_fail/len(powers))
2605
2606 def test_consistency(dp_eval_list, qp_eval_list):
2607 """ Computes the consistency test C from the DP and QP evaluations.
2608 C = accuracy(all_DP_test) / abs(best_QP_eval-best_DP_eval)
2609 So a consistent test would have C as close to one as possible.
2610 The tuple returned is (log(median(C)),log(min(C)),log(max(C)))"""
2611 consistencies = []
2612 for dp_eval, qp_eval in zip(dp_eval_list,qp_eval_list):
2613 dp_evals = [dp_eval[key] for key in dp_eval.keys() \
2614 if key!='Accuracy']
2615 qp_evals = [qp_eval[key] for key in qp_eval.keys() \
2616 if key!='Accuracy']
2617 if (abs(best_estimate(qp_evals)-best_estimate(dp_evals)))!=0.0 and \
2618 accuracy(dp_evals)!=0.0:
2619 consistencies.append(accuracy(dp_evals)/(abs(\
2620 best_estimate(qp_evals)-best_estimate(dp_evals))))
2621
2622 if len(consistencies)==0:
2623 return (None,None,None)
2624
2625 return (math.log(median(consistencies))/math.log(10),
2626 math.log(min(consistencies))/math.log(10),
2627 math.log(max(consistencies))/math.log(10))
2628
2629 def median(orig_list):
2630 """ Find the median of a sorted float list. """
2631 tmp=copy.copy(orig_list)
2632 tmp.sort()
2633 if len(tmp)%2==0:
2634 return (tmp[int((len(tmp)/2)-1)]+tmp[int(len(tmp)/2)])/2.0
2635 else:
2636 return tmp[int((len(tmp)-1)/2)]
2637
2638
2639 f = format_output
2640 opt = stability['loop_optimized_output']
2641
2642 mode = 'optimized' if opt else 'default'
2643 process = stability['Process']
2644 res_str = "Stability checking for %s (%s mode)\n"\
2645 %(process.nice_string()[9:],mode)
2646
2647 logFile = open(pjoin(output_path, 'stability_%s_%s.log'\
2648 %(mode,process.shell_string())), 'w')
2649
2650 logFile.write('Stability check results\n\n')
2651 logFile.write(res_str)
2652 data_plot_dict={}
2653 accuracy_dict={}
2654 nPSmax=0
2655 max_acc=0.0
2656 min_acc=1.0
2657 if stability['Stability']:
2658 toolnames= list(stability['Stability'].keys())
2659 toolnamestr=" | ".join(tn+
2660 ''.join([' ']*(10-len(tn))) for tn in toolnames)
2661 DP_stability = [[eval['Accuracy'] for eval in stab['DP_stability']] \
2662 for key,stab in stability['Stability'].items()]
2663 med_dp_stab_str=" | ".join([f(median(dp_stab),'%.2e ') for dp_stab in DP_stability])
2664 min_dp_stab_str=" | ".join([f(min(dp_stab),'%.2e ') for dp_stab in DP_stability])
2665 max_dp_stab_str=" | ".join([f(max(dp_stab),'%.2e ') for dp_stab in DP_stability])
2666 UPS = [stab['Unstable_PS_points'] for key,stab in stability['Stability'].items()]
2667 res_str_i = "\n= Tool (DoublePrec for CT)....... %s\n"%toolnamestr
2668 len_PS=["%i"%len(evals)+\
2669 ''.join([' ']*(10-len("%i"%len(evals)))) for evals in DP_stability]
2670 len_PS_str=" | ".join(len_PS)
2671 res_str_i += "|= Number of PS points considered %s\n"%len_PS_str
2672 res_str_i += "|= Median accuracy............... %s\n"%med_dp_stab_str
2673 res_str_i += "|= Max accuracy.................. %s\n"%min_dp_stab_str
2674 res_str_i += "|= Min accuracy.................. %s\n"%max_dp_stab_str
2675 pmedminlist=[]
2676 pfraclist=[]
2677 for key,stab in stability['Stability'].items():
2678 (pmed,pmin,pfrac)=loop_direction_test_power(stab['DP_stability'])
2679 ldtest_str = "%s,%s"%(f(pmed,'%.1f'),f(pmin,'%.1f'))
2680 pfrac_str = f(pfrac,'%.2e')
2681 pmedminlist.append(ldtest_str+''.join([' ']*(10-len(ldtest_str))))
2682 pfraclist.append(pfrac_str+''.join([' ']*(10-len(pfrac_str))))
2683 pmedminlist_str=" | ".join(pmedminlist)
2684 pfraclist_str=" | ".join(pfraclist)
2685 res_str_i += "|= Overall DP loop_dir test power %s\n"%pmedminlist_str
2686 res_str_i += "|= Fraction of evts with power<-3 %s\n"%pfraclist_str
2687 len_UPS=["%i"%len(upup)+\
2688 ''.join([' ']*(10-len("%i"%len(upup)))) for upup in UPS]
2689 len_UPS_str=" | ".join(len_UPS)
2690 res_str_i += "|= Number of Unstable PS points %s\n"%len_UPS_str
2691 res_str_i += \
2692 """
2693 = Legend for the statistics of the stability tests. (all log below ar log_10)
2694 The loop direction test power P is computed as follow:
2695 P = accuracy(loop_dir_test) / accuracy(all_other_test)
2696 So that log(P) is positive if the loop direction test is effective.
2697 The tuple printed out is (log(median(P)),log(min(P)))
2698 The consistency test C is computed when QP evaluations are available:
2699 C = accuracy(all_DP_test) / abs(best_QP_eval-best_DP_eval)
2700 So a consistent test would have log(C) as close to zero as possible.
2701 The tuple printed out is (log(median(C)),log(min(C)),log(max(C)))\n"""
2702 res_str+=res_str_i
2703 for key in stability['Stability'].keys():
2704 toolname=key
2705 stab=stability['Stability'][key]
2706 DP_stability = [eval['Accuracy'] for eval in stab['DP_stability']]
2707
2708 QP_stability = [eval['Accuracy'] if eval!={} else -1.0 for eval in \
2709 stab['QP_stability']]
2710 nPS = len(DP_stability)
2711 if nPS>nPSmax:nPSmax=nPS
2712 UPS = stab['Unstable_PS_points']
2713 UPS_stability_DP = [DP_stability[U[0]] for U in UPS]
2714 UPS_stability_QP = [QP_stability[U[0]] for U in UPS]
2715 EPS = stab['Exceptional_PS_points']
2716 EPS_stability_DP = [DP_stability[E[0]] for E in EPS]
2717 EPS_stability_QP = [QP_stability[E[0]] for E in EPS]
2718 res_str_i = ""
2719
2720 xml_toolname = {'GOLEM95':'GOLEM','IREGI':'IREGI',
2721 'CUTTOOLS':'CUTTOOLS','PJFRY++':'PJFRY',
2722 'NINJA':'NINJA','SAMURAI':'SAMURAI',
2723 'COLLIER':'COLLIER'}[toolname.upper()]
2724 if len(UPS)>0:
2725 res_str_i = "\nDetails of the %d/%d UPS encountered by %s\n"\
2726 %(len(UPS),nPS,toolname)
2727 prefix = 'DP' if toolname=='CutTools' else ''
2728 res_str_i += "|= %s Median inaccuracy.......... %s\n"\
2729 %(prefix,f(median(UPS_stability_DP),'%.2e'))
2730 res_str_i += "|= %s Max accuracy............... %s\n"\
2731 %(prefix,f(min(UPS_stability_DP),'%.2e'))
2732 res_str_i += "|= %s Min accuracy............... %s\n"\
2733 %(prefix,f(max(UPS_stability_DP),'%.2e'))
2734 (pmed,pmin,pfrac)=loop_direction_test_power(\
2735 [stab['DP_stability'][U[0]] for U in UPS])
2736 if toolname=='CutTools':
2737 res_str_i += "|= UPS DP loop_dir test power.... %s,%s\n"\
2738 %(f(pmed,'%.1f'),f(pmin,'%.1f'))
2739 res_str_i += "|= UPS DP fraction with power<-3. %s\n"\
2740 %f(pfrac,'%.2e')
2741 res_str_i += "|= QP Median accuracy............ %s\n"\
2742 %f(median(UPS_stability_QP),'%.2e')
2743 res_str_i += "|= QP Max accuracy............... %s\n"\
2744 %f(min(UPS_stability_QP),'%.2e')
2745 res_str_i += "|= QP Min accuracy............... %s\n"\
2746 %f(max(UPS_stability_QP),'%.2e')
2747 (pmed,pmin,pfrac)=loop_direction_test_power(\
2748 [stab['QP_stability'][U[0]] for U in UPS])
2749 res_str_i += "|= UPS QP loop_dir test power.... %s,%s\n"\
2750 %(f(pmed,'%.1f'),f(pmin,'%.1f'))
2751 res_str_i += "|= UPS QP fraction with power<-3. %s\n"%f(pfrac,'%.2e')
2752 (pmed,pmin,pmax)=test_consistency(\
2753 [stab['DP_stability'][U[0]] for U in UPS],
2754 [stab['QP_stability'][U[0]] for U in UPS])
2755 res_str_i += "|= DP vs QP stab test consistency %s,%s,%s\n"\
2756 %(f(pmed,'%.1f'),f(pmin,'%.1f'),f(pmax,'%.1f'))
2757 if len(EPS)==0:
2758 res_str_i += "= Number of Exceptional PS points : 0\n"
2759 if len(EPS)>0:
2760 res_str_i = "\nDetails of the %d/%d EPS encountered by %s\n"\
2761 %(len(EPS),nPS,toolname)
2762 res_str_i += "|= DP Median accuracy............ %s\n"\
2763 %f(median(EPS_stability_DP),'%.2e')
2764 res_str_i += "|= DP Max accuracy............... %s\n"\
2765 %f(min(EPS_stability_DP),'%.2e')
2766 res_str_i += "|= DP Min accuracy............... %s\n"\
2767 %f(max(EPS_stability_DP),'%.2e')
2768 pmed,pmin,pfrac=loop_direction_test_power(\
2769 [stab['DP_stability'][E[0]] for E in EPS])
2770 res_str_i += "|= EPS DP loop_dir test power.... %s,%s\n"\
2771 %(f(pmed,'%.1f'),f(pmin,'%.1f'))
2772 res_str_i += "|= EPS DP fraction with power<-3. %s\n"\
2773 %f(pfrac,'%.2e')
2774 res_str_i += "|= QP Median accuracy............ %s\n"\
2775 %f(median(EPS_stability_QP),'%.2e')
2776 res_str_i += "|= QP Max accuracy............... %s\n"\
2777 %f(min(EPS_stability_QP),'%.2e')
2778 res_str_i += "|= QP Min accuracy............... %s\n"\
2779 %f(max(EPS_stability_QP),'%.2e')
2780 pmed,pmin,pfrac=loop_direction_test_power(\
2781 [stab['QP_stability'][E[0]] for E in EPS])
2782 res_str_i += "|= EPS QP loop_dir test power.... %s,%s\n"\
2783 %(f(pmed,'%.1f'),f(pmin,'%.1f'))
2784 res_str_i += "|= EPS QP fraction with power<-3. %s\n"%f(pfrac,'%.2e')
2785
2786 logFile.write(res_str_i)
2787
2788 if len(EPS)>0:
2789 logFile.write('\nFull details of the %i EPS encountered by %s.\n'\
2790 %(len(EPS),toolname))
2791 logFile.write('<EPS_data reduction=%s>\n'%xml_toolname.upper())
2792 for i, eps in enumerate(EPS):
2793 logFile.write('\nEPS #%i\n'%(i+1))
2794 logFile.write('\n'.join([' '+' '.join(['%.16E'%pi for pi in p]) \
2795 for p in eps[1]]))
2796 logFile.write('\n DP accuracy : %.4e\n'%DP_stability[eps[0]])
2797 logFile.write(' QP accuracy : %.4e\n'%QP_stability[eps[0]])
2798 logFile.write('</EPS_data>\n')
2799 if len(UPS)>0:
2800 logFile.write('\nFull details of the %i UPS encountered by %s.\n'\
2801 %(len(UPS),toolname))
2802 logFile.write('<UPS_data reduction=%s>\n'%xml_toolname.upper())
2803 for i, ups in enumerate(UPS):
2804 logFile.write('\nUPS #%i\n'%(i+1))
2805 logFile.write('\n'.join([' '+' '.join(['%.16E'%pi for pi in p]) \
2806 for p in ups[1]]))
2807 logFile.write('\n DP accuracy : %.4e\n'%DP_stability[ups[0]])
2808 logFile.write(' QP accuracy : %.4e\n'%QP_stability[ups[0]])
2809 logFile.write('</UPS_data>\n')
2810
2811 logFile.write('\nData entries for the stability plot.\n')
2812 logFile.write('First row is a maximal accuracy delta, second is the '+\
2813 'fraction of events with DP accuracy worse than delta.\n')
2814 logFile.write('<plot_data reduction=%s>\n'%xml_toolname.upper())
2815
2816 if max(DP_stability)>0.0:
2817 min_digit_acc=int(math.log(max(DP_stability))/math.log(10))
2818 if min_digit_acc>=0:
2819 min_digit_acc = min_digit_acc+1
2820 accuracies=[10**(-17+(i/5.0)) for i in range(5*(17+min_digit_acc)+1)]
2821 else:
2822 logFile.writelines('%.4e %.4e\n'%(accuracies[i], 0.0) for i in \
2823 range(len(accuracies)))
2824 logFile.write('</plot_data>\n')
2825 res_str_i += '\nPerfect accuracy over all the trial PS points. No plot'+\
2826 ' is output then.'
2827 logFile.write('Perfect accuracy over all the trial PS points.')
2828 res_str +=res_str_i
2829 continue
2830
2831 accuracy_dict[toolname]=accuracies
2832 if max(accuracies) > max_acc: max_acc=max(accuracies)
2833 if min(accuracies) < min_acc: min_acc=min(accuracies)
2834 data_plot=[]
2835 for acc in accuracies:
2836 data_plot.append(float(len([d for d in DP_stability if d>acc]))\
2837 /float(len(DP_stability)))
2838 data_plot_dict[toolname]=data_plot
2839
2840 logFile.writelines('%.4e %.4e\n'%(accuracies[i], data_plot[i]) for i in \
2841 range(len(accuracies)))
2842 logFile.write('</plot_data>\n')
2843 logFile.write('\nList of accuracies recorded for the %i evaluations with %s\n'\
2844 %(nPS,toolname))
2845 logFile.write('First row is DP, second is QP (if available).\n\n')
2846 logFile.write('<accuracies reduction=%s>\n'%xml_toolname.upper())
2847 logFile.writelines('%.4e '%DP_stability[i]+('NA\n' if QP_stability[i]==-1.0 \
2848 else '%.4e\n'%QP_stability[i]) for i in range(nPS))
2849 logFile.write('</accuracies>\n')
2850 res_str+=res_str_i
2851 logFile.close()
2852 res_str += "\n= Stability details of the run are output to the file"+\
2853 " stability_%s_%s.log\n"%(mode,process.shell_string())
2854
2855
2856
2857
2858 if any(isinstance(handler,logging.FileHandler) for handler in \
2859 logging.getLogger('madgraph').handlers):
2860 return res_str
2861
2862 try:
2863 import matplotlib.pyplot as plt
2864 colorlist=['b','r','g','y','m','c','k']
2865 for i,key in enumerate(data_plot_dict.keys()):
2866 color=colorlist[i]
2867 data_plot=data_plot_dict[key]
2868 accuracies=accuracy_dict[key]
2869 plt.plot(accuracies, data_plot, color=color, marker='', linestyle='-',\
2870 label=key)
2871 plt.axis([min_acc,max_acc,\
2872 10**(-int(math.log(nPSmax-0.5)/math.log(10))-1), 1])
2873 plt.yscale('log')
2874 plt.xscale('log')
2875 plt.title('Stability plot for %s (%s mode, %d points)'%\
2876 (process.nice_string()[9:],mode,nPSmax))
2877 plt.ylabel('Fraction of events')
2878 plt.xlabel('Maximal precision')
2879 plt.legend()
2880 if not reusing:
2881 logger.info('Some stability statistics will be displayed once you '+\
2882 'close the plot window')
2883 plt.show()
2884 else:
2885 fig_output_file = str(pjoin(output_path,
2886 'stability_plot_%s_%s.png'%(mode,process.shell_string())))
2887 logger.info('Stability plot output to file %s. '%fig_output_file)
2888 plt.savefig(fig_output_file)
2889 return res_str
2890 except Exception as e:
2891 if isinstance(e, ImportError):
2892 res_str += "\n= Install matplotlib to get a "+\
2893 "graphical display of the results of this check."
2894 else:
2895 res_str += "\n= Could not produce the stability plot because of "+\
2896 "the following error: %s"%str(e)
2897 return res_str
2898
2900 """Present the result of a timings check in a nice format """
2901
2902
2903 f = format_output
2904 loop_optimized_output = timings['loop_optimized_output']
2905 reduction_tool = bannermod.MadLoopParam._ID_reduction_tool_map[
2906 timings['reduction_tool']]
2907
2908 res_str = "%s \n"%process.nice_string()
2909 try:
2910 gen_total = timings['HELAS_MODEL_compilation']+\
2911 timings['HelasDiagrams_generation']+\
2912 timings['Process_output']+\
2913 timings['Diagrams_generation']+\
2914 timings['Process_compilation']+\
2915 timings['Initialization']
2916 except TypeError:
2917 gen_total = None
2918 res_str += "\n= Generation time total...... ========== %s\n"%f(gen_total,'%.3gs')
2919 res_str += "|= Diagrams generation....... %s\n"\
2920 %f(timings['Diagrams_generation'],'%.3gs')
2921 res_str += "|= Helas Diagrams generation. %s\n"\
2922 %f(timings['HelasDiagrams_generation'],'%.3gs')
2923 res_str += "|= Process output............ %s\n"\
2924 %f(timings['Process_output'],'%.3gs')
2925 res_str += "|= HELAS+model compilation... %s\n"\
2926 %f(timings['HELAS_MODEL_compilation'],'%.3gs')
2927 res_str += "|= Process compilation....... %s\n"\
2928 %f(timings['Process_compilation'],'%.3gs')
2929 res_str += "|= Initialization............ %s\n"\
2930 %f(timings['Initialization'],'%.3gs')
2931
2932 res_str += "\n= Reduction tool tested...... %s\n"%reduction_tool
2933 res_str += "\n= Helicity sum time / PSpoint ========== %.3gms\n"\
2934 %(timings['run_unpolarized_total']*1000.0)
2935 if loop_optimized_output:
2936 coef_time=timings['run_unpolarized_coefs']*1000.0
2937 loop_time=(timings['run_unpolarized_total']-\
2938 timings['run_unpolarized_coefs'])*1000.0
2939 total=coef_time+loop_time
2940 res_str += "|= Coefs. computation time... %.3gms (%d%%)\n"\
2941 %(coef_time,int(round(100.0*coef_time/total)))
2942 res_str += "|= Loop evaluation time...... %.3gms (%d%%)\n"\
2943 %(loop_time,int(round(100.0*loop_time/total)))
2944 res_str += "\n= One helicity time / PSpoint ========== %.3gms\n"\
2945 %(timings['run_polarized_total']*1000.0)
2946 if loop_optimized_output:
2947 coef_time=timings['run_polarized_coefs']*1000.0
2948 loop_time=(timings['run_polarized_total']-\
2949 timings['run_polarized_coefs'])*1000.0
2950 total=coef_time+loop_time
2951 res_str += "|= Coefs. computation time... %.3gms (%d%%)\n"\
2952 %(coef_time,int(round(100.0*coef_time/total)))
2953 res_str += "|= Loop evaluation time...... %.3gms (%d%%)\n"\
2954 %(loop_time,int(round(100.0*loop_time/total)))
2955 res_str += "\n= Miscellaneous ========================\n"
2956 res_str += "|= Number of hel. computed... %s/%s\n"\
2957 %(f(timings['n_contrib_hel'],'%d'),f(timings['n_tot_hel'],'%d'))
2958 res_str += "|= Number of loop diagrams... %s\n"%f(timings['n_loops'],'%d')
2959 if loop_optimized_output:
2960 res_str += "|= Number of loop groups..... %s\n"\
2961 %f(timings['n_loop_groups'],'%d')
2962 res_str += "|= Number of loop wfs........ %s\n"\
2963 %f(timings['n_loop_wfs'],'%d')
2964 if timings['loop_wfs_ranks']!=None:
2965 for i, r in enumerate(timings['loop_wfs_ranks']):
2966 res_str += "||= # of loop wfs of rank %d.. %d\n"%(i,r)
2967 res_str += "|= Loading time (Color data). ~%.3gms\n"\
2968 %(timings['Booting_time']*1000.0)
2969 res_str += "|= Maximum RAM usage (rss)... %s\n"\
2970 %f(float(timings['ram_usage']/1000.0),'%.3gMb')
2971 res_str += "\n= Output disk size =====================\n"
2972 res_str += "|= Source directory sources.. %s\n"%f(timings['du_source'],'%sb')
2973 res_str += "|= Process sources........... %s\n"%f(timings['du_process'],'%sb')
2974 res_str += "|= Color and helicity data... %s\n"%f(timings['du_color'],'%sb')
2975 res_str += "|= Executable size........... %s\n"%f(timings['du_exe'],'%sb')
2976
2977 return res_str
2978
2980 """Present the results of a comparison in a nice list format
2981 mode short: return the number of fail process
2982 """
2983 proc_col_size = 17
2984 pert_coupl = comparison_results[0]['process']['perturbation_couplings']
2985 if pert_coupl:
2986 process_header = "Process [virt="+" ".join(pert_coupl)+"]"
2987 else:
2988 process_header = "Process"
2989
2990 if len(process_header) + 1 > proc_col_size:
2991 proc_col_size = len(process_header) + 1
2992
2993 for proc in comparison_results:
2994 if len(proc['process'].base_string()) + 1 > proc_col_size:
2995 proc_col_size = len(proc['process'].base_string()) + 1
2996
2997 col_size = 18
2998
2999 pass_proc = 0
3000 fail_proc = 0
3001 no_check_proc = 0
3002
3003 failed_proc_list = []
3004 no_check_proc_list = []
3005
3006 res_str = fixed_string_length(process_header, proc_col_size) + \
3007 fixed_string_length("Min element", col_size) + \
3008 fixed_string_length("Max element", col_size) + \
3009 fixed_string_length("Relative diff.", col_size) + \
3010 "Result"
3011
3012 for result in comparison_results:
3013 proc = result['process'].base_string()
3014 values = result['values']
3015
3016 if len(values) <= 1:
3017 res_str += '\n' + fixed_string_length(proc, proc_col_size) + \
3018 " * No permutations, process not checked *"
3019 no_check_proc += 1
3020 no_check_proc_list.append(result['process'].nice_string())
3021 continue
3022
3023 passed = result['passed']
3024
3025 res_str += '\n' + fixed_string_length(proc, proc_col_size) + \
3026 fixed_string_length("%1.10e" % min(values), col_size) + \
3027 fixed_string_length("%1.10e" % max(values), col_size) + \
3028 fixed_string_length("%1.10e" % result['difference'],
3029 col_size)
3030 if passed:
3031 pass_proc += 1
3032 res_str += "Passed"
3033 else:
3034 fail_proc += 1
3035 failed_proc_list.append(result['process'].nice_string())
3036 res_str += "Failed"
3037
3038 res_str += "\nSummary: %i/%i passed, %i/%i failed" % \
3039 (pass_proc, pass_proc + fail_proc,
3040 fail_proc, pass_proc + fail_proc)
3041
3042 if fail_proc != 0:
3043 res_str += "\nFailed processes: %s" % ', '.join(failed_proc_list)
3044 if no_check_proc != 0:
3045 res_str += "\nNot checked processes: %s" % ', '.join(no_check_proc_list)
3046
3047 return res_str
3048
3050 """Helper function to fix the length of a string by cutting it
3051 or adding extra space."""
3052
3053 if len(mystr) > length:
3054 return mystr[0:length]
3055 else:
3056 return mystr + " " * (length - len(mystr))
3057
3058
3059
3060
3061
3062 -def check_gauge(processes, param_card = None,cuttools="", tir={}, reuse = False,
3063 options=None, output_path=None, cmd = FakeInterface()):
3064 """Check gauge invariance of the processes by using the BRS check.
3065 For one of the massless external bosons (e.g. gluon or photon),
3066 replace the polarization vector (epsilon_mu) with its momentum (p_mu)
3067 """
3068 cmass_scheme = cmd.options['complex_mass_scheme']
3069 if isinstance(processes, base_objects.ProcessDefinition):
3070
3071
3072 multiprocess = processes
3073
3074 model = multiprocess.get('model')
3075
3076 if multiprocess.get('perturbation_couplings')==[]:
3077 evaluator = MatrixElementEvaluator(model, param_card,cmd= cmd,
3078 auth_skipping = True, reuse = False)
3079 else:
3080 evaluator = LoopMatrixElementEvaluator(cuttools_dir=cuttools,tir_dir=tir,
3081 cmd=cmd,model=model, param_card=param_card,
3082 auth_skipping = False, reuse = False,
3083 output_path=output_path)
3084
3085 if not cmass_scheme and multiprocess.get('perturbation_couplings')==[]:
3086
3087 logger.info('Set All width to zero for non complex mass scheme checks')
3088 for particle in evaluator.full_model.get('particles'):
3089 if particle.get('width') != 'ZERO':
3090 evaluator.full_model.get('parameter_dict')[particle.get('width')] = 0.
3091 results = run_multiprocs_no_crossings(check_gauge_process,
3092 multiprocess,
3093 evaluator,
3094 options=options
3095 )
3096
3097 if multiprocess.get('perturbation_couplings')!=[] and not reuse:
3098
3099 clean_up(output_path)
3100
3101 return results
3102
3103 elif isinstance(processes, base_objects.Process):
3104 processes = base_objects.ProcessList([processes])
3105 elif isinstance(processes, base_objects.ProcessList):
3106 pass
3107 else:
3108 raise InvalidCmd("processes is of non-supported format")
3109
3110 assert processes, "No processes given"
3111
3112 model = processes[0].get('model')
3113
3114
3115 if processes[0].get('perturbation_couplings')==[]:
3116 evaluator = MatrixElementEvaluator(model, param_card,
3117 auth_skipping = True, reuse = False,
3118 cmd = cmd)
3119 else:
3120 evaluator = LoopMatrixElementEvaluator(cuttools_dir=cuttools,tir_dir=tir,
3121 model=model, param_card=param_card,
3122 auth_skipping = False, reuse = False,
3123 output_path=output_path, cmd = cmd)
3124 comparison_results = []
3125 comparison_explicit_flip = []
3126
3127
3128 for process in processes:
3129
3130
3131
3132
3133
3134
3135
3136
3137
3138 result = check_gauge_process(process, evaluator,options=options)
3139 if result:
3140 comparison_results.append(result)
3141
3142 if processes[0].get('perturbation_couplings')!=[] and not reuse:
3143
3144 clean_up(output_path)
3145
3146 return comparison_results
3147
3150 """Check gauge invariance for the process, unless it is already done."""
3151
3152 model = process.get('model')
3153
3154
3155 found_gauge = False
3156 for i, leg in enumerate(process.get('legs')):
3157 part = model.get_particle(leg.get('id'))
3158 if part.get('spin') == 3 and part.get('mass').lower() == 'zero':
3159 found_gauge = True
3160 break
3161 if not found_gauge:
3162 logger.info("No ward identity for %s" % \
3163 process.nice_string().replace('Process', 'process'))
3164
3165 return None
3166
3167 for i, leg in enumerate(process.get('legs')):
3168 leg.set('number', i+1)
3169
3170 logger.info("Checking ward identities for %s" % \
3171 process.nice_string().replace('Process', 'process'))
3172
3173 legs = process.get('legs')
3174
3175
3176 try:
3177 if process.get('perturbation_couplings')==[]:
3178 amplitude = diagram_generation.Amplitude(process)
3179 else:
3180 amplitude = loop_diagram_generation.LoopAmplitude(process)
3181 except InvalidCmd:
3182 logging.info("No diagrams for %s" % \
3183 process.nice_string().replace('Process', 'process'))
3184 return None
3185 if not amplitude.get('diagrams'):
3186
3187 logging.info("No diagrams for %s" % \
3188 process.nice_string().replace('Process', 'process'))
3189 return None
3190
3191 if not isinstance(amplitude,loop_diagram_generation.LoopAmplitude):
3192 matrix_element = helas_objects.HelasMatrixElement(amplitude,
3193 gen_color = False)
3194 else:
3195 matrix_element = loop_helas_objects.LoopHelasMatrixElement(amplitude,
3196 optimized_output=evaluator.loop_optimized_output)
3197
3198
3199
3200
3201
3202
3203
3204
3205 brsvalue = evaluator.evaluate_matrix_element(matrix_element, gauge_check = True,
3206 output='jamp', options=options)
3207
3208 if not isinstance(amplitude,loop_diagram_generation.LoopAmplitude):
3209 matrix_element = helas_objects.HelasMatrixElement(amplitude,
3210 gen_color = False)
3211
3212 mvalue = evaluator.evaluate_matrix_element(matrix_element, gauge_check = False,
3213 output='jamp', options=options)
3214
3215 if mvalue and mvalue['m2']:
3216 return {'process':process,'value':mvalue,'brs':brsvalue}
3217
3219 """Present the results of a comparison in a nice list format"""
3220
3221 proc_col_size = 17
3222
3223 pert_coupl = comparison_results[0]['process']['perturbation_couplings']
3224
3225
3226 if pert_coupl:
3227 threshold=1e-5
3228 else:
3229 threshold=1e-10
3230
3231 if pert_coupl:
3232 process_header = "Process [virt="+" ".join(pert_coupl)+"]"
3233 else:
3234 process_header = "Process"
3235
3236 if len(process_header) + 1 > proc_col_size:
3237 proc_col_size = len(process_header) + 1
3238
3239 for one_comp in comparison_results:
3240 proc = one_comp['process'].base_string()
3241 mvalue = one_comp['value']
3242 brsvalue = one_comp['brs']
3243 if len(proc) + 1 > proc_col_size:
3244 proc_col_size = len(proc) + 1
3245
3246 col_size = 18
3247
3248 pass_proc = 0
3249 fail_proc = 0
3250
3251 failed_proc_list = []
3252 no_check_proc_list = []
3253
3254 res_str = fixed_string_length(process_header, proc_col_size) + \
3255 fixed_string_length("matrix", col_size) + \
3256 fixed_string_length("BRS", col_size) + \
3257 fixed_string_length("ratio", col_size) + \
3258 "Result"
3259
3260 for one_comp in comparison_results:
3261 proc = one_comp['process'].base_string()
3262 mvalue = one_comp['value']
3263 brsvalue = one_comp['brs']
3264 ratio = (abs(brsvalue['m2'])/abs(mvalue['m2']))
3265 res_str += '\n' + fixed_string_length(proc, proc_col_size) + \
3266 fixed_string_length("%1.10e" % mvalue['m2'], col_size)+ \
3267 fixed_string_length("%1.10e" % brsvalue['m2'], col_size)+ \
3268 fixed_string_length("%1.10e" % ratio, col_size)
3269
3270 if ratio > threshold:
3271 fail_proc += 1
3272 proc_succeed = False
3273 failed_proc_list.append(proc)
3274 res_str += "Failed"
3275 else:
3276 pass_proc += 1
3277 proc_succeed = True
3278 res_str += "Passed"
3279
3280
3281
3282
3283
3284 if len(mvalue['jamp'])!=0:
3285 for k in range(len(mvalue['jamp'][0])):
3286 m_sum = 0
3287 brs_sum = 0
3288
3289 for j in range(len(mvalue['jamp'])):
3290
3291 m_sum += abs(mvalue['jamp'][j][k])**2
3292 brs_sum += abs(brsvalue['jamp'][j][k])**2
3293
3294
3295 if not m_sum:
3296 continue
3297 ratio = abs(brs_sum) / abs(m_sum)
3298
3299 tmp_str = '\n' + fixed_string_length(' JAMP %s'%k , proc_col_size) + \
3300 fixed_string_length("%1.10e" % m_sum, col_size) + \
3301 fixed_string_length("%1.10e" % brs_sum, col_size) + \
3302 fixed_string_length("%1.10e" % ratio, col_size)
3303
3304 if ratio > 1e-15:
3305 if not len(failed_proc_list) or failed_proc_list[-1] != proc:
3306 fail_proc += 1
3307 pass_proc -= 1
3308 failed_proc_list.append(proc)
3309 res_str += tmp_str + "Failed"
3310 elif not proc_succeed:
3311 res_str += tmp_str + "Passed"
3312
3313
3314 res_str += "\nSummary: %i/%i passed, %i/%i failed" % \
3315 (pass_proc, pass_proc + fail_proc,
3316 fail_proc, pass_proc + fail_proc)
3317
3318 if fail_proc != 0:
3319 res_str += "\nFailed processes: %s" % ', '.join(failed_proc_list)
3320
3321 if output=='text':
3322 return res_str
3323 else:
3324 return fail_proc
3325
3326
3327
3328 -def check_lorentz(processes, param_card = None,cuttools="", tir={}, options=None, \
3329 reuse = False, output_path=None, cmd = FakeInterface()):
3330 """ Check if the square matrix element (sum over helicity) is lorentz
3331 invariant by boosting the momenta with different value."""
3332
3333 cmass_scheme = cmd.options['complex_mass_scheme']
3334 if isinstance(processes, base_objects.ProcessDefinition):
3335
3336
3337 multiprocess = processes
3338 model = multiprocess.get('model')
3339
3340 if multiprocess.get('perturbation_couplings')==[]:
3341 evaluator = MatrixElementEvaluator(model,
3342 cmd= cmd, auth_skipping = False, reuse = True)
3343 else:
3344 evaluator = LoopMatrixElementEvaluator(cuttools_dir=cuttools,tir_dir=tir,
3345 model=model, auth_skipping = False, reuse = True,
3346 output_path=output_path, cmd = cmd)
3347
3348 if not cmass_scheme and processes.get('perturbation_couplings')==[]:
3349
3350 logger.info('Set All width to zero for non complex mass scheme checks')
3351 for particle in evaluator.full_model.get('particles'):
3352 if particle.get('width') != 'ZERO':
3353 evaluator.full_model.get('parameter_dict')[\
3354 particle.get('width')] = 0.
3355
3356 results = run_multiprocs_no_crossings(check_lorentz_process,
3357 multiprocess,
3358 evaluator,
3359 options=options)
3360
3361 if multiprocess.get('perturbation_couplings')!=[] and not reuse:
3362
3363 clean_up(output_path)
3364
3365 return results
3366
3367 elif isinstance(processes, base_objects.Process):
3368 processes = base_objects.ProcessList([processes])
3369 elif isinstance(processes, base_objects.ProcessList):
3370 pass
3371 else:
3372 raise InvalidCmd("processes is of non-supported format")
3373
3374 assert processes, "No processes given"
3375
3376 model = processes[0].get('model')
3377
3378
3379 if processes[0].get('perturbation_couplings')==[]:
3380 evaluator = MatrixElementEvaluator(model, param_card,
3381 auth_skipping = False, reuse = True,
3382 cmd=cmd)
3383 else:
3384 evaluator = LoopMatrixElementEvaluator(cuttools_dir=cuttools, tir_dir=tir,
3385 model=model,param_card=param_card,
3386 auth_skipping = False, reuse = True,
3387 output_path=output_path, cmd = cmd)
3388
3389 comparison_results = []
3390
3391
3392 for process in processes:
3393
3394
3395
3396
3397
3398
3399
3400
3401
3402 result = check_lorentz_process(process, evaluator,options=options)
3403 if result:
3404 comparison_results.append(result)
3405
3406 if processes[0].get('perturbation_couplings')!=[] and not reuse:
3407
3408 clean_up(output_path)
3409
3410 return comparison_results
3411
3414 """Check gauge invariance for the process, unless it is already done."""
3415
3416 amp_results = []
3417 model = process.get('model')
3418
3419 for i, leg in enumerate(process.get('legs')):
3420 leg.set('number', i+1)
3421
3422 logger.info("Checking lorentz transformations for %s" % \
3423 process.nice_string().replace('Process:', 'process'))
3424
3425 legs = process.get('legs')
3426
3427
3428 try:
3429 if process.get('perturbation_couplings')==[]:
3430 amplitude = diagram_generation.Amplitude(process)
3431 else:
3432 amplitude = loop_diagram_generation.LoopAmplitude(process)
3433 except InvalidCmd:
3434 logging.info("No diagrams for %s" % \
3435 process.nice_string().replace('Process', 'process'))
3436 return None
3437
3438 if not amplitude.get('diagrams'):
3439
3440 logging.info("No diagrams for %s" % \
3441 process.nice_string().replace('Process', 'process'))
3442 return None
3443
3444
3445 p, w_rambo = evaluator.get_momenta(process, options)
3446
3447
3448 if not isinstance(amplitude, loop_diagram_generation.LoopAmplitude):
3449 matrix_element = helas_objects.HelasMatrixElement(amplitude,
3450 gen_color = True)
3451 else:
3452 matrix_element = loop_helas_objects.LoopHelasMatrixElement(amplitude,
3453 optimized_output = evaluator.loop_optimized_output)
3454
3455 MLOptions = {'ImprovePS':True,'ForceMP':True}
3456 if not isinstance(amplitude, loop_diagram_generation.LoopAmplitude):
3457 data = evaluator.evaluate_matrix_element(matrix_element, p=p, output='jamp',
3458 auth_skipping = True, options=options)
3459 else:
3460 data = evaluator.evaluate_matrix_element(matrix_element, p=p, output='jamp',
3461 auth_skipping = True, PS_name = 'original', MLOptions=MLOptions,
3462 options = options)
3463
3464 if data and data['m2']:
3465 if not isinstance(amplitude, loop_diagram_generation.LoopAmplitude):
3466 results = [data]
3467 else:
3468 results = [('Original evaluation',data)]
3469 else:
3470 return {'process':process, 'results':'pass'}
3471
3472
3473
3474
3475 if not isinstance(amplitude, loop_diagram_generation.LoopAmplitude):
3476 for boost in range(1,4):
3477 boost_p = boost_momenta(p, boost)
3478 results.append(evaluator.evaluate_matrix_element(matrix_element,
3479 p=boost_p,output='jamp'))
3480 else:
3481
3482 boost_p = boost_momenta(p, 3)
3483 results.append(('Z-axis boost',
3484 evaluator.evaluate_matrix_element(matrix_element, options=options,
3485 p=boost_p, PS_name='zBoost', output='jamp',MLOptions = MLOptions)))
3486
3487
3488
3489
3490 if not options['events']:
3491 boost_p = boost_momenta(p, 1)
3492 results.append(('X-axis boost',
3493 evaluator.evaluate_matrix_element(matrix_element, options=options,
3494 p=boost_p, PS_name='xBoost', output='jamp',MLOptions = MLOptions)))
3495 boost_p = boost_momenta(p, 2)
3496 results.append(('Y-axis boost',
3497 evaluator.evaluate_matrix_element(matrix_element,options=options,
3498 p=boost_p, PS_name='yBoost', output='jamp',MLOptions = MLOptions)))
3499
3500
3501 rot_p = [[pm[0],-pm[2],pm[1],pm[3]] for pm in p]
3502 results.append(('Z-axis pi/2 rotation',
3503 evaluator.evaluate_matrix_element(matrix_element,options=options,
3504 p=rot_p, PS_name='Rotation1', output='jamp',MLOptions = MLOptions)))
3505
3506 sq2 = math.sqrt(2.0)
3507 rot_p = [[pm[0],(pm[1]-pm[2])/sq2,(pm[1]+pm[2])/sq2,pm[3]] for pm in p]
3508 results.append(('Z-axis pi/4 rotation',
3509 evaluator.evaluate_matrix_element(matrix_element,options=options,
3510 p=rot_p, PS_name='Rotation2', output='jamp',MLOptions = MLOptions)))
3511
3512
3513 return {'process': process, 'results': results}
3514
3515
3516
3517
3518 -def check_unitary_feynman(processes_unit, processes_feynm, param_card=None,
3519 options=None, tir={}, output_path=None,
3520 cuttools="", reuse=False, cmd = FakeInterface()):
3521 """Check gauge invariance of the processes by flipping
3522 the gauge of the model
3523 """
3524
3525 mg_root = cmd._mgme_dir
3526
3527 cmass_scheme = cmd.options['complex_mass_scheme']
3528
3529 if isinstance(processes_unit, base_objects.ProcessDefinition):
3530
3531
3532 multiprocess_unit = processes_unit
3533 model = multiprocess_unit.get('model')
3534
3535
3536
3537 loop_optimized_bu = cmd.options['loop_optimized_output']
3538 if processes_unit.get('squared_orders'):
3539 if processes_unit.get('perturbation_couplings') in [[],['QCD']]:
3540 cmd.options['loop_optimized_output'] = True
3541 else:
3542 raise InvalidCmd("The gauge test cannot be performed for "+
3543 " a process with more than QCD corrections and which"+
3544 " specifies squared order constraints.")
3545 else:
3546 cmd.options['loop_optimized_output'] = False
3547
3548 aloha.unitary_gauge = True
3549 if processes_unit.get('perturbation_couplings')==[]:
3550 evaluator = MatrixElementEvaluator(model, param_card,
3551 cmd=cmd,auth_skipping = False, reuse = True)
3552 else:
3553 evaluator = LoopMatrixElementEvaluator(cuttools_dir=cuttools,tir_dir=tir,
3554 cmd=cmd, model=model,
3555 param_card=param_card,
3556 auth_skipping = False,
3557 output_path=output_path,
3558 reuse = False)
3559 if not cmass_scheme and multiprocess_unit.get('perturbation_couplings')==[]:
3560 logger.info('Set All width to zero for non complex mass scheme checks')
3561 for particle in evaluator.full_model.get('particles'):
3562 if particle.get('width') != 'ZERO':
3563 evaluator.full_model.get('parameter_dict')[particle.get('width')] = 0.
3564
3565 output_u = run_multiprocs_no_crossings(get_value,
3566 multiprocess_unit,
3567 evaluator,
3568 options=options)
3569
3570 clean_added_globals(ADDED_GLOBAL)
3571
3572 if processes_unit.get('perturbation_couplings')!=[]:
3573 clean_up(output_path)
3574
3575 momentum = {}
3576 for data in output_u:
3577 momentum[data['process']] = data['p']
3578
3579 multiprocess_feynm = processes_feynm
3580 model = multiprocess_feynm.get('model')
3581
3582
3583 aloha.unitary_gauge = False
3584
3585
3586 cmd.options['loop_optimized_output'] = True
3587 if processes_feynm.get('perturbation_couplings')==[]:
3588 evaluator = MatrixElementEvaluator(model, param_card,
3589 cmd= cmd, auth_skipping = False, reuse = False)
3590 else:
3591 evaluator = LoopMatrixElementEvaluator(cuttools_dir=cuttools,tir_dir=tir,
3592 cmd= cmd, model=model,
3593 param_card=param_card,
3594 auth_skipping = False,
3595 output_path=output_path,
3596 reuse = False)
3597
3598 if not cmass_scheme and multiprocess_feynm.get('perturbation_couplings')==[]:
3599
3600 for particle in evaluator.full_model.get('particles'):
3601 if particle.get('width') != 'ZERO':
3602 evaluator.full_model.get('parameter_dict')[particle.get('width')] = 0.
3603
3604 output_f = run_multiprocs_no_crossings(get_value, multiprocess_feynm,
3605 evaluator, momentum,
3606 options=options)
3607 output = [processes_unit]
3608 for data in output_f:
3609 local_dico = {}
3610 local_dico['process'] = data['process']
3611 local_dico['value_feynm'] = data['value']
3612 local_dico['value_unit'] = [d['value'] for d in output_u
3613 if d['process'] == data['process']][0]
3614 output.append(local_dico)
3615
3616 if processes_feynm.get('perturbation_couplings')!=[] and not reuse:
3617
3618 clean_up(output_path)
3619
3620
3621 cmd.options['loop_optimized_output'] = loop_optimized_bu
3622
3623 return output
3624
3625
3626
3627
3628 else:
3629 raise InvalidCmd("processes is of non-supported format")
3630
3636 """Check complex mass scheme consistency in the offshell region of s-channels
3637 detected for this process, by varying the expansion paramer consistently
3638 with the corresponding width and making sure that the difference between
3639 the complex mass-scheme and the narrow-width approximation is higher order.
3640 """
3641
3642 if not isinstance(process_line, str):
3643 raise InvalidCmd("Proces definition must be given as a stirng for this check")
3644
3645
3646 cmd.do_set('complex_mass_scheme False', log=False)
3647
3648 multiprocess_nwa = cmd.extract_process(process_line)
3649
3650
3651 has_FRdecay = os.path.isfile(pjoin(cmd._curr_model.get('modelpath'),
3652 'decays.py'))
3653
3654
3655 missing_perturbations = cmd._curr_model.get_coupling_orders()-\
3656 set(multiprocess_nwa.get('perturbation_couplings'))
3657
3658 if len(multiprocess_nwa.get('perturbation_couplings'))>0 and \
3659 len(missing_perturbations)>0:
3660 logger.warning("------------------------------------------------------")
3661 logger.warning("The process considered does not specify the following "+
3662 "type of loops to be included : %s"%str(list(missing_perturbations)))
3663 logger.warning("Consequently, the CMS check will be unsuccessful if the"+
3664 " process involves any resonating particle whose LO decay is "+
3665 "mediated by one of these orders.")
3666 logger.warning("You can use the syntax '[virt=all]' to automatically"+
3667 " include all loops supported by the model.")
3668 logger.warning("------------------------------------------------------")
3669
3670 if len(multiprocess_nwa.get('perturbation_couplings'))>0 and \
3671 len(multiprocess_nwa.get('legs'))<=4:
3672 logger.warning("------------------------------------------------------")
3673 logger.warning("Processes with four or less external states are typically not"+\
3674 " sensitive to incorrect Complex Mass Scheme implementations.")
3675 logger.warning("You can test this sensitivity by making sure that the"+
3676 " same check on the leading-order counterpart of this process *fails*"+
3677 " when using the option '--diff_lambda_power=2'.")
3678 logger.warning("If it does not, then consider adding a massless "+
3679 "gauge vector to the external states.")
3680 logger.warning("------------------------------------------------------")
3681
3682 if options['recompute_width']=='auto':
3683 if multiprocess_nwa.get('perturbation_couplings')!=[]:
3684
3685 options['recompute_width'] = 'first_time'
3686 else:
3687 options['recompute_width'] = 'never'
3688
3689
3690 if options['recompute_width'] in ['first_time', 'always'] and \
3691 not has_FRdecay and not 'cached_widths' in options:
3692 logger.info('The LO widths will need to be recomputed but the '+
3693 'model considered does not appear to have a decay module.\nThe widths'+
3694 ' will need to be computed numerically and it will slow down the test.\n'+
3695 'Consider using a param_card already specifying correct LO widths and'+
3696 " adding the option --recompute_width=never when doing this check.")
3697
3698 if options['recompute_width']=='never' and \
3699 any(order in multiprocess_nwa.get('perturbation_couplings') for order in
3700 options['expansion_orders']):
3701 logger.warning('You chose not to recompute the widths while including'+
3702 ' loop corrections. The check will be successful only if the width'+\
3703 ' specified in the default param_card is LO accurate (Remember that'+\
3704 ' the default values of alpha_s and awem1 are set to 0.1 and 10.0'+\
3705 ' respectively by default).')
3706
3707
3708
3709
3710
3711 if options['recompute_width'] in ['first_time', 'always'] and has_FRdecay:
3712 modelname = cmd._curr_model.get('modelpath+restriction')
3713 with misc.MuteLogger(['madgraph'], ['INFO']):
3714 model = import_ufo.import_model(modelname, decay=True,
3715 complex_mass_scheme=False)
3716 multiprocess_nwa.set('model', model)
3717
3718 run_options = copy.deepcopy(options)
3719
3720
3721 if options['seed'] > 0:
3722 random.seed(options['seed'])
3723
3724
3725 run_options['param_card'] = param_card
3726 if isinstance(cmd, FakeInterface):
3727 raise MadGraph5Error("Check CMS cannot be run with a FakeInterface.")
3728 run_options['cmd'] = cmd
3729 run_options['MLOptions'] = MLOptions
3730 if output_path:
3731 run_options['output_path'] = output_path
3732 else:
3733 run_options['output_path'] = cmd._mgme_dir
3734
3735
3736 run_options['has_FRdecay'] = has_FRdecay
3737
3738
3739 if 'cached_widths' not in run_options:
3740 run_options['cached_widths'] = {}
3741
3742
3743 run_options['cached_param_card'] = {'NWA':[None,None],'CMS':[None,None]}
3744
3745 if options['tweak']['name']:
3746 logger.info("Now running the CMS check for tweak '%s'"\
3747 %options['tweak']['name'])
3748
3749 model = multiprocess_nwa.get('model')
3750
3751 for particle in model.get('particles'):
3752 mass_param = model.get_parameter(particle.get('mass'))
3753 if particle.get('mass')!='ZERO' and 'external' not in mass_param.depend:
3754 if model.get('name') not in ['sm','loop_sm']:
3755 logger.warning("The mass '%s' of particle '%s' is not an external"%\
3756 (model.get_parameter(particle.get('mass')).name,particle.get('name'))+\
3757 " parameter as required by this check. \nMG5_aMC will try to"+\
3758 " modify the model to remedy the situation. No guarantee.")
3759 status = model.change_electroweak_mode(set(['mz','mw','alpha']))
3760 if not status:
3761 raise InvalidCmd('The EW scheme could apparently not be changed'+\
3762 ' so as to have the W-boson mass external. The check cannot'+\
3763 ' proceed.')
3764 break
3765
3766 veto_orders = [order for order in model.get('coupling_orders') if \
3767 order not in options['expansion_orders']]
3768 if len(veto_orders)>0:
3769 logger.warning('You did not define any parameter scaling rule for the'+\
3770 " coupling orders %s. They will be "%','.join(veto_orders)+\
3771 "forced to zero in the tests. Consider adding the scaling rule to"+\
3772 "avoid this. (see option '--cms' in 'help check')")
3773 for order in veto_orders:
3774 multiprocess_nwa.get('orders')[order]==0
3775 multiprocess_nwa.set('perturbation_couplings', [order for order in
3776 multiprocess_nwa['perturbation_couplings'] if order not in veto_orders])
3777
3778 if multiprocess_nwa.get('perturbation_couplings')==[]:
3779 evaluator = MatrixElementEvaluator(model, param_card,
3780 cmd=cmd,auth_skipping = False, reuse = True)
3781 else:
3782 evaluator = LoopMatrixElementTimer(cuttools_dir=cuttools,tir_dir=tir,
3783 cmd=cmd, model=model,
3784 param_card=param_card,
3785 auth_skipping = False,
3786 output_path=output_path,
3787 reuse = False)
3788
3789 cached_information = []
3790 output_nwa = run_multiprocs_no_crossings(check_complex_mass_scheme_process,
3791 multiprocess_nwa,
3792 evaluator,
3793
3794
3795
3796
3797
3798 opt = cached_information,
3799 options=run_options)
3800
3801
3802 clean_added_globals(ADDED_GLOBAL)
3803
3804
3805 cmd.do_set('complex_mass_scheme True', log=False)
3806
3807
3808 multiprocess_cms = cmd.extract_process(process_line)
3809 model = multiprocess_cms.get('model')
3810
3811 if len(veto_orders)>0:
3812 for order in veto_orders:
3813 multiprocess_cms.get('orders')[order]==0
3814 multiprocess_cms.set('perturbation_couplings', [order for order in
3815 multiprocess_cms['perturbation_couplings'] if order not in veto_orders])
3816
3817 if multiprocess_cms.get('perturbation_couplings')==[]:
3818 evaluator = MatrixElementEvaluator(model, param_card,
3819 cmd=cmd,auth_skipping = False, reuse = True)
3820 else:
3821 evaluator = LoopMatrixElementTimer(cuttools_dir=cuttools,tir_dir=tir,
3822 cmd=cmd, model=model,
3823 param_card=param_card,
3824 auth_skipping = False,
3825 output_path=output_path,
3826 reuse = False)
3827
3828 output_cms = run_multiprocs_no_crossings(check_complex_mass_scheme_process,
3829 multiprocess_cms,
3830 evaluator,
3831
3832 opt = dict(cached_information),
3833 options=run_options)
3834
3835 if multiprocess_cms.get('perturbation_couplings')!=[] and not options['reuse']:
3836
3837 clean_up(output_path)
3838
3839
3840
3841
3842 result = {'ordered_processes':[],'lambdaCMS':options['lambdaCMS']}
3843
3844 result['perturbation_orders']=multiprocess_nwa.get('perturbation_couplings')
3845 for i, proc_res in enumerate(output_nwa):
3846 result['ordered_processes'].append(proc_res[0])
3847 result[proc_res[0]] = {
3848 'NWA':proc_res[1]['resonances_result'],
3849 'CMS':output_cms[i][1]['resonances_result'],
3850 'born_order':proc_res[1]['born_order'],
3851 'loop_order':proc_res[1]['loop_order']}
3852
3853
3854
3855 options['cached_widths'] = run_options['cached_widths']
3856
3857
3858 result['recompute_width'] = options['recompute_width']
3859 result['has_FRdecay'] = has_FRdecay
3860 result['widths_computed'] = []
3861 cached_widths = sorted(list(options['cached_widths'].items()), key=lambda el: \
3862 abs(el[0][0]))
3863 for (pdg, lambda_value), width in cached_widths:
3864 if lambda_value != 1.0:
3865 continue
3866 result['widths_computed'].append((model.get_particle(pdg).get_name(),
3867 width))
3868
3869
3870 clean_added_globals(ADDED_GLOBAL)
3871
3872 return result
3873
3878 """Check CMS for the process in argument. The options 'opt' is quite important.
3879 When opt is a list, it means that we are doing NWA and we are filling the
3880 list with the following tuple
3881 ('proc_name',({'ParticlePDG':ParticlePDG,
3882 'FinalStateMothersNumbers':set([]),
3883 'PS_point_used':[]},...))
3884 When opt is a dictionary, we are in the CMS mode and it will be reused then.
3885 """
3886
3887
3888
3889 NLO = process.get('perturbation_couplings') != []
3890
3891 def glue_momenta(production, decay):
3892 """ Merge together the kinematics for the production of particle
3893 positioned last in the 'production' array with the 1>N 'decay' kinematic'
3894 provided where the decay particle is first."""
3895
3896 from MadSpin.decay import momentum
3897
3898 full = production[:-1]
3899
3900
3901
3902
3903
3904 for p in decay[1:]:
3905 bp = momentum(*p).boost(momentum(*production[-1]))
3906 full.append([bp.E,bp.px,bp.py,bp.pz])
3907
3908 return full
3909
3910 def find_resonances(diagrams):
3911 """ Find all the resonances in the matrix element in argument """
3912
3913 model = process['model']
3914 resonances_found = []
3915
3916 for ll, diag in enumerate(diagrams):
3917 for amp in diag.get('amplitudes'):
3918
3919
3920 s_channels, t_channels = amp.\
3921 get_s_and_t_channels(process.get_ninitial(), model, 0)
3922
3923
3924 replacement_dict = {}
3925 for s_channel in s_channels:
3926 new_resonance = {
3927 'ParticlePDG':s_channel.get('legs')[-1].get('id'),
3928 'FSMothersNumbers':[],
3929 'PS_point_used':[]}
3930 for leg in s_channel.get('legs')[:-1]:
3931 if leg.get('number')>0:
3932 new_resonance['FSMothersNumbers'].append(
3933 leg.get('number'))
3934 else:
3935 try:
3936 new_resonance['FSMothersNumbers'].extend(
3937 replacement_dict[leg.get('number')])
3938 except KeyError:
3939 raise Exception('The following diagram '+\
3940 'is malformed:'+diag.nice_string())
3941
3942 replacement_dict[s_channel.get('legs')[-1].get('number')] = \
3943 new_resonance['FSMothersNumbers']
3944 new_resonance['FSMothersNumbers'] = set(
3945 new_resonance['FSMothersNumbers'])
3946 if new_resonance not in resonances_found:
3947 resonances_found.append(new_resonance)
3948
3949
3950 kept_resonances = []
3951 for resonance in resonances_found:
3952
3953 if resonance['ParticlePDG'] == 0:
3954 continue
3955
3956
3957 if abs(resonance['ParticlePDG']) in \
3958 [abs(l.get('id')) for l in process.get('legs')]:
3959 continue
3960
3961 mass_string = evaluator.full_model.get_particle(
3962 resonance['ParticlePDG']).get('mass')
3963 mass = evaluator.full_model.get('parameter_dict')[mass_string].real
3964
3965 if mass==0.0:
3966 continue
3967
3968 width_string = evaluator.full_model.get_particle(
3969 resonance['ParticlePDG']).get('width')
3970 width = evaluator.full_model.get('parameter_dict')[width_string].real
3971
3972
3973 if width==0.0:
3974 continue
3975
3976 final_state_energy = sum(
3977 evaluator.full_model.get('parameter_dict')[
3978 evaluator.full_model.get_particle(l.get('id')).get('mass')].real
3979 for l in process.get('legs') if l.get('number') in
3980 resonance['FSMothersNumbers'])
3981
3982
3983 special_mass = (1.0 + options['offshellness'])*mass
3984
3985
3986 if special_mass<final_state_energy:
3987 raise InvalidCmd('The offshellness specified (%s) is such'\
3988 %options['offshellness']+' that the resulting kinematic is '+\
3989 'impossible for resonance %s %s.'%(evaluator.full_model.
3990 get_particle(resonance['ParticlePDG']).get_name(),
3991 str(list(resonance['FSMothersNumbers']))))
3992 continue
3993
3994
3995 kept_resonances.append(resonance)
3996
3997 for resonance in kept_resonances:
3998
3999 set_PSpoint(resonance, force_other_res_offshell=kept_resonances)
4000
4001 return tuple(kept_resonances)
4002
4003 def set_PSpoint(resonance, force_other_res_offshell=[],
4004 allow_energy_increase=1.5, isolation_cuts=True):
4005 """ Starting from the specified resonance, construct a phase space point
4006 for it and possibly also enforce other resonances to be onshell. Possibly
4007 allow to progressively increase enregy by steps of the integer specified
4008 (negative float to forbid it) and possible enforce default isolation cuts
4009 as well."""
4010
4011 def invmass(momenta):
4012 """ Computes the invariant mass of a list of momenta."""
4013 ptot = [sum(p[i] for p in momenta) for i in range(4)]
4014 return math.sqrt(ptot[0]**2-ptot[1]**2-ptot[2]**2-ptot[3]**2)
4015
4016 model = evaluator.full_model
4017 def getmass(pdg):
4018 """ Returns the mass of a particle given the current model and its
4019 pdg given in argument."""
4020 return model.get('parameter_dict')[
4021 model.get_particle(pdg).get('mass')].real
4022
4023 N_trials = 0
4024 max_trial = 1e4
4025 nstep_for_energy_increase = 1e3
4026 PS_point_found = None
4027 if options['offshellness'] > 0.0:
4028 offshellness = options['offshellness']
4029 else:
4030
4031
4032
4033
4034 offshellness = (0.25*(options['offshellness']+1.0))-1.0
4035
4036
4037
4038
4039 if options['offshellness'] < 0.0:
4040 energy_increase = math.sqrt(allow_energy_increase)
4041 else:
4042 energy_increase = allow_energy_increase
4043
4044 other_res_offshell = [res for res in force_other_res_offshell if
4045 res!=resonance]
4046
4047
4048
4049 all_other_res_masses = [getmass(res['ParticlePDG'])
4050 for res in other_res_offshell]
4051 resonance_mass = getmass(resonance['ParticlePDG'])
4052
4053 str_res = '%s %s'%(model.get_particle(
4054 resonance['ParticlePDG']).get_name(),
4055 str(list(resonance['FSMothersNumbers'])))
4056 leg_number_to_leg = dict((l.get('number'),l) for l in process.get('legs'))
4057
4058
4059
4060 daughter_masses = sum(getmass(leg_number_to_leg[\
4061 number].get('id')) for number in resonance['FSMothersNumbers'])
4062 min_offshellnes = 4.0*((daughter_masses*1.2)/resonance_mass)-1.0
4063
4064
4065
4066 min_energy = max(sum(getmass(l.get('id')) for l in \
4067 process.get('legs') if l.get('state')==True),
4068 sum(getmass(l.get('id')) for l in \
4069 process.get('legs') if l.get('state')==False))
4070
4071
4072
4073 daughter_offshellnesses = [(1.0+options['offshellness'])*mass
4074 for i, mass in enumerate(all_other_res_masses) if
4075 other_res_offshell[i]['FSMothersNumbers'].issubset(
4076 resonance['FSMothersNumbers'])]
4077
4078 if options['offshellness'] >= 0.0:
4079
4080 if len(daughter_offshellnesses)>0:
4081 max_mass = max(daughter_offshellnesses)
4082
4083 offshellness = max(2.0*(max_mass/resonance_mass)-1.0,
4084 options['offshellness'])
4085
4086 max_mass = max([(1.0+options['offshellness'])*mass for mass in \
4087 all_other_res_masses]+[(1.0+offshellness)*resonance_mass])
4088
4089
4090
4091 target = max(min_energy*1.2,max_mass*2.0)
4092 if target > options['energy']:
4093 logger.warning("The user-defined energy %f seems "%options['energy']+
4094 " insufficient to reach the minimum propagator invariant mass "+
4095 "%f required for the chosen offshellness %f."%(max_mass,
4096 options['offshellness']) + " Energy reset to %f."%target)
4097 options['energy'] = target
4098
4099 else:
4100 if len(daughter_offshellnesses) > 0:
4101 min_mass = min(daughter_offshellnesses)
4102
4103 offshellness = min(0.25*(min_mass/resonance_mass)-1.0,
4104 options['offshellness'])
4105
4106
4107
4108 if (1.0+offshellness)*resonance_mass < daughter_masses*1.2:
4109 msg = 'The resonance %s cannot accomodate'%str_res+\
4110 ' an offshellness of %f because the daughter'%options['offshellness']+\
4111 ' masses are %f.'%daughter_masses
4112 if options['offshellness']<min_offshellnes:
4113 msg += ' Try again with an offshellness'+\
4114 ' smaller (in absolute value) of at least %f.'%min_offshellnes
4115 else:
4116 msg += ' Try again with a smalled offshellness (in absolute value).'
4117 raise InvalidCmd(msg)
4118
4119 min_mass = min([(1.0+options['offshellness'])*mass for mass in \
4120 all_other_res_masses]+[(1.0+offshellness)*resonance_mass])
4121
4122
4123 if 2.0*min_mass < options['energy']:
4124 new_energy = max(min_energy*1.2, 2.0*min_mass)
4125 logger.warning("The user-defined energy %f seems "%options['energy']+
4126 " too large to not overshoot the maximum propagator invariant mass "+
4127 "%f required for the chosen offshellness %f."%(min_mass,
4128 options['offshellness']) + " Energy reset to %f."%new_energy)
4129 options['energy'] = new_energy
4130
4131 if options['offshellness'] < 0.0 and options['energy'] >= min_mass:
4132 logger.debug("The target energy is not compatible with the mass"+
4133 " of the external states for this process (%f). It is "%min_mass+
4134 "unlikely that a valid kinematic configuration will be found.")
4135
4136 if options['offshellness']<0.0 and offshellness<options['offshellness'] or \
4137 options['offshellness']>0.0 and offshellness>options['offshellness']:
4138 logger.debug("Offshellness increased to %f"%offshellness+
4139 " so as to try to find a kinematical configuration with"+
4140 " offshellness at least equal to %f"%options['offshellness']+
4141 " for all resonances.")
4142
4143 start_energy = options['energy']
4144 while N_trials<max_trial:
4145 N_trials += 1
4146 if N_trials%nstep_for_energy_increase==0:
4147 if allow_energy_increase > 0.0:
4148 old_offshellness = offshellness
4149 if offshellness > 0.0:
4150 options['energy'] *= energy_increase
4151 offshellness *= energy_increase
4152 else:
4153 options['energy'] = max(options['energy']/energy_increase,
4154 min_energy*1.2)
4155 offshellness = max(min_offshellnes,
4156 ((offshellness+1.0)/energy_increase)-1.0)
4157 if old_offshellness!=offshellness:
4158 logger.debug('Trying to find a valid kinematic'+\
4159 " configuration for resonance '%s'"%str_res+\
4160 ' with increased offshellness %f'%offshellness)
4161
4162 candidate = get_PSpoint_for_resonance(resonance, offshellness)
4163 pass_offshell_test = True
4164 for i, res in enumerate(other_res_offshell):
4165
4166 if offshellness > 0.0:
4167 if invmass([candidate[j-1] for j in res['FSMothersNumbers']]) <\
4168 ((1.0+options['offshellness'])*all_other_res_masses[i]):
4169 pass_offshell_test = False
4170 break
4171 else:
4172 if invmass([candidate[j-1] for j in res['FSMothersNumbers']]) >\
4173 ((1.0+options['offshellness'])*all_other_res_masses[i]):
4174 pass_offshell_test = False
4175 break
4176 if not pass_offshell_test:
4177 continue
4178
4179 if isolation_cuts:
4180
4181 if not evaluator.pass_isolation_cuts(candidate,
4182 ptcut=0.05*invmass([candidate[0],candidate[1]]), drcut=0.4):
4183 continue
4184 PS_point_found = candidate
4185 break
4186
4187
4188 options['energy'] = start_energy
4189
4190 if PS_point_found is None:
4191 err_msg = 'Could not find a valid PS point in %d'%max_trial+\
4192 ' trials. Try increasing the energy, modify the offshellness '+\
4193 'or relax some constraints.'
4194 if options['offshellness']<0.0:
4195 err_msg +='Try with a positive offshellness instead (or a '+\
4196 'negative one of smaller absolute value)'
4197 raise InvalidCmd(err_msg)
4198 else:
4199
4200
4201 resonance['offshellnesses'] = []
4202 all_other_res_masses = [resonance_mass] + all_other_res_masses
4203 other_res_offshell = [resonance] + other_res_offshell
4204 for i, res in enumerate(other_res_offshell):
4205 if i==0:
4206 res_str = 'self'
4207 else:
4208 res_str = '%s %s'%(model.get_particle(
4209 res['ParticlePDG']).get_name(),
4210 str(list(res['FSMothersNumbers'])))
4211 resonance['offshellnesses'].append((res_str,(
4212 (invmass([PS_point_found[j-1] for j in
4213 res['FSMothersNumbers']])/all_other_res_masses[i])-1.0)))
4214
4215 resonance['PS_point_used'] = PS_point_found
4216
4217 def get_PSpoint_for_resonance(resonance, offshellness = options['offshellness']):
4218 """ Assigns a kinematic configuration to the resonance dictionary
4219 given in argument."""
4220
4221
4222 mass_string = evaluator.full_model.get_particle(
4223 resonance['ParticlePDG']).get('mass')
4224 mass = evaluator.full_model.get('parameter_dict')[mass_string].real
4225
4226
4227 special_mass = (1.0 + offshellness)*mass
4228
4229
4230 prod_proc = base_objects.Process({'legs':base_objects.LegList(
4231 copy.copy(leg) for leg in process.get('legs') if
4232 leg.get('number') not in resonance['FSMothersNumbers'])})
4233
4234
4235
4236 prod_proc.get('legs').append(base_objects.Leg({
4237 'number':max(l.get('number') for l in process.get('legs'))+1,
4238 'state':True,
4239 'id':0}))
4240
4241 decay_proc = base_objects.Process({'legs':base_objects.LegList(
4242 copy.copy(leg) for leg in process.get('legs') if leg.get('number')
4243 in resonance['FSMothersNumbers'] and not leg.get('state')==False)})
4244
4245
4246
4247
4248 decay_proc.get('legs').insert(0,base_objects.Leg({
4249 'number':-1,
4250 'state':False,
4251 'id':0}))
4252 prod_kinematic = evaluator.get_momenta(prod_proc, options=options,
4253 special_mass=special_mass)[0]
4254 decay_kinematic = evaluator.get_momenta(decay_proc, options=options,
4255 special_mass=special_mass)[0]
4256 momenta = glue_momenta(prod_kinematic,decay_kinematic)
4257
4258
4259
4260 ordered_momenta = [(prod_proc.get('legs')[i].get('number'),momenta[i])
4261 for i in range(len(prod_proc.get('legs'))-1)]
4262
4263 ordered_momenta += [(decay_proc.get('legs')[-i].get('number'),
4264 momenta[-i]) for i in range(1,len(decay_proc.get('legs')))]
4265
4266
4267 return [m[1] for m in sorted(ordered_momenta, key = lambda el: el[0])]
4268
4269
4270
4271 @misc.mute_logger()
4272 def get_width(PDG, lambdaCMS, param_card):
4273 """ Returns the width to use for particle with absolute PDG 'PDG' and
4274 for the the lambdaCMS value 'lambdaCMS' using the cache if possible."""
4275
4276
4277
4278 if abs(PDG) in [abs(leg.get('id')) for leg in process.get('legs')]:
4279 return 0.0
4280
4281 particle = evaluator.full_model.get_particle(PDG)
4282
4283
4284
4285 if particle.get('ghost') or particle.get('goldstone'):
4286 return 0.0
4287
4288
4289 if particle.get('width')=='ZERO':
4290 return 0.0
4291
4292 if (PDG,lambdaCMS) in options['cached_widths']:
4293 return options['cached_widths'][(PDG,lambdaCMS)]
4294
4295 if options['recompute_width'] == 'never':
4296 width = evaluator.full_model.\
4297 get('parameter_dict')[particle.get('width')].real
4298 else:
4299
4300 if aloha.complex_mass:
4301 raise MadGraph5Error("The width for particle with PDG %d and"%PDG+\
4302 " lambdaCMS=%f should have already been "%lambdaCMS+\
4303 "computed during the NWA run.")
4304
4305
4306 if options['recompute_width'] in ['always','first_time']:
4307 particle_name = particle.get_name()
4308 with misc.TMP_directory(dir=options['output_path']) as path:
4309 param_card.write(pjoin(path,'tmp.dat'))
4310
4311
4312
4313 command = '%s --output=%s'%(particle_name,pjoin(path,'tmp.dat'))+\
4314 ' --path=%s --body_decay=2'%pjoin(path,'tmp.dat')+\
4315 ' --precision_channel=0.001'
4316
4317 param_card.write(pjoin(options['output_path'],'tmp.dat'))
4318
4319
4320
4321 orig_model = options['cmd']._curr_model
4322 orig_helas_model = options['cmd']._curr_helas_model
4323 options['cmd'].do_compute_widths(command, evaluator.full_model)
4324
4325 options['cmd']._curr_model = orig_model
4326 options['cmd']._curr_helas_model = orig_helas_model
4327
4328
4329 evaluator.full_model.set_parameters_and_couplings(
4330 param_card=param_card)
4331 try:
4332 tmp_param_card = check_param_card.ParamCard(pjoin(path,'tmp.dat'))
4333 except:
4334 raise MadGraph5Error('Error occured during width '+\
4335 'computation with command:\n compute_widths %s'%command)
4336 width = tmp_param_card['decay'].get(PDG).value
4337
4338
4339
4340
4341
4342
4343
4344 if options['recompute_width'] in ['never','first_time']:
4345
4346 for lam in options['lambdaCMS']:
4347 options['cached_widths'][(PDG,lam)]=width*(lam/lambdaCMS)
4348 else:
4349 options['cached_widths'][(PDG,lambdaCMS)] = width
4350
4351 return options['cached_widths'][(PDG,lambdaCMS)]
4352
4353 def get_order(diagrams, diagsName):
4354 """Compute the common summed of coupling orders used for this cms check
4355 in the diagrams specified. When inconsistency occurs, use orderName
4356 in the warning message if throwm."""
4357
4358 orders = set([])
4359 for diag in diagrams:
4360 diag_orders = diag.calculate_orders()
4361 orders.add(sum((diag_orders[order] if order in diag_orders else 0)
4362 for order in options['expansion_orders']))
4363 if len(orders)>1:
4364 logger.warning(msg%('%s '%diagsName,str(orders)))
4365 return min(list(orders))
4366 else:
4367 return list(orders)[0]
4368
4369 MLoptions = copy.copy(options['MLOptions'])
4370
4371 MLoptions['DoubleCheckHelicityFilter'] = False
4372
4373
4374 for tweak in options['tweak']['custom']:
4375 if tweak.startswith('seed'):
4376 try:
4377 new_seed = int(tweak[4:])
4378 except ValueError:
4379 raise MadGraph5Error("Seed '%s' is not of the right format 'seed<int>'."%tweak)
4380 random.seed(new_seed)
4381
4382 mode = 'CMS' if aloha.complex_mass else 'NWA'
4383 for i, leg in enumerate(process.get('legs')):
4384 leg.set('number', i+1)
4385
4386 logger.info("Running CMS check for process %s (now doing %s scheme)" % \
4387 ( process.nice_string().replace('Process:', 'process'), mode))
4388
4389 proc_dir = None
4390 resonances = None
4391 warning_msg = "All %sdiagrams do not share the same sum of orders "+\
4392 "%s; found %%s."%(','.join(options['expansion_orders']))+\
4393 " This potentially problematic for the CMS check."
4394 if NLO:
4395
4396
4397
4398 if options['name']=='auto':
4399 proc_name = "%s%s_%s%s__%s__"%(('SAVED' if options['reuse'] else ''),
4400 temp_dir_prefix, '_'.join(process.shell_string().split('_')[1:]),
4401 ('_' if process.get('perturbation_couplings') else '')+
4402 '_'.join(process.get('perturbation_couplings')),mode)
4403 else:
4404 proc_name = "%s%s_%s__%s__"%(('SAVED' if options['reuse'] else ''),
4405 temp_dir_prefix,options['name'], mode)
4406
4407 timing, matrix_element = generate_loop_matrix_element(process,
4408 options['reuse'], output_path=options['output_path'],
4409 cmd = options['cmd'], proc_name=proc_name,
4410 loop_filter=options['loop_filter'])
4411 if matrix_element is None:
4412
4413 return None
4414
4415 reusing = isinstance(matrix_element, base_objects.Process)
4416 proc_dir = pjoin(options['output_path'],proc_name)
4417
4418
4419 infos = evaluator.setup_process(matrix_element, proc_dir,
4420 reusing = reusing, param_card = options['param_card'],
4421 MLOptions=MLoptions)
4422
4423 evaluator.fix_MadLoopParamCard(pjoin(proc_dir,'Cards'),
4424 mp = None, loop_filter = True,MLOptions=MLoptions)
4425
4426
4427 tmp_card_backup = pjoin(proc_dir,'Cards','param_card.dat__TemporaryBackup__')
4428 if os.path.isfile(tmp_card_backup):
4429
4430 logger.info("Last run in process '%s' apparently aborted."%proc_dir+\
4431 " Now reverting 'param_card.dat' to its original value.")
4432 shutil.copy(tmp_card_backup, pjoin(proc_dir, 'Cards','param_card.dat'))
4433 else:
4434
4435 shutil.copy(pjoin(proc_dir,'Cards','param_card.dat'), tmp_card_backup)
4436
4437 tmp_modelfunc_backup = pjoin(proc_dir,'Source','MODEL',
4438 'model_functions.f__TemporaryBackup__')
4439 if os.path.isfile(tmp_modelfunc_backup):
4440
4441 logger.info("Last run in process '%s' apparently aborted."%proc_dir+\
4442 " Now reverting 'model_functions.f' to its original value.")
4443 shutil.copy(tmp_modelfunc_backup, pjoin(proc_dir,'Source','MODEL',
4444 'model_functions.f'))
4445 evaluator.apply_log_tweak(proc_dir, 'recompile')
4446 else:
4447
4448 shutil.copy(pjoin(proc_dir,'Source','MODEL','model_functions.f'),
4449 tmp_modelfunc_backup)
4450
4451
4452 MadLoopInitializer.fix_PSPoint_in_check(pjoin(proc_dir,'SubProcesses'),
4453 read_ps = True, npoints = 1, hel_config = options['helicity'],
4454 split_orders=options['split_orders'])
4455
4456
4457
4458 for dir in misc.glob('P*_*', pjoin(proc_dir,'SubProcesses')):
4459 if not (re.search(r'.*P\d+_\w*$', dir) or not os.path.isdir(dir)):
4460 continue
4461 try:
4462 os.remove(pjoin(dir,'check'))
4463 os.remove(pjoin(dir,'check_sa.o'))
4464 except OSError:
4465 pass
4466
4467 with open(os.devnull, 'w') as devnull:
4468 retcode = subprocess.call(['make','check'],
4469 cwd=dir, stdout=devnull, stderr=devnull)
4470 if retcode != 0:
4471 raise MadGraph5Error("Compilation error with "+\
4472 "'make check' in %s"%dir)
4473
4474
4475 pkl_path = pjoin(proc_dir,'resonance_specs.pkl')
4476 if reusing:
4477
4478
4479 if not os.path.isfile(pkl_path):
4480 raise InvalidCmd('The folder %s could'%proc_dir+\
4481 " not be reused because the resonance specification file "+
4482 "'resonance_specs.pkl' is missing.")
4483 else:
4484 proc_name, born_order, loop_order, resonances = \
4485 save_load_object.load_from_file(pkl_path)
4486
4487
4488 for res in resonances:
4489 set_PSpoint(res, force_other_res_offshell=resonances)
4490
4491
4492 if isinstance(opt, list):
4493 opt.append((proc_name, resonances))
4494 else:
4495 resonances = opt
4496 else:
4497 helas_born_diagrams = matrix_element.get_born_diagrams()
4498 if len(helas_born_diagrams)==0:
4499 logger.warning('The CMS check for loop-induced process is '+\
4500 'not yet available (nor is it very interesting).')
4501 return None
4502 born_order = get_order(helas_born_diagrams,'Born')
4503 loop_order = get_order(matrix_element.get_loop_diagrams(),'loop')
4504
4505
4506 if isinstance(opt, list):
4507 opt.append((process.base_string(),find_resonances(helas_born_diagrams)))
4508 resonances = opt[-1][1]
4509 else:
4510 resonances = opt
4511
4512
4513 save_load_object.save_to_file(pkl_path, (process.base_string(),
4514 born_order, loop_order,resonances))
4515
4516 else:
4517
4518 try:
4519 amplitude = diagram_generation.Amplitude(process)
4520 except InvalidCmd:
4521 logging.info("No diagrams for %s" % \
4522 process.nice_string().replace('Process', 'process'))
4523 return None
4524 if not amplitude.get('diagrams'):
4525
4526 logging.info("No diagrams for %s" % \
4527 process.nice_string().replace('Process', 'process'))
4528 return None
4529
4530 matrix_element = helas_objects.HelasMatrixElement(amplitude,
4531 gen_color=True)
4532 diagrams = matrix_element.get('diagrams')
4533 born_order = get_order(diagrams,'Born')
4534
4535 loop_order = -1
4536
4537 if isinstance(opt, list):
4538 opt.append((process.base_string(),find_resonances(diagrams)))
4539 resonances = opt[-1][1]
4540 else:
4541 resonances= opt
4542
4543 if len(resonances)==0:
4544 logger.info("No resonance found for process %s."\
4545 %process.base_string())
4546 return None
4547
4548
4549 if not options['cached_param_card'][mode][0]:
4550 if NLO:
4551 param_card = check_param_card.ParamCard(
4552 pjoin(proc_dir,'Cards','param_card.dat'))
4553 else:
4554 param_card = check_param_card.ParamCard(
4555 StringIO.StringIO(evaluator.full_model.write_param_card()))
4556 options['cached_param_card'][mode][0] = param_card
4557 name2block, _ = param_card.analyze_param_card()
4558 options['cached_param_card'][mode][1] = name2block
4559
4560 else:
4561 param_card = options['cached_param_card'][mode][0]
4562 name2block = options['cached_param_card'][mode][1]
4563
4564
4565 if loop_order != -1 and (loop_order+born_order)%2 != 0:
4566 raise MadGraph5Error('The summed squared matrix element '+\
4567 " order '%d' is not even."%(loop_order+born_order))
4568 result = {'born_order':born_order,
4569 'loop_order': (-1 if loop_order==-1 else (loop_order+born_order)/2),
4570 'resonances_result':[]}
4571
4572
4573 if NLO:
4574 try:
4575 shutil.copy(pjoin(proc_dir,'Cards','param_card.dat'),
4576 pjoin(proc_dir,'Cards','param_card.dat__backUp__'))
4577 except:
4578 pass
4579
4580
4581 had_log_tweaks=False
4582 if NLO:
4583 for tweak in options['tweak']['custom']:
4584 if tweak.startswith('seed'):
4585 continue
4586 try:
4587 logstart, logend = tweak.split('->')
4588 except:
4589 raise Madgraph5Error("Tweak '%s' not reckognized."%tweak)
4590 if logstart in ['logp','logm', 'log'] and \
4591 logend in ['logp','logm', 'log']:
4592 if NLO:
4593 evaluator.apply_log_tweak(proc_dir, [logstart, logend])
4594 had_log_tweaks = True
4595 else:
4596 raise Madgraph5Error("Tweak '%s' not reckognized."%tweak)
4597 if had_log_tweaks:
4598 evaluator.apply_log_tweak(proc_dir, 'recompile')
4599
4600
4601 if options['resonances']=='all':
4602 resonances_to_run = resonances
4603 elif isinstance(options['resonances'],int):
4604 resonances_to_run = resonances[:options['resonances']]
4605 elif isinstance(options['resonances'],list):
4606 resonances_to_run = []
4607 for res in resonances:
4608 for res_selection in options['resonances']:
4609 if abs(res['ParticlePDG'])==res_selection[0] and \
4610 res['FSMothersNumbers']==set(res_selection[1]):
4611 resonances_to_run.append(res)
4612 break
4613 else:
4614 raise InvalidCmd("Resonance selection '%s' not reckognized"%\
4615 str(options['resonances']))
4616
4617
4618
4619 if NLO and options['show_plot']:
4620 widgets = ['ME evaluations:', pbar.Percentage(), ' ',
4621 pbar.Bar(),' ', pbar.ETA(), ' ']
4622 progress_bar = pbar.ProgressBar(widgets=widgets,
4623 maxval=len(options['lambdaCMS'])*len(resonances_to_run), fd=sys.stdout)
4624 progress_bar.update(0)
4625
4626 sys.stdout.flush()
4627 else:
4628 progress_bar = None
4629
4630 for resNumber, res in enumerate(resonances_to_run):
4631
4632
4633 result['resonances_result'].append({'resonance':res,'born':[]})
4634 if NLO:
4635 result['resonances_result'][-1]['finite'] = []
4636
4637 for lambdaNumber, lambdaCMS in enumerate(options['lambdaCMS']):
4638
4639
4640 new_param_card = check_param_card.ParamCard(param_card)
4641
4642 for param, replacement in options['expansion_parameters'].items():
4643
4644
4645 orig_param = param.replace('__tmpprefix__','')
4646 if orig_param not in name2block:
4647
4648
4649
4650 continue
4651 for block, lhaid in name2block[orig_param]:
4652 orig_value = float(param_card[block].get(lhaid).value)
4653 new_value = eval(replacement,
4654 {param:orig_value,'lambdacms':lambdaCMS})
4655 new_param_card[block].get(lhaid).value=new_value
4656
4657
4658
4659
4660
4661
4662
4663
4664 evaluator.full_model.set_parameters_and_couplings(
4665 param_card=new_param_card)
4666
4667 for decay in new_param_card['decay'].keys():
4668 if mode=='CMS':
4669 new_width = get_width(abs(decay[0]), lambdaCMS,
4670 new_param_card)
4671 else:
4672 new_width = 0.0
4673 new_param_card['decay'].get(decay).value= new_width
4674
4675
4676 evaluator.full_model.set_parameters_and_couplings(
4677 param_card=new_param_card)
4678 if NLO:
4679 new_param_card.write(pjoin(proc_dir,'Cards','param_card.dat'))
4680
4681
4682 if lambdaCMS==1.0 and mode=='CMS' and \
4683 options['recompute_width'] in ['always','first_time']:
4684 new_param_card.write(pjoin(proc_dir,
4685 'Cards','param_card.dat_recomputed_widths'))
4686
4687
4688
4689 if mode=='NWA' and (options['recompute_width']=='always' or (
4690 options['recompute_width']=='first_time' and lambdaCMS==1.0)):
4691
4692 tmp_param_card = check_param_card.ParamCard(new_param_card)
4693
4694
4695 for decay in new_param_card['decay'].keys():
4696 particle_name = evaluator.full_model.get_particle(\
4697 abs(decay[0])).get_name()
4698 new_width = get_width(abs(decay[0]),lambdaCMS,new_param_card)
4699 tmp_param_card['decay'].get(decay).value = new_width
4700 if not options['has_FRdecay'] and new_width != 0.0 and \
4701 (abs(decay[0]),lambdaCMS) not in options['cached_widths']:
4702 logger.info('Numerically computed width of particle'+\
4703 ' %s for lambda=%.4g : %-9.6gGeV'%
4704 (particle_name,lambdaCMS,new_width))
4705
4706
4707
4708 if lambdaCMS==1.0 and NLO:
4709 tmp_param_card.write(pjoin(proc_dir,
4710 'Cards','param_card.dat_recomputed_widths'))
4711
4712
4713 for param, replacement in options['tweak']['params'].items():
4714
4715
4716 orig_param = param.replace('__tmpprefix__','')
4717
4718 if orig_param.lower() == 'allwidths':
4719
4720 for decay in new_param_card['decay'].keys():
4721 orig_value = float(new_param_card['decay'].get(decay).value)
4722 new_value = eval(replacement,
4723 {param:orig_value,'lambdacms':lambdaCMS})
4724 new_param_card['decay'].get(decay).value = new_value
4725 continue
4726 if orig_param not in name2block:
4727
4728
4729 continue
4730 for block, lhaid in name2block[orig_param]:
4731 orig_value = float(new_param_card[block].get(lhaid).value)
4732 new_value = eval(replacement,
4733 {param:orig_value,'lambdacms':lambdaCMS})
4734 new_param_card[block].get(lhaid).value=new_value
4735
4736 if options['tweak']['params']:
4737
4738 evaluator.full_model.set_parameters_and_couplings(
4739 param_card=new_param_card)
4740 if NLO:
4741 new_param_card.write(pjoin(proc_dir,'Cards','param_card.dat'))
4742
4743
4744 if NLO:
4745 ME_res = LoopMatrixElementEvaluator.get_me_value(process, 0,
4746 proc_dir, PSpoint=res['PS_point_used'], verbose=False,
4747 format='dict', skip_compilation=True)
4748
4749
4750
4751
4752 result['resonances_result'][-1]['born'].append(ME_res['born'])
4753 result['resonances_result'][-1]['finite'].append(
4754 ME_res['finite']*ME_res['born']*ME_res['alphaS_over_2pi'])
4755 else:
4756 ME_res = evaluator.evaluate_matrix_element(matrix_element,
4757 p=res['PS_point_used'], auth_skipping=False, output='m2')[0]
4758 result['resonances_result'][-1]['born'].append(ME_res)
4759 if not progress_bar is None:
4760 progress_bar.update(resNumber*len(options['lambdaCMS'])+\
4761 (lambdaNumber+1))
4762
4763 sys.stdout.flush()
4764
4765
4766 log_reversed = False
4767 for tweak in options['tweak']['custom']:
4768 if tweak.startswith('log') and had_log_tweaks:
4769 if log_reversed:
4770 continue
4771 if NLO:
4772 evaluator.apply_log_tweak(proc_dir, 'default')
4773 evaluator.apply_log_tweak(proc_dir, 'recompile')
4774 log_reversed = True
4775
4776
4777 evaluator.full_model.set_parameters_and_couplings(param_card=param_card)
4778 if NLO:
4779 try:
4780 shutil.copy(pjoin(proc_dir,'Cards','param_card.dat__backUp__'),
4781 pjoin(proc_dir,'Cards','param_card.dat'))
4782 except:
4783 param_card.write(pjoin(proc_dir,'Cards','param_card.dat'))
4784
4785
4786
4787 try:
4788 os.remove(pjoin(proc_dir,'Cards','param_card.dat__TemporaryBackup__'))
4789 os.remove(pjoin(proc_dir,'Source','MODEL',
4790 'model_functions.f__TemporaryBackup__'))
4791 except:
4792 pass
4793
4794 return (process.nice_string().replace('Process:', '').strip(),result)
4795
4796 -def get_value(process, evaluator, p=None, options=None):
4797 """Return the value/momentum for a phase space point"""
4798
4799 for i, leg in enumerate(process.get('legs')):
4800 leg.set('number', i+1)
4801
4802 logger.info("Checking %s in %s gauge" % \
4803 ( process.nice_string().replace('Process:', 'process'),
4804 'unitary' if aloha.unitary_gauge else 'feynman'))
4805
4806 legs = process.get('legs')
4807
4808
4809 try:
4810 if process.get('perturbation_couplings')==[]:
4811 amplitude = diagram_generation.Amplitude(process)
4812 else:
4813 amplitude = loop_diagram_generation.LoopAmplitude(process)
4814 except InvalidCmd:
4815 logging.info("No diagrams for %s" % \
4816 process.nice_string().replace('Process', 'process'))
4817 return None
4818
4819 if not amplitude.get('diagrams'):
4820
4821 logging.info("No diagrams for %s" % \
4822 process.nice_string().replace('Process', 'process'))
4823 return None
4824
4825 if not p:
4826
4827 p, w_rambo = evaluator.get_momenta(process, options)
4828
4829
4830 if not isinstance(amplitude, loop_diagram_generation.LoopAmplitude):
4831 matrix_element = helas_objects.HelasMatrixElement(amplitude,
4832 gen_color = True)
4833 else:
4834 matrix_element = loop_helas_objects.LoopHelasMatrixElement(amplitude,
4835 gen_color = True, optimized_output = evaluator.loop_optimized_output)
4836
4837 mvalue = evaluator.evaluate_matrix_element(matrix_element, p=p,
4838 output='jamp',options=options)
4839
4840 if mvalue and mvalue['m2']:
4841 return {'process':process.base_string(),'value':mvalue,'p':p}
4842
4844 """Present the results of a comparison in a nice list format for loop
4845 processes. It detail the results from each lorentz transformation performed.
4846 """
4847
4848 process = comparison_results[0]['process']
4849 results = comparison_results[0]['results']
4850
4851
4852 threshold_rotations = 1e-6
4853
4854
4855
4856 threshold_boosts = 1e-3
4857 res_str = "%s" % process.base_string()
4858
4859 transfo_col_size = 17
4860 col_size = 18
4861 transfo_name_header = 'Transformation name'
4862
4863 if len(transfo_name_header) + 1 > transfo_col_size:
4864 transfo_col_size = len(transfo_name_header) + 1
4865
4866 for transfo_name, value in results:
4867 if len(transfo_name) + 1 > transfo_col_size:
4868 transfo_col_size = len(transfo_name) + 1
4869
4870 res_str += '\n' + fixed_string_length(transfo_name_header, transfo_col_size) + \
4871 fixed_string_length("Value", col_size) + \
4872 fixed_string_length("Relative diff.", col_size) + "Result"
4873
4874 ref_value = results[0]
4875 res_str += '\n' + fixed_string_length(ref_value[0], transfo_col_size) + \
4876 fixed_string_length("%1.10e" % ref_value[1]['m2'], col_size)
4877
4878
4879 all_pass = True
4880 for res in results[1:]:
4881 threshold = threshold_boosts if 'BOOST' in res[0].upper() else \
4882 threshold_rotations
4883 rel_diff = abs((ref_value[1]['m2']-res[1]['m2'])\
4884 /((ref_value[1]['m2']+res[1]['m2'])/2.0))
4885 this_pass = rel_diff <= threshold
4886 if not this_pass:
4887 all_pass = False
4888 res_str += '\n' + fixed_string_length(res[0], transfo_col_size) + \
4889 fixed_string_length("%1.10e" % res[1]['m2'], col_size) + \
4890 fixed_string_length("%1.10e" % rel_diff, col_size) + \
4891 ("Passed" if this_pass else "Failed")
4892 if all_pass:
4893 res_str += '\n' + 'Summary: passed'
4894 else:
4895 res_str += '\n' + 'Summary: failed'
4896
4897 return res_str
4898
4900 """Present the results of a comparison in a nice list format
4901 if output='fail' return the number of failed process -- for test--
4902 """
4903
4904
4905 if comparison_results[0]['process']['perturbation_couplings']!=[]:
4906 return output_lorentz_inv_loop(comparison_results, output)
4907
4908 proc_col_size = 17
4909
4910 threshold=1e-10
4911 process_header = "Process"
4912
4913 if len(process_header) + 1 > proc_col_size:
4914 proc_col_size = len(process_header) + 1
4915
4916 for proc, values in comparison_results:
4917 if len(proc) + 1 > proc_col_size:
4918 proc_col_size = len(proc) + 1
4919
4920 col_size = 18
4921
4922 pass_proc = 0
4923 fail_proc = 0
4924 no_check_proc = 0
4925
4926 failed_proc_list = []
4927 no_check_proc_list = []
4928
4929 res_str = fixed_string_length(process_header, proc_col_size) + \
4930 fixed_string_length("Min element", col_size) + \
4931 fixed_string_length("Max element", col_size) + \
4932 fixed_string_length("Relative diff.", col_size) + \
4933 "Result"
4934
4935 for one_comp in comparison_results:
4936 proc = one_comp['process'].base_string()
4937 data = one_comp['results']
4938
4939 if data == 'pass':
4940 no_check_proc += 1
4941 no_check_proc_list.append(proc)
4942 continue
4943
4944 values = [data[i]['m2'] for i in range(len(data))]
4945
4946 min_val = min(values)
4947 max_val = max(values)
4948 diff = (max_val - min_val) / abs(max_val)
4949
4950 res_str += '\n' + fixed_string_length(proc, proc_col_size) + \
4951 fixed_string_length("%1.10e" % min_val, col_size) + \
4952 fixed_string_length("%1.10e" % max_val, col_size) + \
4953 fixed_string_length("%1.10e" % diff, col_size)
4954
4955 if diff < threshold:
4956 pass_proc += 1
4957 proc_succeed = True
4958 res_str += "Passed"
4959 else:
4960 fail_proc += 1
4961 proc_succeed = False
4962 failed_proc_list.append(proc)
4963 res_str += "Failed"
4964
4965
4966
4967
4968
4969 if len(data[0]['jamp'])!=0:
4970 for k in range(len(data[0]['jamp'][0])):
4971 sum = [0] * len(data)
4972
4973 for j in range(len(data[0]['jamp'])):
4974
4975 values = [abs(data[i]['jamp'][j][k])**2 for i in range(len(data))]
4976 sum = [sum[i] + values[i] for i in range(len(values))]
4977
4978
4979 min_val = min(sum)
4980 max_val = max(sum)
4981 if not max_val:
4982 continue
4983 diff = (max_val - min_val) / max_val
4984
4985 tmp_str = '\n' + fixed_string_length(' JAMP %s'%k , proc_col_size) + \
4986 fixed_string_length("%1.10e" % min_val, col_size) + \
4987 fixed_string_length("%1.10e" % max_val, col_size) + \
4988 fixed_string_length("%1.10e" % diff, col_size)
4989
4990 if diff > 1e-10:
4991 if not len(failed_proc_list) or failed_proc_list[-1] != proc:
4992 fail_proc += 1
4993 pass_proc -= 1
4994 failed_proc_list.append(proc)
4995 res_str += tmp_str + "Failed"
4996 elif not proc_succeed:
4997 res_str += tmp_str + "Passed"
4998
4999
5000
5001 res_str += "\nSummary: %i/%i passed, %i/%i failed" % \
5002 (pass_proc, pass_proc + fail_proc,
5003 fail_proc, pass_proc + fail_proc)
5004
5005 if fail_proc != 0:
5006 res_str += "\nFailed processes: %s" % ', '.join(failed_proc_list)
5007 if no_check_proc:
5008 res_str += "\nNot checked processes: %s" % ', '.join(no_check_proc_list)
5009
5010 if output == 'text':
5011 return res_str
5012 else:
5013 return fail_proc
5014
5016 """Present the results of a comparison in a nice list format
5017 if output='fail' return the number of failed process -- for test--
5018 """
5019
5020 proc_col_size = 17
5021
5022
5023
5024 pert_coupl = comparison_results[0]['perturbation_couplings']
5025 comparison_results = comparison_results[1:]
5026
5027 if pert_coupl:
5028 process_header = "Process [virt="+" ".join(pert_coupl)+"]"
5029 else:
5030 process_header = "Process"
5031
5032 if len(process_header) + 1 > proc_col_size:
5033 proc_col_size = len(process_header) + 1
5034
5035 for data in comparison_results:
5036 proc = data['process']
5037 if len(proc) + 1 > proc_col_size:
5038 proc_col_size = len(proc) + 1
5039
5040 pass_proc = 0
5041 fail_proc = 0
5042 no_check_proc = 0
5043
5044 failed_proc_list = []
5045 no_check_proc_list = []
5046
5047 col_size = 18
5048
5049 res_str = fixed_string_length(process_header, proc_col_size) + \
5050 fixed_string_length("Unitary", col_size) + \
5051 fixed_string_length("Feynman", col_size) + \
5052 fixed_string_length("Relative diff.", col_size) + \
5053 "Result"
5054
5055 for one_comp in comparison_results:
5056 proc = one_comp['process']
5057 data = [one_comp['value_unit'], one_comp['value_feynm']]
5058
5059
5060 if data[0] == 'pass':
5061 no_check_proc += 1
5062 no_check_proc_list.append(proc)
5063 continue
5064
5065 values = [data[i]['m2'] for i in range(len(data))]
5066
5067 min_val = min(values)
5068 max_val = max(values)
5069
5070
5071 diff = (max_val - min_val) / abs(max_val)
5072
5073 res_str += '\n' + fixed_string_length(proc, proc_col_size) + \
5074 fixed_string_length("%1.10e" % values[0], col_size) + \
5075 fixed_string_length("%1.10e" % values[1], col_size) + \
5076 fixed_string_length("%1.10e" % diff, col_size)
5077
5078 if diff < 1e-8:
5079 pass_proc += 1
5080 proc_succeed = True
5081 res_str += "Passed"
5082 else:
5083 fail_proc += 1
5084 proc_succeed = False
5085 failed_proc_list.append(proc)
5086 res_str += "Failed"
5087
5088
5089
5090
5091
5092 if len(data[0]['jamp'])>0:
5093 for k in range(len(data[0]['jamp'][0])):
5094 sum = [0, 0]
5095
5096 for j in range(len(data[0]['jamp'])):
5097
5098 values = [abs(data[i]['jamp'][j][k])**2 for i in range(len(data))]
5099 sum = [sum[i] + values[i] for i in range(len(values))]
5100
5101
5102 min_val = min(sum)
5103 max_val = max(sum)
5104 if not max_val:
5105 continue
5106 diff = (max_val - min_val) / max_val
5107
5108 tmp_str = '\n' + fixed_string_length(' JAMP %s'%k , col_size) + \
5109 fixed_string_length("%1.10e" % sum[0], col_size) + \
5110 fixed_string_length("%1.10e" % sum[1], col_size) + \
5111 fixed_string_length("%1.10e" % diff, col_size)
5112
5113 if diff > 1e-10:
5114 if not len(failed_proc_list) or failed_proc_list[-1] != proc:
5115 fail_proc += 1
5116 pass_proc -= 1
5117 failed_proc_list.append(proc)
5118 res_str += tmp_str + "Failed"
5119 elif not proc_succeed:
5120 res_str += tmp_str + "Passed"
5121
5122
5123
5124 res_str += "\nSummary: %i/%i passed, %i/%i failed" % \
5125 (pass_proc, pass_proc + fail_proc,
5126 fail_proc, pass_proc + fail_proc)
5127
5128 if fail_proc != 0:
5129 res_str += "\nFailed processes: %s" % ', '.join(failed_proc_list)
5130 if no_check_proc:
5131 res_str += "\nNot checked processes: %s" % ', '.join(no_check_proc_list)
5132
5133
5134 if output == 'text':
5135 return res_str
5136 else:
5137 return fail_proc
5138
5139 -def CMS_save_path(extension, cms_res, used_model, opts, output_path=None):
5140 """Creates a suitable filename for saving these results."""
5141
5142 if opts['name']=='auto' and opts['analyze']!='None':
5143
5144 return '%s.%s'%(os.path.splitext(opts['analyze'].split(',')[0])\
5145 [0],extension)
5146
5147 if opts['name']!='auto':
5148 basename = opts['name']
5149 else:
5150 prefix = 'cms_check_'
5151
5152 if len(cms_res['ordered_processes'])==1:
5153 proc = cms_res['ordered_processes'][0]
5154 replacements = [('=>','gt'),('<=','lt'),('/','_no_'),
5155 (' ',''),('+','p'),('-','m'),
5156 ('~','x'), ('>','_'),('=','eq'),('^2','squared')]
5157
5158 try:
5159 proc=proc[:proc.index('[')]
5160 except ValueError:
5161 pass
5162
5163 for key, value in replacements:
5164 proc = proc.replace(key,value)
5165
5166 basename =prefix+proc+'_%s_'%used_model.get('name')+\
5167 ( ('_'+'_'.join(cms_res['perturbation_orders'])) if \
5168 cms_res['perturbation_orders']!=[] else '')
5169
5170 else:
5171 basename = prefix+datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%Ss")
5172
5173 suffix = '_%s'%opts['tweak']['name'] if opts['tweak']['name']!='' else ''
5174 if output_path:
5175 return pjoin(output_path,'%s%s.%s'%(basename,suffix,extension))
5176 else:
5177 return '%s%s.%s'%(basename,suffix,extension)
5178
5180 """ Outputs nicely the outcome of the complex mass scheme check performed
5181 by varying the width in the offshell region of resonances found for eahc process.
5182 Output just specifies whether text should be returned or a list of failed
5183 processes. Use 'concise_text' for a consise report of the results."""
5184
5185 pert_orders=result['perturbation_orders']
5186
5187
5188
5189
5190
5191
5192
5193
5194
5195 diff_lambda_power = options['diff_lambda_power']
5196
5197
5198
5199
5200
5201
5202
5203 if 'has_FRdecay' in result:
5204 has_FRdecay = result['has_FRdecay']
5205 else:
5206 has_FRdecay = False
5207
5208 if not pert_orders:
5209 CMS_test_threshold = 1e-3
5210 else:
5211
5212
5213
5214
5215
5216
5217 if not has_FRdecay and ('recomputed_with' not in result or \
5218 result['recompute_width'] in ['always','first_time']):
5219 CMS_test_threshold = 2e-2*(1.0e-4/min(result['lambdaCMS']))
5220 else:
5221
5222
5223 CMS_test_threshold = 2e-2*(1.0e-5/min(result['lambdaCMS']))
5224
5225
5226
5227
5228 consideration_threshold = min(CMS_test_threshold/10.0, 0.05)
5229
5230
5231 group_val = 3
5232
5233
5234
5235
5236 diff_zero_threshold = 1e-3
5237
5238
5239 lambda_range = options['lambda_plot_range']
5240
5241
5242
5243
5244
5245
5246
5247
5248
5249
5250 res_str = ''
5251
5252 concise_str = ''
5253 concise_data = '%%(process)-%ds%%(asymptot)-15s%%(cms_check)-25s%%(status)-25s\n'
5254 concise_repl_dict = {'Header':{'process':'Process',
5255 'asymptot':'Asymptot',
5256 'cms_check':'Deviation to asymptot',
5257 'status':'Result'}}
5258
5259
5260
5261
5262
5263 useLatexParticleName = 'built-in'
5264 name2tex = {'e+':r'e^+','w+':r'W^+','a':r'\gamma','g':'g',
5265 'e-':r'e^-','w-':r'W^-','z':'Z','h':'H',
5266 'mu+':r'\mu^+',
5267 'mu-':r'\mu^-',
5268 'ta+':r'\tau^+',
5269 'ta-':r'\tau^-'}
5270 for p in ['e','m','t']:
5271 d = {'e':'e','m':r'\mu','t':r'\tau'}
5272 name2tex['v%s'%p]=r'\nu_{%s}'%d[p]
5273 name2tex['v%s~'%p]=r'\bar{\nu_{%s}}'%d[p]
5274
5275 for p in ['u','d','c','s','b','t']:
5276 name2tex[p]=p
5277 name2tex['%s~'%p]=r'\bar{%s}'%p
5278
5279 def format_particle_name(particle, latex=useLatexParticleName):
5280 p_name = particle
5281 if latex=='model':
5282 try:
5283 texname = model.get_particle(particle).get('texname')
5284 if texname and texname!='none':
5285 p_name = r'$\displaystyle %s$'%texname
5286 except:
5287 pass
5288 elif latex=='built-in':
5289 try:
5290 p_name = r'$\displaystyle %s$'%name2tex[particle]
5291 except:
5292 pass
5293 return p_name
5294
5295 def resonance_str(resonance, latex=useLatexParticleName):
5296 """ Provides a concise string to characterize the resonance """
5297 particle_name = model.get_particle(resonance['ParticlePDG']).get_name()
5298 mothersID=['%d'%n for n in sorted(resonance['FSMothersNumbers'])]
5299 return r"%s [%s]"%(format_particle_name(particle_name,latex=latex),
5300 ','.join(mothersID))
5301
5302 def format_title(process, resonance):
5303 """ Format the plot title given the process and resonance """
5304
5305 process_string = []
5306 for particle in process.split():
5307 if '<=' in particle:
5308 particle = particle.replace('<=',r'$\displaystyle <=$')
5309 if '^2' in particle:
5310 particle = particle.replace('^2',r'$\displaystyle ^2$')
5311 if particle=='$$':
5312 process_string.append(r'\$\$')
5313 continue
5314 if particle=='>':
5315 process_string.append(r'$\displaystyle \rightarrow$')
5316 continue
5317 if particle=='/':
5318 process_string.append(r'$\displaystyle /$')
5319 continue
5320 process_string.append(format_particle_name(particle))
5321
5322 if resonance=='':
5323 return r'CMS check for %s' %(' '.join(process_string))
5324 else:
5325 return r'CMS check for %s ( resonance %s )'\
5326 %(' '.join(process_string),resonance)
5327
5328 def guess_lambdaorder(ME_values_list, lambda_values, expected=None,
5329 proc=None, res=None):
5330 """ Guess the lambda scaling from a list of ME values and return it.
5331 Also compare with the expected result if specified and trigger a
5332 warning if not in agreement."""
5333
5334 bpowers = []
5335 for i, lambdaCMS in enumerate(lambda_values[1:]):
5336 bpowers.append(round(math.log(ME_values_list[0]/ME_values_list[i+1],\
5337 lambda_values[0]/lambdaCMS)))
5338
5339
5340 bpower = sorted([(el, bpowers.count(el)) for el in set(bpowers)],
5341 key = lambda elem: elem[1], reverse=True)[0][0]
5342 if not expected:
5343 return bpower
5344 if bpower != expected:
5345 logger.warning('The apparent scaling of the squared amplitude'+
5346 'seems inconsistent w.r.t to detected value '+
5347 '(%i vs %i). %i will be used.'%(expected,bpower,bpower)+
5348 ' This happend for process %s and resonance %s'%(proc, res))
5349 return bpower
5350
5351 def check_stability(ME_values, lambda_values, lambda_scaling, values_name):
5352 """ Checks if the values passed in argument are stable and return the
5353 stability check outcome warning if it is not precise enough. """
5354
5355 values = sorted([
5356 abs(val*(lambda_values[0]/lambda_values[i])**lambda_scaling) for \
5357 i, val in enumerate(ME_values)])
5358 median = values[len(values)//2]
5359 max_diff = max(abs(values[0]-median),abs(values[-1]-median))
5360 stability = max_diff/median
5361 stab_threshold = 1e-2
5362 if stability >= stab_threshold:
5363 return "== WARNING: Stability check failed for '%s' with stability %.2e.\n"\
5364 %(values_name, stability)
5365 else:
5366 return None
5367
5368 if options['analyze']=='None':
5369 if options['reuse']:
5370 save_path = CMS_save_path('pkl', result, model, options,
5371 output_path=output_path)
5372 buff = "\nThe results of this check have been stored on disk and its "+\
5373 "analysis can be rerun at anytime with the MG5aMC command:\n "+\
5374 " check cms --analyze=%s\n"%save_path
5375 res_str += buff
5376 concise_str += buff
5377 save_load_object.save_to_file(save_path, result)
5378 elif len(result['ordered_processes'])>0:
5379 buff = "\nUse the following synthax if you want to store "+\
5380 "the raw results on disk.\n"+\
5381 " check cms -reuse <proc_def> <options>\n"
5382 res_str += buff
5383 concise_str += buff
5384
5385
5386
5387
5388
5389 checks = []
5390 for process in result['ordered_processes']:
5391 checks.extend([(process,resID) for resID in \
5392 range(len(result[process]['CMS']))])
5393
5394 if options['reuse']:
5395 logFile = open(CMS_save_path(
5396 'log', result, model, options, output_path=output_path),'w')
5397
5398 lambdaCMS_list=result['lambdaCMS']
5399
5400
5401 failed_procs = []
5402
5403
5404 bar = lambda char: char*47
5405
5406
5407 if 'widths_computed' in result:
5408 res_str += '\n%s%s%s\n'%(bar('='),' Widths ',bar('='))
5409 if result['recompute_width'] == 'never':
5410 res_str += '| Widths extracted from the param_card.dat'
5411 else:
5412 res_str += '| Widths computed %s'%('analytically' if has_FRdecay
5413 else 'numerically')
5414 if result['recompute_width'] == 'first_time':
5415 res_str += ' for \lambda = 1'
5416 elif result['recompute_width'] == 'always':
5417 res_str += ' for all \lambda values'
5418 res_str += " using mode '--recompute_width=%s'.\n"%result['recompute_width']
5419 for particle_name, width in result['widths_computed']:
5420 res_str += '| %-10s = %-11.6gGeV\n'%('Width(%s)'%particle_name,width)
5421 res_str += '%s%s%s\n'%(bar('='),'='*8,bar('='))
5422
5423
5424
5425
5426 nstab_points=group_val
5427
5428 differences_target = {}
5429 for process, resID in checks:
5430
5431
5432 concise_repl_dict[process] = {'process':process,
5433 'asymptot':'N/A',
5434 'cms_check':'N/A',
5435 'status':'N/A'}
5436 proc_res = result[process]
5437 cms_res = proc_res['CMS'][resID]
5438 nwa_res = proc_res['NWA'][resID]
5439 resonance = resonance_str(cms_res['resonance'], latex='none')
5440 cms_born=cms_res['born']
5441 nwa_born=nwa_res['born']
5442
5443 res_str += '\n%s%s%s\n'%(bar('='),'='*8,bar('='))
5444
5445 proc_title = "%s (resonance %s)"%(process,resonance)
5446 centering = (bar(2)+8-len(proc_title))//2
5447 res_str += "%s%s\n"%(' '*centering,proc_title)
5448
5449 res_str += '%s%s%s\n'%(bar('-'),'-'*8,bar('-'))
5450
5451
5452 if diff_lambda_power!=1:
5453 res_str += "== WARNING diff_lambda_power is not 1 but = %g\n"%diff_lambda_power
5454 res_str += '%s%s%s\n'%(bar('-'),'-'*8,bar('-'))
5455
5456 born_power = guess_lambdaorder(nwa_born,lambdaCMS_list,
5457 expected=proc_res['born_order'], proc=process, res=resonance)
5458 stab_cms_born = check_stability(cms_born[-nstab_points:],
5459 lambdaCMS_list[-nstab_points:], born_power, 'CMS Born')
5460 if stab_cms_born:
5461 res_str += stab_cms_born
5462 stab_nwa_born = check_stability(nwa_born[-nstab_points:],
5463 lambdaCMS_list[-nstab_points:], born_power, 'NWA Born')
5464 if stab_nwa_born:
5465 res_str += stab_nwa_born
5466
5467 res_str += "== Kinematic configuration in GeV (E,px,pypz)\n"
5468 for i, p in enumerate(cms_res['resonance']['PS_point_used']):
5469 res_str += " | p%-2.d = "%(i+1)
5470 for pi in p:
5471 res_str += '%-24.17g'%pi if pi<0.0 else ' %-23.17g'%pi
5472 res_str += "\n"
5473
5474 res_str += "== Offshellnesses of all detected resonances\n"
5475 for res_name, offshellness in cms_res['resonance']['offshellnesses']:
5476 res_str += " | %-15s = %f\n"%(res_name, offshellness)
5477 res_str += '%s%s%s\n'%(bar('-'),'-'*8,bar('-'))
5478
5479 if not pert_orders:
5480 res_str += "== Born scaling lambda^n_born. nborn = %d\n"%born_power
5481 else:
5482 cms_finite=cms_res['finite']
5483 nwa_finite=nwa_res['finite']
5484 loop_power = guess_lambdaorder(nwa_finite,lambdaCMS_list,
5485 expected=proc_res['loop_order'], proc=process, res=resonance)
5486 res_str += "== Scaling lambda^n. nborn, nloop = %d, %d\n"\
5487 %(born_power,loop_power)
5488 stab_cms_finite = check_stability(cms_finite[-nstab_points:],
5489 lambdaCMS_list[-nstab_points:], loop_power, 'CMS finite')
5490 if stab_cms_finite:
5491 res_str += stab_cms_finite
5492 stab_nwa_finite = check_stability(nwa_finite[-nstab_points:],
5493 lambdaCMS_list[-nstab_points:], loop_power, 'NWA finite')
5494 if stab_nwa_finite:
5495 res_str += stab_nwa_finite
5496
5497 CMSData = []
5498 NWAData = []
5499 DiffData = []
5500 for idata, lam in enumerate(lambdaCMS_list):
5501 if not pert_orders:
5502 new_cms=cms_born[idata]/(lam**born_power)
5503 new_nwa=nwa_born[idata]/(lam**born_power)
5504 else:
5505 new_cms=(cms_finite[idata]+cms_born[idata]-nwa_born[idata])/(lam*nwa_born[idata])
5506 new_nwa=nwa_finite[idata]/(lam*nwa_born[idata])
5507 new_diff=(new_cms-new_nwa)/(lam**diff_lambda_power)
5508 CMSData.append(new_cms)
5509 NWAData.append(new_nwa)
5510 DiffData.append(new_diff)
5511
5512
5513
5514
5515
5516
5517 trim_range=int(((1.0-0.6)/2.0)*len(DiffData))
5518 low_diff_median = sorted(DiffData[trim_range:-trim_range])\
5519 [(len(DiffData)-2*trim_range)//2]
5520
5521
5522
5523
5524
5525
5526 current_median = 0
5527
5528 scan_index = 0
5529 reference = abs(sorted(NWAData)[len(NWAData)//2])
5530 if low_diff_median!= 0.0:
5531 if abs(reference/low_diff_median)<diff_zero_threshold:
5532 reference = abs(low_diff_median)
5533 while True:
5534 scanner = DiffData[scan_index:group_val+scan_index]
5535 current_median = sorted(scanner)[len(scanner)//2]
5536
5537
5538 if abs(current_median-low_diff_median)/reference<\
5539 consideration_threshold:
5540 break;
5541 scan_index += 1
5542 if (group_val+scan_index)>=len(DiffData):
5543
5544
5545 logger.warning('The median scanning failed during the CMS check '+
5546 'for process %s'%proc_title+\
5547 'This is means that the difference plot has not stable'+\
5548 'intermediate region and MG5_aMC will arbitrarily consider the'+\
5549 'left half of the values.')
5550 scan_index = -1
5551 break;
5552
5553 if scan_index == -1:
5554 cms_check_data_range = len(DiffData)//2
5555 else:
5556 cms_check_data_range = scan_index + group_val
5557
5558 res_str += "== Data range considered (min, max, n_val) = (%.1e, %.1e, %d)\n"\
5559 %(lambdaCMS_list[-1],lambdaCMS_list[scan_index],
5560 len(lambdaCMS_list)-scan_index)
5561
5562 CMScheck_values = DiffData[cms_check_data_range:]
5563
5564
5565
5566
5567 if scan_index >= 0:
5568
5569 scan_index = len(CMScheck_values)
5570 used_group_val = max(3,group_val)
5571 unstability_found = True
5572 while True:
5573 scanner = CMScheck_values[scan_index-used_group_val:scan_index]
5574 maxdiff = max(abs(scan-low_diff_median) for scan in scanner)
5575 if maxdiff/reference<consideration_threshold:
5576 break;
5577 if (scan_index-used_group_val)==0:
5578
5579
5580 unstability_found = False
5581 break;
5582
5583 scan_index -= 1
5584
5585
5586 if unstability_found:
5587 unstab_check=CMScheck_values[scan_index:]
5588 relative_array = [val > CMScheck_values[scan_index-1] for
5589 val in unstab_check]
5590 upper = relative_array.count(True)
5591 lower = relative_array.count(False)
5592 if not ((lower==0 and upper>=0) or (lower>=0 and upper==0)):
5593 logger.warning(
5594 """For process %s, a numerically unstable region was detected starting from lambda < %.1e.
5595 Look at the plot in this region (and possibly throw more points using the option --lambdaCMS).
5596 If this is indeed a stability issue, then either decrease MLStabThreshold in MadLoop or decrease the
5597 minimum value of lambda to be considered in the CMS check."""\
5598 %(proc_title, lambdaCMS_list[cms_check_data_range+scan_index-1]))
5599
5600
5601
5602
5603 scan_index = 0
5604 max_diff = 0.0
5605 res_str += "== Ref. value used in the ratios (Born NWA) = %s\n"\
5606 %('%.3g'%reference)
5607 res_str += "== Asymptotic difference value detected = %s\n"\
5608 %('%.3g'%low_diff_median)
5609 concise_repl_dict[process]['asymptot'] = '%.3e'%low_diff_median
5610
5611
5612 differences_target[(process,resID)]= low_diff_median
5613
5614 while True:
5615 current_vals = CMScheck_values[scan_index:scan_index+group_val]
5616 max_diff = max(max_diff, abs(low_diff_median-
5617 sorted(current_vals)[len(current_vals)//2])/reference)
5618 if (scan_index+group_val)>=len(CMScheck_values):
5619 break
5620 scan_index += 1
5621
5622
5623 cms_check = (max_diff*100.0, '>' if max_diff>CMS_test_threshold else '<',
5624 CMS_test_threshold*100.0)
5625 res_str += "== CMS check result (threshold) = %.3g%% (%s%.3g%%)\n"%cms_check
5626 concise_repl_dict[process]['cms_check'] = \
5627 "%-10s (%s%.3g%%)"%('%.3g%%'%cms_check[0],cms_check[1],cms_check[2])
5628
5629 if max_diff>CMS_test_threshold:
5630 failed_procs.append((process,resonance))
5631 res_str += "%s %s %s\n"%(bar('='),
5632 'FAILED' if max_diff>CMS_test_threshold else 'PASSED',bar('='))
5633 concise_repl_dict[process]['status'] = 'Failed' if max_diff>CMS_test_threshold \
5634 else 'Passed'
5635
5636 if output=='concise_text':
5637
5638 max_proc_size = max(
5639 [len(process) for process in result['ordered_processes']]+[10])
5640
5641 res_str = concise_str
5642 res_str += '\n'+concise_data%(max_proc_size+4)%concise_repl_dict['Header']
5643 for process in result['ordered_processes']:
5644 res_str += (concise_data%(max_proc_size+4)%concise_repl_dict[process])
5645
5646 if len(checks):
5647 res_str += "Summary: %i/%i passed"%(len(checks)-len(failed_procs),len(checks))+\
5648 ('.\n' if not failed_procs else ', failed checks are for:\n')
5649 else:
5650 return "\nNo CMS check to perform, the process either has no diagram or does not "+\
5651 "not feature any massive s-channel resonance."
5652
5653 for process, resonance in failed_procs:
5654 res_str += "> %s, %s\n"%(process, resonance)
5655
5656 if output=='concise_text':
5657 res_str += '\nMore detailed information on this check available with the command:\n'
5658 res_str += ' MG5_aMC>display checks\n'
5659
5660
5661
5662
5663 if not options['show_plot']:
5664 if options['reuse']:
5665 logFile.write(res_str)
5666 logFile.close()
5667 if output.endswith('text'):
5668 return res_str
5669 else:
5670 return failed_procs
5671
5672 fig_output_file = CMS_save_path('pdf', result, model, options,
5673 output_path=output_path)
5674 base_fig_name = fig_output_file[:-4]
5675 suffix = 1
5676 while os.path.isfile(fig_output_file):
5677 fig_output_file = '%s__%d__.pdf'%(base_fig_name,suffix)
5678 suffix+=1
5679
5680 process_data_plot_dict={}
5681
5682
5683
5684 all_res = [(result, None)]
5685 for i, add_res in enumerate(options['analyze'].split(',')[1:]):
5686 specs =re.match(r'^(?P<filename>.*)\((?P<title>.*)\)$', add_res)
5687 if specs:
5688 filename = specs.group('filename')
5689 title = specs.group('title')
5690 else:
5691 filename = add_res
5692 title = '#%d'%(i+1)
5693
5694 new_result = save_load_object.load_from_file(filename)
5695 if new_result is None:
5696 raise InvalidCmd('The complex mass scheme check result'+
5697 " file below could not be read.\n %s"%filename)
5698 if len(new_result['ordered_processes'])!=len(result['ordered_processes']) \
5699 or len(new_result['lambdaCMS'])!=len(result['lambdaCMS']):
5700 raise self.InvalidCmd('The complex mass scheme check result'+
5701 " file below does not seem compatible.\n %s"%filename)
5702 all_res.append((new_result,title))
5703
5704
5705 for process, resID in checks:
5706 data1=[]
5707 data2=[]
5708 info ={}
5709 for res in all_res:
5710 proc_res = res[0][process]
5711 cms_res = proc_res['CMS'][resID]
5712 nwa_res = proc_res['NWA'][resID]
5713 resonance = resonance_str(cms_res['resonance'])
5714 if options['resonances']!=1:
5715 info['title'] = format_title(process, resonance)
5716 else:
5717 info['title'] = format_title(process, '')
5718
5719 cms_born=cms_res['born']
5720 nwa_born=nwa_res['born']
5721 if len(cms_born) != len(lambdaCMS_list) or\
5722 len(nwa_born) != len(lambdaCMS_list):
5723 raise MadGraph5Error('Inconsistent list of results w.r.t. the'+\
5724 ' lambdaCMS values specified for process %s'%process)
5725 if pert_orders:
5726 cms_finite=cms_res['finite']
5727 nwa_finite=nwa_res['finite']
5728 if len(cms_finite) != len(lambdaCMS_list) or\
5729 len(nwa_finite) != len(lambdaCMS_list):
5730 raise MadGraph5Error('Inconsistent list of results w.r.t. the'+\
5731 ' lambdaCMS values specified for process %s'%process)
5732
5733 bpower = guess_lambdaorder(nwa_born,lambdaCMS_list,
5734 expected=proc_res['born_order'], proc=process, res=resonance)
5735
5736 CMSData = []
5737 NWAData = []
5738 DiffData = []
5739 for idata, lam in enumerate(lambdaCMS_list):
5740 if not pert_orders:
5741 new_cms = cms_born[idata]/lam**bpower
5742 new_nwa = nwa_born[idata]/lam**bpower
5743 else:
5744 new_cms=cms_finite[idata]+cms_born[idata]-nwa_born[idata]
5745 new_nwa=nwa_finite[idata]
5746 new_cms /= lam*nwa_born[idata]
5747 new_nwa /= lam*nwa_born[idata]
5748 new_diff=(new_cms-new_nwa)/(lam**diff_lambda_power)
5749 CMSData.append(new_cms)
5750 NWAData.append(new_nwa)
5751 DiffData.append(new_diff)
5752 if res[1] is None:
5753 if not pert_orders:
5754 data1.append([r'$\displaystyle CMS\;=\;\mathcal{M}_{CMS}^{(0)}/\lambda^%d$'%bpower,CMSData])
5755 data1.append([r'$\displaystyle NWA\;=\;\mathcal{M}_{NWA}^{(0)}/\lambda^%d$'%bpower,NWAData])
5756 else:
5757 data1.append([r'$\displaystyle CMS\;=\;(\mathcal{M}^{(1)}_{CMS}+\mathcal{M}_{CMS}^{(0)}-\mathcal{M}^{(0)}_{NWA})/(\lambda\cdot\mathcal{M}^{(0)}_{NWA})$',CMSData])
5758 data1.append([r'$\displaystyle NWA\;=\;\mathcal{M}^{(1)}_{NWA}/(\lambda\cdot\mathcal{M}^{(0)}_{NWA})$',NWAData])
5759 data2.append([r'$\displaystyle\Delta\;=\;(CMS-NWA)/\lambda%s$'\
5760 %('' if diff_lambda_power==1 else r'^{%g}'%diff_lambda_power)
5761 ,DiffData])
5762 data2.append([r'Detected asymptot',[differences_target[(process,resID)]
5763 for i in range(len(lambdaCMS_list))]])
5764 else:
5765 data1.append([r'$\displaystyle CMS$ %s'%res[1].replace('_',' ').replace('#','\#'), CMSData])
5766 data1.append([r'$\displaystyle NWA$ %s'%res[1].replace('_',' ').replace('#','\#'), NWAData])
5767 data2.append([r'$\displaystyle\Delta$ %s'%res[1].replace('_',' ').replace('#','\#'), DiffData])
5768
5769 process_data_plot_dict[(process,resID)]=(data1,data2, info)
5770
5771
5772 try:
5773 import matplotlib.pyplot as plt
5774 from matplotlib.backends.backend_pdf import PdfPages
5775 logger.info('Rendering plots... (this can take some time because of the latex labels)')
5776
5777 res_str += \
5778 """\n-----------------------------------------------------------------------------------------------
5779 | In the plots, the Complex Mass Scheme check is successful if the normalized difference |
5780 | between the CMS and NWA result (lower inset) tends to a constant when \lambda goes to zero. |
5781 -----------------------------------------------------------------------------------------------\n"""
5782
5783
5784 if lambda_range[1]>0:
5785 min_lambda_index = -1
5786 for i, lam in enumerate(lambdaCMS_list):
5787 if lam<=lambda_range[1]:
5788 min_lambda_index = i
5789 break
5790 else:
5791 min_lambda_index = 0
5792 if lambda_range[0]>0:
5793 max_lambda_index = -1
5794 for i, lam in enumerate(lambdaCMS_list):
5795 if lam<=lambda_range[0]:
5796 max_lambda_index=i-1
5797 break
5798 else:
5799 max_lambda_index=len(lambdaCMS_list)-1
5800
5801 if max_lambda_index==-1 or min_lambda_index==-1 or \
5802 min_lambda_index==max_lambda_index:
5803 raise InvalidCmd('Invalid lambda plotting range: (%.1e,%.1e)'%\
5804 (lambda_range[0],lambda_range[1]))
5805
5806 if lambda_range[0]>0.0 or lambda_range[1]>0.0:
5807 lambdaCMS_list = lambdaCMS_list[min_lambda_index:max_lambda_index+1]
5808
5809 plt.rc('text', usetex=True)
5810 plt.rc('font', family='serif')
5811 pp=PdfPages(fig_output_file)
5812 if len(checks)==0 or len(process_data_plot_dict[checks[0]][1])<=7:
5813 colorlist=['b','r','g','k','c','m','y']
5814 else:
5815 import matplotlib.colors as colors
5816 import matplotlib.cm as mplcm
5817 import matplotlib.colors as colors
5818
5819
5820 cm = plt.get_cmap('gist_rainbow')
5821 cNorm = colors.Normalize(vmin=0, vmax=(len(data2)-1))
5822 scalarMap = mplcm.ScalarMappable(norm=cNorm, cmap=cm)
5823
5824 colorlist = [scalarMap.to_rgba(i*0.9) for i in range(len(data2))]
5825
5826
5827
5828
5829
5830
5831 legend_size = 10
5832 for iproc, (process, resID) in enumerate(checks):
5833 data1,data2, info=process_data_plot_dict[(process,resID)]
5834
5835 if lambda_range[0]>0.0 or lambda_range[1]>0.0:
5836 for i in range(len(data1)):
5837 data1[i][1]=data1[i][1][min_lambda_index:max_lambda_index+1]
5838 for i in range(len(data2)):
5839 data2[i][1]=data2[i][1][min_lambda_index:max_lambda_index+1]
5840 plt.figure(iproc+1)
5841 plt.subplot(211)
5842 minvalue=1e+99
5843 maxvalue=-1e+99
5844 for i, d1 in enumerate(data1):
5845
5846 color=colorlist[i//2]
5847 data_plot=d1[1]
5848 minvalue=min(min(data_plot),minvalue)
5849 maxvalue=max(max(data_plot),maxvalue)
5850 plt.plot(lambdaCMS_list, data_plot, color=color, marker='', \
5851 linestyle=('-' if i%2==0 else '--'),
5852 label=(d1[0] if (i%2==0 or i==1) else '_nolegend_'))
5853 ymin = minvalue-(maxvalue-minvalue)/5.
5854 ymax = maxvalue+(maxvalue-minvalue)/5.
5855
5856 plt.yscale('linear')
5857 plt.xscale('log')
5858 plt.title(info['title'],fontsize=12,y=1.08)
5859 plt.ylabel(r'$\displaystyle \mathcal{M}$')
5860
5861 if ymax*len(data1)-sum(max(d1[1][-len(d1[1])//2:]) \
5862 for d1 in data1) > 0.5*(ymax-ymin)*len(data1):
5863 plt.legend(prop={'size':legend_size},loc='upper left', frameon=False)
5864 else:
5865 plt.legend(prop={'size':legend_size},loc='lower left', frameon=False)
5866
5867 plt.axis([min(lambdaCMS_list),max(lambdaCMS_list), ymin, ymax])
5868
5869 plt.subplot(212)
5870 minvalue=1e+99
5871 maxvalue=-1e+99
5872
5873 try:
5874 asymptot_index = [d2[0] for d2 in data2].index('Detected asymptot')
5875 plt.plot(lambdaCMS_list, data2[asymptot_index][1],
5876 color='0.75', marker='', linestyle='-', label='')
5877 except ValueError:
5878 pass
5879
5880 color_ID = -1
5881 for d2 in data2:
5882
5883 if d2[0]=='Detected asymptot':
5884 continue
5885 color_ID += 1
5886 color=colorlist[color_ID]
5887 data_plot=d2[1]
5888 minvalue=min(min(data_plot),minvalue)
5889 maxvalue=max(max(data_plot),maxvalue)
5890 plt.plot(lambdaCMS_list, data_plot, color=color, marker='',\
5891 linestyle='-', label=d2[0])
5892 ymin = minvalue-(maxvalue-minvalue)/5.
5893 ymax = maxvalue+(maxvalue-minvalue)/5.
5894
5895 plt.yscale('linear')
5896 plt.xscale('log')
5897 plt.ylabel(r'$\displaystyle \Delta$')
5898 plt.xlabel(r'$\displaystyle \lambda$')
5899
5900
5901 sd = [sorted(d2[1][-len(d2[1])//2:]) for d2 in data2]
5902 left_stability = sum(abs(s[0]-s[-1]) for s in sd)
5903 sd = [sorted(d2[1][:-len(d2[1])//2]) for d2 in data2]
5904 right_stability = sum(abs(s[0]-s[-1]) for s in sd)
5905 left_stable = False if right_stability==0.0 else \
5906 (left_stability/right_stability)<0.1
5907
5908 if left_stable:
5909 if ymax*len(data2)-sum(max(d2[1][-len(d2[1])//2:]) \
5910 for d2 in data2) > 0.5*(ymax-ymin)*len(data2):
5911 plt.legend(prop={'size':legend_size},loc='upper left', frameon=False)
5912 else:
5913 plt.legend(prop={'size':legend_size},loc='lower left', frameon=False)
5914 else:
5915 if ymax*len(data2)-sum(max(d2[1][:-len(d2[1])//2]) \
5916 for d2 in data2) > 0.5*(ymax-ymin)*len(data2):
5917 plt.legend(prop={'size':legend_size},loc='upper right', frameon=False)
5918 else:
5919 plt.legend(prop={'size':legend_size},loc='lower right', frameon=False)
5920
5921 plt.axis([min(lambdaCMS_list),max(lambdaCMS_list),\
5922 minvalue-(maxvalue-minvalue)/5., maxvalue+(maxvalue-minvalue)/5.])
5923
5924 plt.savefig(pp,format='pdf')
5925
5926 pp.close()
5927
5928 if len(checks)>0:
5929 logger.info('Complex Mass Scheme check plot output to file %s. '%fig_output_file)
5930
5931 if sys.platform.startswith('linux'):
5932 misc.call(["xdg-open", fig_output_file])
5933 elif sys.platform.startswith('darwin'):
5934 misc.call(["open", fig_output_file])
5935
5936 plt.close("all")
5937
5938 except Exception as e:
5939 if isinstance(e, ImportError):
5940 res_str += "\n= Install matplotlib to get a "+\
5941 "graphical display of the results of the cms check."
5942 else:
5943 general_error = "\n= Could not produce the cms check plot because of "+\
5944 "the following error: %s"%str(e)
5945 try:
5946 import six.moves.tkinter
5947 if isinstance(e, six.moves.tkinter.TclError):
5948 res_str += "\n= Plots are not generated because your system"+\
5949 " does not support graphical display."
5950 else:
5951 res_str += general_error
5952 except:
5953 res_str += general_error
5954
5955 if options['reuse']:
5956 logFile.write(res_str)
5957 logFile.close()
5958
5959 if output.endswith('text'):
5960 return res_str
5961 else:
5962 return failed_procs
5963