Package madgraph :: Package madevent :: Module sum_html
[hide private]
[frames] | no frames]

Source Code for Module madgraph.madevent.sum_html

  1  ################################################################################ 
  2  # 
  3  # Copyright (c) 2011 The MadGraph5_aMC@NLO Development team and Contributors 
  4  # 
  5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
  6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
  7  # high-energy processes in the Standard Model and beyond. 
  8  # 
  9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
 10  # distribution. 
 11  # 
 12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
 13  # 
 14  ################################################################################ 
 15  from __future__ import division 
 16  from __future__ import absolute_import 
 17  import os 
 18  import math 
 19  import logging 
 20  import re 
 21  import xml.dom.minidom as minidom 
 22  from six.moves import range 
 23   
 24  logger = logging.getLogger('madevent.stdout') # -> stdout 
 25   
 26  pjoin = os.path.join 
 27  try: 
 28      import madgraph 
 29  except ImportError: 
 30      import internal.cluster as cluster 
 31      import internal.misc as misc 
 32      from internal import MadGraph5Error 
 33  else: 
 34      import madgraph.various.cluster as cluster 
 35      import madgraph.various.misc as misc 
 36      from madgraph import MadGraph5Error 
 37   
38 -class RunStatistics(dict):
39 """ A class to store statistics about a MadEvent run. """ 40
41 - def __init__(self, *args, **opts):
42 """ Initialize the run dictionary. For now, the same as a regular 43 dictionary, except that we specify some default statistics. """ 44 45 madloop_statistics = { 46 'unknown_stability' : 0, 47 'stable_points' : 0, 48 'unstable_points' : 0, 49 'exceptional_points' : 0, 50 'DP_usage' : 0, 51 'QP_usage' : 0, 52 'DP_init_usage' : 0, 53 'QP_init_usage' : 0, 54 'CutTools_DP_usage' : 0, 55 'CutTools_QP_usage' : 0, 56 'PJFry_usage' : 0, 57 'Golem_usage' : 0, 58 'IREGI_usage' : 0, 59 'Samurai_usage' : 0, 60 'Ninja_usage' : 0, 61 'Ninja_QP_usage' : 0, 62 'COLLIER_usage' : 0, 63 'max_precision' : 1.0e99, 64 'min_precision' : 0.0, 65 'averaged_timing' : 0.0, 66 'n_madloop_calls' : 0, 67 'cumulative_timing' : 0.0, 68 'skipped_subchannel' : 0 # number of times that a computation have been 69 # discarded due to abnormal weight. 70 } 71 72 for key, value in madloop_statistics.items(): 73 self[key] = value 74 75 super(dict,self).__init__(*args, **opts)
76
77 - def aggregate_statistics(self, new_stats):
78 """ Update the current statitistics with the new_stats specified.""" 79 80 if isinstance(new_stats,RunStatistics): 81 new_stats = [new_stats, ] 82 elif isinstance(new_stats,list): 83 if any(not isinstance(_,RunStatistics) for _ in new_stats): 84 raise MadGraph5Error("The 'new_stats' argument of the function "+\ 85 "'updtate_statistics' must be a (possibly list of) "+\ 86 "RunStatistics instance.") 87 88 keys = set([]) 89 for stat in [self,]+new_stats: 90 keys |= set(stat.keys()) 91 92 new_stats = new_stats+[self,] 93 for key in keys: 94 # Define special rules 95 if key=='max_precision': 96 # The minimal precision corresponds to the maximal value for PREC 97 self[key] = min( _[key] for _ in new_stats if key in _) 98 elif key=='min_precision': 99 # The maximal precision corresponds to the minimal value for PREC 100 self[key] = max( _[key] for _ in new_stats if key in _) 101 elif key=='averaged_timing': 102 n_madloop_calls = sum(_['n_madloop_calls'] for _ in new_stats if 103 'n_madloop_calls' in _) 104 if n_madloop_calls > 0 : 105 self[key] = sum(_[key]*_['n_madloop_calls'] for _ in 106 new_stats if (key in _ and 'n_madloop_calls' in _) )/n_madloop_calls 107 else: 108 # Now assume all other quantities are cumulative 109 self[key] = sum(_[key] for _ in new_stats if key in _)
110
111 - def load_statistics(self, xml_node):
112 """ Load the statistics from an xml node. """ 113 114 def getData(Node): 115 return Node.childNodes[0].data
116 117 u_return_code = xml_node.getElementsByTagName('u_return_code') 118 u_codes = [int(_) for _ in getData(u_return_code[0]).split(',')] 119 self['CutTools_DP_usage'] = u_codes[1] 120 self['PJFry_usage'] = u_codes[2] 121 self['IREGI_usage'] = u_codes[3] 122 self['Golem_usage'] = u_codes[4] 123 self['Samurai_usage'] = u_codes[5] 124 self['Ninja_usage'] = u_codes[6] 125 self['COLLIER_usage'] = u_codes[7] 126 self['Ninja_QP_usage'] = u_codes[8] 127 self['CutTools_QP_usage'] = u_codes[9] 128 t_return_code = xml_node.getElementsByTagName('t_return_code') 129 t_codes = [int(_) for _ in getData(t_return_code[0]).split(',')] 130 self['DP_usage'] = t_codes[1] 131 self['QP_usage'] = t_codes[2] 132 self['DP_init_usage'] = t_codes[3] 133 self['DP_init_usage'] = t_codes[4] 134 h_return_code = xml_node.getElementsByTagName('h_return_code') 135 h_codes = [int(_) for _ in getData(h_return_code[0]).split(',')] 136 self['unknown_stability'] = h_codes[1] 137 self['stable_points'] = h_codes[2] 138 self['unstable_points'] = h_codes[3] 139 self['exceptional_points'] = h_codes[4] 140 average_time = xml_node.getElementsByTagName('average_time') 141 avg_time = float(getData(average_time[0])) 142 self['averaged_timing'] = avg_time 143 cumulated_time = xml_node.getElementsByTagName('cumulated_time') 144 cumul_time = float(getData(cumulated_time[0])) 145 self['cumulative_timing'] = cumul_time 146 max_prec = xml_node.getElementsByTagName('max_prec') 147 max_prec = float(getData(max_prec[0])) 148 # The minimal precision corresponds to the maximal value for PREC 149 self['min_precision'] = max_prec 150 min_prec = xml_node.getElementsByTagName('min_prec') 151 min_prec = float(getData(min_prec[0])) 152 # The maximal precision corresponds to the minimal value for PREC 153 self['max_precision'] = min_prec 154 n_evals = xml_node.getElementsByTagName('n_evals') 155 n_evals = int(getData(n_evals[0])) 156 self['n_madloop_calls'] = n_evals
157
158 - def nice_output(self,G, no_warning=False):
159 """Returns a one-line string summarizing the run statistics 160 gathered for the channel G.""" 161 162 # Do not return anythign for now if there is no madloop calls. This can 163 # change of course if more statistics are gathered, unrelated to MadLoop. 164 if self['n_madloop_calls']==0: 165 return '' 166 167 stability = [ 168 ('tot#',self['n_madloop_calls']), 169 ('unkwn#',self['unknown_stability']), 170 ('UPS%',float(self['unstable_points'])/self['n_madloop_calls']), 171 ('EPS#',self['exceptional_points'])] 172 173 stability = [_ for _ in stability if _[1] > 0 or _[0] in ['UPS%','EPS#']] 174 stability = [(_[0],'%i'%_[1]) if isinstance(_[1], int) else 175 (_[0],'%.3g'%(100.0*_[1])) for _ in stability] 176 177 tools_used = [ 178 ('CT_DP',float(self['CutTools_DP_usage'])/self['n_madloop_calls']), 179 ('CT_QP',float(self['CutTools_QP_usage'])/self['n_madloop_calls']), 180 ('PJFry',float(self['PJFry_usage'])/self['n_madloop_calls']), 181 ('Golem',float(self['Golem_usage'])/self['n_madloop_calls']), 182 ('IREGI',float(self['IREGI_usage'])/self['n_madloop_calls']), 183 ('Samurai',float(self['Samurai_usage'])/self['n_madloop_calls']), 184 ('COLLIER',float(self['COLLIER_usage'])/self['n_madloop_calls']), 185 ('Ninja_DP',float(self['Ninja_usage'])/self['n_madloop_calls']), 186 ('Ninja_QP',float(self['Ninja_QP_usage'])/self['n_madloop_calls'])] 187 188 tools_used = [(_[0],'%.3g'%(100.0*_[1])) for _ in tools_used if _[1] > 0.0 ] 189 190 to_print = [('%s statistics:'%(G if isinstance(G,str) else 191 str(os.path.join(list(G))))\ 192 +(' %s,'%misc.format_time(int(self['cumulative_timing'])) if 193 int(self['cumulative_timing']) > 0 else '') 194 +((' Avg. ML timing = %i ms'%int(1.0e3*self['averaged_timing'])) if 195 self['averaged_timing'] > 0.001 else 196 (' Avg. ML timing = %i mus'%int(1.0e6*self['averaged_timing']))) \ 197 +', Min precision = %.2e'%self['min_precision']) 198 ,' -> Stability %s'%dict(stability) 199 ,' -> Red. tools usage in %% %s'%dict(tools_used) 200 # I like the display above better after all 201 # ,'Stability %s'%(str([_[0] for _ in stability]), 202 # str([_[1] for _ in stability])) 203 # ,'Red. tools usage in %% %s'%(str([_[0] for _ in tools_used]), 204 # str([_[1] for _ in tools_used])) 205 ] 206 207 if self['skipped_subchannel'] > 0 and not no_warning: 208 to_print.append("WARNING: Some event with large weight have been "+\ 209 "discarded. This happened %s times." % self['skipped_subchannel']) 210 211 return ('\n'.join(to_print)).replace("'"," ")
212
213 - def has_warning(self):
214 """return if any stat needs to be reported as a warning 215 When this is True, the print_warning doit retourner un warning 216 """ 217 218 if self['n_madloop_calls'] > 0: 219 fraction = self['exceptional_points']/float(self['n_madloop_calls']) 220 else: 221 fraction = 0.0 222 223 if self['skipped_subchannel'] > 0: 224 return True 225 elif fraction > 1.0e-4: 226 return True 227 else: 228 return False
229
230 - def get_warning_text(self):
231 """get a string with all the identified warning""" 232 233 to_print = [] 234 if self['skipped_subchannel'] > 0: 235 to_print.append("Some event with large weight have been discarded."+\ 236 " This happens %s times." % self['skipped_subchannel']) 237 if self['n_madloop_calls'] > 0: 238 fraction = self['exceptional_points']/float(self['n_madloop_calls']) 239 if fraction > 1.0e-4: 240 to_print.append("Some PS with numerical instability have been set "+\ 241 "to a zero matrix-element (%.3g%%)" % (100.0*fraction)) 242 243 return ('\n'.join(to_print)).replace("'"," ")
244
245 -class OneResult(object):
246
247 - def __init__(self, name):
248 """Initialize all data """ 249 250 self.run_statistics = RunStatistics() 251 self.name = name 252 self.parent_name = '' 253 self.axsec = 0 # Absolute cross section = Sum(abs(wgt)) 254 self.xsec = 0 # Real cross section = Sum(wgt) 255 self.xerru = 0 # uncorrelated error 256 self.xerrc = 0 # correlated error 257 self.nevents = 0 258 self.nw = 0 # number of events after the primary unweighting 259 self.maxit = 0 # 260 self.nunwgt = 0 # number of unweighted events 261 self.luminosity = 0 262 self.mfactor = 1 # number of times that this channel occur (due to symmetry) 263 self.ysec_iter = [] 264 self.yerr_iter = [] 265 self.yasec_iter = [] 266 self.eff_iter = [] 267 self.maxwgt_iter = [] 268 self.maxwgt = 0 # weight used for the secondary unweighting. 269 self.th_maxwgt= 0 # weight that should have been use for secondary unweighting 270 # this can happen if we force maxweight 271 self.th_nunwgt = 0 # associated number of event with th_maxwgt 272 #(this is theoretical do not correspond to a number of written event) 273 274 return
275 276 #@cluster.multiple_try(nb_try=5,sleep=20)
277 - def read_results(self, filepath):
278 """read results.dat and fullfill information""" 279 280 if isinstance(filepath, str): 281 finput = open(filepath) 282 elif isinstance(filepath, file): 283 finput = filepath 284 else: 285 raise Exception("filepath should be a path or a file descriptor") 286 287 i=0 288 found_xsec_line = False 289 for line in finput: 290 # Exit as soon as we hit the xml part. Not elegant, but the part 291 # below should eventually be xml anyway. 292 if '<' in line: 293 break 294 i+=1 295 if i == 1: 296 def secure_float(d): 297 try: 298 return float(d) 299 except ValueError: 300 m=re.search(r'''([+-]?[\d.]*)([+-]\d*)''', d) 301 if m: 302 return float(m.group(1))*10**(float(m.group(2))) 303 return
304 305 data = [secure_float(d) for d in line.split()] 306 try: 307 self.axsec, self.xerru, self.xerrc, self.nevents, self.nw,\ 308 self.maxit, self.nunwgt, self.luminosity, self.wgt, \ 309 self.xsec = data[:10] 310 except ValueError: 311 log = pjoin(os.path.dirname(filepath), 'log.txt') 312 if os.path.exists(log): 313 if 'end code not correct' in line: 314 error_code = data[4] 315 log = pjoin(os.path.dirname(filepath), 'log.txt') 316 raise Exception("Reported error: End code %s \n Full associated log: \n%s"\ 317 % (error_code, open(log).read())) 318 else: 319 log = pjoin(os.path.dirname(filepath), 'log.txt') 320 raise Exception("Wrong formatting in results.dat: %s \n Full associated log: \n%s"\ 321 % (line, open(log).read())) 322 if len(data) > 10: 323 self.maxwgt = data[10] 324 if len(data) >12: 325 self.th_maxwgt, self.th_nunwgt = data[11:13] 326 if self.mfactor > 1: 327 self.luminosity /= self.mfactor 328 continue 329 try: 330 l, sec, err, eff, maxwgt, asec = line.split() 331 found_xsec_line = True 332 except: 333 break 334 self.ysec_iter.append(secure_float(sec)) 335 self.yerr_iter.append(secure_float(err)) 336 self.yasec_iter.append(secure_float(asec)) 337 self.eff_iter.append(secure_float(eff)) 338 self.maxwgt_iter.append(secure_float(maxwgt)) 339 340 finput.seek(0) 341 xml = [] 342 for line in finput: 343 if re.match('^.*<.*>',line): 344 xml.append(line) 345 break 346 for line in finput: 347 xml.append(line) 348 349 if xml: 350 self.parse_xml_results('\n'.join(xml)) 351 352 # this is for amcatnlo: the number of events has to be read from another file 353 if self.nevents == 0 and self.nunwgt == 0 and isinstance(filepath, str) and \ 354 os.path.exists(pjoin(os.path.split(filepath)[0], 'nevts')): 355 nevts = int((open(pjoin(os.path.split(filepath)[0], 'nevts')).read()).split()[0]) 356 self.nevents = nevts 357 self.nunwgt = nevts
358
359 - def parse_xml_results(self, xml):
360 """ Parse the xml part of the results.dat file.""" 361 362 dom = minidom.parseString(xml) 363 364 statistics_node = dom.getElementsByTagName("run_statistics") 365 366 if statistics_node: 367 try: 368 self.run_statistics.load_statistics(statistics_node[0]) 369 except ValueError as IndexError: 370 logger.warning('Fail to read run statistics from results.dat')
371
372 - def set_mfactor(self, value):
373 self.mfactor = int(value)
374
375 - def change_iterations_number(self, nb_iter):
376 """Change the number of iterations for this process""" 377 378 if len(self.ysec_iter) <= nb_iter: 379 return 380 381 # Combine the first iterations into a single bin 382 nb_to_rm = len(self.ysec_iter) - nb_iter 383 ysec = [0] 384 yerr = [0] 385 for i in range(nb_to_rm): 386 ysec[0] += self.ysec_iter[i] 387 yerr[0] += self.yerr_iter[i]**2 388 ysec[0] /= (nb_to_rm+1) 389 yerr[0] = math.sqrt(yerr[0]) / (nb_to_rm + 1) 390 391 for i in range(1, nb_iter): 392 ysec[i] = self.ysec_iter[nb_to_rm + i] 393 yerr[i] = self.yerr_iter[nb_to_rm + i] 394 395 self.ysec_iter = ysec 396 self.yerr_iter = yerr
397
398 - def get(self, name):
399 400 if name in ['xsec', 'xerru','xerrc']: 401 return getattr(self, name) * self.mfactor 402 elif name in ['luminosity']: 403 #misc.sprint("use unsafe luminosity definition") 404 #raise Exception 405 return getattr(self, name) #/ self.mfactor 406 elif (name == 'eff'): 407 return self.xerr*math.sqrt(self.nevents/(self.xsec+1e-99)) 408 elif name == 'xerr': 409 return math.sqrt(self.xerru**2+self.xerrc**2) 410 elif name == 'name': 411 return pjoin(self.parent_name, self.name) 412 else: 413 return getattr(self, name)
414
415 -class Combine_results(list, OneResult):
416
417 - def __init__(self, name):
418 419 list.__init__(self) 420 OneResult.__init__(self, name)
421
422 - def add_results(self, name, filepath, mfactor=1):
423 """read the data in the file""" 424 try: 425 oneresult = OneResult(name) 426 oneresult.set_mfactor(mfactor) 427 oneresult.read_results(filepath) 428 oneresult.parent_name = self.name 429 self.append(oneresult) 430 return oneresult 431 except Exception: 432 logger.critical("Error when reading %s" % filepath) 433 raise
434 435
436 - def compute_values(self, update_statistics=False):
437 """compute the value associate to this combination""" 438 439 self.compute_iterations() 440 self.axsec = sum([one.axsec for one in self]) 441 self.xsec = sum([one.xsec for one in self]) 442 self.xerrc = sum([one.xerrc for one in self]) 443 self.xerru = math.sqrt(sum([one.xerru**2 for one in self])) 444 445 self.nevents = sum([one.nevents for one in self]) 446 self.nw = sum([one.nw for one in self]) 447 self.maxit = len(self.yerr_iter) # 448 self.nunwgt = sum([one.nunwgt for one in self]) 449 self.wgt = 0 450 self.luminosity = min([0]+[one.luminosity for one in self]) 451 if update_statistics: 452 self.run_statistics.aggregate_statistics([_.run_statistics for _ in self])
453
454 - def compute_average(self, error=None):
455 """compute the value associate to this combination""" 456 457 nbjobs = len(self) 458 if not nbjobs: 459 return 460 max_xsec = max(one.xsec for one in self) 461 min_xsec = min(one.xsec for one in self) 462 self.axsec = sum([one.axsec for one in self]) / nbjobs 463 self.xsec = sum([one.xsec for one in self]) /nbjobs 464 self.xerrc = sum([one.xerrc for one in self]) /nbjobs 465 self.xerru = math.sqrt(sum([one.xerru**2 for one in self])) /nbjobs 466 if error: 467 self.xerrc = error 468 self.xerru = error 469 470 self.nevents = sum([one.nevents for one in self]) 471 self.nw = 0#sum([one.nw for one in self]) 472 self.maxit = 0#len(self.yerr_iter) # 473 self.nunwgt = sum([one.nunwgt for one in self]) 474 self.wgt = 0 475 self.luminosity = sum([one.luminosity for one in self]) 476 self.ysec_iter = [] 477 self.yerr_iter = [] 478 self.th_maxwgt = 0.0 479 self.th_nunwgt = 0 480 for result in self: 481 self.ysec_iter+=result.ysec_iter 482 self.yerr_iter+=result.yerr_iter 483 self.yasec_iter += result.yasec_iter 484 self.eff_iter += result.eff_iter 485 self.maxwgt_iter += result.maxwgt_iter 486 487 #check full consistency 488 onefail = False 489 for one in list(self): 490 if one.xsec < (self.xsec - 25* one.xerru): 491 if not onefail: 492 logger.debug('multi run are inconsistent: %s < %s - 25* %s: assign error %s', one.xsec, self.xsec, one.xerru, error if error else max_xsec-min_xsec) 493 onefail = True 494 self.remove(one) 495 if onefail: 496 if error: 497 return self.compute_average(error) 498 else: 499 return self.compute_average((max_xsec-min_xsec)/2.)
500 501 502
503 - def compute_iterations(self):
504 """Compute iterations to have a chi-square on the stability of the 505 integral""" 506 507 #iter = [len(a.ysec_iter) for a in self] 508 #if iter: 509 # nb_iter = min(iter) 510 #else: 511 # nb_iter = 0 512 #nb_iter = misc.mmin([len(a.ysec_iter) for a in self], 0) 513 #misc.sprint(nb_iter) 514 # syncronize all iterations to a single one 515 for oneresult in self: 516 oneresult.change_iterations_number(0)
517 518 # compute value error for each iteration 519 #for i in range(nb_iter): 520 # value = [one.ysec_iter[i] for one in self] 521 # error = [one.yerr_iter[i]**2 for one in self] 522 # 523 # # store the value for the iteration 524 # raise Exception 525 # self.ysec_iter.append(sum(value)) 526 # self.yerr_iter.append(math.sqrt(sum(error))) 527 528 529 template_file = \ 530 """ 531 %(diagram_link)s 532 <BR> 533 &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;<b>s= %(cross).5g &#177 %(error).3g (%(unit)s)</b><br><br> 534 <table class="sortable" id='tablesort'> 535 <tr><th>Graph</th> 536 <th> %(result_type)s</th> 537 <th>Error</th> 538 <th>Events (K)</th> 539 <th>Unwgt</th> 540 <th>Luminosity</th> 541 </tr> 542 %(table_lines)s 543 </table> 544 </center> 545 <br><br><br> 546 """ 547 table_line_template = \ 548 """ 549 <tr><td align=right>%(P_title)s</td> 550 <td align=right><a id="%(P_link)s" href=%(P_link)s onClick="check_link('%(P_link)s','%(mod_P_link)s','%(P_link)s')"> %(cross)s </a> </td> 551 <td align=right> %(error)s</td> 552 <td align=right> %(events)s</td> 553 <td align=right> %(unweighted)s</td> 554 <td align=right> %(luminosity)s</td> 555 </tr> 556 """ 557
558 - def get_html(self,run, unit, me_dir = []):
559 """write html output""" 560 561 # store value for global cross-section 562 P_grouping = {} 563 564 tables_line = '' 565 for oneresult in self: 566 if oneresult.name.startswith('P'): 567 title = '<a href=../../SubProcesses/%(P)s/diagrams.html>%(P)s</a>' \ 568 % {'P':oneresult.name} 569 P = oneresult.name.split('_',1)[0] 570 if P in P_grouping: 571 P_grouping[P] += float(oneresult.xsec) 572 else: 573 P_grouping[P] = float(oneresult.xsec) 574 else: 575 title = oneresult.name 576 577 if not isinstance(oneresult, Combine_results): 578 # this is for the (aMC@)NLO logs 579 if os.path.exists(pjoin(me_dir, 'Events', run, 'alllogs_1.html')): 580 link = '../../Events/%(R)s/alllogs_1.html#/%(P)s/%(G)s' % \ 581 {'P': os.path.basename(self.name), 582 'G': oneresult.name, 583 'R': run} 584 mod_link = link 585 elif os.path.exists(pjoin(me_dir, 'Events', run, 'alllogs_0.html')): 586 link = '../../Events/%(R)s/alllogs_0.html#/%(P)s/%(G)s' % \ 587 {'P': os.path.basename(self.name), 588 'G': oneresult.name, 589 'R': run} 590 mod_link = link 591 else: 592 # this is for madevent runs 593 link = '../../SubProcesses/%(P)s/%(G)s/%(R)s_log.txt' % \ 594 {'P': os.path.basename(self.name), 595 'G': oneresult.name, 596 'R': run} 597 mod_link = '../../SubProcesses/%(P)s/%(G)s/log.txt' % \ 598 {'P': os.path.basename(self.name), 599 'G': oneresult.name} 600 if not os.path.exists(link) and not os.path.exists(mod_link): 601 P = os.path.basename(self.name) 602 base = pjoin(me_dir, 'SubProcesses', P, os.path.dirname(link)) 603 pos = [pjoin(base,c) for c in os.listdir(base) if c.endswith('.log')] 604 if len(pos) == 1: 605 link = pos[0] 606 else: 607 link = '#%s' % oneresult.name 608 mod_link = link 609 610 dico = {'P_title': title, 611 'P_link': link, 612 'mod_P_link': mod_link, 613 'cross': '%.4g' % oneresult.xsec, 614 'error': '%.3g' % oneresult.xerru, 615 'events': oneresult.nevents/1000.0, 616 'unweighted': oneresult.nunwgt, 617 'luminosity': '%.3g' % oneresult.luminosity 618 } 619 620 tables_line += self.table_line_template % dico 621 622 for P_name, cross in P_grouping.items(): 623 dico = {'P_title': '%s sum' % P_name, 624 'P_link': './results.html', 625 'mod_P_link':'', 626 'cross': cross, 627 'error': '', 628 'events': '', 629 'unweighted': '', 630 'luminosity': '' 631 } 632 tables_line += self.table_line_template % dico 633 634 if self.name.startswith('P'): 635 title = '<dt><a name=%(P)s href=../../SubProcesses/%(P)s/diagrams.html>%(P)s</a></dt><dd>' \ 636 % {'P':self.name} 637 else: 638 title = '' 639 640 dico = {'cross': self.xsec, 641 'abscross': self.axsec, 642 'error': self.xerru, 643 'unit': unit, 644 'result_type': 'Cross-Section', 645 'table_lines': tables_line, 646 'diagram_link': title 647 } 648 649 html_text = self.template_file % dico 650 return html_text
651
652 - def write_results_dat(self, output_path):
653 """write a correctly formatted results.dat""" 654 655 def fstr(nb): 656 data = '%E' % nb 657 if data == 'NAN': 658 nb, power = 0,0 659 else: 660 nb, power = data.split('E') 661 nb = float(nb) /10 662 power = int(power) + 1 663 return '%.5fE%+03i' %(nb,power)
664 665 line = '%s %s %s %i %i %i %i %s %s %s %s %s %i\n' % (fstr(self.axsec), fstr(self.xerru), 666 fstr(self.xerrc), self.nevents, self.nw, self.maxit, self.nunwgt, 667 fstr(self.luminosity), fstr(self.wgt), fstr(self.xsec), fstr(self.maxwgt), 668 fstr(self.th_maxwgt), self.th_nunwgt) 669 fsock = open(output_path,'w') 670 fsock.writelines(line) 671 for i in range(len(self.ysec_iter)): 672 line = '%s %s %s %s %s %s\n' % (i+1, self.ysec_iter[i], self.yerr_iter[i], 673 self.eff_iter[i], self.maxwgt_iter[i], self.yasec_iter[i]) 674 fsock.writelines(line)
675 676 677 678 results_header = """ 679 <head> 680 <title>Process results</title> 681 <script type="text/javascript" src="../sortable.js"></script> 682 <link rel=stylesheet href="../mgstyle.css" type="text/css"> 683 </head> 684 <body> 685 <script type="text/javascript"> 686 function UrlExists(url) { 687 var http = new XMLHttpRequest(); 688 http.open('HEAD', url, false); 689 try{ 690 http.send() 691 } 692 catch(err){ 693 return 1==2; 694 } 695 return http.status!=404; 696 } 697 function check_link(url,alt, id){ 698 var obj = document.getElementById(id); 699 if ( ! UrlExists(url)){ 700 if ( ! UrlExists(alt)){ 701 obj.href = alt; 702 return true; 703 } 704 obj.href = alt; 705 return false; 706 } 707 obj.href = url; 708 return 1==1; 709 } 710 </script> 711 """ 712
713 -def collect_result(cmd, folder_names=[], jobs=None, main_dir=None):
714 """ """ 715 716 run = cmd.results.current['run_name'] 717 all = Combine_results(run) 718 719 720 for Pdir in cmd.get_Pdir(): 721 P_comb = Combine_results(Pdir) 722 723 if jobs: 724 for job in [j for j in jobs if j['p_dir'] in Pdir]: 725 P_comb.add_results(os.path.basename(job['dirname']),\ 726 pjoin(job['dirname'],'results.dat')) 727 elif folder_names: 728 try: 729 for line in open(pjoin(Pdir, 'symfact.dat')): 730 name, mfactor = line.split() 731 if float(mfactor) < 0: 732 continue 733 if os.path.exists(pjoin(Pdir, 'ajob.no_ps.log')): 734 continue 735 736 for folder in folder_names: 737 if 'G' in folder: 738 dir = folder.replace('*', name) 739 else: 740 dir = folder.replace('*', '_G' + name) 741 P_comb.add_results(dir, pjoin(Pdir,dir,'results.dat'), mfactor) 742 if jobs: 743 for job in [j for j in jobs if j['p_dir'] == Pdir]: 744 P_comb.add_results(os.path.basename(job['dirname']),\ 745 pjoin(job['dirname'],'results.dat')) 746 except IOError: 747 continue 748 else: 749 G_dir, mfactors = cmd.get_Gdir(Pdir, symfact=True) 750 for G in G_dir: 751 if not folder_names: 752 if main_dir: 753 path = pjoin(main_dir, os.path.basename(Pdir), os.path.basename(G),'results.dat') 754 else: 755 path = pjoin(G,'results.dat') 756 P_comb.add_results(os.path.basename(G), path, mfactors[G]) 757 758 P_comb.compute_values() 759 all.append(P_comb) 760 all.compute_values() 761 762 763 764 return all
765 766
767 -def make_all_html_results(cmd, folder_names = [], jobs=[]):
768 """ folder_names and jobs have been added for the amcatnlo runs """ 769 run = cmd.results.current['run_name'] 770 if not os.path.exists(pjoin(cmd.me_dir, 'HTML', run)): 771 os.mkdir(pjoin(cmd.me_dir, 'HTML', run)) 772 773 unit = cmd.results.unit 774 P_text = "" 775 Presults = collect_result(cmd, folder_names=folder_names, jobs=jobs) 776 777 for P_comb in Presults: 778 P_text += P_comb.get_html(run, unit, cmd.me_dir) 779 P_comb.compute_values() 780 if cmd.proc_characteristics['ninitial'] == 1: 781 P_comb.write_results_dat(pjoin(cmd.me_dir, 'SubProcesses', P_comb.name, 782 '%s_results.dat' % run)) 783 784 Presults.write_results_dat(pjoin(cmd.me_dir,'SubProcesses', 'results.dat')) 785 786 fsock = open(pjoin(cmd.me_dir, 'HTML', run, 'results.html'),'w') 787 fsock.write(results_header) 788 fsock.write('%s <dl>' % Presults.get_html(run, unit, cmd.me_dir)) 789 fsock.write('%s </dl></body>' % P_text) 790 791 return Presults.xsec, Presults.xerru
792