1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 """ A python file to replace the fortran script gen_ximprove.
16 This script analyses the result of the survey/ previous refine and
17 creates the jobs for the following script.
18 """
19 from __future__ import division
20
21 import collections
22 import os
23 import glob
24 import logging
25 import math
26 import re
27 import subprocess
28 import shutil
29
30 try:
31 import madgraph
32 except ImportError:
33 MADEVENT = True
34 import internal.sum_html as sum_html
35 import internal.banner as bannermod
36 import internal.misc as misc
37 import internal.files as files
38 import internal.cluster as cluster
39 import internal.combine_grid as combine_grid
40 import internal.combine_runs as combine_runs
41 import internal.lhe_parser as lhe_parser
42 else:
43 MADEVENT= False
44 import madgraph.madevent.sum_html as sum_html
45 import madgraph.various.banner as bannermod
46 import madgraph.various.misc as misc
47 import madgraph.iolibs.files as files
48 import madgraph.various.cluster as cluster
49 import madgraph.madevent.combine_grid as combine_grid
50 import madgraph.madevent.combine_runs as combine_runs
51 import madgraph.various.lhe_parser as lhe_parser
52
53 logger = logging.getLogger('madgraph.madevent.gen_ximprove')
54 pjoin = os.path.join
57 """a class to call the fortran gensym executable and handle it's output
58 in order to create the various job that are needed for the survey"""
59
60
61 @ staticmethod
64
65 combining_job = 2
66 splitted_grid = False
67 min_iterations = 3
68 mode= "survey"
69
70
72
73 try:
74 super(gensym, self).__init__(cmd, opt)
75 except TypeError:
76 pass
77
78
79 self.run_statistics = {}
80
81 self.cmd = cmd
82 self.run_card = cmd.run_card
83 self.me_dir = cmd.me_dir
84
85
86
87 self.cross = collections.defaultdict(int)
88 self.abscross = collections.defaultdict(int)
89 self.sigma = collections.defaultdict(int)
90 self.chi2 = collections.defaultdict(int)
91
92 self.splitted_grid = False
93 if self.cmd.proc_characteristics['loop_induced']:
94 nexternal = self.cmd.proc_characteristics['nexternal']
95 self.splitted_grid = max(2, (nexternal-2)**2)
96 if hasattr(self.cmd, "opts") and self.cmd.opts['accuracy'] == 0.1:
97 self.cmd.opts['accuracy'] = 0.02
98
99 if isinstance(cmd.cluster, cluster.MultiCore) and self.splitted_grid > 1:
100 self.splitted_grid = int(cmd.cluster.nb_core**0.5)
101 if self.splitted_grid == 1 and cmd.cluster.nb_core >1:
102 self.splitted_grid = 2
103
104
105 if self.run_card['survey_splitting'] != -1:
106 self.splitted_grid = self.run_card['survey_splitting']
107
108 self.splitted_Pdir = {}
109 self.splitted_for_dir = lambda x,y: self.splitted_grid
110 self.combining_job_for_Pdir = lambda x: self.combining_job
111 self.lastoffset = {}
112
114 """ """
115
116 self.subproc = [l.strip() for l in open(pjoin(self.me_dir,'SubProcesses',
117 'subproc.mg'))]
118 subproc = self.subproc
119
120 P_zero_result = []
121
122 nb_tot_proc = len(subproc)
123 for nb_proc,subdir in enumerate(subproc):
124 job_list = {}
125 self.cmd.update_status('Compiling for process %s/%s. <br> (previous processes already running)' % \
126 (nb_proc+1,nb_tot_proc), level=None)
127
128 subdir = subdir.strip()
129 Pdir = pjoin(self.me_dir, 'SubProcesses',subdir)
130 logger.info(' %s ' % subdir)
131
132
133 for match in misc.glob('*ajob*', Pdir):
134 if os.path.basename(match)[:4] in ['ajob', 'wait', 'run.', 'done']:
135 os.remove(match)
136 for match in misc.glob('G*', Pdir):
137 if os.path.exists(pjoin(match,'results.dat')):
138 os.remove(pjoin(match, 'results.dat'))
139 if os.path.exists(pjoin(match, 'ftn25')):
140 os.remove(pjoin(match, 'ftn25'))
141
142
143 self.cmd.compile(['gensym'], cwd=Pdir)
144 if not os.path.exists(pjoin(Pdir, 'gensym')):
145 raise Exception, 'Error make gensym not successful'
146
147
148 p = misc.Popen(['./gensym'], stdout=subprocess.PIPE,
149 stderr=subprocess.STDOUT, cwd=Pdir)
150
151 (stdout, _) = p.communicate('')
152
153 if os.path.exists(pjoin(self.me_dir,'error')):
154 files.mv(pjoin(self.me_dir,'error'), pjoin(Pdir,'ajob.no_ps.log'))
155 P_zero_result.append(subdir)
156 continue
157
158 jobs = stdout.split()
159 job_list[Pdir] = jobs
160 try:
161
162 [float(s) for s in jobs]
163 except Exception:
164 logger.debug("unformated string found in gensym. Please check:\n %s" % stdout)
165 done=False
166 job_list[Pdir] = []
167 lines = stdout.split('\n')
168 for l in lines:
169 try:
170 [float(s) for s in l.split()]
171 except:
172 continue
173 else:
174 if done:
175 raise Exception, 'Parsing error in gensym: %s' % stdout
176 job_list[Pdir] = l.split()
177 done = True
178 if not done:
179 raise Exception, 'Parsing error in gensym: %s' % stdout
180
181 self.cmd.compile(['madevent'], cwd=Pdir)
182 self.submit_to_cluster(job_list)
183 return job_list, P_zero_result
184
185
187 """ """
188
189 if self.run_card['job_strategy'] > 0:
190 if len(job_list) >1:
191 for path, dirs in job_list.items():
192 self.submit_to_cluster({path:dirs})
193 return
194 path, value = job_list.items()[0]
195 nexternal = self.cmd.proc_characteristics['nexternal']
196 current = open(pjoin(path, "nexternal.inc")).read()
197 ext = re.search(r"PARAMETER \(NEXTERNAL=(\d+)\)", current).group(1)
198
199 if self.run_card['job_strategy'] == 2:
200 self.splitted_grid = 2
201 if nexternal == int(ext):
202 to_split = 2
203 else:
204 to_split = 0
205 if hasattr(self, 'splitted_Pdir'):
206 self.splitted_Pdir[path] = to_split
207 else:
208 self.splitted_Pdir = {path: to_split}
209 self.splitted_for_dir = lambda x,y : self.splitted_Pdir[x]
210 elif self.run_card['job_strategy'] == 1:
211 if nexternal == int(ext):
212 combine = 1
213 else:
214 combine = self.combining_job
215 if hasattr(self, 'splitted_Pdir'):
216 self.splitted_Pdir[path] = combine
217 else:
218 self.splitted_Pdir = {path: combine}
219 self.combining_job_for_Pdir = lambda x : self.splitted_Pdir[x]
220
221 if not self.splitted_grid:
222 return self.submit_to_cluster_no_splitting(job_list)
223 elif self.cmd.cluster_mode == 0:
224 return self.submit_to_cluster_no_splitting(job_list)
225 elif self.cmd.cluster_mode == 2 and self.cmd.options['nb_core'] == 1:
226 return self.submit_to_cluster_no_splitting(job_list)
227 else:
228 return self.submit_to_cluster_splitted(job_list)
229
230
232 """submit the survey without the parralelization.
233 This is the old mode which is still usefull in single core"""
234
235
236 self.write_parameter(parralelization=False, Pdirs=job_list.keys())
237
238
239
240 for Pdir, jobs in job_list.items():
241 jobs = list(jobs)
242 i=0
243 while jobs:
244 i+=1
245 to_submit = ['0']
246 for _ in range(self.combining_job_for_Pdir(Pdir)):
247 if jobs:
248 to_submit.append(jobs.pop(0))
249
250 self.cmd.launch_job(pjoin(self.me_dir, 'SubProcesses', 'survey.sh'),
251 argument=to_submit,
252 cwd=pjoin(self.me_dir,'SubProcesses' , Pdir))
253
254
256 """prepare the input_file for submitting the channel"""
257
258
259 if 'SubProcesses' not in Pdir:
260 Pdir = pjoin(self.me_dir, 'SubProcesses', Pdir)
261
262
263 self.splitted_Pdir[(Pdir, G)] = int(nb_job)
264
265
266
267 run_card = self.cmd.run_card
268 options = {'event' : submit_ps,
269 'maxiter': 1,
270 'miniter': 1,
271 'accuracy': self.cmd.opts['accuracy'],
272 'helicity': run_card['nhel_survey'] if 'nhel_survey' in run_card \
273 else run_card['nhel'],
274 'gridmode': -2,
275 'channel' : G
276 }
277
278 Gdir = pjoin(Pdir, 'G%s' % G)
279 self.write_parameter_file(pjoin(Gdir, 'input_app.txt'), options)
280
281
282 assert os.path.exists(pjoin(Gdir, "ftn25"))
283
284
285
286
287 packet = cluster.Packet((Pdir, G, step+1),
288 self.combine_iteration,
289 (Pdir, G, step+1))
290
291 if step ==0:
292 self.lastoffset[(Pdir, G)] = 0
293
294
295 for i in xrange(int(nb_job)):
296 name = "G%s_%s" % (G,i+1)
297 self.lastoffset[(Pdir, G)] += 1
298 offset = self.lastoffset[(Pdir, G)]
299 self.cmd.launch_job(pjoin(self.me_dir, 'SubProcesses', 'refine_splitted.sh'),
300 argument=[name, 'G%s'%G, offset],
301 cwd= Pdir,
302 packet_member=packet)
303
304
306 """ submit the version of the survey with splitted grid creation
307 """
308
309
310
311
312 for Pdir, jobs in job_list.items():
313 if self.splitted_for_dir(Pdir, jobs[0]) <= 1:
314 return self.submit_to_cluster_no_splitting({Pdir:jobs})
315
316 self.write_parameter(parralelization=True, Pdirs=[Pdir])
317
318
319 for job in jobs:
320 packet = cluster.Packet((Pdir, job, 1), self.combine_iteration, (Pdir, job, 1))
321 for i in range(self.splitted_for_dir(Pdir, job)):
322 self.cmd.launch_job(pjoin(self.me_dir, 'SubProcesses', 'survey.sh'),
323 argument=[i+1, job],
324 cwd=pjoin(self.me_dir,'SubProcesses' , Pdir),
325 packet_member=packet)
326
328
329 grid_calculator, cross, error = self.combine_grid(Pdir, G, step)
330
331
332 nb_events = grid_calculator.target_evt
333
334 Gdirs = []
335 for i in range(self.splitted_for_dir(Pdir, G)):
336 path = pjoin(Pdir, "G%s_%s" % (G, i+1))
337 Gdirs.append(path)
338
339
340
341
342
343 need_submit = False
344 if step < self.min_iterations and cross != 0:
345 if step == 1:
346 need_submit = True
347 else:
348 across = self.abscross[(Pdir,G)]/(self.sigma[(Pdir,G)]+1e-99)
349 tot_across = self.get_current_axsec()
350 if across / tot_across < 1e-6:
351 need_submit = False
352 elif error < self.cmd.opts['accuracy'] / 100:
353 need_submit = False
354 else:
355 need_submit = True
356
357 elif step >= self.cmd.opts['iterations']:
358 need_submit = False
359 elif self.cmd.opts['accuracy'] < 0:
360
361 raise Exception, "Not Implemented"
362 elif self.abscross[(Pdir,G)] == 0:
363 need_submit = False
364 else:
365 across = self.abscross[(Pdir,G)]/(self.sigma[(Pdir,G)]+1e-99)
366 tot_across = self.get_current_axsec()
367 if across == 0:
368 need_submit = False
369 elif across / tot_across < 1e-5:
370 need_submit = False
371 elif error > self.cmd.opts['accuracy']:
372 need_submit = True
373 else:
374 need_submit = False
375
376
377 if cross:
378 grid_calculator.write_grid_for_submission(Pdir,G,
379 self.splitted_for_dir(Pdir, G),
380 nb_events,mode=self.mode,
381 conservative_factor=5.0)
382
383 xsec_format = '.%ig'%(max(3,int(math.log10(1.0/float(error)))+2)
384 if float(cross)!=0.0 and float(error)!=0.0 else 8)
385 if need_submit:
386 message = "%%s/G%%s is at %%%s +- %%.3g pb. Now submitting iteration #%s."%(xsec_format, step+1)
387 logger.info(message%\
388 (os.path.basename(Pdir), G, float(cross),
389 float(error)*float(cross)))
390 self.resubmit_survey(Pdir,G, Gdirs, step)
391 elif cross:
392 logger.info("Survey finished for %s/G%s at %s"%(
393 os.path.basename(Pdir),G,('%%%s +- %%.3g pb'%xsec_format))%
394 (float(cross), float(error)*float(cross)))
395
396 newGpath = pjoin(self.me_dir,'SubProcesses' , Pdir, 'G%s' % G)
397 if not os.path.exists(newGpath):
398 os.mkdir(newGpath)
399
400
401 files.cp(pjoin(Gdirs[0], 'ftn25'),
402 pjoin(self.me_dir,'SubProcesses' , Pdir, 'G%s' % G, 'ftn26'))
403
404
405 fsock = open(pjoin(newGpath, 'events.lhe'), 'w')
406 for Gdir in Gdirs:
407 fsock.write(open(pjoin(Gdir, 'events.lhe')).read())
408
409
410 files.cp(pjoin(Gdirs[0], 'log.txt'),
411 pjoin(self.me_dir,'SubProcesses' , Pdir, 'G%s' % G))
412
413
414
415 self.write_results(grid_calculator, cross, error, Pdir, G, step)
416 else:
417 logger.info("Survey finished for %s/G%s [0 cross]", os.path.basename(Pdir),G)
418
419 Gdir = pjoin(self.me_dir,'SubProcesses' , Pdir, 'G%s' % G)
420 if not os.path.exists(Gdir):
421 os.mkdir(Gdir)
422
423 files.cp(pjoin(Gdirs[0], 'log.txt'), Gdir)
424
425 self.write_results(grid_calculator, cross, error, Pdir, G, step)
426
427 return 0
428
429 - def combine_grid(self, Pdir, G, step, exclude_sub_jobs=[]):
430 """ exclude_sub_jobs is to remove some of the subjobs if a numerical
431 issue is detected in one of them. Warning is issue when this occurs.
432 """
433
434
435 grid_calculator = combine_grid.grid_information(self.run_card['nhel'])
436
437 for i in range(self.splitted_for_dir(Pdir, G)):
438 if i in exclude_sub_jobs:
439 continue
440 path = pjoin(Pdir, "G%s_%s" % (G, i+1))
441 fsock = misc.mult_try_open(pjoin(path, 'results.dat'))
442 one_result = grid_calculator.add_results_information(fsock)
443 fsock.close()
444 if one_result.axsec == 0:
445 grid_calculator.onefail = True
446 continue
447 fsock = misc.mult_try_open(pjoin(path, 'grid_information'))
448 grid_calculator.add_one_grid_information(fsock)
449 fsock.close()
450 os.remove(pjoin(path, 'results.dat'))
451
452
453
454
455
456
457 cross, across, sigma = grid_calculator.get_cross_section()
458
459
460
461 maxwgt = grid_calculator.get_max_wgt(0.01)
462 if maxwgt:
463 nunwgt = grid_calculator.get_nunwgt(maxwgt)
464
465
466
467
468 apply_instability_security = False
469 rel_contrib = 0.0
470 if (self.__class__ != gensym or step > 1):
471 Pdir_across = 0.0
472 Gdir_across = 0.0
473 for (mPdir,mG) in self.abscross.keys():
474 if mPdir == Pdir:
475 Pdir_across += (self.abscross[(mPdir,mG)]/
476 (self.sigma[(mPdir,mG)]+1e-99))
477 if mG == G:
478 Gdir_across += (self.abscross[(mPdir,mG)]/
479 (self.sigma[(mPdir,mG)]+1e-99))
480 rel_contrib = abs(Gdir_across/(Pdir_across+1e-99))
481 if rel_contrib > (1.0e-8) and \
482 nunwgt < 2 and len(grid_calculator.results) > 1:
483 apply_instability_security = True
484
485 if apply_instability_security:
486
487 th_maxwgt = [(r.th_maxwgt,i) for i,r in enumerate(grid_calculator.results)]
488 th_maxwgt.sort()
489 ratio = th_maxwgt[-1][0]/th_maxwgt[-2][0]
490 if ratio > 1e4:
491 logger.warning(
492 """"One Event with large weight have been found (ratio = %.3g) in channel G%s (with rel.contrib=%.3g).
493 This is likely due to numerical instabilities. The associated job is discarded to recover.
494 For offline investigation, the problematic discarded events are stored in:
495 %s"""%(ratio,G,rel_contrib,pjoin(Pdir,'DiscardedUnstableEvents')))
496 exclude_sub_jobs = list(exclude_sub_jobs)
497 exclude_sub_jobs.append(th_maxwgt[-1][1])
498 grid_calculator.results.run_statistics['skipped_subchannel'] += 1
499
500
501 gPath = pjoin(Pdir, "G%s_%s" % (G, th_maxwgt[-1][1]+1))
502 if os.path.isfile(pjoin(gPath,'events.lhe')):
503 lhe_file = lhe_parser.EventFile(pjoin(gPath,'events.lhe'))
504 discardedPath = pjoin(Pdir,'DiscardedUnstableEvents')
505 if not os.path.exists(discardedPath):
506 os.mkdir(discardedPath)
507 if os.path.isdir(discardedPath):
508
509
510 evtRecord = open(pjoin(discardedPath,'discarded_G%s.dat'%G),'a')
511 lhe_file.seek(0)
512 try:
513 evtRecord.write('\n'+str(max(lhe_file,key=lambda evt:abs(evt.wgt))))
514 except Exception:
515
516 lhe_file.close()
517 evtRecord.write(pjoin(gPath,'events.lhe').read())
518 evtRecord.close()
519
520 return self.combine_grid(Pdir, G, step, exclude_sub_jobs)
521
522
523 if across !=0:
524 if sigma != 0:
525 self.cross[(Pdir,G)] += cross**3/sigma**2
526 self.abscross[(Pdir,G)] += across * cross**2/sigma**2
527 self.sigma[(Pdir,G)] += cross**2/ sigma**2
528 self.chi2[(Pdir,G)] += cross**4/sigma**2
529
530 cross = self.cross[(Pdir,G)]/self.sigma[(Pdir,G)]
531 if step > 1:
532 error = math.sqrt(abs((self.chi2[(Pdir,G)]/cross**2 - \
533 self.sigma[(Pdir,G)])/(step-1))/self.sigma[(Pdir,G)])
534 else:
535 error = sigma/cross
536 else:
537 self.cross[(Pdir,G)] = cross
538 self.abscross[(Pdir,G)] = across
539 self.sigma[(Pdir,G)] = 0
540 self.chi2[(Pdir,G)] = 0
541 cross = self.cross[(Pdir,G)]
542 error = 0
543
544 else:
545 error = 0
546
547 grid_calculator.results.compute_values(update_statistics=True)
548 if (str(os.path.basename(Pdir)), G) in self.run_statistics:
549 self.run_statistics[(str(os.path.basename(Pdir)), G)]\
550 .aggregate_statistics(grid_calculator.results.run_statistics)
551 else:
552 self.run_statistics[(str(os.path.basename(Pdir)), G)] = \
553 grid_calculator.results.run_statistics
554
555 self.warnings_from_statistics(G, grid_calculator.results.run_statistics)
556 stats_msg = grid_calculator.results.run_statistics.nice_output(
557 '/'.join([os.path.basename(Pdir),'G%s'%G]))
558
559 if stats_msg:
560 logger.log(5, stats_msg)
561
562
563 for i in range(self.splitted_for_dir(Pdir, G)):
564 path = pjoin(Pdir, "G%s_%s" % (G, i+1))
565 try:
566 os.remove(pjoin(path, 'grid_information'))
567 except OSError, oneerror:
568 if oneerror.errno != 2:
569 raise
570 return grid_calculator, cross, error
571
573 """Possible warn user for worrying MadLoop stats for this channel."""
574
575 if stats['n_madloop_calls']==0:
576 return
577
578 EPS_fraction = float(stats['exceptional_points'])/stats['n_madloop_calls']
579
580 msg = "Channel %s has encountered a fraction of %.3g\n"+ \
581 "of numerically unstable loop matrix element computations\n"+\
582 "(which could not be rescued using quadruple precision).\n"+\
583 "The results might not be trusted."
584
585 if 0.01 > EPS_fraction > 0.001:
586 logger.warning(msg%(G,EPS_fraction))
587 elif EPS_fraction > 0.01:
588 logger.critical((msg%(G,EPS_fraction)).replace('might', 'can'))
589 raise Exception, (msg%(G,EPS_fraction)).replace('might', 'can')
590
592
593 across = 0
594 for (Pdir,G) in self.abscross:
595 across += self.abscross[(Pdir,G)]/(self.sigma[(Pdir,G)]+1e-99)
596 return across
597
598 - def write_results(self, grid_calculator, cross, error, Pdir, G, step):
599
600
601 if cross == 0:
602 abscross,nw, luminosity = 0, 0, 0
603 wgt, maxit,nunwgt, wgt, nevents = 0,0,0,0,0
604 maxwgt = 0
605 error = 0
606 else:
607 grid_calculator.results.compute_values()
608 abscross = self.abscross[(Pdir,G)]/self.sigma[(Pdir,G)]
609 nw = grid_calculator.results.nw
610 wgt = grid_calculator.results.wgt
611 maxit = step
612 wgt = 0
613 nevents = grid_calculator.results.nevents
614 maxwgt = grid_calculator.get_max_wgt()
615 nunwgt = grid_calculator.get_nunwgt()
616 luminosity = nunwgt/cross
617
618
619 def fstr(nb):
620 data = '%E' % nb
621 nb, power = data.split('E')
622 nb = float(nb) /10
623 power = int(power) + 1
624 return '%.5fE%+03i' %(nb,power)
625 line = '%s %s %s %i %i %i %i %s %s %s %s 0.0 0\n' % \
626 (fstr(cross), fstr(error*cross), fstr(error*cross),
627 nevents, nw, maxit,nunwgt,
628 fstr(luminosity), fstr(wgt), fstr(abscross), fstr(maxwgt))
629
630 fsock = open(pjoin(self.me_dir,'SubProcesses' , Pdir, 'G%s' % G,
631 'results.dat'),'w')
632 fsock.writelines(line)
633 fsock.close()
634
636 """submit the next iteration of the survey"""
637
638
639 run_card = self.cmd.run_card
640 options = {'event' : 2**(step) * self.cmd.opts['points'] / self.splitted_grid,
641 'maxiter': 1,
642 'miniter': 1,
643 'accuracy': self.cmd.opts['accuracy'],
644 'helicity': run_card['nhel_survey'] if 'nhel_survey' in run_card \
645 else run_card['nhel'],
646 'gridmode': -2,
647 'channel' : ''
648 }
649
650 if int(options['helicity']) == 1:
651 options['event'] = options['event'] * 2**(self.cmd.proc_characteristics['nexternal']//3)
652
653 for Gdir in Gdirs:
654 self.write_parameter_file(pjoin(Gdir, 'input_app.txt'), options)
655
656
657
658 packet = cluster.Packet((Pdir, G, step+1), self.combine_iteration, \
659 (Pdir, G, step+1))
660 nb_step = len(Gdirs) * (step+1)
661 for i,subdir in enumerate(Gdirs):
662 subdir = subdir.rsplit('_',1)[1]
663 subdir = int(subdir)
664 offset = nb_step+i+1
665 offset=str(offset)
666 tag = "%s.%s" % (subdir, offset)
667
668 self.cmd.launch_job(pjoin(self.me_dir, 'SubProcesses', 'survey.sh'),
669 argument=[tag, G],
670 cwd=pjoin(self.me_dir,'SubProcesses' , Pdir),
671 packet_member=packet)
672
673
674
675
677 """ """
678
679 template =""" %(event)s %(maxiter)s %(miniter)s !Number of events and max and min iterations
680 %(accuracy)s !Accuracy
681 %(gridmode)s !Grid Adjustment 0=none, 2=adjust
682 1 !Suppress Amplitude 1=yes
683 %(helicity)s !Helicity Sum/event 0=exact
684 %(channel)s """
685 options['event'] = int(options['event'])
686 open(path, 'w').write(template % options)
687
688
689
691 """Write the parameter of the survey run"""
692
693 run_card = self.cmd.run_card
694
695 options = {'event' : self.cmd.opts['points'],
696 'maxiter': self.cmd.opts['iterations'],
697 'miniter': self.min_iterations,
698 'accuracy': self.cmd.opts['accuracy'],
699 'helicity': run_card['nhel_survey'] if 'nhel_survey' in run_card \
700 else run_card['nhel'],
701 'gridmode': 2,
702 'channel': ''
703 }
704
705 if int(options['helicity'])== 1:
706 options['event'] = options['event'] * 2**(self.cmd.proc_characteristics['nexternal']//3)
707
708 if parralelization:
709 options['gridmode'] = -2
710 options['maxiter'] = 1
711 options['miniter'] = 1
712 options['event'] /= self.splitted_grid
713
714 if not Pdirs:
715 Pdirs = self.subproc
716
717 for Pdir in Pdirs:
718 path =pjoin(Pdir, 'input_app.txt')
719 self.write_parameter_file(path, options)
720
724
725
726
727 gen_events_security = 1.2
728 combining_job = 0
729 max_request_event = 1000
730 max_event_in_iter = 5000
731 min_event_in_iter = 1000
732 max_splitting = 130
733 min_iter = 3
734 max_iter = 9
735 keep_grid_for_refine = False
736
737
738 @ staticmethod
741
742
744 """Choose in which type of refine we want to be"""
745
746 if cmd.proc_characteristics['loop_induced']:
747 return super(gen_ximprove, cls).__new__(gen_ximprove_share, cmd, opt)
748 elif gen_ximprove.format_variable(cmd.run_card['gridpack'], bool):
749 raise Exception, "Not implemented"
750 elif cmd.run_card["job_strategy"] == 2:
751 return super(gen_ximprove, cls).__new__(gen_ximprove_share, cmd, opt)
752 else:
753 return super(gen_ximprove, cls).__new__(gen_ximprove_v4, cmd, opt)
754
755
757
758 try:
759 super(gen_ximprove, self).__init__(cmd, opt)
760 except TypeError:
761 pass
762
763 self.run_statistics = {}
764 self.cmd = cmd
765 self.run_card = cmd.run_card
766 run_card = self.run_card
767 self.me_dir = cmd.me_dir
768
769
770 self.gridpack = run_card['gridpack']
771 self.nhel = run_card['nhel']
772 if "nhel_refine" in run_card:
773 self.nhel = run_card["nhel_refine"]
774
775 if self.run_card['refine_evt_by_job'] != -1:
776 self.max_request_event = run_card['refine_evt_by_job']
777
778
779
780 self.gen_events = True
781 self.min_iter = 3
782 self.parralel = False
783
784 self.err_goal = 0.01
785 self.max_np = 9
786 self.split_channels = False
787
788 self.nreq = 2000
789 self.iseed = 4321
790 self.ngran = 1
791
792
793 self.results = 0
794
795 if isinstance(opt, dict):
796 self.configure(opt)
797 elif isinstance(opt, bannermod.GridpackCard):
798 self.configure_gridpack(opt)
799
802
817
818
839
841 """not needed but for gridpack --which is not handle here for the moment"""
842 return
843
844
846 """return the list of channel that need to be improved"""
847
848 assert self.err_goal >=1
849 self.err_goal = int(self.err_goal)
850
851 goal_lum = self.err_goal/(self.results.axsec+1e-99)
852 logger.info('Effective Luminosity %s pb^-1', goal_lum)
853
854 all_channels = sum([list(P) for P in self.results],[])
855 all_channels.sort(cmp= lambda x,y: 1 if y.get('luminosity') - \
856 x.get('luminosity') > 0 else -1)
857
858 to_refine = []
859 for C in all_channels:
860 if C.get('axsec') == 0:
861 continue
862 if goal_lum/(C.get('luminosity')+1e-99) >= 1 + (self.gen_events_security-1)/2:
863 logger.debug("channel %s is at %s (%s) (%s pb)", C.name, C.get('luminosity'), goal_lum/(C.get('luminosity')+1e-99), C.get('xsec'))
864 to_refine.append(C)
865 elif C.get('xerr') > max(C.get('axsec'),
866 (1/(100*math.sqrt(self.err_goal)))*all_channels[-1].get('axsec')):
867 to_refine.append(C)
868
869 logger.info('need to improve %s channels' % len(to_refine))
870 return goal_lum, to_refine
871
873 """update the html from this object since it contains all the information"""
874
875
876 run = self.cmd.results.current['run_name']
877 if not os.path.exists(pjoin(self.cmd.me_dir, 'HTML', run)):
878 os.mkdir(pjoin(self.cmd.me_dir, 'HTML', run))
879
880 unit = self.cmd.results.unit
881 P_text = ""
882 if self.results:
883 Presults = self.results
884 else:
885 self.results = sum_html.collect_result(self.cmd, None)
886 Presults = self.results
887
888 for P_comb in Presults:
889 P_text += P_comb.get_html(run, unit, self.cmd.me_dir)
890
891 Presults.write_results_dat(pjoin(self.cmd.me_dir,'SubProcesses', 'results.dat'))
892
893 fsock = open(pjoin(self.cmd.me_dir, 'HTML', run, 'results.html'),'w')
894 fsock.write(sum_html.results_header)
895 fsock.write('%s <dl>' % Presults.get_html(run, unit, self.cmd.me_dir))
896 fsock.write('%s </dl></body>' % P_text)
897
898 self.cmd.results.add_detail('cross', Presults.xsec)
899 self.cmd.results.add_detail('error', Presults.xerru)
900
901 return Presults.xsec, Presults.xerru
902
925
927
928 for path in misc.glob(pjoin('*', '*','multijob.dat'), pjoin(self.me_dir, 'SubProcesses')):
929 open(path,'w').write('0\n')
930
932 """ """
933 if nb_split <=1:
934 return
935 f = open(pjoin(self.me_dir, 'SubProcesses', Channel.get('name'), 'multijob.dat'), 'w')
936 f.write('%i\n' % nb_split)
937 f.close()
938
948
949 alphabet = "abcdefghijklmnopqrstuvwxyz"
951 """generate the script in order to generate a given number of event"""
952
953
954
955 goal_lum, to_refine = self.find_job_for_event()
956
957
958 self.reset_multijob()
959
960 jobs = []
961
962
963
964 if self.combining_job >1:
965
966 new_order = []
967 if self.combining_job % 2 == 0:
968 for i in range(len(to_refine) //2):
969 new_order.append(to_refine[i])
970 new_order.append(to_refine[-i-1])
971 if len(to_refine) % 2:
972 new_order.append(to_refine[i+1])
973 else:
974 for i in range(len(to_refine) //3):
975 new_order.append(to_refine[i])
976 new_order.append(to_refine[-2*i-1])
977 new_order.append(to_refine[-2*i-2])
978 if len(to_refine) % 3 == 1:
979 new_order.append(to_refine[i+1])
980 elif len(to_refine) % 3 == 2:
981 new_order.append(to_refine[i+2])
982
983 assert set([id(C) for C in to_refine]) == set([id(C) for C in new_order])
984 to_refine = new_order
985
986
987
988 for C in to_refine:
989
990 needed_event = goal_lum*C.get('axsec')
991 nb_split = int(max(1,((needed_event-1)// self.max_request_event) +1))
992 if not self.split_channels:
993 nb_split = 1
994 if nb_split > self.max_splitting:
995 nb_split = self.max_splitting
996 nb_split=max(1, nb_split)
997
998
999
1000 if C.get('nunwgt') > 0:
1001 nevents = needed_event / nb_split * (C.get('nevents') / C.get('nunwgt'))
1002
1003 nevents = int(nevents / (2**self.min_iter-1))
1004 else:
1005 nevents = self.max_event_in_iter
1006
1007 if nevents < self.min_event_in_iter:
1008 nb_split = int(nb_split * nevents / self.min_event_in_iter) + 1
1009 nevents = self.min_event_in_iter
1010
1011
1012 nevents = max(self.min_event_in_iter, min(self.max_event_in_iter, nevents))
1013 logger.debug("%s : need %s event. Need %s split job of %s points", C.name, needed_event, nb_split, nevents)
1014
1015
1016
1017 self.write_multijob(C, nb_split)
1018
1019 packet = cluster.Packet((C.parent_name, C.name),
1020 combine_runs.CombineRuns,
1021 (pjoin(self.me_dir, 'SubProcesses', C.parent_name)),
1022 {"subproc": C.name, "nb_split":nb_split})
1023
1024
1025
1026 info = {'name': self.cmd.results.current['run_name'],
1027 'script_name': 'unknown',
1028 'directory': C.name,
1029 'P_dir': C.parent_name,
1030 'offset': 1,
1031 'nevents': nevents,
1032 'maxiter': self.max_iter,
1033 'miniter': self.min_iter,
1034 'precision': -goal_lum/nb_split,
1035 'nhel': self.run_card['nhel'],
1036 'channel': C.name.replace('G',''),
1037 'grid_refinment' : 0,
1038 'base_directory': '',
1039 'packet': packet,
1040 }
1041
1042 if nb_split == 1:
1043 jobs.append(info)
1044 else:
1045 for i in range(nb_split):
1046 new_info = dict(info)
1047 new_info['offset'] = i+1
1048 new_info['directory'] += self.alphabet[i % 26] + str((i+1)//26)
1049 if self.keep_grid_for_refine:
1050 new_info['base_directory'] = info['directory']
1051 jobs.append(new_info)
1052
1053 self.create_ajob(pjoin(self.me_dir, 'SubProcesses', 'refine.sh'), jobs)
1054
1055
1057 """create the ajob"""
1058
1059 if not jobs:
1060 return
1061
1062
1063 P2job= collections.defaultdict(list)
1064 for j in jobs:
1065 P2job[j['P_dir']].append(j)
1066 if len(P2job) >1:
1067 for P in P2job.values():
1068 self.create_ajob(template, P)
1069 return
1070
1071
1072 path = pjoin(self.me_dir, 'SubProcesses' ,jobs[0]['P_dir'])
1073
1074 template_text = open(template, 'r').read()
1075
1076
1077 if self.combining_job > 1:
1078 skip1=0
1079 n_channels = len(jobs)
1080 nb_sub = n_channels // self.combining_job
1081 nb_job_in_last = n_channels % self.combining_job
1082 if nb_job_in_last:
1083 nb_sub +=1
1084 skip1 = self.combining_job - nb_job_in_last
1085 if skip1 > nb_sub:
1086 self.combining_job -=1
1087 return self.create_ajob(template, jobs)
1088 combining_job = self.combining_job
1089 else:
1090
1091
1092 skip1=0
1093 combining_job =1
1094 nb_sub = len(jobs)
1095
1096
1097 nb_use = 0
1098 for i in range(nb_sub):
1099 script_number = i+1
1100 if i < skip1:
1101 nb_job = combining_job -1
1102 else:
1103 nb_job = combining_job
1104 fsock = open(pjoin(path, 'ajob%i' % script_number), 'w')
1105 for j in range(nb_use, nb_use + nb_job):
1106 if j> len(jobs):
1107 break
1108 info = jobs[j]
1109 info['script_name'] = 'ajob%i' % script_number
1110 info['keeplog'] = 'false'
1111 if "base_directory" not in info:
1112 info["base_directory"] = "./"
1113 fsock.write(template_text % info)
1114 nb_use += nb_job
1115
1117 """create the ajob to achieve a give precision on the total cross-section"""
1118
1119
1120 assert self.err_goal <=1
1121 xtot = abs(self.results.xsec)
1122 logger.info("Working on precision: %s %%" %(100*self.err_goal))
1123 all_channels = sum([list(P) for P in self.results if P.mfactor],[])
1124 limit = self.err_goal * xtot / len(all_channels)
1125 to_refine = []
1126 rerr = 0
1127 for C in all_channels:
1128 cerr = C.mfactor*(C.xerru + len(all_channels)*C.xerrc)
1129 if cerr > abs(limit):
1130 to_refine.append(C)
1131 else:
1132 rerr += cerr
1133 rerr *=rerr
1134 if not len(to_refine):
1135 return
1136
1137
1138 limit = math.sqrt((self.err_goal * xtot)**2 - rerr/math.sqrt(len(to_refine)))
1139 for C in to_refine[:]:
1140 cerr = C.mfactor*(C.xerru + len(to_refine)*C.xerrc)
1141 if cerr < limit:
1142 to_refine.remove(C)
1143
1144
1145 logger.info('need to improve %s channels' % len(to_refine))
1146
1147
1148 jobs = []
1149
1150
1151
1152 for C in to_refine:
1153
1154
1155 yerr = C.mfactor*(C.xerru+len(to_refine)*C.xerrc)
1156 nevents = 0.2*C.nevents*(yerr/limit)**2
1157
1158 nb_split = int((nevents*(C.nunwgt/C.nevents)/self.max_request_event/ (2**self.min_iter-1))**(2/3))
1159 nb_split = max(nb_split, 1)
1160
1161 if nb_split > self.max_splitting:
1162 nb_split = self.max_splitting
1163
1164 if nb_split >1:
1165 nevents = nevents / nb_split
1166 self.write_multijob(C, nb_split)
1167
1168 nevents = min(self.min_event_in_iter, max(self.max_event_in_iter, nevents))
1169
1170
1171
1172 info = {'name': self.cmd.results.current['run_name'],
1173 'script_name': 'unknown',
1174 'directory': C.name,
1175 'P_dir': C.parent_name,
1176 'offset': 1,
1177 'nevents': nevents,
1178 'maxiter': self.max_iter,
1179 'miniter': self.min_iter,
1180 'precision': yerr/math.sqrt(nb_split)/(C.get('xsec')+ yerr),
1181 'nhel': self.run_card['nhel'],
1182 'channel': C.name.replace('G',''),
1183 'grid_refinment' : 1
1184 }
1185
1186 if nb_split == 1:
1187 jobs.append(info)
1188 else:
1189 for i in range(nb_split):
1190 new_info = dict(info)
1191 new_info['offset'] = i+1
1192 new_info['directory'] += self.alphabet[i % 26] + str((i+1)//26)
1193 jobs.append(new_info)
1194 self.create_ajob(pjoin(self.me_dir, 'SubProcesses', 'refine.sh'), jobs)
1195
1197 """update the html from this object since it contains all the information"""
1198
1199
1200 run = self.cmd.results.current['run_name']
1201 if not os.path.exists(pjoin(self.cmd.me_dir, 'HTML', run)):
1202 os.mkdir(pjoin(self.cmd.me_dir, 'HTML', run))
1203
1204 unit = self.cmd.results.unit
1205 P_text = ""
1206 if self.results:
1207 Presults = self.results
1208 else:
1209 self.results = sum_html.collect_result(self.cmd, None)
1210 Presults = self.results
1211
1212 for P_comb in Presults:
1213 P_text += P_comb.get_html(run, unit, self.cmd.me_dir)
1214
1215 Presults.write_results_dat(pjoin(self.cmd.me_dir,'SubProcesses', 'results.dat'))
1216
1217 fsock = open(pjoin(self.cmd.me_dir, 'HTML', run, 'results.html'),'w')
1218 fsock.write(sum_html.results_header)
1219 fsock.write('%s <dl>' % Presults.get_html(run, unit, self.cmd.me_dir))
1220 fsock.write('%s </dl></body>' % P_text)
1221
1222 self.cmd.results.add_detail('cross', Presults.xsec)
1223 self.cmd.results.add_detail('error', Presults.xerru)
1224
1225 return Presults.xsec, Presults.xerru
1226
1251
1266
1268 """Doing the refine in multicore. Each core handle a couple of PS point."""
1269
1270 nb_ps_by_job = 2000
1271 mode = "refine"
1272 gen_events_security = 1.15
1273
1274
1275
1277
1278 super(gen_ximprove_share, self).__init__(*args, **opts)
1279 self.generated_events = {}
1280 self.splitted_for_dir = lambda x,y : self.splitted_Pdir[(x,y)]
1281
1282
1284 """generate the script in order to generate a given number of event"""
1285
1286
1287
1288 goal_lum, to_refine = self.find_job_for_event()
1289 self.goal_lum = goal_lum
1290
1291
1292 total_ps_points = 0
1293 channel_to_ps_point = []
1294 for C in to_refine:
1295
1296 try:
1297 os.remove(pjoin(self.me_dir, "SubProcesses",C.parent_name, C.name, "events.lhe"))
1298 except:
1299 pass
1300
1301
1302 needed_event = goal_lum*C.get('axsec')
1303 if needed_event == 0:
1304 continue
1305
1306 if C.get('nunwgt') > 0:
1307 nevents = needed_event * (C.get('nevents') / C.get('nunwgt'))
1308
1309 nevents = int(nevents / (2**self.min_iter-1))
1310 else:
1311 nb_split = int(max(1,((needed_event-1)// self.max_request_event) +1))
1312 if not self.split_channels:
1313 nb_split = 1
1314 if nb_split > self.max_splitting:
1315 nb_split = self.max_splitting
1316 nevents = self.max_event_in_iter * self.max_splitting
1317 else:
1318 nevents = self.max_event_in_iter * nb_split
1319
1320 if nevents > self.max_splitting*self.max_event_in_iter:
1321 logger.warning("Channel %s/%s has a very low efficiency of unweighting. Might not be possible to reach target" % \
1322 (C.name, C.parent_name))
1323 nevents = self.max_event_in_iter * self.max_splitting
1324
1325 total_ps_points += nevents
1326 channel_to_ps_point.append((C, nevents))
1327
1328 if self.cmd.options["run_mode"] == 1:
1329 if self.cmd.options["cluster_size"]:
1330 nb_ps_by_job = total_ps_points /int(self.cmd.options["cluster_size"])
1331 else:
1332 nb_ps_by_job = self.nb_ps_by_job
1333 elif self.cmd.options["run_mode"] == 2:
1334 remain = total_ps_points % self.cmd.options["nb_core"]
1335 if remain:
1336 nb_ps_by_job = 1 + (total_ps_points - remain) / self.cmd.options["nb_core"]
1337 else:
1338 nb_ps_by_job = total_ps_points / self.cmd.options["nb_core"]
1339 else:
1340 nb_ps_by_job = self.nb_ps_by_job
1341
1342 nb_ps_by_job = int(max(nb_ps_by_job, 500))
1343
1344 for C, nevents in channel_to_ps_point:
1345 if nevents % nb_ps_by_job:
1346 nb_job = 1 + int(nevents // nb_ps_by_job)
1347 else:
1348 nb_job = int(nevents // nb_ps_by_job)
1349 submit_ps = min(nevents, nb_ps_by_job)
1350 if nb_job == 1:
1351 submit_ps = max(submit_ps, self.min_event_in_iter)
1352 self.create_resubmit_one_iter(C.parent_name, C.name[1:], submit_ps, nb_job, step=0)
1353 needed_event = goal_lum*C.get('xsec')
1354 logger.debug("%s/%s : need %s event. Need %s split job of %s points", C.parent_name, C.name, needed_event, nb_job, submit_ps)
1355
1356
1358
1359 grid_calculator, cross, error = self.combine_grid(Pdir, G, step)
1360
1361
1362 Gdirs = []
1363 for i in range(self.splitted_for_dir(Pdir, G)):
1364 path = pjoin(Pdir, "G%s_%s" % (G, i+1))
1365 Gdirs.append(path)
1366 assert len(grid_calculator.results) == len(Gdirs) == self.splitted_for_dir(Pdir, G)
1367
1368
1369
1370 needed_event = cross * self.goal_lum
1371 if needed_event == 0:
1372 return 0
1373
1374
1375 if self.err_goal >=1:
1376 if needed_event > self.gen_events_security * self.err_goal:
1377 needed_event = int(self.gen_events_security * self.err_goal)
1378
1379 if (Pdir, G) in self.generated_events:
1380 old_nunwgt, old_maxwgt = self.generated_events[(Pdir, G)]
1381 else:
1382 old_nunwgt, old_maxwgt = 0, 0
1383
1384 if old_nunwgt == 0 and os.path.exists(pjoin(Pdir,"G%s" % G, "events.lhe")):
1385
1386 lhe = lhe_parser.EventFile(pjoin(Pdir,"G%s" % G, "events.lhe"))
1387 old_nunwgt = lhe.unweight(None, trunc_error=0.005, log_level=0)
1388 old_maxwgt = lhe.max_wgt
1389
1390
1391
1392 maxwgt = max(grid_calculator.get_max_wgt(), old_maxwgt)
1393 new_evt = grid_calculator.get_nunwgt(maxwgt)
1394 efficiency = new_evt / sum([R.nevents for R in grid_calculator.results])
1395 nunwgt = old_nunwgt * old_maxwgt / maxwgt
1396 nunwgt += new_evt
1397
1398
1399 one_iter_nb_event = max(grid_calculator.get_nunwgt(),1)
1400 drop_previous_iteration = False
1401
1402 n_target_one_iter = (needed_event-one_iter_nb_event) / ( one_iter_nb_event/ sum([R.nevents for R in grid_calculator.results]))
1403 n_target_combined = (needed_event-nunwgt) / efficiency
1404 if n_target_one_iter < n_target_combined:
1405
1406
1407 drop_previous_iteration = True
1408 nunwgt = one_iter_nb_event
1409 maxwgt = grid_calculator.get_max_wgt()
1410 new_evt = nunwgt
1411 efficiency = ( one_iter_nb_event/ sum([R.nevents for R in grid_calculator.results]))
1412
1413 try:
1414 if drop_previous_iteration:
1415 raise IOError
1416 output_file = open(pjoin(Pdir,"G%s" % G, "events.lhe"), 'a')
1417 except IOError:
1418 output_file = open(pjoin(Pdir,"G%s" % G, "events.lhe"), 'w')
1419
1420 misc.call(["cat"] + [pjoin(d, "events.lhe") for d in Gdirs],
1421 stdout=output_file)
1422 output_file.close()
1423
1424
1425 if nunwgt < 0.6 * needed_event and step > self.min_iter:
1426 lhe = lhe_parser.EventFile(output_file.name)
1427 old_nunwgt =nunwgt
1428 nunwgt = lhe.unweight(None, trunc_error=0.01, log_level=0)
1429
1430
1431 self.generated_events[(Pdir, G)] = (nunwgt, maxwgt)
1432
1433
1434
1435 if nunwgt >= int(0.96*needed_event)+1:
1436
1437 logger.info("found enough event for %s/G%s" % (os.path.basename(Pdir), G))
1438 self.write_results(grid_calculator, cross, error, Pdir, G, step, efficiency)
1439 return 0
1440 elif step >= self.max_iter:
1441 logger.debug("fail to find enough event")
1442 self.write_results(grid_calculator, cross, error, Pdir, G, step, efficiency)
1443 return 0
1444
1445 nb_split_before = len(grid_calculator.results)
1446 nevents = grid_calculator.results[0].nevents
1447 if nevents == 0:
1448 nevents = max(g.nevents for g in grid_calculator.results)
1449
1450 need_ps_point = (needed_event - nunwgt)/(efficiency+1e-99)
1451 need_job = need_ps_point // nevents + 1
1452
1453 if step < self.min_iter:
1454
1455 job_at_first_iter = nb_split_before/2**(step-1)
1456 expected_total_job = job_at_first_iter * (2**self.min_iter-1)
1457 done_job = job_at_first_iter * (2**step-1)
1458 expected_remaining_job = expected_total_job - done_job
1459
1460 logger.debug("efficiency status (smaller is better): %s", need_job/expected_remaining_job)
1461
1462 need_job = min(need_job, expected_remaining_job*1.25)
1463
1464 nb_job = (need_job-0.5)//(2**(self.min_iter-step)-1) + 1
1465 nb_job = max(1, nb_job)
1466 grid_calculator.write_grid_for_submission(Pdir,G,
1467 self.splitted_for_dir(Pdir, G), nb_job*nevents ,mode=self.mode,
1468 conservative_factor=self.max_iter)
1469 logger.info("%s/G%s is at %i/%i (%.2g%%) event. Resubmit %i job at iteration %i." \
1470 % (os.path.basename(Pdir), G, int(nunwgt),int(needed_event)+1,
1471 (float(nunwgt)/needed_event)*100.0 if needed_event>0.0 else 0.0,
1472 nb_job, step))
1473 self.create_resubmit_one_iter(Pdir, G, nevents, nb_job, step)
1474
1475
1476 elif step < self.max_iter:
1477 if step + 1 == self.max_iter:
1478 need_job = 1.20 * need_job
1479
1480 nb_job = int(min(need_job, nb_split_before*1.5))
1481 grid_calculator.write_grid_for_submission(Pdir,G,
1482 self.splitted_for_dir(Pdir, G), nb_job*nevents ,mode=self.mode,
1483 conservative_factor=self.max_iter)
1484
1485
1486 logger.info("%s/G%s is at %i/%i ('%.2g%%') event. Resubmit %i job at iteration %i." \
1487 % (os.path.basename(Pdir), G, int(nunwgt),int(needed_event)+1,
1488 (float(nunwgt)/needed_event)*100.0 if needed_event>0.0 else 0.0,
1489 nb_job, step))
1490 self.create_resubmit_one_iter(Pdir, G, nevents, nb_job, step)
1491
1492
1493
1494 return 0
1495
1496
1497 - def write_results(self, grid_calculator, cross, error, Pdir, G, step, efficiency):
1498
1499
1500 if cross == 0:
1501 abscross,nw, luminosity = 0, 0, 0
1502 wgt, maxit,nunwgt, wgt, nevents = 0,0,0,0,0
1503 error = 0
1504 else:
1505 grid_calculator.results.compute_values()
1506 abscross = self.abscross[(Pdir,G)]/self.sigma[(Pdir,G)]
1507 nunwgt, wgt = self.generated_events[(Pdir, G)]
1508 nw = int(nunwgt / efficiency)
1509 nunwgt = int(nunwgt)
1510 maxit = step
1511 nevents = nunwgt
1512
1513 luminosity = nunwgt/cross
1514
1515
1516 def fstr(nb):
1517 data = '%E' % nb
1518 nb, power = data.split('E')
1519 nb = float(nb) /10
1520 power = int(power) + 1
1521 return '%.5fE%+03i' %(nb,power)
1522 line = '%s %s %s %i %i %i %i %s %s %s 0.0 0.0 0\n' % \
1523 (fstr(cross), fstr(error*cross), fstr(error*cross),
1524 nevents, nw, maxit,nunwgt,
1525 fstr(luminosity), fstr(wgt), fstr(abscross))
1526
1527 fsock = open(pjoin(self.me_dir,'SubProcesses' , Pdir, 'G%s' % G,
1528 'results.dat'),'w')
1529 fsock.writelines(line)
1530 fsock.close()
1531