Package madgraph :: Package madevent :: Module sum_html
[hide private]
[frames] | no frames]

Source Code for Module madgraph.madevent.sum_html

  1  ################################################################################ 
  2  # 
  3  # Copyright (c) 2011 The MadGraph5_aMC@NLO Development team and Contributors 
  4  # 
  5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
  6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
  7  # high-energy processes in the Standard Model and beyond. 
  8  # 
  9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
 10  # distribution. 
 11  # 
 12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
 13  # 
 14  ################################################################################ 
 15  from __future__ import division 
 16  from __future__ import absolute_import 
 17  import os 
 18  import math 
 19  import logging 
 20  import re 
 21  import xml.dom.minidom as minidom 
 22  from six.moves import range 
 23   
 24  logger = logging.getLogger('madevent.stdout') # -> stdout 
 25   
 26  pjoin = os.path.join 
 27  try: 
 28      import madgraph 
 29  except ImportError: 
 30      import internal.cluster as cluster 
 31      import internal.misc as misc 
 32      from internal import MadGraph5Error 
 33  else: 
 34      import madgraph.various.cluster as cluster 
 35      import madgraph.various.misc as misc 
 36      from madgraph import MadGraph5Error 
 37   
38 -class RunStatistics(dict):
39 """ A class to store statistics about a MadEvent run. """ 40
41 - def __init__(self, *args, **opts):
42 """ Initialize the run dictionary. For now, the same as a regular 43 dictionary, except that we specify some default statistics. """ 44 45 madloop_statistics = { 46 'unknown_stability' : 0, 47 'stable_points' : 0, 48 'unstable_points' : 0, 49 'exceptional_points' : 0, 50 'DP_usage' : 0, 51 'QP_usage' : 0, 52 'DP_init_usage' : 0, 53 'QP_init_usage' : 0, 54 'CutTools_DP_usage' : 0, 55 'CutTools_QP_usage' : 0, 56 'PJFry_usage' : 0, 57 'Golem_usage' : 0, 58 'IREGI_usage' : 0, 59 'Samurai_usage' : 0, 60 'Ninja_usage' : 0, 61 'Ninja_QP_usage' : 0, 62 'COLLIER_usage' : 0, 63 'max_precision' : 1.0e99, 64 'min_precision' : 0.0, 65 'averaged_timing' : 0.0, 66 'n_madloop_calls' : 0, 67 'cumulative_timing' : 0.0, 68 'skipped_subchannel' : 0 # number of times that a computation have been 69 # discarded due to abnormal weight. 70 } 71 72 for key, value in madloop_statistics.items(): 73 self[key] = value 74 75 super(dict,self).__init__(*args, **opts)
76
77 - def aggregate_statistics(self, new_stats):
78 """ Update the current statitistics with the new_stats specified.""" 79 80 if isinstance(new_stats,RunStatistics): 81 new_stats = [new_stats, ] 82 elif isinstance(new_stats,list): 83 if any(not isinstance(_,RunStatistics) for _ in new_stats): 84 raise MadGraph5Error("The 'new_stats' argument of the function "+\ 85 "'updtate_statistics' must be a (possibly list of) "+\ 86 "RunStatistics instance.") 87 88 keys = set([]) 89 for stat in [self,]+new_stats: 90 keys |= set(stat.keys()) 91 92 new_stats = new_stats+[self,] 93 for key in keys: 94 # Define special rules 95 if key=='max_precision': 96 # The minimal precision corresponds to the maximal value for PREC 97 self[key] = min( _[key] for _ in new_stats if key in _) 98 elif key=='min_precision': 99 # The maximal precision corresponds to the minimal value for PREC 100 self[key] = max( _[key] for _ in new_stats if key in _) 101 elif key=='averaged_timing': 102 n_madloop_calls = sum(_['n_madloop_calls'] for _ in new_stats if 103 'n_madloop_calls' in _) 104 if n_madloop_calls > 0 : 105 self[key] = sum(_[key]*_['n_madloop_calls'] for _ in 106 new_stats if (key in _ and 'n_madloop_calls' in _) )/n_madloop_calls 107 else: 108 # Now assume all other quantities are cumulative 109 self[key] = sum(_[key] for _ in new_stats if key in _)
110
111 - def load_statistics(self, xml_node):
112 """ Load the statistics from an xml node. """ 113 114 def getData(Node): 115 return Node.childNodes[0].data
116 117 u_return_code = xml_node.getElementsByTagName('u_return_code') 118 u_codes = [int(_) for _ in getData(u_return_code[0]).split(',')] 119 self['CutTools_DP_usage'] = u_codes[1] 120 self['PJFry_usage'] = u_codes[2] 121 self['IREGI_usage'] = u_codes[3] 122 self['Golem_usage'] = u_codes[4] 123 self['Samurai_usage'] = u_codes[5] 124 self['Ninja_usage'] = u_codes[6] 125 self['COLLIER_usage'] = u_codes[7] 126 self['Ninja_QP_usage'] = u_codes[8] 127 self['CutTools_QP_usage'] = u_codes[9] 128 t_return_code = xml_node.getElementsByTagName('t_return_code') 129 t_codes = [int(_) for _ in getData(t_return_code[0]).split(',')] 130 self['DP_usage'] = t_codes[1] 131 self['QP_usage'] = t_codes[2] 132 self['DP_init_usage'] = t_codes[3] 133 self['DP_init_usage'] = t_codes[4] 134 h_return_code = xml_node.getElementsByTagName('h_return_code') 135 h_codes = [int(_) for _ in getData(h_return_code[0]).split(',')] 136 self['unknown_stability'] = h_codes[1] 137 self['stable_points'] = h_codes[2] 138 self['unstable_points'] = h_codes[3] 139 self['exceptional_points'] = h_codes[4] 140 average_time = xml_node.getElementsByTagName('average_time') 141 avg_time = float(getData(average_time[0])) 142 self['averaged_timing'] = avg_time 143 cumulated_time = xml_node.getElementsByTagName('cumulated_time') 144 cumul_time = float(getData(cumulated_time[0])) 145 self['cumulative_timing'] = cumul_time 146 max_prec = xml_node.getElementsByTagName('max_prec') 147 max_prec = float(getData(max_prec[0])) 148 # The minimal precision corresponds to the maximal value for PREC 149 self['min_precision'] = max_prec 150 min_prec = xml_node.getElementsByTagName('min_prec') 151 min_prec = float(getData(min_prec[0])) 152 # The maximal precision corresponds to the minimal value for PREC 153 self['max_precision'] = min_prec 154 n_evals = xml_node.getElementsByTagName('n_evals') 155 n_evals = int(getData(n_evals[0])) 156 self['n_madloop_calls'] = n_evals
157
158 - def nice_output(self,G, no_warning=False):
159 """Returns a one-line string summarizing the run statistics 160 gathered for the channel G.""" 161 162 # Do not return anythign for now if there is no madloop calls. This can 163 # change of course if more statistics are gathered, unrelated to MadLoop. 164 if self['n_madloop_calls']==0: 165 return '' 166 167 stability = [ 168 ('tot#',self['n_madloop_calls']), 169 ('unkwn#',self['unknown_stability']), 170 ('UPS%',float(self['unstable_points'])/self['n_madloop_calls']), 171 ('EPS#',self['exceptional_points'])] 172 173 stability = [_ for _ in stability if _[1] > 0 or _[0] in ['UPS%','EPS#']] 174 stability = [(_[0],'%i'%_[1]) if isinstance(_[1], int) else 175 (_[0],'%.3g'%(100.0*_[1])) for _ in stability] 176 177 tools_used = [ 178 ('CT_DP',float(self['CutTools_DP_usage'])/self['n_madloop_calls']), 179 ('CT_QP',float(self['CutTools_QP_usage'])/self['n_madloop_calls']), 180 ('PJFry',float(self['PJFry_usage'])/self['n_madloop_calls']), 181 ('Golem',float(self['Golem_usage'])/self['n_madloop_calls']), 182 ('IREGI',float(self['IREGI_usage'])/self['n_madloop_calls']), 183 ('Samurai',float(self['Samurai_usage'])/self['n_madloop_calls']), 184 ('COLLIER',float(self['COLLIER_usage'])/self['n_madloop_calls']), 185 ('Ninja_DP',float(self['Ninja_usage'])/self['n_madloop_calls']), 186 ('Ninja_QP',float(self['Ninja_QP_usage'])/self['n_madloop_calls'])] 187 188 tools_used = [(_[0],'%.3g'%(100.0*_[1])) for _ in tools_used if _[1] > 0.0 ] 189 190 to_print = [('%s statistics:'%(G if isinstance(G,str) else 191 str(os.path.join(list(G))))\ 192 +(' %s,'%misc.format_time(int(self['cumulative_timing'])) if 193 int(self['cumulative_timing']) > 0 else '') 194 +((' Avg. ML timing = %i ms'%int(1.0e3*self['averaged_timing'])) if 195 self['averaged_timing'] > 0.001 else 196 (' Avg. ML timing = %i mus'%int(1.0e6*self['averaged_timing']))) \ 197 +', Min precision = %.2e'%self['min_precision']) 198 ,' -> Stability %s'%dict(stability) 199 ,' -> Red. tools usage in %% %s'%dict(tools_used) 200 # I like the display above better after all 201 # ,'Stability %s'%(str([_[0] for _ in stability]), 202 # str([_[1] for _ in stability])) 203 # ,'Red. tools usage in %% %s'%(str([_[0] for _ in tools_used]), 204 # str([_[1] for _ in tools_used])) 205 ] 206 207 if self['skipped_subchannel'] > 0 and not no_warning: 208 to_print.append("WARNING: Some event with large weight have been "+\ 209 "discarded. This happened %s times." % self['skipped_subchannel']) 210 211 return ('\n'.join(to_print)).replace("'"," ")
212
213 - def has_warning(self):
214 """return if any stat needs to be reported as a warning 215 When this is True, the print_warning doit retourner un warning 216 """ 217 218 if self['n_madloop_calls'] > 0: 219 fraction = self['exceptional_points']/float(self['n_madloop_calls']) 220 else: 221 fraction = 0.0 222 223 if self['skipped_subchannel'] > 0: 224 return True 225 elif fraction > 1.0e-4: 226 return True 227 else: 228 return False
229
230 - def get_warning_text(self):
231 """get a string with all the identified warning""" 232 233 to_print = [] 234 if self['skipped_subchannel'] > 0: 235 to_print.append("Some event with large weight have been discarded."+\ 236 " This happens %s times." % self['skipped_subchannel']) 237 if self['n_madloop_calls'] > 0: 238 fraction = self['exceptional_points']/float(self['n_madloop_calls']) 239 if fraction > 1.0e-4: 240 to_print.append("Some PS with numerical instability have been set "+\ 241 "to a zero matrix-element (%.3g%%)" % (100.0*fraction)) 242 243 return ('\n'.join(to_print)).replace("'"," ")
244
245 -class OneResult(object):
246
247 - def __init__(self, name):
248 """Initialize all data """ 249 250 self.run_statistics = RunStatistics() 251 self.name = name 252 self.parent_name = '' 253 self.axsec = 0 # Absolute cross section = Sum(abs(wgt)) 254 self.xsec = 0 # Real cross section = Sum(wgt) 255 self.xerru = 0 # uncorrelated error 256 self.xerrc = 0 # correlated error 257 self.nevents = 0 258 self.nw = 0 # number of events after the primary unweighting 259 self.maxit = 0 # 260 self.nunwgt = 0 # number of unweighted events 261 self.luminosity = 0 262 self.mfactor = 1 # number of times that this channel occur (due to symmetry) 263 self.ysec_iter = [] 264 self.yerr_iter = [] 265 self.yasec_iter = [] 266 self.eff_iter = [] 267 self.maxwgt_iter = [] 268 self.maxwgt = 0 # weight used for the secondary unweighting. 269 self.th_maxwgt= 0 # weight that should have been use for secondary unweighting 270 # this can happen if we force maxweight 271 self.th_nunwgt = 0 # associated number of event with th_maxwgt 272 #(this is theoretical do not correspond to a number of written event) 273 self.timing = 0 274 return
275 276 #@cluster.multiple_try(nb_try=5,sleep=20)
277 - def read_results(self, filepath):
278 """read results.dat and fullfill information""" 279 280 if isinstance(filepath, str): 281 finput = open(filepath) 282 elif hasattr(filepath, 'read') and hasattr(filepath, 'name'): 283 finput = filepath 284 else: 285 raise Exception("filepath should be a path or a file descriptor") 286 287 i=0 288 found_xsec_line = False 289 for line in finput: 290 # Exit as soon as we hit the xml part. Not elegant, but the part 291 # below should eventually be xml anyway. 292 if '<' in line: 293 break 294 i+=1 295 if i == 1: 296 def secure_float(d): 297 try: 298 return float(d) 299 except ValueError: 300 m=re.search(r'''([+-]?[\d.]*)([+-]\d*)''', d) 301 if m: 302 return float(m.group(1))*10**(float(m.group(2))) 303 return
304 305 data = [secure_float(d) for d in line.split()] 306 try: 307 self.axsec, self.xerru, self.xerrc, self.nevents, self.nw,\ 308 self.maxit, self.nunwgt, self.luminosity, self.wgt, \ 309 self.xsec = data[:10] 310 except ValueError: 311 log = pjoin(os.path.dirname(filepath), 'log.txt') 312 if os.path.exists(log): 313 if 'end code not correct' in line: 314 error_code = data[4] 315 log = pjoin(os.path.dirname(filepath), 'log.txt') 316 raise Exception("Reported error: End code %s \n Full associated log: \n%s"\ 317 % (error_code, open(log).read())) 318 else: 319 log = pjoin(os.path.dirname(filepath), 'log.txt') 320 raise Exception("Wrong formatting in results.dat: %s \n Full associated log: \n%s"\ 321 % (line, open(log).read())) 322 if len(data) > 10: 323 self.maxwgt = data[10] 324 if len(data) >12: 325 self.th_maxwgt, self.th_nunwgt = data[11:13] 326 if self.mfactor > 1: 327 self.luminosity /= self.mfactor 328 continue 329 try: 330 l, sec, err, eff, maxwgt, asec = line.split() 331 found_xsec_line = True 332 except: 333 break 334 self.ysec_iter.append(secure_float(sec)) 335 self.yerr_iter.append(secure_float(err)) 336 self.yasec_iter.append(secure_float(asec)) 337 self.eff_iter.append(secure_float(eff)) 338 self.maxwgt_iter.append(secure_float(maxwgt)) 339 340 finput.seek(0) 341 xml = [] 342 for line in finput: 343 if re.match('^.*<.*>',line): 344 xml.append(line) 345 break 346 for line in finput: 347 xml.append(line) 348 349 if xml: 350 self.parse_xml_results('\n'.join(xml)) 351 352 # this is for amcatnlo: the number of events has to be read from another file 353 if self.nevents == 0 and self.nunwgt == 0 and isinstance(filepath, str) and \ 354 os.path.exists(pjoin(os.path.split(filepath)[0], 'nevts')): 355 nevts = int((open(pjoin(os.path.split(filepath)[0], 'nevts')).read()).split()[0]) 356 self.nevents = nevts 357 self.nunwgt = nevts
358
359 - def parse_xml_results(self, xml):
360 """ Parse the xml part of the results.dat file.""" 361 362 dom = minidom.parseString(xml) 363 364 statistics_node = dom.getElementsByTagName("run_statistics") 365 366 if statistics_node: 367 try: 368 self.run_statistics.load_statistics(statistics_node[0]) 369 except ValueError as IndexError: 370 logger.warning('Fail to read run statistics from results.dat') 371 else: 372 lo_statistics_node = dom.getElementsByTagName("lo_statistics")[0] 373 timing = lo_statistics_node.getElementsByTagName('cumulated_time')[0] 374 timing= timing.firstChild.nodeValue 375 self.timing = 0.3 + float(timing) #0.3 is the typical latency of bash script/...
376 377
378 - def set_mfactor(self, value):
379 self.mfactor = int(value)
380
381 - def change_iterations_number(self, nb_iter):
382 """Change the number of iterations for this process""" 383 384 if len(self.ysec_iter) <= nb_iter: 385 return 386 387 # Combine the first iterations into a single bin 388 nb_to_rm = len(self.ysec_iter) - nb_iter 389 ysec = [0] 390 yerr = [0] 391 for i in range(nb_to_rm): 392 ysec[0] += self.ysec_iter[i] 393 yerr[0] += self.yerr_iter[i]**2 394 ysec[0] /= (nb_to_rm+1) 395 yerr[0] = math.sqrt(yerr[0]) / (nb_to_rm + 1) 396 397 for i in range(1, nb_iter): 398 ysec[i] = self.ysec_iter[nb_to_rm + i] 399 yerr[i] = self.yerr_iter[nb_to_rm + i] 400 401 self.ysec_iter = ysec 402 self.yerr_iter = yerr
403
404 - def get(self, name):
405 406 if name in ['xsec', 'xerru','xerrc']: 407 return getattr(self, name) * self.mfactor 408 elif name in ['luminosity']: 409 #misc.sprint("use unsafe luminosity definition") 410 #raise Exception 411 return getattr(self, name) #/ self.mfactor 412 elif (name == 'eff'): 413 return self.xerr*math.sqrt(self.nevents/(self.xsec+1e-99)) 414 elif name == 'xerr': 415 return math.sqrt(self.xerru**2+self.xerrc**2) 416 elif name == 'name': 417 return pjoin(self.parent_name, self.name) 418 else: 419 return getattr(self, name)
420
421 -class Combine_results(list, OneResult):
422
423 - def __init__(self, name):
424 425 list.__init__(self) 426 OneResult.__init__(self, name)
427
428 - def add_results(self, name, filepath, mfactor=1):
429 """read the data in the file""" 430 try: 431 oneresult = OneResult(name) 432 oneresult.set_mfactor(mfactor) 433 oneresult.read_results(filepath) 434 oneresult.parent_name = self.name 435 self.append(oneresult) 436 return oneresult 437 except Exception: 438 logger.critical("Error when reading %s" % filepath) 439 raise
440 441
442 - def compute_values(self, update_statistics=False):
443 """compute the value associate to this combination""" 444 445 self.compute_iterations() 446 self.axsec = sum([one.axsec for one in self]) 447 self.xsec = sum([one.xsec for one in self]) 448 self.xerrc = sum([one.xerrc for one in self]) 449 self.xerru = math.sqrt(sum([one.xerru**2 for one in self])) 450 451 self.nevents = sum([one.nevents for one in self]) 452 self.nw = sum([one.nw for one in self]) 453 self.maxit = len(self.yerr_iter) # 454 self.nunwgt = sum([one.nunwgt for one in self]) 455 self.wgt = 0 456 self.luminosity = min([0]+[one.luminosity for one in self]) 457 self.timing = sum([one.timing for one in self]) 458 if update_statistics: 459 self.run_statistics.aggregate_statistics([_.run_statistics for _ in self])
460
461 - def compute_average(self, error=None):
462 """compute the value associate to this combination""" 463 464 nbjobs = len(self) 465 if not nbjobs: 466 return 467 max_xsec = max(one.xsec for one in self) 468 min_xsec = min(one.xsec for one in self) 469 self.axsec = sum([one.axsec for one in self]) / nbjobs 470 self.xsec = sum([one.xsec for one in self]) /nbjobs 471 self.xerrc = sum([one.xerrc for one in self]) /nbjobs 472 self.xerru = math.sqrt(sum([one.xerru**2 for one in self])) /nbjobs 473 self.timing = sum([one.timing for one in self]) #no average here 474 if error: 475 self.xerrc = error 476 self.xerru = error 477 478 self.nevents = sum([one.nevents for one in self]) 479 self.nw = 0#sum([one.nw for one in self]) 480 self.maxit = 0#len(self.yerr_iter) # 481 self.nunwgt = sum([one.nunwgt for one in self]) 482 self.wgt = 0 483 self.luminosity = sum([one.luminosity for one in self]) 484 self.ysec_iter = [] 485 self.yerr_iter = [] 486 self.th_maxwgt = 0.0 487 self.th_nunwgt = 0 488 for result in self: 489 self.ysec_iter+=result.ysec_iter 490 self.yerr_iter+=result.yerr_iter 491 self.yasec_iter += result.yasec_iter 492 self.eff_iter += result.eff_iter 493 self.maxwgt_iter += result.maxwgt_iter 494 495 #check full consistency 496 onefail = False 497 for one in list(self): 498 if one.xsec < (self.xsec - 25* one.xerru): 499 if not onefail: 500 logger.debug('multi run are inconsistent: %s < %s - 25* %s: assign error %s', one.xsec, self.xsec, one.xerru, error if error else max_xsec-min_xsec) 501 onefail = True 502 self.remove(one) 503 if onefail: 504 if error: 505 return self.compute_average(error) 506 else: 507 return self.compute_average((max_xsec-min_xsec)/2.)
508 509 510
511 - def compute_iterations(self):
512 """Compute iterations to have a chi-square on the stability of the 513 integral""" 514 515 #iter = [len(a.ysec_iter) for a in self] 516 #if iter: 517 # nb_iter = min(iter) 518 #else: 519 # nb_iter = 0 520 #nb_iter = misc.mmin([len(a.ysec_iter) for a in self], 0) 521 #misc.sprint(nb_iter) 522 # syncronize all iterations to a single one 523 for oneresult in self: 524 oneresult.change_iterations_number(0)
525 526 # compute value error for each iteration 527 #for i in range(nb_iter): 528 # value = [one.ysec_iter[i] for one in self] 529 # error = [one.yerr_iter[i]**2 for one in self] 530 # 531 # # store the value for the iteration 532 # raise Exception 533 # self.ysec_iter.append(sum(value)) 534 # self.yerr_iter.append(math.sqrt(sum(error))) 535 536 537 template_file = \ 538 """ 539 %(diagram_link)s 540 <BR> 541 &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;<b>s= %(cross).5g &#177 %(error).3g (%(unit)s)</b><br><br> 542 <table class="sortable" id='tablesort'> 543 <tr><th>Graph</th> 544 <th> %(result_type)s</th> 545 <th>Error</th> 546 <th>Events (K)</th> 547 <th>Unwgt</th> 548 <th>Luminosity</th> 549 </tr> 550 %(table_lines)s 551 </table> 552 </center> 553 <br><br><br> 554 """ 555 table_line_template = \ 556 """ 557 <tr><td align=right>%(P_title)s</td> 558 <td align=right><a id="%(P_link)s" href=%(P_link)s > %(cross)s </a> </td> 559 <td align=right> %(error)s</td> 560 <td align=right> %(events)s</td> 561 <td align=right> %(unweighted)s</td> 562 <td align=right> %(luminosity)s</td> 563 </tr> 564 """ 565
566 - def get_html(self,run, unit, me_dir = []):
567 """write html output""" 568 569 # store value for global cross-section 570 P_grouping = {} 571 572 tables_line = '' 573 for oneresult in self: 574 if oneresult.name.startswith('P'): 575 title = '<a href=../../SubProcesses/%(P)s/diagrams.html>%(P)s</a>' \ 576 % {'P':oneresult.name} 577 P = oneresult.name.split('_',1)[0] 578 if P in P_grouping: 579 P_grouping[P] += float(oneresult.xsec) 580 else: 581 P_grouping[P] = float(oneresult.xsec) 582 else: 583 title = oneresult.name 584 585 if not isinstance(oneresult, Combine_results): 586 # this is for the (aMC@)NLO logs 587 if os.path.exists(pjoin(me_dir, 'Events', run, 'alllogs_1.html')): 588 link = '../../Events/%(R)s/alllogs_1.html#/%(P)s/%(G)s' % \ 589 {'P': os.path.basename(self.name), 590 'G': oneresult.name, 591 'R': run} 592 mod_link = link 593 elif os.path.exists(pjoin(me_dir, 'Events', run, 'alllogs_0.html')): 594 link = '../../Events/%(R)s/alllogs_0.html#/%(P)s/%(G)s' % \ 595 {'P': os.path.basename(self.name), 596 'G': oneresult.name, 597 'R': run} 598 mod_link = link 599 else: 600 # this is for madevent runs 601 link = '../../SubProcesses/%(P)s/%(G)s/%(R)s_log.txt' % \ 602 {'P': os.path.basename(self.name), 603 'G': oneresult.name, 604 'R': run} 605 mod_link = '../../SubProcesses/%(P)s/%(G)s/log.txt' % \ 606 {'P': os.path.basename(self.name), 607 'G': oneresult.name} 608 if not os.path.exists(link) and not os.path.exists(mod_link): 609 P = os.path.basename(self.name) 610 base = pjoin(me_dir, 'SubProcesses', P, os.path.dirname(link)) 611 pos = [pjoin(base,c) for c in os.listdir(base) if c.endswith('.log')] 612 if len(pos) == 1: 613 link = pos[0] 614 else: 615 link = '#%s' % oneresult.name 616 mod_link = link 617 618 dico = {'P_title': title, 619 'P_link': link, 620 'mod_P_link': mod_link, 621 'cross': '%.4g' % oneresult.xsec, 622 'error': '%.3g' % oneresult.xerru, 623 'events': oneresult.nevents/1000.0, 624 'unweighted': oneresult.nunwgt, 625 'luminosity': '%.3g' % oneresult.luminosity 626 } 627 628 tables_line += self.table_line_template % dico 629 630 for P_name, cross in P_grouping.items(): 631 dico = {'P_title': '%s sum' % P_name, 632 'P_link': './results.html', 633 'mod_P_link':'', 634 'cross': cross, 635 'error': '', 636 'events': '', 637 'unweighted': '', 638 'luminosity': '' 639 } 640 tables_line += self.table_line_template % dico 641 642 if self.name.startswith('P'): 643 title = '<dt><a name=%(P)s href=../../SubProcesses/%(P)s/diagrams.html>%(P)s</a></dt><dd>' \ 644 % {'P':self.name} 645 else: 646 title = '' 647 648 dico = {'cross': self.xsec, 649 'abscross': self.axsec, 650 'error': self.xerru, 651 'unit': unit, 652 'result_type': 'Cross-Section', 653 'table_lines': tables_line, 654 'diagram_link': title 655 } 656 657 html_text = self.template_file % dico 658 return html_text
659
660 - def write_results_dat(self, output_path):
661 """write a correctly formatted results.dat""" 662 663 def fstr(nb): 664 data = '%E' % nb 665 if data == 'NAN': 666 nb, power = 0,0 667 else: 668 nb, power = data.split('E') 669 nb = float(nb) /10 670 power = int(power) + 1 671 return '%.5fE%+03i' %(nb,power)
672 673 line = '%s %s %s %i %i %i %i %s %s %s %s %s %i\n' % (fstr(self.axsec), fstr(self.xerru), 674 fstr(self.xerrc), self.nevents, self.nw, self.maxit, self.nunwgt, 675 fstr(self.luminosity), fstr(self.wgt), fstr(self.xsec), fstr(self.maxwgt), 676 fstr(self.th_maxwgt), self.th_nunwgt) 677 fsock = open(output_path,'w') 678 fsock.writelines(line) 679 for i in range(len(self.ysec_iter)): 680 line = '%s %s %s %s %s %s\n' % (i+1, self.ysec_iter[i], self.yerr_iter[i], 681 self.eff_iter[i], self.maxwgt_iter[i], self.yasec_iter[i]) 682 fsock.writelines(line) 683 684 if self.timing: 685 text = """<lo_statistics>\n<cumulated_time> %s </cumulated_time>\n</lo_statistics>""" 686 fsock.writelines(text % self.timing)
687 688 689 690 results_header = """ 691 <head> 692 <title>Process results</title> 693 <script type="text/javascript" src="../sortable.js"></script> 694 <link rel=stylesheet href="../mgstyle.css" type="text/css"> 695 </head> 696 <body> 697 <script type="text/javascript"> 698 function UrlExists(url) { 699 var http = new XMLHttpRequest(); 700 http.open('HEAD', url, false); 701 try{ 702 http.send() 703 } 704 catch(err){ 705 return 1==2; 706 } 707 return http.status!=404; 708 } 709 </script> 710 """ 711
712 -def collect_result(cmd, folder_names=[], jobs=None, main_dir=None):
713 """ """ 714 715 run = cmd.results.current['run_name'] 716 all = Combine_results(run) 717 718 for Pdir in cmd.get_Pdir(): 719 P_comb = Combine_results(Pdir) 720 721 if jobs: 722 for job in [j for j in jobs if j['p_dir'] in Pdir]: 723 P_comb.add_results(os.path.basename(job['dirname']),\ 724 pjoin(job['dirname'],'results.dat')) 725 elif folder_names: 726 try: 727 for line in open(pjoin(Pdir, 'symfact.dat')): 728 name, mfactor = line.split() 729 if float(mfactor) < 0: 730 continue 731 if os.path.exists(pjoin(Pdir, 'ajob.no_ps.log')): 732 continue 733 734 for folder in folder_names: 735 if 'G' in folder: 736 dir = folder.replace('*', name) 737 else: 738 dir = folder.replace('*', '_G' + name) 739 P_comb.add_results(dir, pjoin(Pdir,dir,'results.dat'), mfactor) 740 if jobs: 741 for job in [j for j in jobs if j['p_dir'] == Pdir]: 742 P_comb.add_results(os.path.basename(job['dirname']),\ 743 pjoin(job['dirname'],'results.dat')) 744 except IOError: 745 continue 746 else: 747 G_dir, mfactors = cmd.get_Gdir(Pdir, symfact=True) 748 for G in G_dir: 749 if not folder_names: 750 if main_dir: 751 path = pjoin(main_dir, os.path.basename(Pdir), os.path.basename(G),'results.dat') 752 else: 753 path = pjoin(G,'results.dat') 754 P_comb.add_results(os.path.basename(G), path, mfactors[G]) 755 756 P_comb.compute_values() 757 all.append(P_comb) 758 all.compute_values() 759 760 try: 761 all_channels = sum([list(P) for P in all],[]) 762 timings = sum(x.timing for x in all_channels) 763 logger.info('sum of cpu time of last step: %s', misc.format_time(timings)) 764 except Exception as error: 765 logger.debug(str(error)) 766 pass 767 768 for x in all_channels: 769 x.timing = 0 770 771 return all
772 773
774 -def make_all_html_results(cmd, folder_names = [], jobs=[]):
775 """ folder_names and jobs have been added for the amcatnlo runs """ 776 run = cmd.results.current['run_name'] 777 if not os.path.exists(pjoin(cmd.me_dir, 'HTML', run)): 778 os.mkdir(pjoin(cmd.me_dir, 'HTML', run)) 779 780 unit = cmd.results.unit 781 P_text = "" 782 Presults = collect_result(cmd, folder_names=folder_names, jobs=jobs) 783 784 for P_comb in Presults: 785 P_text += P_comb.get_html(run, unit, cmd.me_dir) 786 P_comb.compute_values() 787 if cmd.proc_characteristics['ninitial'] == 1: 788 P_comb.write_results_dat(pjoin(cmd.me_dir, 'SubProcesses', P_comb.name, 789 '%s_results.dat' % run)) 790 791 Presults.write_results_dat(pjoin(cmd.me_dir,'SubProcesses', 'results.dat')) 792 793 fsock = open(pjoin(cmd.me_dir, 'HTML', run, 'results.html'),'w') 794 fsock.write(results_header) 795 fsock.write('%s <dl>' % Presults.get_html(run, unit, cmd.me_dir)) 796 fsock.write('%s </dl></body>' % P_text) 797 798 return Presults.xsec, Presults.xerru
799