Package madgraph :: Package interface :: Module amcatnlo_run_interface
[hide private]
[frames] | no frames]

Source Code for Module madgraph.interface.amcatnlo_run_interface

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2011 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """A user friendly command line interface to access MadGraph5_aMC@NLO features. 
  16     Uses the cmd package for command interpretation and tab completion. 
  17  """ 
  18  from __future__ import division 
  19   
  20  from __future__ import absolute_import 
  21  from __future__ import print_function 
  22  import atexit 
  23  import glob 
  24  import logging 
  25  import math 
  26  import optparse 
  27  import os 
  28  import pydoc 
  29  import random 
  30  import re 
  31  import shutil 
  32  import subprocess 
  33  import sys 
  34  import traceback 
  35  import time 
  36  import signal 
  37  import tarfile 
  38  import copy 
  39  import datetime 
  40  import tarfile 
  41  import traceback 
  42  import six 
  43  StringIO = six 
  44  from six.moves import range 
  45  from six.moves import zip 
  46  try: 
  47      import cpickle as pickle 
  48  except: 
  49      import pickle 
  50   
  51  try: 
  52      import readline 
  53      GNU_SPLITTING = ('GNU' in readline.__doc__) 
  54  except: 
  55      GNU_SPLITTING = True 
  56   
  57  root_path = os.path.split(os.path.dirname(os.path.realpath( __file__ )))[0] 
  58  root_path = os.path.split(root_path)[0] 
  59  sys.path.insert(0, os.path.join(root_path,'bin')) 
  60   
  61  # usefull shortcut 
  62  pjoin = os.path.join 
  63  # Special logger for the Cmd Interface 
  64  logger = logging.getLogger('madgraph.stdout') # -> stdout 
  65  logger_stderr = logging.getLogger('madgraph.stderr') # ->stderr 
  66    
  67  try: 
  68      import madgraph 
  69  except ImportError:  
  70      aMCatNLO = True  
  71      import internal.extended_cmd as cmd 
  72      import internal.common_run_interface as common_run 
  73      import internal.banner as banner_mod 
  74      import internal.misc as misc     
  75      from internal import InvalidCmd, MadGraph5Error 
  76      import internal.files as files 
  77      import internal.cluster as cluster 
  78      import internal.save_load_object as save_load_object 
  79      import internal.gen_crossxhtml as gen_crossxhtml 
  80      import internal.sum_html as sum_html 
  81      import internal.shower_card as shower_card 
  82      import internal.FO_analyse_card as analyse_card  
  83      import internal.lhe_parser as lhe_parser 
  84  else: 
  85      # import from madgraph directory 
  86      aMCatNLO = False 
  87      import madgraph.interface.extended_cmd as cmd 
  88      import madgraph.interface.common_run_interface as common_run 
  89      import madgraph.iolibs.files as files 
  90      import madgraph.iolibs.save_load_object as save_load_object 
  91      import madgraph.madevent.gen_crossxhtml as gen_crossxhtml 
  92      import madgraph.madevent.sum_html as sum_html 
  93      import madgraph.various.banner as banner_mod 
  94      import madgraph.various.cluster as cluster 
  95      import madgraph.various.misc as misc 
  96      import madgraph.various.shower_card as shower_card 
  97      import madgraph.various.FO_analyse_card as analyse_card 
  98      import madgraph.various.lhe_parser as lhe_parser 
  99      from madgraph import InvalidCmd, aMCatNLOError, MadGraph5Error,MG5DIR 
100 101 -class aMCatNLOError(Exception):
102 pass
103
104 105 -def compile_dir(*arguments):
106 """compile the direcory p_dir 107 arguments is the tuple (me_dir, p_dir, mode, options, tests, exe, run_mode) 108 this function needs not to be a class method in order to do 109 the compilation on multicore""" 110 111 if len(arguments) == 1: 112 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments[0] 113 elif len(arguments)==7: 114 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments 115 else: 116 raise aMCatNLOError('not correct number of argument') 117 logger.info(' Compiling %s...' % p_dir) 118 119 this_dir = pjoin(me_dir, 'SubProcesses', p_dir) 120 121 try: 122 #compile everything 123 # compile and run tests 124 for test in tests: 125 # skip check_poles for LOonly dirs 126 if test == 'check_poles' and os.path.exists(pjoin(this_dir, 'parton_lum_0.f')): 127 continue 128 if test == 'test_ME' or test == 'test_MC': 129 test_exe='test_soft_col_limits' 130 else: 131 test_exe=test 132 misc.compile([test_exe], cwd = this_dir, job_specs = False) 133 input = pjoin(me_dir, '%s_input.txt' % test) 134 #this can be improved/better written to handle the output 135 misc.call(['./%s' % (test_exe)], cwd=this_dir, 136 stdin = open(input), stdout=open(pjoin(this_dir, '%s.log' % test), 'w'), 137 close_fds=True) 138 if test == 'check_poles' and os.path.exists(pjoin(this_dir,'MadLoop5_resources')) : 139 tf=tarfile.open(pjoin(this_dir,'MadLoop5_resources.tar.gz'),'w:gz', 140 dereference=True) 141 tf.add(pjoin(this_dir,'MadLoop5_resources'),arcname='MadLoop5_resources') 142 tf.close() 143 144 if not options['reweightonly']: 145 misc.compile(['gensym'], cwd=this_dir, job_specs = False) 146 misc.call(['./gensym'],cwd= this_dir, 147 stdout=open(pjoin(this_dir, 'gensym.log'), 'w'), 148 close_fds=True) 149 #compile madevent_mintMC/mintFO 150 misc.compile([exe], cwd=this_dir, job_specs = False) 151 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 152 misc.compile(['reweight_xsec_events'], cwd=this_dir, job_specs = False) 153 154 logger.info(' %s done.' % p_dir) 155 return 0 156 except MadGraph5Error as msg: 157 return msg
158
159 160 -def check_compiler(options, block=False):
161 """check that the current fortran compiler is gfortran 4.6 or later. 162 If block, stops the execution, otherwise just print a warning""" 163 164 msg = 'In order to be able to run at NLO MadGraph5_aMC@NLO, you need to have ' + \ 165 'gfortran 4.6 or later installed.\n%s has been detected\n'+\ 166 'Note that You can still run all MadEvent run without any problem!' 167 #first check that gfortran is installed 168 if options['fortran_compiler']: 169 compiler = options['fortran_compiler'] 170 elif misc.which('gfortran'): 171 compiler = 'gfortran' 172 else: 173 compiler = '' 174 175 if 'gfortran' not in compiler: 176 if block: 177 raise aMCatNLOError(msg % compiler) 178 else: 179 logger.warning(msg % compiler) 180 else: 181 curr_version = misc.get_gfortran_version(compiler) 182 curr_version = curr_version.split('.') 183 if len(curr_version) == 1: 184 curr_version.append(0) 185 186 if int(curr_version[0]) < 5: 187 if int(curr_version[0]) == 4 and int(curr_version[1]) > 5: 188 return 189 if block: 190 raise aMCatNLOError(msg % (compiler + ' ' + curr_version)) 191 else: 192 logger.warning(msg % (compiler + ' ' + curr_version))
193
194 195 196 #=============================================================================== 197 # CmdExtended 198 #=============================================================================== 199 -class CmdExtended(common_run.CommonRunCmd):
200 """Particularisation of the cmd command for aMCatNLO""" 201 202 #suggested list of command 203 next_possibility = { 204 'start': [], 205 } 206 207 debug_output = 'ME5_debug' 208 error_debug = 'Please report this bug on https://bugs.launchpad.net/mg5amcnlo\n' 209 error_debug += 'More information is found in \'%(debug)s\'.\n' 210 error_debug += 'Please attach this file to your report.' 211 212 config_debug = 'If you need help with this issue please contact us on https://answers.launchpad.net/mg5amcnlo\n' 213 214 215 keyboard_stop_msg = """stopping all operation 216 in order to quit MadGraph5_aMC@NLO please enter exit""" 217 218 # Define the Error 219 InvalidCmd = InvalidCmd 220 ConfigurationError = aMCatNLOError 221
222 - def __init__(self, me_dir, options, *arg, **opt):
223 """Init history and line continuation""" 224 225 # Tag allowing/forbiding question 226 self.force = False 227 228 # If possible, build an info line with current version number 229 # and date, from the VERSION text file 230 info = misc.get_pkg_info() 231 info_line = "" 232 if info and 'version' in info and 'date' in info: 233 len_version = len(info['version']) 234 len_date = len(info['date']) 235 if len_version + len_date < 30: 236 info_line = "#* VERSION %s %s %s *\n" % \ 237 (info['version'], 238 (30 - len_version - len_date) * ' ', 239 info['date']) 240 else: 241 version = open(pjoin(root_path,'MGMEVersion.txt')).readline().strip() 242 info_line = "#* VERSION %s %s *\n" % \ 243 (version, (24 - len(version)) * ' ') 244 245 # Create a header for the history file. 246 # Remember to fill in time at writeout time! 247 self.history_header = \ 248 '#************************************************************\n' + \ 249 '#* MadGraph5_aMC@NLO *\n' + \ 250 '#* *\n' + \ 251 "#* * * *\n" + \ 252 "#* * * * * *\n" + \ 253 "#* * * * * 5 * * * * *\n" + \ 254 "#* * * * * *\n" + \ 255 "#* * * *\n" + \ 256 "#* *\n" + \ 257 "#* *\n" + \ 258 info_line + \ 259 "#* *\n" + \ 260 "#* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \ 261 "#* https://server06.fynu.ucl.ac.be/projects/madgraph *\n" + \ 262 "#* and *\n" + \ 263 "#* http://amcatnlo.cern.ch *\n" + \ 264 '#* *\n' + \ 265 '#************************************************************\n' + \ 266 '#* *\n' + \ 267 '#* Command File for aMCatNLO *\n' + \ 268 '#* *\n' + \ 269 '#* run as ./bin/aMCatNLO.py filename *\n' + \ 270 '#* *\n' + \ 271 '#************************************************************\n' 272 273 if info_line: 274 info_line = info_line[1:] 275 276 logger.info(\ 277 "************************************************************\n" + \ 278 "* *\n" + \ 279 "* W E L C O M E to M A D G R A P H 5 *\n" + \ 280 "* a M C @ N L O *\n" + \ 281 "* *\n" + \ 282 "* * * *\n" + \ 283 "* * * * * *\n" + \ 284 "* * * * * 5 * * * * *\n" + \ 285 "* * * * * *\n" + \ 286 "* * * *\n" + \ 287 "* *\n" + \ 288 info_line + \ 289 "* *\n" + \ 290 "* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \ 291 "* http://amcatnlo.cern.ch *\n" + \ 292 "* *\n" + \ 293 "* Type 'help' for in-line help. *\n" + \ 294 "* *\n" + \ 295 "************************************************************") 296 super(CmdExtended, self).__init__(me_dir, options, *arg, **opt)
297 298
299 - def get_history_header(self):
300 """return the history header""" 301 return self.history_header % misc.get_time_info()
302
303 - def stop_on_keyboard_stop(self):
304 """action to perform to close nicely on a keyboard interupt""" 305 try: 306 if hasattr(self, 'cluster'): 307 logger.info('rm jobs on queue') 308 self.cluster.remove() 309 if hasattr(self, 'results'): 310 self.update_status('Stop by the user', level=None, makehtml=True, error=True) 311 self.add_error_log_in_html(KeyboardInterrupt) 312 except: 313 pass
314
315 - def postcmd(self, stop, line):
316 """ Update the status of the run for finishing interactive command """ 317 318 # relaxing the tag forbidding question 319 self.force = False 320 321 if not self.use_rawinput: 322 return stop 323 324 325 arg = line.split() 326 if len(arg) == 0: 327 return stop 328 elif str(arg[0]) in ['exit','quit','EOF']: 329 return stop 330 331 try: 332 self.update_status('Command \'%s\' done.<br> Waiting for instruction.' % arg[0], 333 level=None, error=True) 334 except Exception: 335 misc.sprint('self.update_status fails', log=logger) 336 pass
337
338 - def nice_user_error(self, error, line):
339 """If a ME run is currently running add a link in the html output""" 340 341 self.add_error_log_in_html() 342 cmd.Cmd.nice_user_error(self, error, line)
343
344 - def nice_config_error(self, error, line):
345 """If a ME run is currently running add a link in the html output""" 346 347 self.add_error_log_in_html() 348 cmd.Cmd.nice_config_error(self, error, line)
349
350 - def nice_error_handling(self, error, line):
351 """If a ME run is currently running add a link in the html output""" 352 353 self.add_error_log_in_html() 354 cmd.Cmd.nice_error_handling(self, error, line)
355
356 357 358 #=============================================================================== 359 # HelpToCmd 360 #=============================================================================== 361 -class HelpToCmd(object):
362 """ The Series of help routine for the aMCatNLOCmd""" 363
364 - def help_launch(self):
365 """help for launch command""" 366 _launch_parser.print_help()
367
368 - def help_banner_run(self):
369 logger.info("syntax: banner_run Path|RUN [--run_options]") 370 logger.info("-- Reproduce a run following a given banner") 371 logger.info(" One of the following argument is require:") 372 logger.info(" Path should be the path of a valid banner.") 373 logger.info(" RUN should be the name of a run of the current directory") 374 self.run_options_help([('-f','answer all question by default'), 375 ('--name=X', 'Define the name associated with the new run')])
376 377
378 - def help_compile(self):
379 """help for compile command""" 380 _compile_parser.print_help()
381
382 - def help_generate_events(self):
383 """help for generate_events commandi 384 just call help_launch""" 385 _generate_events_parser.print_help()
386 387
388 - def help_calculate_xsect(self):
389 """help for generate_events command""" 390 _calculate_xsect_parser.print_help()
391
392 - def help_shower(self):
393 """help for shower command""" 394 _shower_parser.print_help()
395 396
397 - def help_open(self):
398 logger.info("syntax: open FILE ") 399 logger.info("-- open a file with the appropriate editor.") 400 logger.info(' If FILE belongs to index.html, param_card.dat, run_card.dat') 401 logger.info(' the path to the last created/used directory is used')
402
403 - def run_options_help(self, data):
404 if data: 405 logger.info('-- local options:') 406 for name, info in data: 407 logger.info(' %s : %s' % (name, info)) 408 409 logger.info("-- session options:") 410 logger.info(" Note that those options will be kept for the current session") 411 logger.info(" --cluster : Submit to the cluster. Current cluster: %s" % self.options['cluster_type']) 412 logger.info(" --multicore : Run in multi-core configuration") 413 logger.info(" --nb_core=X : limit the number of core to use to X.")
414
415 416 417 418 #=============================================================================== 419 # CheckValidForCmd 420 #=============================================================================== 421 -class CheckValidForCmd(object):
422 """ The Series of check routine for the aMCatNLOCmd""" 423
424 - def check_shower(self, args, options):
425 """Check the validity of the line. args[0] is the run_directory""" 426 427 if options['force']: 428 self.force = True 429 430 if len(args) == 0: 431 self.help_shower() 432 raise self.InvalidCmd('Invalid syntax, please specify the run name') 433 if not os.path.isdir(pjoin(self.me_dir, 'Events', args[0])): 434 raise self.InvalidCmd('Directory %s does not exists' % \ 435 pjoin(os.getcwd(), 'Events', args[0])) 436 437 self.set_run_name(args[0], level= 'shower') 438 args[0] = pjoin(self.me_dir, 'Events', args[0])
439
440 - def check_plot(self, args):
441 """Check the argument for the plot command 442 plot run_name modes""" 443 444 445 madir = self.options['madanalysis_path'] 446 td = self.options['td_path'] 447 448 if not madir or not td: 449 logger.info('Retry to read configuration file to find madanalysis/td') 450 self.set_configuration() 451 452 madir = self.options['madanalysis_path'] 453 td = self.options['td_path'] 454 455 if not madir: 456 error_msg = 'No Madanalysis path correctly set.' 457 error_msg += 'Please use the set command to define the path and retry.' 458 error_msg += 'You can also define it in the configuration file.' 459 raise self.InvalidCmd(error_msg) 460 if not td: 461 error_msg = 'No path to td directory correctly set.' 462 error_msg += 'Please use the set command to define the path and retry.' 463 error_msg += 'You can also define it in the configuration file.' 464 raise self.InvalidCmd(error_msg) 465 466 if len(args) == 0: 467 if not hasattr(self, 'run_name') or not self.run_name: 468 self.help_plot() 469 raise self.InvalidCmd('No run name currently define. Please add this information.') 470 args.append('all') 471 return 472 473 474 if args[0] not in self._plot_mode: 475 self.set_run_name(args[0], level='plot') 476 del args[0] 477 if len(args) == 0: 478 args.append('all') 479 elif not self.run_name: 480 self.help_plot() 481 raise self.InvalidCmd('No run name currently define. Please add this information.') 482 483 for arg in args: 484 if arg not in self._plot_mode and arg != self.run_name: 485 self.help_plot() 486 raise self.InvalidCmd('unknown options %s' % arg)
487
488 - def check_pgs(self, arg):
489 """Check the argument for pythia command 490 syntax: pgs [NAME] 491 Note that other option are already remove at this point 492 """ 493 494 # If not pythia-pgs path 495 if not self.options['pythia-pgs_path']: 496 logger.info('Retry to read configuration file to find pythia-pgs path') 497 self.set_configuration() 498 499 if not self.options['pythia-pgs_path'] or not \ 500 os.path.exists(pjoin(self.options['pythia-pgs_path'],'src')): 501 error_msg = 'No pythia-pgs path correctly set.' 502 error_msg += 'Please use the set command to define the path and retry.' 503 error_msg += 'You can also define it in the configuration file.' 504 raise self.InvalidCmd(error_msg) 505 506 tag = [a for a in arg if a.startswith('--tag=')] 507 if tag: 508 arg.remove(tag[0]) 509 tag = tag[0][6:] 510 511 512 if len(arg) == 0 and not self.run_name: 513 if self.results.lastrun: 514 arg.insert(0, self.results.lastrun) 515 else: 516 raise self.InvalidCmd('No run name currently define. Please add this information.') 517 518 if len(arg) == 1 and self.run_name == arg[0]: 519 arg.pop(0) 520 521 if not len(arg) and \ 522 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')): 523 self.help_pgs() 524 raise self.InvalidCmd('''No file file pythia_events.hep currently available 525 Please specify a valid run_name''') 526 527 lock = None 528 if len(arg) == 1: 529 prev_tag = self.set_run_name(arg[0], tag, 'pgs') 530 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events', self.run_name)) 531 532 if not filenames: 533 raise self.InvalidCmd('No events file corresponding to %s run with tag %s. '% (self.run_name, prev_tag)) 534 else: 535 input_file = filenames[0] 536 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep') 537 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'), 538 argument=['-c', input_file], 539 close_fds=True) 540 else: 541 if tag: 542 self.run_card['run_tag'] = tag 543 self.set_run_name(self.run_name, tag, 'pgs') 544 545 return lock
546 547
548 - def check_delphes(self, arg):
549 """Check the argument for pythia command 550 syntax: delphes [NAME] 551 Note that other option are already remove at this point 552 """ 553 554 # If not pythia-pgs path 555 if not self.options['delphes_path']: 556 logger.info('Retry to read configuration file to find delphes path') 557 self.set_configuration() 558 559 if not self.options['delphes_path']: 560 error_msg = 'No delphes path correctly set.' 561 error_msg += 'Please use the set command to define the path and retry.' 562 error_msg += 'You can also define it in the configuration file.' 563 raise self.InvalidCmd(error_msg) 564 565 tag = [a for a in arg if a.startswith('--tag=')] 566 if tag: 567 arg.remove(tag[0]) 568 tag = tag[0][6:] 569 570 571 if len(arg) == 0 and not self.run_name: 572 if self.results.lastrun: 573 arg.insert(0, self.results.lastrun) 574 else: 575 raise self.InvalidCmd('No run name currently define. Please add this information.') 576 577 if len(arg) == 1 and self.run_name == arg[0]: 578 arg.pop(0) 579 580 if not len(arg) and \ 581 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')): 582 self.help_pgs() 583 raise self.InvalidCmd('''No file file pythia_events.hep currently available 584 Please specify a valid run_name''') 585 586 if len(arg) == 1: 587 prev_tag = self.set_run_name(arg[0], tag, 'delphes') 588 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events')) 589 590 591 if not filenames: 592 raise self.InvalidCmd('No events file corresponding to %s run with tag %s.:%s '\ 593 % (self.run_name, prev_tag, 594 pjoin(self.me_dir,'Events',self.run_name, '%s_pythia_events.hep.gz' % prev_tag))) 595 else: 596 input_file = filenames[0] 597 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep') 598 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'), 599 argument=['-c', input_file], 600 close_fds=True) 601 else: 602 if tag: 603 self.run_card['run_tag'] = tag 604 self.set_run_name(self.run_name, tag, 'delphes')
605
606 - def check_calculate_xsect(self, args, options):
607 """check the validity of the line. args is ORDER, 608 ORDER being LO or NLO. If no mode is passed, NLO is used""" 609 # modify args in order to be DIR 610 # mode being either standalone or madevent 611 612 if options['force']: 613 self.force = True 614 615 if not args: 616 args.append('NLO') 617 return 618 619 if len(args) > 1: 620 self.help_calculate_xsect() 621 raise self.InvalidCmd('Invalid Syntax: Too many argument') 622 623 elif len(args) == 1: 624 if not args[0] in ['NLO', 'LO']: 625 raise self.InvalidCmd('%s is not a valid mode, please use "LO" or "NLO"' % args[1]) 626 mode = args[0] 627 628 # check for incompatible options/modes 629 if options['multicore'] and options['cluster']: 630 raise self.InvalidCmd('options -m (--multicore) and -c (--cluster)' + \ 631 ' are not compatible. Please choose one.')
632 633
634 - def check_generate_events(self, args, options):
635 """check the validity of the line. args is ORDER, 636 ORDER being LO or NLO. If no mode is passed, NLO is used""" 637 # modify args in order to be DIR 638 # mode being either standalone or madevent 639 640 if not args: 641 args.append('NLO') 642 return 643 644 if len(args) > 1: 645 self.help_generate_events() 646 raise self.InvalidCmd('Invalid Syntax: Too many argument') 647 648 elif len(args) == 1: 649 if not args[0] in ['NLO', 'LO']: 650 raise self.InvalidCmd('%s is not a valid mode, please use "LO" or "NLO"' % args[1]) 651 mode = args[0] 652 653 # check for incompatible options/modes 654 if options['multicore'] and options['cluster']: 655 raise self.InvalidCmd('options -m (--multicore) and -c (--cluster)' + \ 656 ' are not compatible. Please choose one.')
657
658 - def check_banner_run(self, args):
659 """check the validity of line""" 660 661 if len(args) == 0: 662 self.help_banner_run() 663 raise self.InvalidCmd('banner_run requires at least one argument.') 664 665 tag = [a[6:] for a in args if a.startswith('--tag=')] 666 667 668 if os.path.exists(args[0]): 669 type ='banner' 670 format = self.detect_card_type(args[0]) 671 if format != 'banner': 672 raise self.InvalidCmd('The file is not a valid banner.') 673 elif tag: 674 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \ 675 (args[0], tag)) 676 if not os.path.exists(args[0]): 677 raise self.InvalidCmd('No banner associates to this name and tag.') 678 else: 679 name = args[0] 680 type = 'run' 681 banners = misc.glob('*_banner.txt', pjoin(self.me_dir,'Events', args[0])) 682 if not banners: 683 raise self.InvalidCmd('No banner associates to this name.') 684 elif len(banners) == 1: 685 args[0] = banners[0] 686 else: 687 #list the tag and propose those to the user 688 tags = [os.path.basename(p)[len(args[0])+1:-11] for p in banners] 689 tag = self.ask('which tag do you want to use?', tags[0], tags) 690 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \ 691 (args[0], tag)) 692 693 run_name = [arg[7:] for arg in args if arg.startswith('--name=')] 694 if run_name: 695 try: 696 self.exec_cmd('remove %s all banner -f' % run_name) 697 except Exception: 698 pass 699 self.set_run_name(args[0], tag=None, level='parton', reload_card=True) 700 elif type == 'banner': 701 self.set_run_name(self.find_available_run_name(self.me_dir)) 702 elif type == 'run': 703 if not self.results[name].is_empty(): 704 run_name = self.find_available_run_name(self.me_dir) 705 logger.info('Run %s is not empty so will use run_name: %s' % \ 706 (name, run_name)) 707 self.set_run_name(run_name) 708 else: 709 try: 710 self.exec_cmd('remove %s all banner -f' % run_name) 711 except Exception: 712 pass 713 self.set_run_name(name)
714 715 716
717 - def check_launch(self, args, options):
718 """check the validity of the line. args is MODE 719 MODE being LO, NLO, aMC@NLO or aMC@LO. If no mode is passed, auto is used""" 720 # modify args in order to be DIR 721 # mode being either standalone or madevent 722 723 if options['force']: 724 self.force = True 725 726 727 if not args: 728 args.append('auto') 729 return 730 731 if len(args) > 1: 732 self.help_launch() 733 raise self.InvalidCmd('Invalid Syntax: Too many argument') 734 735 elif len(args) == 1: 736 if not args[0] in ['LO', 'NLO', 'aMC@NLO', 'aMC@LO','auto']: 737 raise self.InvalidCmd('%s is not a valid mode, please use "LO", "NLO", "aMC@NLO" or "aMC@LO"' % args[0]) 738 mode = args[0] 739 740 # check for incompatible options/modes 741 if options['multicore'] and options['cluster']: 742 raise self.InvalidCmd('options -m (--multicore) and -c (--cluster)' + \ 743 ' are not compatible. Please choose one.') 744 if mode == 'NLO' and options['reweightonly']: 745 raise self.InvalidCmd('option -r (--reweightonly) needs mode "aMC@NLO" or "aMC@LO"')
746 747
748 - def check_compile(self, args, options):
749 """check the validity of the line. args is MODE 750 MODE being FO or MC. If no mode is passed, MC is used""" 751 # modify args in order to be DIR 752 # mode being either standalone or madevent 753 754 if options['force']: 755 self.force = True 756 757 if not args: 758 args.append('MC') 759 return 760 761 if len(args) > 1: 762 self.help_compile() 763 raise self.InvalidCmd('Invalid Syntax: Too many argument') 764 765 elif len(args) == 1: 766 if not args[0] in ['MC', 'FO']: 767 raise self.InvalidCmd('%s is not a valid mode, please use "FO" or "MC"' % args[0]) 768 mode = args[0]
769
770 # check for incompatible options/modes 771 772 773 #=============================================================================== 774 # CompleteForCmd 775 #=============================================================================== 776 -class CompleteForCmd(CheckValidForCmd):
777 """ The Series of help routine for the MadGraphCmd""" 778
779 - def complete_launch(self, text, line, begidx, endidx):
780 """auto-completion for launch command""" 781 782 args = self.split_arg(line[0:begidx]) 783 if len(args) == 1: 784 #return mode 785 return self.list_completion(text,['LO','NLO','aMC@NLO','aMC@LO'],line) 786 elif len(args) == 2 and line[begidx-1] == '@': 787 return self.list_completion(text,['LO','NLO'],line) 788 else: 789 opts = [] 790 for opt in _launch_parser.option_list: 791 opts += opt._long_opts + opt._short_opts 792 return self.list_completion(text, opts, line)
793
794 - def complete_banner_run(self, text, line, begidx, endidx, formatting=True):
795 "Complete the banner run command" 796 try: 797 798 799 args = self.split_arg(line[0:begidx], error=False) 800 801 if args[-1].endswith(os.path.sep): 802 return self.path_completion(text, 803 os.path.join('.',*[a for a in args \ 804 if a.endswith(os.path.sep)])) 805 806 807 if len(args) > 1: 808 # only options are possible 809 tags = misc.glob('%s_*_banner.txt' % args[1],pjoin(self.me_dir, 'Events' , args[1])) 810 tags = ['%s' % os.path.basename(t)[len(args[1])+1:-11] for t in tags] 811 812 if args[-1] != '--tag=': 813 tags = ['--tag=%s' % t for t in tags] 814 else: 815 return self.list_completion(text, tags) 816 return self.list_completion(text, tags +['--name=','-f'], line) 817 818 # First argument 819 possibilites = {} 820 821 comp = self.path_completion(text, os.path.join('.',*[a for a in args \ 822 if a.endswith(os.path.sep)])) 823 if os.path.sep in line: 824 return comp 825 else: 826 possibilites['Path from ./'] = comp 827 828 run_list = misc.glob(pjoin('*','*_banner.txt'), pjoin(self.me_dir, 'Events')) 829 run_list = [n.rsplit('/',2)[1] for n in run_list] 830 possibilites['RUN Name'] = self.list_completion(text, run_list) 831 832 return self.deal_multiple_categories(possibilites, formatting) 833 834 835 except Exception as error: 836 print(error)
837 838
839 - def complete_compile(self, text, line, begidx, endidx):
840 """auto-completion for launch command""" 841 842 args = self.split_arg(line[0:begidx]) 843 if len(args) == 1: 844 #return mode 845 return self.list_completion(text,['FO','MC'],line) 846 else: 847 opts = [] 848 for opt in _compile_parser.option_list: 849 opts += opt._long_opts + opt._short_opts 850 return self.list_completion(text, opts, line)
851
852 - def complete_calculate_xsect(self, text, line, begidx, endidx):
853 """auto-completion for launch command""" 854 855 args = self.split_arg(line[0:begidx]) 856 if len(args) == 1: 857 #return mode 858 return self.list_completion(text,['LO','NLO'],line) 859 else: 860 opts = [] 861 for opt in _calculate_xsect_parser.option_list: 862 opts += opt._long_opts + opt._short_opts 863 return self.list_completion(text, opts, line)
864
865 - def complete_generate_events(self, text, line, begidx, endidx):
866 """auto-completion for generate_events command 867 call the compeltion for launch""" 868 self.complete_launch(text, line, begidx, endidx)
869 870
871 - def complete_shower(self, text, line, begidx, endidx):
872 args = self.split_arg(line[0:begidx]) 873 if len(args) == 1: 874 #return valid run_name 875 data = misc.glob(pjoin('*','events.lhe.gz', pjoin(self.me_dir, 'Events'))) 876 data = [n.rsplit('/',2)[1] for n in data] 877 tmp1 = self.list_completion(text, data) 878 if not self.run_name: 879 return tmp1
880
881 - def complete_plot(self, text, line, begidx, endidx):
882 """ Complete the plot command """ 883 884 args = self.split_arg(line[0:begidx], error=False) 885 886 if len(args) == 1: 887 #return valid run_name 888 data = misc.glob(pjoin('*','events.lhe*', pjoin(self.me_dir, 'Events'))) 889 data = [n.rsplit('/',2)[1] for n in data] 890 tmp1 = self.list_completion(text, data) 891 if not self.run_name: 892 return tmp1 893 894 if len(args) > 1: 895 return self.list_completion(text, self._plot_mode)
896
897 - def complete_pgs(self,text, line, begidx, endidx):
898 "Complete the pgs command" 899 args = self.split_arg(line[0:begidx], error=False) 900 if len(args) == 1: 901 #return valid run_name 902 data = misc.glob(pjoin('*', 'events_*.hep.gz'), 903 pjoin(self.me_dir, 'Events')) 904 data = [n.rsplit('/',2)[1] for n in data] 905 tmp1 = self.list_completion(text, data) 906 if not self.run_name: 907 return tmp1 908 else: 909 tmp2 = self.list_completion(text, self._run_options + ['-f', 910 '--tag=' ,'--no_default'], line) 911 return tmp1 + tmp2 912 else: 913 return self.list_completion(text, self._run_options + ['-f', 914 '--tag=','--no_default'], line)
915 916 complete_delphes = complete_pgs
917
918 -class aMCatNLOAlreadyRunning(InvalidCmd):
919 pass
920
921 -class AskRunNLO(cmd.ControlSwitch):
922 923 to_control = [('order', 'Type of perturbative computation'), 924 ('fixed_order', 'No MC@[N]LO matching / event generation'), 925 ('shower', 'Shower the generated events'), 926 ('madspin', 'Decay onshell particles'), 927 ('reweight', 'Add weights to events for new hypp.'), 928 ('madanalysis','Run MadAnalysis5 on the events generated')] 929 930 quit_on = cmd.ControlSwitch.quit_on + ['onlyshower'] 931
932 - def __init__(self, question, line_args=[], mode=None, force=False, 933 *args, **opt):
934 935 self.me_dir = opt['mother_interface'].me_dir 936 self.check_available_module(opt['mother_interface'].options) 937 self.last_mode = opt['mother_interface'].last_mode 938 self.proc_characteristics = opt['mother_interface'].proc_characteristics 939 self.run_card = banner_mod.RunCard(pjoin(self.me_dir,'Cards', 'run_card.dat'), 940 consistency='warning') 941 super(AskRunNLO,self).__init__(self.to_control, opt['mother_interface'], 942 *args, **opt)
943 944 @property
945 - def answer(self):
946 947 out = super(AskRunNLO, self).answer 948 if out['shower'] == 'HERWIG7': 949 out['shower'] = 'HERWIGPP' 950 951 if out['shower'] not in self.get_allowed('shower') or out['shower'] =='OFF': 952 out['runshower'] = False 953 else: 954 out['runshower'] = True 955 return out
956 957
958 - def check_available_module(self, options):
959 960 self.available_module = set() 961 if options['madanalysis5_path']: 962 self.available_module.add('MA5') 963 if not aMCatNLO or ('mg5_path' in options and options['mg5_path']): 964 965 self.available_module.add('MadSpin') 966 if misc.has_f2py() or options['f2py_compiler']: 967 self.available_module.add('reweight') 968 if options['pythia8_path']: 969 self.available_module.add('PY8') 970 if options['hwpp_path'] and options['thepeg_path'] and options['hepmc_path']: 971 self.available_module.add('HW7') 972 973 MCatNLO_libdir = pjoin(self.me_dir, 'MCatNLO', 'lib') 974 if os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libstdhep.a'))): 975 self.available_module.add('StdHEP')
976 # 977 # shorcut 978 #
979 - def ans_lo(self, value):
980 """ function called if the user type lo=value. or lo (then value is None)""" 981 982 if value is None: 983 self.switch['order'] = 'LO' 984 self.switch['fixed_order'] = 'ON' 985 self.set_switch('shower', 'OFF') 986 else: 987 logger.warning('Invalid command: lo=%s' % value)
988
989 - def ans_nlo(self, value):
990 if value is None: 991 self.switch['order'] = 'NLO' 992 self.switch['fixed_order'] = 'ON' 993 self.set_switch('shower', 'OFF') 994 else: 995 logger.warning('Invalid command: nlo=%s' % value)
996
997 - def ans_amc__at__nlo(self, value):
998 if value is None: 999 self.switch['order'] = 'NLO' 1000 self.switch['fixed_order'] = 'OFF' 1001 self.set_switch('shower', 'ON') 1002 else: 1003 logger.warning('Invalid command: aMC@NLO=%s' % value)
1004
1005 - def ans_amc__at__lo(self, value):
1006 if value is None: 1007 self.switch['order'] = 'LO' 1008 self.switch['fixed_order'] = 'OFF' 1009 self.set_switch('shower', 'ON') 1010 else: 1011 logger.warning('Invalid command: aMC@LO=%s' % value)
1012
1013 - def ans_noshower(self, value):
1014 if value is None: 1015 self.switch['order'] = 'NLO' 1016 self.switch['fixed_order'] = 'OFF' 1017 self.set_switch('shower', 'OFF') 1018 else: 1019 logger.warning('Invalid command: noshower=%s' % value)
1020
1021 - def ans_onlyshower(self, value):
1022 if value is None: 1023 self.switch['mode'] = 'onlyshower' 1024 self.switch['madspin'] = 'OFF' 1025 self.switch['reweight'] = 'OFF' 1026 else: 1027 logger.warning('Invalid command: onlyshower=%s' % value)
1028
1029 - def ans_noshowerlo(self, value):
1030 if value is None: 1031 self.switch['order'] = 'LO' 1032 self.switch['fixed_order'] = 'OFF' 1033 self.set_switch('shower', 'OFF') 1034 else: 1035 logger.warning('Invalid command: noshowerlo=%s' % value)
1036
1037 - def ans_madanalysis5(self, value):
1038 """ shortcut madanalysis5 -> madanalysis """ 1039 1040 if value is None: 1041 return self.onecmd('madanalysis') 1042 else: 1043 self.set_switch('madanalysis', value)
1044 # 1045 # ORDER 1046 #
1047 - def get_allowed_order(self):
1048 return ["LO", "NLO"]
1049
1050 - def set_default_order(self):
1051 1052 if self.last_mode in ['LO', 'aMC@L0', 'noshowerLO']: 1053 self.switch['order'] = 'LO' 1054 self.switch['order'] = 'NLO'
1055
1056 - def set_switch_off_order(self):
1057 return
1058 # 1059 # Fix order 1060 #
1061 - def get_allowed_fixed_order(self):
1062 """ """ 1063 if self.proc_characteristics['ninitial'] == 1: 1064 return ['ON'] 1065 else: 1066 return ['ON', 'OFF']
1067
1068 - def set_default_fixed_order(self):
1069 1070 if self.last_mode in ['LO', 'NLO']: 1071 self.switch['fixed_order'] = 'ON' 1072 if self.proc_characteristics['ninitial'] == 1: 1073 self.switch['fixed_order'] = 'ON' 1074 else: 1075 self.switch['fixed_order'] = 'OFF' 1076
1077 - def color_for_fixed_order(self, switch_value):
1078 1079 if switch_value in ['OFF']: 1080 return self.green % switch_value 1081 else: 1082 return self.red % switch_value
1083
1084 - def color_for_shower(self, switch_value):
1085 1086 if switch_value in ['ON']: 1087 return self.green % switch_value 1088 elif switch_value in self.get_allowed('shower'): 1089 return self.green % switch_value 1090 else: 1091 return self.red % switch_value
1092
1093 - def consistency_fixed_order_shower(self, vfix, vshower):
1094 """ consistency_XX_YY(val_XX, val_YY) 1095 -> XX is the new key set by the user to a new value val_XX 1096 -> YY is another key set by the user. 1097 -> return value should be None or "replace_YY" 1098 """ 1099 1100 if vfix == 'ON' and vshower != 'OFF' : 1101 return 'OFF' 1102 return None
1103 1104 consistency_fixed_order_madspin = consistency_fixed_order_shower 1105 consistency_fixed_order_reweight = consistency_fixed_order_shower 1106
1107 - def consistency_fixed_order_madanalysis(self, vfix, vma5):
1108 1109 if vfix == 'ON' and vma5 == 'ON' : 1110 return 'OFF' 1111 return None
1112 1113
1114 - def consistency_shower_fixed_order(self, vshower, vfix):
1115 """ consistency_XX_YY(val_XX, val_YY) 1116 -> XX is the new key set by the user to a new value val_XX 1117 -> YY is another key set by the user. 1118 -> return value should be None or "replace_YY" 1119 """ 1120 1121 if vshower != 'OFF' and vfix == 'ON': 1122 return 'OFF' 1123 return None
1124 1125 consistency_madspin_fixed_order = consistency_shower_fixed_order 1126 consistency_reweight_fixed_order = consistency_shower_fixed_order 1127 consistency_madanalysis_fixed_order = consistency_shower_fixed_order 1128 1129 1130 # 1131 # Shower 1132 #
1133 - def get_allowed_shower(self):
1134 """ """ 1135 1136 if hasattr(self, 'allowed_shower'): 1137 return self.allowed_shower 1138 1139 if not misc.which('bc'): 1140 return ['OFF'] 1141 1142 if self.proc_characteristics['ninitial'] == 1: 1143 self.allowed_shower = ['OFF'] 1144 return ['OFF'] 1145 else: 1146 if 'StdHEP' in self.available_module: 1147 allowed = ['HERWIG6','OFF', 'PYTHIA6Q', 'PYTHIA6PT', ] 1148 else: 1149 allowed = ['OFF'] 1150 if 'PY8' in self.available_module: 1151 allowed.append('PYTHIA8') 1152 if 'HW7' in self.available_module: 1153 allowed.append('HERWIGPP') 1154 1155 1156 self.allowed_shower = allowed 1157 1158 return allowed
1159
1160 - def check_value_shower(self, value):
1161 """ """ 1162 1163 if value.upper() in self.get_allowed_shower(): 1164 return True 1165 if value.upper() in ['PYTHIA8', 'HERWIGPP']: 1166 return True 1167 if value.upper() == 'ON': 1168 return self.run_card['parton_shower'] 1169 if value.upper() in ['P8','PY8','PYTHIA_8']: 1170 return 'PYTHIA8' 1171 if value.upper() in ['PY6','P6','PY6PT', 'PYTHIA_6', 'PYTHIA_6PT','PYTHIA6PT','PYTHIA6_PT']: 1172 return 'PYTHIA6PT' 1173 if value.upper() in ['PY6Q', 'PYTHIA_6Q','PYTHIA6Q', 'PYTHIA6_Q']: 1174 return 'PYTHIA6Q' 1175 if value.upper() in ['HW7', 'HERWIG7']: 1176 return 'HERWIG7' 1177 if value.upper() in ['HW++', 'HWPP', 'HERWIG++']: 1178 return 'HERWIGPP' 1179 if value.upper() in ['HW6', 'HERWIG_6']: 1180 return 'HERWIG6'
1181
1182 - def set_default_shower(self):
1183 1184 if self.last_mode in ['LO', 'NLO', 'noshower', 'noshowerLO']: 1185 self.switch['shower'] = 'OFF' 1186 return 1187 1188 if self.proc_characteristics['ninitial'] == 1: 1189 self.switch['shower'] = 'OFF' 1190 return 1191 1192 if not misc.which('bc'): 1193 logger.warning('bc command not available. Forbids to run the shower. please install it if you want to run the shower. (sudo apt-get install bc)') 1194 self.switch['shower'] = 'OFF' 1195 return 1196 1197 if os.path.exists(pjoin(self.me_dir, 'Cards', 'shower_card.dat')): 1198 self.switch['shower'] = self.run_card['parton_shower'] 1199 #self.switch['shower'] = 'ON' 1200 self.switch['fixed_order'] = "OFF" 1201 else: 1202 self.switch['shower'] = 'OFF' 1203
1204 - def consistency_shower_madanalysis(self, vshower, vma5):
1205 """ MA5 only possible with (N)LO+PS if shower is run""" 1206 1207 if vshower == 'OFF' and vma5 == 'ON': 1208 return 'OFF' 1209 return None
1210
1211 - def consistency_madanalysis_shower(self, vma5, vshower):
1212 1213 if vma5=='ON' and vshower == 'OFF': 1214 return 'ON' 1215 return None
1216
1217 - def get_cardcmd_for_shower(self, value):
1218 """ adpat run_card according to this setup. return list of cmd to run""" 1219 1220 if value != 'OFF': 1221 return ['set parton_shower %s' % self.switch['shower']] 1222 return []
1223 1224 # 1225 # madspin 1226 #
1227 - def get_allowed_madspin(self):
1228 """ """ 1229 1230 if hasattr(self, 'allowed_madspin'): 1231 return self.allowed_madspin 1232 1233 self.allowed_madspin = [] 1234 1235 1236 if 'MadSpin' not in self.available_module: 1237 return self.allowed_madspin 1238 if self.proc_characteristics['ninitial'] == 1: 1239 self.available_module.remove('MadSpin') 1240 self.allowed_madspin = ['OFF'] 1241 return self.allowed_madspin 1242 else: 1243 self.allowed_madspin = ['OFF', 'ON', 'onshell'] 1244 return self.allowed_madspin
1245
1246 - def check_value_madspin(self, value):
1247 """handle alias and valid option not present in get_allowed_madspin 1248 remember that this mode should always be OFF for 1>N. (ON not in allowed value)""" 1249 1250 if value.upper() in self.get_allowed_madspin(): 1251 if value == value.upper(): 1252 return True 1253 else: 1254 return value.upper() 1255 elif value.lower() in self.get_allowed_madspin(): 1256 if value == value.lower(): 1257 return True 1258 else: 1259 return value.lower() 1260 1261 if 'MadSpin' not in self.available_module or \ 1262 'ON' not in self.get_allowed_madspin(): 1263 return False 1264 1265 if value.lower() in ['madspin', 'full']: 1266 return 'full' 1267 elif value.lower() in ['none']: 1268 return 'none'
1269
1270 - def set_default_madspin(self):
1271 1272 if 'MadSpin' in self.available_module: 1273 if os.path.exists(pjoin(self.me_dir,'Cards','madspin_card.dat')): 1274 self.switch['madspin'] = 'ON' 1275 else: 1276 self.switch['madspin'] = 'OFF' 1277 else: 1278 self.switch['madspin'] = 'Not Avail.'
1279
1280 - def get_cardcmd_for_madspin(self, value):
1281 """set some command to run before allowing the user to modify the cards.""" 1282 1283 if value == 'onshell': 1284 return ["edit madspin_card --replace_line='set spinmode' --before_line='decay' set spinmode onshell"] 1285 elif value in ['full', 'madspin']: 1286 return ["edit madspin_card --replace_line='set spinmode' --before_line='decay' set spinmode madspin"] 1287 elif value == 'none': 1288 return ["edit madspin_card --replace_line='set spinmode' --before_line='decay' set spinmode none"] 1289 else: 1290 return []
1291 1292 # 1293 # reweight 1294 #
1295 - def get_allowed_reweight(self):
1296 """set the valid (visible) options for reweight""" 1297 1298 if hasattr(self, 'allowed_reweight'): 1299 return getattr(self, 'allowed_reweight') 1300 1301 self.allowed_reweight = [] 1302 if 'reweight' not in self.available_module: 1303 return self.allowed_reweight 1304 if self.proc_characteristics['ninitial'] == 1: 1305 self.available_module.remove('reweight') 1306 self.allowed_reweight.append('OFF') 1307 return self.allowed_reweight 1308 else: 1309 self.allowed_reweight = [ 'OFF', 'ON', 'NLO', 'NLO_TREE','LO'] 1310 return self.allowed_reweight
1311
1312 - def set_default_reweight(self):
1313 """initialise the switch for reweight""" 1314 1315 if 'reweight' in self.available_module: 1316 if os.path.exists(pjoin(self.me_dir,'Cards','reweight_card.dat')): 1317 self.switch['reweight'] = 'ON' 1318 else: 1319 self.switch['reweight'] = 'OFF' 1320 else: 1321 self.switch['reweight'] = 'Not Avail.'
1322
1323 - def get_cardcmd_for_reweight(self, value):
1324 """ adpat run_card according to this setup. return list of cmd to run""" 1325 1326 if value == 'LO': 1327 return ["edit reweight_card --replace_line='change mode' --before_line='launch' change mode LO"] 1328 elif value == 'NLO': 1329 return ["edit reweight_card --replace_line='change mode' --before_line='launch' change mode NLO", 1330 "set store_rwgt_info T"] 1331 elif value == 'NLO_TREE': 1332 return ["edit reweight_card --replace_line='change mode' --before_line='launch' change mode NLO_tree", 1333 "set store_rwgt_info T"] 1334 return []
1335 1336 # 1337 # MadAnalysis5 1338 #
1339 - def get_allowed_madanalysis(self):
1340 1341 if hasattr(self, 'allowed_madanalysis'): 1342 return self.allowed_madanalysis 1343 1344 self.allowed_madanalysis = [] 1345 1346 1347 if 'MA5' not in self.available_module: 1348 return self.allowed_madanalysis 1349 1350 if self.proc_characteristics['ninitial'] == 1: 1351 self.available_module.remove('MA5') 1352 self.allowed_madanalysis = ['OFF'] 1353 return self.allowed_madanalysis 1354 else: 1355 self.allowed_madanalysis = ['OFF', 'ON'] 1356 return self.allowed_madanalysis
1357
1358 - def set_default_madanalysis(self):
1359 """initialise the switch for reweight""" 1360 1361 if 'MA5' not in self.available_module: 1362 self.switch['madanalysis'] = 'Not Avail.' 1363 elif os.path.exists(pjoin(self.me_dir,'Cards', 'madanalysis5_hadron_card.dat')): 1364 self.switch['madanalysis'] = 'ON' 1365 else: 1366 self.switch['madanalysis'] = 'OFF'
1367
1368 - def check_value_madanalysis(self, value):
1369 """check an entry is valid. return the valid entry in case of shortcut""" 1370 1371 if value.upper() in self.get_allowed('madanalysis'): 1372 return True 1373 value = value.lower() 1374 if value == 'hadron': 1375 return 'ON' if 'ON' in self.get_allowed_madanalysis5 else False 1376 else: 1377 return False
1378
1379 1380 #=============================================================================== 1381 # aMCatNLOCmd 1382 #=============================================================================== 1383 -class aMCatNLOCmd(CmdExtended, HelpToCmd, CompleteForCmd, common_run.CommonRunCmd):
1384 """The command line processor of MadGraph""" 1385 1386 # Truth values 1387 true = ['T','.true.',True,'true'] 1388 # Options and formats available 1389 _run_options = ['--cluster','--multicore','--nb_core=','--nb_core=2', '-c', '-m'] 1390 _generate_options = ['-f', '--laststep=parton', '--laststep=pythia', '--laststep=pgs', '--laststep=delphes'] 1391 _calculate_decay_options = ['-f', '--accuracy=0.'] 1392 _set_options = ['stdout_level','fortran_compiler','cpp_compiler','timeout'] 1393 _plot_mode = ['all', 'parton','shower','pgs','delphes'] 1394 _clean_mode = _plot_mode + ['channel', 'banner'] 1395 _display_opts = ['run_name', 'options', 'variable'] 1396 # survey options, dict from name to type, default value, and help text 1397 # Variables to store object information 1398 web = False 1399 cluster_mode = 0 1400 queue = 'madgraph' 1401 nb_core = None 1402 make_opts_var = {} 1403 1404 next_possibility = { 1405 'start': ['generate_events [OPTIONS]', 'calculate_crossx [OPTIONS]', 'launch [OPTIONS]', 1406 'help generate_events'], 1407 'generate_events': ['generate_events [OPTIONS]', 'shower'], 1408 'launch': ['launch [OPTIONS]', 'shower'], 1409 'shower' : ['generate_events [OPTIONS]'] 1410 } 1411 1412 1413 ############################################################################
1414 - def __init__(self, me_dir = None, options = {}, *completekey, **stdin):
1415 """ add information to the cmd """ 1416 1417 self.start_time = 0 1418 CmdExtended.__init__(self, me_dir, options, *completekey, **stdin) 1419 #common_run.CommonRunCmd.__init__(self, me_dir, options) 1420 1421 self.mode = 'aMCatNLO' 1422 self.nb_core = 0 1423 self.prompt = "%s>"%os.path.basename(pjoin(self.me_dir)) 1424 1425 1426 self.load_results_db() 1427 self.results.def_web_mode(self.web) 1428 # check that compiler is gfortran 4.6 or later if virtuals have been exported 1429 proc_card = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read() 1430 1431 if not '[real=QCD]' in proc_card: 1432 check_compiler(self.options, block=True)
1433 1434 1435 ############################################################################
1436 - def do_shower(self, line):
1437 """ run the shower on a given parton level file """ 1438 argss = self.split_arg(line) 1439 (options, argss) = _launch_parser.parse_args(argss) 1440 # check argument validity and normalise argument 1441 options = options.__dict__ 1442 options['reweightonly'] = False 1443 self.check_shower(argss, options) 1444 evt_file = pjoin(os.getcwd(), argss[0], 'events.lhe') 1445 self.ask_run_configuration('onlyshower', options) 1446 self.run_mcatnlo(evt_file, options) 1447 1448 self.update_status('', level='all', update_results=True)
1449 1450 ################################################################################
1451 - def do_plot(self, line):
1452 """Create the plot for a given run""" 1453 1454 # Since in principle, all plot are already done automaticaly 1455 args = self.split_arg(line) 1456 # Check argument's validity 1457 self.check_plot(args) 1458 logger.info('plot for run %s' % self.run_name) 1459 1460 if not self.force: 1461 self.ask_edit_cards([], args, plot=True) 1462 1463 if any([arg in ['parton'] for arg in args]): 1464 filename = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe') 1465 if os.path.exists(filename+'.gz'): 1466 misc.gunzip(filename) 1467 if os.path.exists(filename): 1468 logger.info('Found events.lhe file for run %s' % self.run_name) 1469 shutil.move(filename, pjoin(self.me_dir, 'Events', 'unweighted_events.lhe')) 1470 self.create_plot('parton') 1471 shutil.move(pjoin(self.me_dir, 'Events', 'unweighted_events.lhe'), filename) 1472 misc.gzip(filename) 1473 1474 if any([arg in ['all','parton'] for arg in args]): 1475 filename = pjoin(self.me_dir, 'Events', self.run_name, 'MADatNLO.top') 1476 if os.path.exists(filename): 1477 logger.info('Found MADatNLO.top file for run %s' % \ 1478 self.run_name) 1479 output = pjoin(self.me_dir, 'HTML',self.run_name, 'plots_parton.html') 1480 plot_dir = pjoin(self.me_dir, 'HTML', self.run_name, 'plots_parton') 1481 1482 if not os.path.isdir(plot_dir): 1483 os.makedirs(plot_dir) 1484 top_file = pjoin(plot_dir, 'plots.top') 1485 files.cp(filename, top_file) 1486 madir = self.options['madanalysis_path'] 1487 tag = self.run_card['run_tag'] 1488 td = self.options['td_path'] 1489 misc.call(['%s/plot' % self.dirbin, madir, td], 1490 stdout = open(pjoin(plot_dir, 'plot.log'),'a'), 1491 stderr = subprocess.STDOUT, 1492 cwd=plot_dir) 1493 1494 misc.call(['%s/plot_page-pl' % self.dirbin, 1495 os.path.basename(plot_dir), 1496 'parton'], 1497 stdout = open(pjoin(plot_dir, 'plot.log'),'a'), 1498 stderr = subprocess.STDOUT, 1499 cwd=pjoin(self.me_dir, 'HTML', self.run_name)) 1500 shutil.move(pjoin(self.me_dir, 'HTML',self.run_name ,'plots.html'), 1501 output) 1502 1503 os.remove(pjoin(self.me_dir, 'Events', 'plots.top')) 1504 1505 if any([arg in ['all','shower'] for arg in args]): 1506 filenames = misc.glob('events_*.lhe.gz', pjoin(self.me_dir, 'Events', self.run_name)) 1507 if len(filenames) != 1: 1508 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events', self.run_name)) 1509 if len(filenames) != 1: 1510 logger.info('No shower level file found for run %s' % \ 1511 self.run_name) 1512 return 1513 filename = filenames[0] 1514 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep')) 1515 1516 if not os.path.exists(pjoin(self.me_dir, 'Cards', 'pythia_card.dat')): 1517 if aMCatNLO and not self.options['mg5_path']: 1518 raise Exception("plotting NLO HEP file needs MG5 utilities") 1519 1520 files.cp(pjoin(self.options['mg5_path'], 'Template','LO', 'Cards', 'pythia_card_default.dat'), 1521 pjoin(self.me_dir, 'Cards', 'pythia_card.dat')) 1522 self.run_hep2lhe() 1523 else: 1524 filename = filenames[0] 1525 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep')) 1526 1527 self.create_plot('shower') 1528 lhe_file_name = filename.replace('.hep.gz', '.lhe') 1529 shutil.move(pjoin(self.me_dir, 'Events','pythia_events.lhe'), 1530 lhe_file_name) 1531 misc.gzip(lhe_file_name) 1532 1533 if any([arg in ['all','pgs'] for arg in args]): 1534 filename = pjoin(self.me_dir, 'Events', self.run_name, 1535 '%s_pgs_events.lhco' % self.run_tag) 1536 if os.path.exists(filename+'.gz'): 1537 misc.gunzip(filename) 1538 if os.path.exists(filename): 1539 self.create_plot('PGS') 1540 misc.gzip(filename) 1541 else: 1542 logger.info('No valid files for pgs plot') 1543 1544 if any([arg in ['all','delphes'] for arg in args]): 1545 filename = pjoin(self.me_dir, 'Events', self.run_name, 1546 '%s_delphes_events.lhco' % self.run_tag) 1547 if os.path.exists(filename+'.gz'): 1548 misc.gunzip(filename) 1549 if os.path.exists(filename): 1550 #shutil.move(filename, pjoin(self.me_dir, 'Events','delphes_events.lhco')) 1551 self.create_plot('Delphes') 1552 #shutil.move(pjoin(self.me_dir, 'Events','delphes_events.lhco'), filename) 1553 misc.gzip(filename) 1554 else: 1555 logger.info('No valid files for delphes plot')
1556 1557 1558 ############################################################################
1559 - def do_calculate_xsect(self, line):
1560 """Main commands: calculates LO/NLO cross-section, using madevent_mintFO 1561 this function wraps the do_launch one""" 1562 1563 self.start_time = time.time() 1564 argss = self.split_arg(line) 1565 # check argument validity and normalise argument 1566 (options, argss) = _calculate_xsect_parser.parse_args(argss) 1567 options = options.__dict__ 1568 options['reweightonly'] = False 1569 options['parton'] = True 1570 self.check_calculate_xsect(argss, options) 1571 self.do_launch(line, options, argss)
1572 1573 ############################################################################
1574 - def do_banner_run(self, line):
1575 """Make a run from the banner file""" 1576 1577 args = self.split_arg(line) 1578 #check the validity of the arguments 1579 self.check_banner_run(args) 1580 1581 # Remove previous cards 1582 for name in ['shower_card.dat', 'madspin_card.dat']: 1583 try: 1584 os.remove(pjoin(self.me_dir, 'Cards', name)) 1585 except Exception: 1586 pass 1587 1588 banner_mod.split_banner(args[0], self.me_dir, proc_card=False) 1589 1590 # Check if we want to modify the run 1591 if not self.force: 1592 ans = self.ask('Do you want to modify the Cards/Run Type?', 'n', ['y','n']) 1593 if ans == 'n': 1594 self.force = True 1595 1596 # Compute run mode: 1597 if self.force: 1598 mode_status = {'order': 'NLO', 'fixed_order': False, 'madspin':False, 'shower':True} 1599 banner = banner_mod.Banner(args[0]) 1600 for line in banner['run_settings']: 1601 if '=' in line: 1602 mode, value = [t.strip() for t in line.split('=')] 1603 mode_status[mode] = value 1604 else: 1605 mode_status = {} 1606 1607 # Call Generate events 1608 self.do_launch('-n %s %s' % (self.run_name, '-f' if self.force else ''), 1609 switch=mode_status)
1610 1611 ############################################################################
1612 - def do_generate_events(self, line):
1613 """Main commands: generate events 1614 this function just wraps the do_launch one""" 1615 self.do_launch(line)
1616 1617 1618 ############################################################################
1619 - def do_treatcards(self, line, amcatnlo=True,mode=''):
1620 """Advanced commands: this is for creating the correct run_card.inc from the nlo format""" 1621 #check if no 'Auto' are present in the file 1622 self.check_param_card(pjoin(self.me_dir, 'Cards','param_card.dat')) 1623 1624 # propagate the FO_card entry FO_LHE_weight_ratio to the run_card. 1625 # this variable is system only in the run_card 1626 # can not be done in EditCard since this parameter is not written in the 1627 # run_card directly. 1628 if mode in ['LO', 'NLO']: 1629 name = 'fo_lhe_weight_ratio' 1630 FO_card = analyse_card.FOAnalyseCard(pjoin(self.me_dir,'Cards', 'FO_analyse_card.dat')) 1631 if name in FO_card: 1632 self.run_card.set(name, FO_card[name], user=False) 1633 name = 'fo_lhe_postprocessing' 1634 if name in FO_card: 1635 self.run_card.set(name, FO_card[name], user=False) 1636 1637 return super(aMCatNLOCmd,self).do_treatcards(line, amcatnlo)
1638 1639 ############################################################################
1640 - def set_configuration(self, amcatnlo=True, **opt):
1641 """assign all configuration variable from file 1642 loop over the different config file if config_file not define """ 1643 return super(aMCatNLOCmd,self).set_configuration(amcatnlo=amcatnlo, **opt)
1644 1645 ############################################################################
1646 - def do_launch(self, line, options={}, argss=[], switch={}):
1647 """Main commands: launch the full chain 1648 options and args are relevant if the function is called from other 1649 functions, such as generate_events or calculate_xsect 1650 mode gives the list of switch needed for the computation (usefull for banner_run) 1651 """ 1652 1653 if not argss and not options: 1654 self.start_time = time.time() 1655 argss = self.split_arg(line) 1656 # check argument validity and normalise argument 1657 (options, argss) = _launch_parser.parse_args(argss) 1658 options = options.__dict__ 1659 self.check_launch(argss, options) 1660 1661 1662 if 'run_name' in list(options.keys()) and options['run_name']: 1663 self.run_name = options['run_name'] 1664 # if a dir with the given run_name already exists 1665 # remove it and warn the user 1666 if os.path.isdir(pjoin(self.me_dir, 'Events', self.run_name)): 1667 logger.warning('Removing old run information in \n'+ 1668 pjoin(self.me_dir, 'Events', self.run_name)) 1669 files.rm(pjoin(self.me_dir, 'Events', self.run_name)) 1670 self.results.delete_run(self.run_name) 1671 else: 1672 self.run_name = '' # will be set later 1673 1674 if options['multicore']: 1675 self.cluster_mode = 2 1676 elif options['cluster']: 1677 self.cluster_mode = 1 1678 1679 if not switch: 1680 mode = argss[0] 1681 1682 if mode in ['LO', 'NLO']: 1683 options['parton'] = True 1684 mode = self.ask_run_configuration(mode, options) 1685 else: 1686 mode = self.ask_run_configuration('auto', options, switch) 1687 1688 self.results.add_detail('run_mode', mode) 1689 1690 self.update_status('Starting run', level=None, update_results=True) 1691 1692 if self.options['automatic_html_opening']: 1693 misc.open_file(os.path.join(self.me_dir, 'crossx.html')) 1694 self.options['automatic_html_opening'] = False 1695 1696 if '+' in mode: 1697 mode = mode.split('+')[0] 1698 self.compile(mode, options) 1699 evt_file = self.run(mode, options) 1700 1701 if self.run_card['nevents'] == 0 and not mode in ['LO', 'NLO']: 1702 logger.info('No event file generated: grids have been set-up with a '\ 1703 'relative precision of %s' % self.run_card['req_acc']) 1704 return 1705 1706 if not mode in ['LO', 'NLO']: 1707 assert evt_file == pjoin(self.me_dir,'Events', self.run_name, 'events.lhe'), '%s != %s' %(evt_file, pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz')) 1708 1709 if self.run_card['systematics_program'] == 'systematics': 1710 self.exec_cmd('systematics %s %s ' % (self.run_name, ' '.join(self.run_card['systematics_arguments']))) 1711 1712 self.exec_cmd('reweight -from_cards', postcmd=False) 1713 self.exec_cmd('decay_events -from_cards', postcmd=False) 1714 evt_file = pjoin(self.me_dir,'Events', self.run_name, 'events.lhe') 1715 1716 if not mode in ['LO', 'NLO', 'noshower', 'noshowerLO'] \ 1717 and not options['parton']: 1718 self.run_mcatnlo(evt_file, options) 1719 self.exec_cmd('madanalysis5_hadron --no_default', postcmd=False, printcmd=False) 1720 1721 elif mode == 'noshower': 1722 logger.warning("""You have chosen not to run a parton shower. NLO events without showering are NOT physical. 1723 Please, shower the Les Houches events before using them for physics analyses.""") 1724 1725 1726 self.update_status('', level='all', update_results=True) 1727 if self.run_card['ickkw'] == 3 and \ 1728 (mode in ['noshower'] or \ 1729 (('PYTHIA8' not in self.run_card['parton_shower'].upper()) and (mode in ['aMC@NLO']))): 1730 logger.warning("""You are running with FxFx merging enabled. 1731 To be able to merge samples of various multiplicities without double counting, 1732 you have to remove some events after showering 'by hand'. 1733 Please read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""") 1734 1735 self.store_result() 1736 #check if the param_card defines a scan. 1737 if self.param_card_iterator: 1738 cpath = pjoin(self.me_dir,'Cards','param_card.dat') 1739 param_card_iterator = self.param_card_iterator 1740 self.param_card_iterator = [] #avoid to next generate go trough here 1741 param_card_iterator.store_entry(self.run_name, self.results.current['cross'], 1742 error=self.results.current['error'], 1743 param_card_path=cpath) 1744 orig_name = self.run_name 1745 #go trough the scal 1746 with misc.TMP_variable(self, 'allow_notification_center', False): 1747 for i,card in enumerate(param_card_iterator): 1748 card.write(cpath) 1749 self.check_param_card(cpath, dependent=True) 1750 if not options['force']: 1751 options['force'] = True 1752 if options['run_name']: 1753 options['run_name'] = '%s_%s' % (orig_name, i+1) 1754 if not argss: 1755 argss = [mode, "-f"] 1756 elif argss[0] == "auto": 1757 argss[0] = mode 1758 self.do_launch("", options=options, argss=argss, switch=switch) 1759 #self.exec_cmd("launch -f ",precmd=True, postcmd=True,errorhandling=False) 1760 param_card_iterator.store_entry(self.run_name, self.results.current['cross'], 1761 error=self.results.current['error'], 1762 param_card_path=cpath) 1763 #restore original param_card 1764 param_card_iterator.write(pjoin(self.me_dir,'Cards','param_card.dat')) 1765 name = misc.get_scan_name(orig_name, self.run_name) 1766 path = pjoin(self.me_dir, 'Events','scan_%s.txt' % name) 1767 logger.info("write all cross-section results in %s" % path, '$MG:BOLD') 1768 param_card_iterator.write_summary(path) 1769 1770 if self.allow_notification_center: 1771 misc.apple_notify('Run %s finished' % os.path.basename(self.me_dir), 1772 '%s: %s +- %s ' % (self.results.current['run_name'], 1773 self.results.current['cross'], 1774 self.results.current['error']))
1775 1776 1777 ############################################################################
1778 - def do_compile(self, line):
1779 """Advanced commands: just compile the executables """ 1780 argss = self.split_arg(line) 1781 # check argument validity and normalise argument 1782 (options, argss) = _compile_parser.parse_args(argss) 1783 options = options.__dict__ 1784 options['reweightonly'] = False 1785 options['nocompile'] = False 1786 self.check_compile(argss, options) 1787 1788 mode = {'FO': 'NLO', 'MC': 'aMC@NLO'}[argss[0]] 1789 self.ask_run_configuration(mode, options) 1790 self.compile(mode, options) 1791 1792 1793 self.update_status('', level='all', update_results=True)
1794 1795
1796 - def update_random_seed(self):
1797 """Update random number seed with the value from the run_card. 1798 If this is 0, update the number according to a fresh one""" 1799 iseed = self.run_card['iseed'] 1800 if iseed == 0: 1801 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit')) 1802 iseed = int(randinit.read()[2:]) + 1 1803 randinit.close() 1804 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit'), 'w') 1805 randinit.write('r=%d' % iseed) 1806 randinit.close()
1807 1808
1809 - def run(self, mode, options):
1810 """runs aMC@NLO. Returns the name of the event file created""" 1811 logger.info('Starting run') 1812 1813 if not 'only_generation' in list(options.keys()): 1814 options['only_generation'] = False 1815 1816 # for second step in applgrid mode, do only the event generation step 1817 if mode in ['LO', 'NLO'] and self.run_card['iappl'] == 2 and not options['only_generation']: 1818 options['only_generation'] = True 1819 self.get_characteristics(pjoin(self.me_dir, 'SubProcesses', 'proc_characteristics')) 1820 self.setup_cluster_or_multicore() 1821 self.update_random_seed() 1822 #find and keep track of all the jobs 1823 folder_names = {'LO': ['born_G*'], 'NLO': ['all_G*'], 1824 'aMC@LO': ['GB*'], 'aMC@NLO': ['GF*']} 1825 folder_names['noshower'] = folder_names['aMC@NLO'] 1826 folder_names['noshowerLO'] = folder_names['aMC@LO'] 1827 p_dirs = [d for d in \ 1828 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d] 1829 #Clean previous results 1830 self.clean_previous_results(options,p_dirs,folder_names[mode]) 1831 1832 mcatnlo_status = ['Setting up grids', 'Computing upper envelope', 'Generating events'] 1833 1834 1835 if options['reweightonly']: 1836 event_norm=self.run_card['event_norm'] 1837 nevents=self.run_card['nevents'] 1838 return self.reweight_and_collect_events(options, mode, nevents, event_norm) 1839 1840 if mode in ['LO', 'NLO']: 1841 # this is for fixed order runs 1842 mode_dict = {'NLO': 'all', 'LO': 'born'} 1843 logger.info('Doing fixed order %s' % mode) 1844 req_acc = self.run_card['req_acc_FO'] 1845 1846 # Re-distribute the grids for the 2nd step of the applgrid 1847 # running 1848 if self.run_card['iappl'] == 2: 1849 self.applgrid_distribute(options,mode_dict[mode],p_dirs) 1850 1851 # create a list of dictionaries "jobs_to_run" with all the 1852 # jobs that need to be run 1853 integration_step=-1 1854 jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \ 1855 req_acc,mode_dict[mode],integration_step,mode,fixed_order=True) 1856 self.prepare_directories(jobs_to_run,mode) 1857 1858 # loop over the integration steps. After every step, check 1859 # if we have the required accuracy. If this is the case, 1860 # stop running, else do another step. 1861 while True: 1862 integration_step=integration_step+1 1863 self.run_all_jobs(jobs_to_run,integration_step) 1864 self.collect_log_files(jobs_to_run,integration_step) 1865 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \ 1866 jobs_to_collect,integration_step,mode,mode_dict[mode]) 1867 if not jobs_to_run: 1868 # there are no more jobs to run (jobs_to_run is empty) 1869 break 1870 # We are done. 1871 self.finalise_run_FO(folder_names[mode],jobs_to_collect) 1872 self.update_status('Run complete', level='parton', update_results=True) 1873 return 1874 1875 elif mode in ['aMC@NLO','aMC@LO','noshower','noshowerLO']: 1876 if self.ninitial == 1: 1877 raise aMCatNLOError('Decay processes can only be run at fixed order.') 1878 mode_dict = {'aMC@NLO': 'all', 'aMC@LO': 'born',\ 1879 'noshower': 'all', 'noshowerLO': 'born'} 1880 shower = self.run_card['parton_shower'].upper() 1881 nevents = self.run_card['nevents'] 1882 req_acc = self.run_card['req_acc'] 1883 if nevents == 0 and req_acc < 0 : 1884 raise aMCatNLOError('Cannot determine the required accuracy from the number '\ 1885 'of events, because 0 events requested. Please set '\ 1886 'the "req_acc" parameter in the run_card to a value '\ 1887 'between 0 and 1') 1888 elif req_acc >1 or req_acc == 0 : 1889 raise aMCatNLOError('Required accuracy ("req_acc" in the run_card) should '\ 1890 'be between larger than 0 and smaller than 1, '\ 1891 'or set to -1 for automatic determination. Current '\ 1892 'value is %f' % req_acc) 1893 # For more than 1M events, set req_acc to 0.001 (except when it was explicitly set in the run_card) 1894 elif req_acc < 0 and nevents > 1000000 : 1895 req_acc=0.001 1896 1897 shower_list = ['HERWIG6', 'HERWIGPP', 'PYTHIA6Q', 'PYTHIA6PT', 'PYTHIA8'] 1898 1899 if not shower in shower_list: 1900 raise aMCatNLOError('%s is not a valid parton shower. '\ 1901 'Please use one of the following: %s' \ 1902 % (shower, ', '.join(shower_list))) 1903 1904 # check that PYTHIA6PT is not used for processes with FSR 1905 if shower == 'PYTHIA6PT' and self.proc_characteristics['has_fsr']: 1906 raise aMCatNLOError('PYTHIA6PT does not support processes with FSR') 1907 1908 if mode in ['aMC@NLO', 'aMC@LO']: 1909 logger.info('Doing %s matched to parton shower' % mode[4:]) 1910 elif mode in ['noshower','noshowerLO']: 1911 logger.info('Generating events without running the shower.') 1912 elif options['only_generation']: 1913 logger.info('Generating events starting from existing results') 1914 1915 jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \ 1916 req_acc,mode_dict[mode],1,mode,fixed_order=False) 1917 # Make sure to update all the jobs to be ready for the event generation step 1918 if options['only_generation']: 1919 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \ 1920 jobs_to_collect,1,mode,mode_dict[mode],fixed_order=False) 1921 else: 1922 self.prepare_directories(jobs_to_run,mode,fixed_order=False) 1923 1924 1925 # Main loop over the three MINT generation steps: 1926 for mint_step, status in enumerate(mcatnlo_status): 1927 if options['only_generation'] and mint_step < 2: 1928 continue 1929 self.update_status(status, level='parton') 1930 self.run_all_jobs(jobs_to_run,mint_step,fixed_order=False) 1931 self.collect_log_files(jobs_to_run,mint_step) 1932 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \ 1933 jobs_to_collect,mint_step,mode,mode_dict[mode],fixed_order=False) 1934 if mint_step+1==2 and nevents==0: 1935 self.print_summary(options,2,mode) 1936 return 1937 1938 # Sanity check on the event files. If error the jobs are resubmitted 1939 self.check_event_files(jobs_to_collect) 1940 1941 if self.cluster_mode == 1: 1942 #if cluster run, wait 10 sec so that event files are transferred back 1943 self.update_status( 1944 'Waiting while files are transferred back from the cluster nodes', 1945 level='parton') 1946 time.sleep(10) 1947 1948 event_norm=self.run_card['event_norm'] 1949 return self.reweight_and_collect_events(options, mode, nevents, event_norm)
1950
1951 - def create_jobs_to_run(self,options,p_dirs,req_acc,run_mode,\ 1952 integration_step,mode,fixed_order=True):
1953 """Creates a list of dictionaries with all the jobs to be run""" 1954 jobs_to_run=[] 1955 if not options['only_generation']: 1956 # Fresh, new run. Check all the P*/channels.txt files 1957 # (created by the 'gensym' executable) to set-up all the 1958 # jobs using the default inputs. 1959 npoints = self.run_card['npoints_FO_grid'] 1960 niters = self.run_card['niters_FO_grid'] 1961 for p_dir in p_dirs: 1962 try: 1963 with open(pjoin(self.me_dir,'SubProcesses',p_dir,'channels.txt')) as chan_file: 1964 channels=chan_file.readline().split() 1965 except IOError: 1966 logger.warning('No integration channels found for contribution %s' % p_dir) 1967 continue 1968 if fixed_order: 1969 lch=len(channels) 1970 maxchannels=20 # combine up to 20 channels in a single job 1971 if self.run_card['iappl'] != 0: maxchannels=1 1972 njobs=(int(lch/maxchannels)+1 if lch%maxchannels!= 0 \ 1973 else int(lch/maxchannels)) 1974 for nj in range(1,njobs+1): 1975 job={} 1976 job['p_dir']=p_dir 1977 job['channel']=str(nj) 1978 job['nchans']=(int(lch/njobs)+1 if nj <= lch%njobs else int(lch/njobs)) 1979 job['configs']=' '.join(channels[:job['nchans']]) 1980 del channels[:job['nchans']] 1981 job['split']=0 1982 if req_acc == -1: 1983 job['accuracy']=0 1984 job['niters']=niters 1985 job['npoints']=npoints 1986 elif req_acc > 0: 1987 job['accuracy']=0.05 1988 job['niters']=6 1989 job['npoints']=-1 1990 else: 1991 raise aMCatNLOError('No consistent "req_acc_FO" set. Use a value '+ 1992 'between 0 and 1 or set it equal to -1.') 1993 job['mint_mode']=0 1994 job['run_mode']=run_mode 1995 job['wgt_frac']=1.0 1996 job['wgt_mult']=1.0 1997 jobs_to_run.append(job) 1998 if channels: 1999 raise aMCatNLOError('channels is not empty %s' % channels) 2000 else: 2001 for channel in channels: 2002 job={} 2003 job['p_dir']=p_dir 2004 job['channel']=channel 2005 job['split']=0 2006 job['accuracy']=0.03 2007 job['niters']=12 2008 job['npoints']=-1 2009 job['mint_mode']=0 2010 job['run_mode']=run_mode 2011 job['wgt_frac']=1.0 2012 jobs_to_run.append(job) 2013 jobs_to_collect=copy.copy(jobs_to_run) # These are all jobs 2014 else: 2015 # if options['only_generation'] is true, just read the current jobs from file 2016 try: 2017 with open(pjoin(self.me_dir,"SubProcesses","job_status.pkl"),'rb') as f: 2018 jobs_to_collect=pickle.load(f) 2019 for job in jobs_to_collect: 2020 job['dirname']=pjoin(self.me_dir,'SubProcesses',job['dirname'].rsplit('/SubProcesses/',1)[1]) 2021 jobs_to_run=copy.copy(jobs_to_collect) 2022 except: 2023 raise aMCatNLOError('Cannot reconstruct saved job status in %s' % \ 2024 pjoin(self.me_dir,'SubProcesses','job_status.pkl')) 2025 # Update cross sections and determine which jobs to run next 2026 if fixed_order: 2027 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, 2028 jobs_to_collect,integration_step,mode,run_mode) 2029 # Update the integration_step to make sure that nothing will be overwritten 2030 integration_step=1 2031 for job in jobs_to_run: 2032 while os.path.exists(pjoin(job['dirname'],'res_%s.dat' % integration_step)): 2033 integration_step=integration_step+1 2034 integration_step=integration_step-1 2035 else: 2036 self.append_the_results(jobs_to_collect,integration_step) 2037 return jobs_to_run,jobs_to_collect,integration_step
2038
2039 - def prepare_directories(self,jobs_to_run,mode,fixed_order=True):
2040 """Set-up the G* directories for running""" 2041 name_suffix={'born' :'B' , 'all':'F'} 2042 for job in jobs_to_run: 2043 if job['split'] == 0: 2044 if fixed_order : 2045 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 2046 job['run_mode']+'_G'+job['channel']) 2047 else: 2048 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 2049 'G'+name_suffix[job['run_mode']]+job['channel']) 2050 else: 2051 if fixed_order : 2052 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 2053 job['run_mode']+'_G'+job['channel']+'_'+str(job['split'])) 2054 else: 2055 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 2056 'G'+name_suffix[job['run_mode']]+job['channel']+'_'+str(job['split'])) 2057 job['dirname']=dirname 2058 if not os.path.isdir(dirname): 2059 os.makedirs(dirname) 2060 self.write_input_file(job,fixed_order) 2061 # link or copy the grids from the base directory to the split directory: 2062 if not fixed_order: 2063 if job['split'] != 0: 2064 for f in ['grid.MC_integer','mint_grids','res_1']: 2065 if not os.path.isfile(pjoin(job['dirname'],f)): 2066 files.ln(pjoin(job['dirname'].rsplit("_",1)[0],f),job['dirname']) 2067 else: 2068 if job['split'] != 0: 2069 for f in ['grid.MC_integer','mint_grids']: 2070 files.cp(pjoin(job['dirname'].rsplit("_",1)[0],f),job['dirname'])
2071 2072
2073 - def write_input_file(self,job,fixed_order):
2074 """write the input file for the madevent_mint* executable in the appropriate directory""" 2075 if fixed_order: 2076 content= \ 2077 """NPOINTS = %(npoints)s 2078 NITERATIONS = %(niters)s 2079 ACCURACY = %(accuracy)s 2080 ADAPT_GRID = 2 2081 MULTICHANNEL = 1 2082 SUM_HELICITY = 1 2083 NCHANS = %(nchans)s 2084 CHANNEL = %(configs)s 2085 SPLIT = %(split)s 2086 WGT_MULT= %(wgt_mult)s 2087 RUN_MODE = %(run_mode)s 2088 RESTART = %(mint_mode)s 2089 """ \ 2090 % job 2091 else: 2092 content = \ 2093 """-1 12 ! points, iterations 2094 %(accuracy)s ! desired fractional accuracy 2095 1 -0.1 ! alpha, beta for Gsoft 2096 -1 -0.1 ! alpha, beta for Gazi 2097 1 ! Suppress amplitude (0 no, 1 yes)? 2098 1 ! Exact helicity sum (0 yes, n = number/event)? 2099 %(channel)s ! Enter Configuration Number: 2100 %(mint_mode)s ! MINT imode: 0 to set-up grids, 1 to perform integral, 2 generate events 2101 1 1 1 ! if imode is 1: Folding parameters for xi_i, phi_i and y_ij 2102 %(run_mode)s ! all, born, real, virt 2103 """ \ 2104 % job 2105 with open(pjoin(job['dirname'], 'input_app.txt'), 'w') as input_file: 2106 input_file.write(content)
2107 2108
2109 - def run_all_jobs(self,jobs_to_run,integration_step,fixed_order=True):
2110 """Loops over the jobs_to_run and executes them using the function 'run_exe'""" 2111 if fixed_order: 2112 if integration_step == 0: 2113 self.update_status('Setting up grids', level=None) 2114 else: 2115 self.update_status('Refining results, step %i' % integration_step, level=None) 2116 self.ijob = 0 2117 name_suffix={'born' :'B', 'all':'F'} 2118 if fixed_order: 2119 run_type="Fixed order integration step %s" % integration_step 2120 else: 2121 run_type="MINT step %s" % integration_step 2122 self.njobs=len(jobs_to_run) 2123 for job in jobs_to_run: 2124 executable='ajob1' 2125 if fixed_order: 2126 arguments=[job['channel'],job['run_mode'], \ 2127 str(job['split']),str(integration_step)] 2128 else: 2129 arguments=[job['channel'],name_suffix[job['run_mode']], \ 2130 str(job['split']),str(integration_step)] 2131 self.run_exe(executable,arguments,run_type, 2132 cwd=pjoin(self.me_dir,'SubProcesses',job['p_dir'])) 2133 2134 if self.cluster_mode == 2: 2135 time.sleep(1) # security to allow all jobs to be launched 2136 self.wait_for_complete(run_type)
2137 2138
2139 - def collect_the_results(self,options,req_acc,jobs_to_run,jobs_to_collect,\ 2140 integration_step,mode,run_mode,fixed_order=True):
2141 """Collect the results, make HTML pages, print the summary and 2142 determine if there are more jobs to run. Returns the list 2143 of the jobs that still need to be run, as well as the 2144 complete list of jobs that need to be collected to get the 2145 final answer. 2146 """ 2147 # Get the results of the current integration/MINT step 2148 self.append_the_results(jobs_to_run,integration_step) 2149 self.cross_sect_dict = self.write_res_txt_file(jobs_to_collect,integration_step) 2150 # Update HTML pages 2151 if fixed_order: 2152 cross, error = self.make_make_all_html_results(folder_names=['%s*' % run_mode], 2153 jobs=jobs_to_collect) 2154 else: 2155 name_suffix={'born' :'B' , 'all':'F'} 2156 cross, error = self.make_make_all_html_results(folder_names=['G%s*' % name_suffix[run_mode]]) 2157 self.results.add_detail('cross', cross) 2158 self.results.add_detail('error', error) 2159 # Combine grids from split fixed order jobs 2160 if fixed_order: 2161 jobs_to_run=self.combine_split_order_run(jobs_to_run) 2162 # Set-up jobs for the next iteration/MINT step 2163 jobs_to_run_new=self.update_jobs_to_run(req_acc,integration_step,jobs_to_run,fixed_order) 2164 # IF THERE ARE NO MORE JOBS, WE ARE DONE!!! 2165 if fixed_order: 2166 # Write the jobs_to_collect directory to file so that we 2167 # can restart them later (with only-generation option) 2168 with open(pjoin(self.me_dir,"SubProcesses","job_status.pkl"),'wb') as f: 2169 pickle.dump(jobs_to_collect,f) 2170 # Print summary 2171 if (not jobs_to_run_new) and fixed_order: 2172 # print final summary of results (for fixed order) 2173 scale_pdf_info=self.collect_scale_pdf_info(options,jobs_to_collect) 2174 self.print_summary(options,integration_step,mode,scale_pdf_info,done=True) 2175 return jobs_to_run_new,jobs_to_collect 2176 elif jobs_to_run_new: 2177 # print intermediate summary of results 2178 scale_pdf_info=[] 2179 self.print_summary(options,integration_step,mode,scale_pdf_info,done=False) 2180 else: 2181 # When we are done for (N)LO+PS runs, do not print 2182 # anything yet. This will be done after the reweighting 2183 # and collection of the events 2184 scale_pdf_info=[] 2185 # Prepare for the next integration/MINT step 2186 if (not fixed_order) and integration_step+1 == 2 : 2187 # Write the jobs_to_collect directory to file so that we 2188 # can restart them later (with only-generation option) 2189 with open(pjoin(self.me_dir,"SubProcesses","job_status.pkl"),'wb') as f: 2190 pickle.dump(jobs_to_collect,f) 2191 # next step is event generation (mint_step 2) 2192 jobs_to_run_new,jobs_to_collect_new= \ 2193 self.check_the_need_to_split(jobs_to_run_new,jobs_to_collect) 2194 self.prepare_directories(jobs_to_run_new,mode,fixed_order) 2195 self.write_nevents_unweighted_file(jobs_to_collect_new,jobs_to_collect) 2196 self.write_nevts_files(jobs_to_run_new) 2197 else: 2198 if fixed_order and self.run_card['iappl'] == 0 \ 2199 and self.run_card['req_acc_FO'] > 0: 2200 jobs_to_run_new,jobs_to_collect= \ 2201 self.split_jobs_fixed_order(jobs_to_run_new,jobs_to_collect) 2202 self.prepare_directories(jobs_to_run_new,mode,fixed_order) 2203 jobs_to_collect_new=jobs_to_collect 2204 return jobs_to_run_new,jobs_to_collect_new
2205 2206
2207 - def write_nevents_unweighted_file(self,jobs,jobs0events):
2208 """writes the nevents_unweighted file in the SubProcesses directory. 2209 We also need to write the jobs that will generate 0 events, 2210 because that makes sure that the cross section from those channels 2211 is taken into account in the event weights (by collect_events.f). 2212 """ 2213 content=[] 2214 for job in jobs: 2215 path=pjoin(job['dirname'].split('/')[-2],job['dirname'].split('/')[-1]) 2216 lhefile=pjoin(path,'events.lhe') 2217 content.append(' %s %d %9e %9e' % \ 2218 (lhefile.ljust(40),job['nevents'],job['resultABS']*job['wgt_frac'],job['wgt_frac'])) 2219 for job in jobs0events: 2220 if job['nevents']==0: 2221 path=pjoin(job['dirname'].split('/')[-2],job['dirname'].split('/')[-1]) 2222 lhefile=pjoin(path,'events.lhe') 2223 content.append(' %s %d %9e %9e' % \ 2224 (lhefile.ljust(40),job['nevents'],job['resultABS'],1.)) 2225 with open(pjoin(self.me_dir,'SubProcesses',"nevents_unweighted"),'w') as f: 2226 f.write('\n'.join(content)+'\n')
2227
2228 - def write_nevts_files(self,jobs):
2229 """write the nevts files in the SubProcesses/P*/G*/ directories""" 2230 for job in jobs: 2231 with open(pjoin(job['dirname'],'nevts'),'w') as f: 2232 if self.run_card['event_norm'].lower()=='bias': 2233 f.write('%i %f\n' % (job['nevents'],self.cross_sect_dict['xseca'])) 2234 else: 2235 f.write('%i\n' % job['nevents'])
2236
2237 - def combine_split_order_run(self,jobs_to_run):
2238 """Combines jobs and grids from split jobs that have been run""" 2239 # combine the jobs that need to be combined in job 2240 # groups. Simply combine the ones that have the same p_dir and 2241 # same channel. 2242 jobgroups_to_combine=[] 2243 jobs_to_run_new=[] 2244 for job in jobs_to_run: 2245 if job['split'] == 0: 2246 job['combined']=1 2247 jobs_to_run_new.append(job) # this jobs wasn't split 2248 elif job['split'] == 1: 2249 jobgroups_to_combine.append([j for j in jobs_to_run if j['p_dir'] == job['p_dir'] and \ 2250 j['channel'] == job['channel']]) 2251 else: 2252 continue 2253 for job_group in jobgroups_to_combine: 2254 # Combine the grids (mint-grids & MC-integer grids) first 2255 self.combine_split_order_grids(job_group) 2256 jobs_to_run_new.append(self.combine_split_order_jobs(job_group)) 2257 return jobs_to_run_new
2258
2259 - def combine_split_order_jobs(self,job_group):
2260 """combine the jobs in job_group and return a single summed job""" 2261 # first copy one of the jobs in 'jobs' 2262 sum_job=copy.copy(job_group[0]) 2263 # update the information to have a 'non-split' job: 2264 sum_job['dirname']=pjoin(sum_job['dirname'].rsplit('_',1)[0]) 2265 sum_job['split']=0 2266 sum_job['wgt_mult']=1.0 2267 sum_job['combined']=len(job_group) 2268 # information to be summed: 2269 keys=['niters_done','npoints_done','niters','npoints',\ 2270 'result','resultABS','time_spend'] 2271 keys2=['error','errorABS'] 2272 # information to be summed in quadrature: 2273 for key in keys2: 2274 sum_job[key]=math.pow(sum_job[key],2) 2275 # Loop over the jobs and sum the information 2276 for i,job in enumerate(job_group): 2277 if i==0 : continue # skip the first 2278 for key in keys: 2279 sum_job[key]+=job[key] 2280 for key in keys2: 2281 sum_job[key]+=math.pow(job[key],2) 2282 for key in keys2: 2283 sum_job[key]=math.sqrt(sum_job[key]) 2284 sum_job['err_percABS'] = sum_job['errorABS']/sum_job['resultABS']*100. 2285 sum_job['err_perc'] = sum_job['error']/sum_job['result']*100. 2286 sum_job['niters']=int(sum_job['niters_done']/len(job_group)) 2287 sum_job['niters_done']=int(sum_job['niters_done']/len(job_group)) 2288 return sum_job
2289 2290
2291 - def combine_split_order_grids(self,job_group):
2292 """Combines the mint_grids and MC-integer grids from the split order 2293 jobs (fixed order only). 2294 """ 2295 files_mint_grids=[] 2296 files_MC_integer=[] 2297 location=None 2298 for job in job_group: 2299 files_mint_grids.append(pjoin(job['dirname'],'mint_grids')) 2300 files_MC_integer.append(pjoin(job['dirname'],'grid.MC_integer')) 2301 if not location: 2302 location=pjoin(job['dirname'].rsplit('_',1)[0]) 2303 else: 2304 if location != pjoin(job['dirname'].rsplit('_',1)[0]) : 2305 raise aMCatNLOError('Not all jobs have the same location. '\ 2306 +'Cannot combine them.') 2307 # Needed to average the grids (both xgrids, ave_virt and 2308 # MC_integer grids), but sum the cross section info. The 2309 # latter is only the only line that contains integers. 2310 for j,fs in enumerate([files_mint_grids,files_MC_integer]): 2311 linesoffiles=[] 2312 for f in fs: 2313 with open(f,'r+') as fi: 2314 linesoffiles.append(fi.readlines()) 2315 to_write=[] 2316 for rowgrp in zip(*linesoffiles): 2317 try: 2318 # check that last element on the line is an 2319 # integer (will raise ValueError if not the 2320 # case). If integer, this is the line that 2321 # contains information that needs to be 2322 # summed. All other lines can be averaged. 2323 is_integer = [[int(row.strip().split()[-1])] for row in rowgrp] 2324 floatsbyfile = [[float(a) for a in row.strip().split()] for row in rowgrp] 2325 floatgrps = list(zip(*floatsbyfile)) 2326 special=[] 2327 for i,floatgrp in enumerate(floatgrps): 2328 if i==0: # sum X-sec 2329 special.append(sum(floatgrp)) 2330 elif i==1: # sum unc in quadrature 2331 special.append(math.sqrt(sum([err**2 for err in floatgrp]))) 2332 elif i==2: # average number of PS per iteration 2333 special.append(int(sum(floatgrp)/len(floatgrp))) 2334 elif i==3: # sum the number of iterations 2335 special.append(int(sum(floatgrp))) 2336 elif i==4: # average the nhits_in_grids 2337 special.append(int(sum(floatgrp)/len(floatgrp))) 2338 else: 2339 raise aMCatNLOError('"mint_grids" files not in correct format. '+\ 2340 'Cannot combine them.') 2341 to_write.append(" ".join(str(s) for s in special) + "\n") 2342 except ValueError: 2343 # just average all 2344 floatsbyfile = [[float(a) for a in row.strip().split()] for row in rowgrp] 2345 floatgrps = list(zip(*floatsbyfile)) 2346 averages = [sum(floatgrp)/len(floatgrp) for floatgrp in floatgrps] 2347 to_write.append(" ".join(str(a) for a in averages) + "\n") 2348 # write the data over the master location 2349 if j==0: 2350 with open(pjoin(location,'mint_grids'),'w') as f: 2351 f.writelines(to_write) 2352 elif j==1: 2353 with open(pjoin(location,'grid.MC_integer'),'w') as f: 2354 f.writelines(to_write)
2355 2356
2357 - def split_jobs_fixed_order(self,jobs_to_run,jobs_to_collect):
2358 """Looks in the jobs_to_run to see if there is the need to split the 2359 jobs, depending on the expected time they take. Updates 2360 jobs_to_run and jobs_to_collect to replace the split-job by 2361 its splits. 2362 """ 2363 # determine the number jobs we should have (this is per p_dir) 2364 if self.options['run_mode'] ==2: 2365 nb_submit = int(self.options['nb_core']) 2366 elif self.options['run_mode'] ==1: 2367 nb_submit = int(self.options['cluster_size']) 2368 else: 2369 nb_submit =1 2370 # total expected aggregated running time 2371 time_expected=0 2372 for job in jobs_to_run: 2373 time_expected+=job['time_spend']*(job['niters']*job['npoints'])/ \ 2374 (job['niters_done']*job['npoints_done']) 2375 # this means that we must expect the following per job (in 2376 # ideal conditions) 2377 time_per_job=time_expected/(nb_submit*(1+len(jobs_to_run)/2)) 2378 jobs_to_run_new=[] 2379 jobs_to_collect_new=copy.copy(jobs_to_collect) 2380 for job in jobs_to_run: 2381 # remove current job from jobs_to_collect. Make sure 2382 # to remove all the split ones in case the original 2383 # job had been a split one (before it was re-combined) 2384 for j in [j for j in jobs_to_collect_new if j['p_dir'] == job['p_dir'] and \ 2385 j['channel'] == job['channel']]: 2386 jobs_to_collect_new.remove(j) 2387 time_expected=job['time_spend']*(job['niters']*job['npoints'])/ \ 2388 (job['niters_done']*job['npoints_done']) 2389 # if the time expected for this job is (much) larger than 2390 # the time spend in the previous iteration, and larger 2391 # than the expected time per job, split it 2392 if time_expected > max(2*job['time_spend']/job['combined'],time_per_job): 2393 # determine the number of splits needed 2394 nsplit=min(max(int(time_expected/max(2*job['time_spend']/job['combined'],time_per_job)),2),nb_submit) 2395 for i in range(1,nsplit+1): 2396 job_new=copy.copy(job) 2397 job_new['split']=i 2398 job_new['wgt_mult']=1./float(nsplit) 2399 job_new['dirname']=job['dirname']+'_%i' % job_new['split'] 2400 job_new['accuracy']=min(job['accuracy']*math.sqrt(float(nsplit)),0.1) 2401 if nsplit >= job['niters']: 2402 job_new['npoints']=int(job['npoints']*job['niters']/nsplit) 2403 job_new['niters']=1 2404 else: 2405 job_new['npoints']=int(job['npoints']/nsplit) 2406 jobs_to_collect_new.append(job_new) 2407 jobs_to_run_new.append(job_new) 2408 else: 2409 jobs_to_collect_new.append(job) 2410 jobs_to_run_new.append(job) 2411 return jobs_to_run_new,jobs_to_collect_new
2412 2413
2414 - def check_the_need_to_split(self,jobs_to_run,jobs_to_collect):
2415 """Looks in the jobs_to_run to see if there is the need to split the 2416 event generation step. Updates jobs_to_run and 2417 jobs_to_collect to replace the split-job by its 2418 splits. Also removes jobs that do not need any events. 2419 """ 2420 nevt_job=self.run_card['nevt_job'] 2421 if nevt_job > 0: 2422 jobs_to_collect_new=copy.copy(jobs_to_collect) 2423 for job in jobs_to_run: 2424 nevents=job['nevents'] 2425 if nevents == 0: 2426 jobs_to_collect_new.remove(job) 2427 elif nevents > nevt_job: 2428 jobs_to_collect_new.remove(job) 2429 if nevents % nevt_job != 0 : 2430 nsplit=int(nevents/nevt_job)+1 2431 else: 2432 nsplit=int(nevents/nevt_job) 2433 for i in range(1,nsplit+1): 2434 job_new=copy.copy(job) 2435 left_over=nevents % nsplit 2436 if i <= left_over: 2437 job_new['nevents']=int(nevents/nsplit)+1 2438 job_new['wgt_frac']=float(job_new['nevents'])/float(nevents) 2439 else: 2440 job_new['nevents']=int(nevents/nsplit) 2441 job_new['wgt_frac']=float(job_new['nevents'])/float(nevents) 2442 job_new['split']=i 2443 job_new['dirname']=job['dirname']+'_%i' % job_new['split'] 2444 jobs_to_collect_new.append(job_new) 2445 jobs_to_run_new=copy.copy(jobs_to_collect_new) 2446 else: 2447 jobs_to_run_new=copy.copy(jobs_to_collect) 2448 for job in jobs_to_collect: 2449 if job['nevents'] == 0: 2450 jobs_to_run_new.remove(job) 2451 jobs_to_collect_new=copy.copy(jobs_to_run_new) 2452 2453 return jobs_to_run_new,jobs_to_collect_new
2454 2455
2456 - def update_jobs_to_run(self,req_acc,step,jobs,fixed_order=True):
2457 """ 2458 For (N)LO+PS: determines the number of events and/or the required 2459 accuracy per job. 2460 For fixed order: determines which jobs need higher precision and 2461 returns those with the newly requested precision. 2462 """ 2463 err=self.cross_sect_dict['errt'] 2464 tot=self.cross_sect_dict['xsect'] 2465 errABS=self.cross_sect_dict['erra'] 2466 totABS=self.cross_sect_dict['xseca'] 2467 jobs_new=[] 2468 if fixed_order: 2469 if req_acc == -1: 2470 if step+1 == 1: 2471 npoints = self.run_card['npoints_FO'] 2472 niters = self.run_card['niters_FO'] 2473 for job in jobs: 2474 job['mint_mode']=-1 2475 job['niters']=niters 2476 job['npoints']=npoints 2477 jobs_new.append(job) 2478 elif step+1 == 2: 2479 pass 2480 elif step+1 > 2: 2481 raise aMCatNLOError('Cannot determine number of iterations and PS points '+ 2482 'for integration step %i' % step ) 2483 elif ( req_acc > 0 and err/abs(tot) > req_acc*1.2 ) or step <= 0: 2484 req_accABS=req_acc*abs(tot)/totABS # overal relative required accuracy on ABS Xsec. 2485 for job in jobs: 2486 job['mint_mode']=-1 2487 # Determine relative required accuracy on the ABS for this job 2488 job['accuracy']=req_accABS*math.sqrt(totABS/job['resultABS']) 2489 # If already accurate enough, skip the job (except when doing the first 2490 # step for the iappl=2 run: we need to fill all the applgrid grids!) 2491 if (job['accuracy'] > job['errorABS']/job['resultABS'] and step != 0) \ 2492 and not (step==-1 and self.run_card['iappl'] == 2): 2493 continue 2494 # Update the number of PS points based on errorABS, ncall and accuracy 2495 itmax_fl=job['niters_done']*math.pow(job['errorABS']/ 2496 (job['accuracy']*job['resultABS']),2) 2497 if itmax_fl <= 4.0 : 2498 job['niters']=max(int(round(itmax_fl)),2) 2499 job['npoints']=job['npoints_done']*2 2500 elif itmax_fl > 4.0 and itmax_fl <= 16.0 : 2501 job['niters']=4 2502 job['npoints']=int(round(job['npoints_done']*itmax_fl/4.0))*2 2503 else: 2504 if itmax_fl > 100.0 : itmax_fl=50.0 2505 job['niters']=int(round(math.sqrt(itmax_fl))) 2506 job['npoints']=int(round(job['npoints_done']*itmax_fl/ 2507 round(math.sqrt(itmax_fl))))*2 2508 # Add the job to the list of jobs that need to be run 2509 jobs_new.append(job) 2510 return jobs_new 2511 elif step+1 <= 2: 2512 nevents=self.run_card['nevents'] 2513 # Total required accuracy for the upper bounding envelope 2514 if req_acc<0: 2515 req_acc2_inv=nevents 2516 else: 2517 req_acc2_inv=1/(req_acc*req_acc) 2518 if step+1 == 1 or step+1 == 2 : 2519 # determine the req. accuracy for each of the jobs for Mint-step = 1 2520 for job in jobs: 2521 accuracy=min(math.sqrt(totABS/(req_acc2_inv*job['resultABS'])),0.2) 2522 job['accuracy']=accuracy 2523 if step+1 == 2: 2524 # Randomly (based on the relative ABS Xsec of the job) determine the 2525 # number of events each job needs to generate for MINT-step = 2. 2526 r=self.get_randinit_seed() 2527 random.seed(r) 2528 totevts=nevents 2529 for job in jobs: 2530 job['nevents'] = 0 2531 while totevts : 2532 target = random.random() * totABS 2533 crosssum = 0. 2534 i = 0 2535 while i<len(jobs) and crosssum < target: 2536 job = jobs[i] 2537 crosssum += job['resultABS'] 2538 i += 1 2539 totevts -= 1 2540 i -= 1 2541 jobs[i]['nevents'] += 1 2542 for job in jobs: 2543 job['mint_mode']=step+1 # next step 2544 return jobs 2545 else: 2546 return []
2547 2548
2549 - def get_randinit_seed(self):
2550 """ Get the random number seed from the randinit file """ 2551 with open(pjoin(self.me_dir,"SubProcesses","randinit")) as randinit: 2552 # format of the file is "r=%d". 2553 iseed = int(randinit.read()[2:]) 2554 return iseed
2555 2556
2557 - def append_the_results(self,jobs,integration_step):
2558 """Appends the results for each of the jobs in the job list""" 2559 error_found=False 2560 for job in jobs: 2561 try: 2562 if integration_step >= 0 : 2563 with open(pjoin(job['dirname'],'res_%s.dat' % integration_step)) as res_file: 2564 results=res_file.readline().split() 2565 else: 2566 # should only be here when doing fixed order with the 'only_generation' 2567 # option equal to True. Take the results from the final run done. 2568 with open(pjoin(job['dirname'],'res.dat')) as res_file: 2569 results=res_file.readline().split() 2570 except IOError: 2571 if not error_found: 2572 error_found=True 2573 error_log=[] 2574 error_log.append(pjoin(job['dirname'],'log.txt')) 2575 continue 2576 job['resultABS']=float(results[0]) 2577 job['errorABS']=float(results[1]) 2578 job['result']=float(results[2]) 2579 job['error']=float(results[3]) 2580 job['niters_done']=int(results[4]) 2581 job['npoints_done']=int(results[5]) 2582 job['time_spend']=float(results[6]) 2583 job['err_percABS'] = job['errorABS']/job['resultABS']*100. 2584 job['err_perc'] = job['error']/job['result']*100. 2585 if error_found: 2586 raise aMCatNLOError('An error occurred during the collection of results.\n' + 2587 'Please check the .log files inside the directories which failed:\n' + 2588 '\n'.join(error_log)+'\n')
2589 2590 2591
2592 - def write_res_txt_file(self,jobs,integration_step):
2593 """writes the res.txt files in the SubProcess dir""" 2594 jobs.sort(key = lambda job: -job['errorABS']) 2595 content=[] 2596 content.append('\n\nCross section per integration channel:') 2597 for job in jobs: 2598 content.append('%(p_dir)20s %(channel)15s %(result)10.8e %(error)6.4e %(err_perc)6.4f%% ' % job) 2599 content.append('\n\nABS cross section per integration channel:') 2600 for job in jobs: 2601 content.append('%(p_dir)20s %(channel)15s %(resultABS)10.8e %(errorABS)6.4e %(err_percABS)6.4f%% ' % job) 2602 totABS=0 2603 errABS=0 2604 tot=0 2605 err=0 2606 for job in jobs: 2607 totABS+= job['resultABS']*job['wgt_frac'] 2608 errABS+= math.pow(job['errorABS'],2)*job['wgt_frac'] 2609 tot+= job['result']*job['wgt_frac'] 2610 err+= math.pow(job['error'],2)*job['wgt_frac'] 2611 if jobs: 2612 content.append('\nTotal ABS and \nTotal: \n %10.8e +- %6.4e (%6.4e%%)\n %10.8e +- %6.4e (%6.4e%%) \n' %\ 2613 (totABS, math.sqrt(errABS), math.sqrt(errABS)/totABS *100.,\ 2614 tot, math.sqrt(err), math.sqrt(err)/tot *100.)) 2615 with open(pjoin(self.me_dir,'SubProcesses','res_%s.txt' % integration_step),'w') as res_file: 2616 res_file.write('\n'.join(content)) 2617 randinit=self.get_randinit_seed() 2618 return {'xsect':tot,'xseca':totABS,'errt':math.sqrt(err),\ 2619 'erra':math.sqrt(errABS),'randinit':randinit}
2620 2621
2622 - def collect_scale_pdf_info(self,options,jobs):
2623 """read the scale_pdf_dependence.dat files and collects there results""" 2624 scale_pdf_info=[] 2625 if any(self.run_card['reweight_scale']) or any(self.run_card['reweight_PDF']) or \ 2626 len(self.run_card['dynamical_scale_choice']) > 1 or len(self.run_card['lhaid']) > 1: 2627 evt_files=[] 2628 evt_wghts=[] 2629 for job in jobs: 2630 evt_files.append(pjoin(job['dirname'],'scale_pdf_dependence.dat')) 2631 evt_wghts.append(job['wgt_frac']) 2632 scale_pdf_info = self.pdf_scale_from_reweighting(evt_files,evt_wghts) 2633 return scale_pdf_info
2634 2635
2636 - def combine_plots_FO(self,folder_name,jobs):
2637 """combines the plots and puts then in the Events/run* directory""" 2638 devnull = open(os.devnull, 'w') 2639 2640 if self.analyse_card['fo_analysis_format'].lower() == 'topdrawer': 2641 topfiles = [] 2642 for job in jobs: 2643 if job['dirname'].endswith('.top'): 2644 topfiles.append(job['dirname']) 2645 else: 2646 topfiles.append(pjoin(job['dirname'],'MADatNLO.top')) 2647 misc.call(['./combine_plots_FO.sh'] + topfiles, \ 2648 stdout=devnull, 2649 cwd=pjoin(self.me_dir, 'SubProcesses')) 2650 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.top'), 2651 pjoin(self.me_dir, 'Events', self.run_name)) 2652 logger.info('The results of this run and the TopDrawer file with the plots' + \ 2653 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 2654 elif self.analyse_card['fo_analysis_format'].lower() == 'hwu': 2655 out=pjoin(self.me_dir,'Events',self.run_name,'MADatNLO') 2656 self.combine_plots_HwU(jobs,out) 2657 try: 2658 misc.call(['gnuplot','MADatNLO.gnuplot'],\ 2659 stdout=devnull,stderr=devnull,\ 2660 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 2661 except Exception: 2662 pass 2663 logger.info('The results of this run and the HwU and GnuPlot files with the plots' + \ 2664 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 2665 elif self.analyse_card['fo_analysis_format'].lower() == 'root': 2666 rootfiles = [] 2667 for job in jobs: 2668 if job['dirname'].endswith('.root'): 2669 rootfiles.append(job['dirname']) 2670 else: 2671 rootfiles.append(pjoin(job['dirname'],'MADatNLO.root')) 2672 misc.call(['./combine_root.sh'] + folder_name + rootfiles, \ 2673 stdout=devnull, 2674 cwd=pjoin(self.me_dir, 'SubProcesses')) 2675 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.root'), 2676 pjoin(self.me_dir, 'Events', self.run_name)) 2677 logger.info('The results of this run and the ROOT file with the plots' + \ 2678 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 2679 elif self.analyse_card['fo_analysis_format'].lower() == 'lhe': 2680 self.combine_FO_lhe(jobs) 2681 logger.info('The results of this run and the LHE File (to be used for plotting only)' + \ 2682 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 2683 else: 2684 logger.info('The results of this run' + \ 2685 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
2686
2687 - def combine_FO_lhe(self,jobs):
2688 """combine the various lhe file generated in each directory. 2689 They are two steps: 2690 1) banner 2691 2) reweight each sample by the factor written at the end of each file 2692 3) concatenate each of the new files (gzip those). 2693 """ 2694 2695 logger.info('Combining lhe events for plotting analysis') 2696 start = time.time() 2697 self.run_card['fo_lhe_postprocessing'] = [i.lower() for i in self.run_card['fo_lhe_postprocessing']] 2698 output = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz') 2699 if os.path.exists(output): 2700 os.remove(output) 2701 2702 2703 2704 2705 # 1. write the banner 2706 text = open(pjoin(jobs[0]['dirname'],'header.txt'),'r').read() 2707 i1, i2 = text.find('<initrwgt>'),text.find('</initrwgt>') 2708 self.banner['initrwgt'] = text[10+i1:i2] 2709 # 2710 # <init> 2711 # 2212 2212 6.500000e+03 6.500000e+03 0 0 247000 247000 -4 1 2712 # 8.430000e+02 2.132160e+00 8.430000e+02 1 2713 # <generator name='MadGraph5_aMC@NLO' version='2.5.2'>please cite 1405.0301 </generator> 2714 # </init> 2715 2716 cross = sum(j['result'] for j in jobs) 2717 error = math.sqrt(sum(j['error'] for j in jobs)) 2718 self.banner['init'] = "0 0 0e0 0e0 0 0 0 0 -4 1\n %s %s %s 1" % (cross, error, cross) 2719 self.banner.write(output[:-3], close_tag=False) 2720 misc.gzip(output[:-3]) 2721 2722 2723 2724 fsock = lhe_parser.EventFile(output,'a') 2725 if 'nogrouping' in self.run_card['fo_lhe_postprocessing']: 2726 fsock.eventgroup = False 2727 else: 2728 fsock.eventgroup = True 2729 2730 if 'norandom' in self.run_card['fo_lhe_postprocessing']: 2731 for job in jobs: 2732 dirname = job['dirname'] 2733 #read last line 2734 lastline = misc.BackRead(pjoin(dirname,'events.lhe')).readline() 2735 nb_event, sumwgt, cross = [float(i) for i in lastline.split()] 2736 # get normalisation ratio 2737 ratio = cross/sumwgt 2738 lhe = lhe_parser.EventFile(pjoin(dirname,'events.lhe')) 2739 lhe.eventgroup = True # read the events by eventgroup 2740 for eventsgroup in lhe: 2741 neweventsgroup = [] 2742 for i,event in enumerate(eventsgroup): 2743 event.rescale_weights(ratio) 2744 if i>0 and 'noidentification' not in self.run_card['fo_lhe_postprocessing'] \ 2745 and event == neweventsgroup[-1]: 2746 neweventsgroup[-1].wgt += event.wgt 2747 for key in event.reweight_data: 2748 neweventsgroup[-1].reweight_data[key] += event.reweight_data[key] 2749 else: 2750 neweventsgroup.append(event) 2751 fsock.write_events(neweventsgroup) 2752 lhe.close() 2753 os.remove(pjoin(dirname,'events.lhe')) 2754 else: 2755 lhe = [] 2756 lenlhe = [] 2757 misc.sprint('need to combine %s event file' % len(jobs)) 2758 globallhe = lhe_parser.MultiEventFile() 2759 globallhe.eventgroup = True 2760 for job in jobs: 2761 dirname = job['dirname'] 2762 lastline = misc.BackRead(pjoin(dirname,'events.lhe')).readline() 2763 nb_event, sumwgt, cross = [float(i) for i in lastline.split()] 2764 lastlhe = globallhe.add(pjoin(dirname,'events.lhe'),cross, 0, cross, 2765 nb_event=int(nb_event), scale=cross/sumwgt) 2766 for eventsgroup in globallhe: 2767 neweventsgroup = [] 2768 for i,event in enumerate(eventsgroup): 2769 event.rescale_weights(event.sample_scale) 2770 if i>0 and 'noidentification' not in self.run_card['fo_lhe_postprocessing'] \ 2771 and event == neweventsgroup[-1]: 2772 neweventsgroup[-1].wgt += event.wgt 2773 for key in event.reweight_data: 2774 neweventsgroup[-1].reweight_data[key] += event.reweight_data[key] 2775 else: 2776 neweventsgroup.append(event) 2777 fsock.write_events(neweventsgroup) 2778 globallhe.close() 2779 fsock.write('</LesHouchesEvents>\n') 2780 fsock.close() 2781 misc.sprint('combining lhe file done in ', time.time()-start) 2782 for job in jobs: 2783 dirname = job['dirname'] 2784 os.remove(pjoin(dirname,'events.lhe')) 2785 2786 2787 2788 misc.sprint('combining lhe file done in ', time.time()-start)
2789 2790 2791 2792 2793 2794
2795 - def combine_plots_HwU(self,jobs,out,normalisation=None):
2796 """Sums all the plots in the HwU format.""" 2797 logger.debug('Combining HwU plots.') 2798 2799 command = [sys.executable] 2800 command.append(pjoin(self.me_dir, 'bin', 'internal','histograms.py')) 2801 for job in jobs: 2802 if job['dirname'].endswith('.HwU'): 2803 command.append(job['dirname']) 2804 else: 2805 command.append(pjoin(job['dirname'],'MADatNLO.HwU')) 2806 command.append("--out="+out) 2807 command.append("--gnuplot") 2808 command.append("--band=[]") 2809 command.append("--lhapdf-config="+self.options['lhapdf']) 2810 if normalisation: 2811 command.append("--multiply="+(','.join([str(n) for n in normalisation]))) 2812 command.append("--sum") 2813 command.append("--keep_all_weights") 2814 command.append("--no_open") 2815 2816 p = misc.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.STDOUT, cwd=self.me_dir) 2817 2818 while p.poll() is None: 2819 line = p.stdout.readline().decode() 2820 #misc.sprint(type(line)) 2821 if any(t in line for t in ['INFO:','WARNING:','CRITICAL:','ERROR:','KEEP:']): 2822 print(line[:-1]) 2823 elif __debug__ and line: 2824 logger.debug(line[:-1])
2825 2826
2827 - def applgrid_combine(self,cross,error,jobs):
2828 """Combines the APPLgrids in all the SubProcess/P*/all_G*/ directories""" 2829 logger.debug('Combining APPLgrids \n') 2830 applcomb=pjoin(self.options['applgrid'].rstrip('applgrid-config'), 2831 'applgrid-combine') 2832 all_jobs=[] 2833 for job in jobs: 2834 all_jobs.append(job['dirname']) 2835 ngrids=len(all_jobs) 2836 nobs =len([name for name in os.listdir(all_jobs[0]) if name.endswith("_out.root")]) 2837 for obs in range(0,nobs): 2838 gdir = [pjoin(job,"grid_obs_"+str(obs)+"_out.root") for job in all_jobs] 2839 # combine APPLgrids from different channels for observable 'obs' 2840 if self.run_card["iappl"] == 1: 2841 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events",self.run_name, 2842 "aMCfast_obs_"+str(obs)+"_starting_grid.root"), '--optimise']+ gdir) 2843 elif self.run_card["iappl"] == 2: 2844 unc2_inv=pow(cross/error,2) 2845 unc2_inv_ngrids=pow(cross/error,2)*ngrids 2846 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events", 2847 self.run_name,"aMCfast_obs_"+str(obs)+".root"),'-s', 2848 str(unc2_inv),'--weight',str(unc2_inv)]+ gdir) 2849 for job in all_jobs: 2850 os.remove(pjoin(job,"grid_obs_"+str(obs)+"_in.root")) 2851 else: 2852 raise aMCatNLOError('iappl parameter can only be 0, 1 or 2') 2853 # after combining, delete the original grids 2854 for ggdir in gdir: 2855 os.remove(ggdir)
2856 2857
2858 - def applgrid_distribute(self,options,mode,p_dirs):
2859 """Distributes the APPLgrids ready to be filled by a second run of the code""" 2860 # if no appl_start_grid argument given, guess it from the time stamps 2861 # of the starting grid files 2862 if not('appl_start_grid' in list(options.keys()) and options['appl_start_grid']): 2863 gfiles = misc.glob(pjoin('*', 'aMCfast_obs_0_starting_grid.root'), 2864 pjoin(self.me_dir,'Events')) 2865 2866 time_stamps={} 2867 for root_file in gfiles: 2868 time_stamps[root_file]=os.path.getmtime(root_file) 2869 options['appl_start_grid']= \ 2870 max(six.iterkeys(time_stamps), key=(lambda key: 2871 time_stamps[key])).split('/')[-2] 2872 logger.info('No --appl_start_grid option given. '+\ 2873 'Guessing that start grid from run "%s" should be used.' \ 2874 % options['appl_start_grid']) 2875 2876 if 'appl_start_grid' in list(options.keys()) and options['appl_start_grid']: 2877 self.appl_start_grid = options['appl_start_grid'] 2878 start_grid_dir=pjoin(self.me_dir, 'Events', self.appl_start_grid) 2879 # check that this dir exists and at least one grid file is there 2880 if not os.path.exists(pjoin(start_grid_dir, 2881 'aMCfast_obs_0_starting_grid.root')): 2882 raise self.InvalidCmd('APPLgrid file not found: %s' % \ 2883 pjoin(start_grid_dir,'aMCfast_obs_0_starting_grid.root')) 2884 else: 2885 all_grids=[pjoin(start_grid_dir,name) for name in os.listdir( \ 2886 start_grid_dir) if name.endswith("_starting_grid.root")] 2887 nobs =len(all_grids) 2888 gstring=" ".join(all_grids) 2889 if not hasattr(self, 'appl_start_grid') or not self.appl_start_grid: 2890 raise self.InvalidCmd('No APPLgrid name currently defined.'+ 2891 'Please provide this information.') 2892 #copy the grid to all relevant directories 2893 for pdir in p_dirs: 2894 g_dirs = [file for file in os.listdir(pjoin(self.me_dir, 2895 "SubProcesses",pdir)) if file.startswith(mode+'_G') and 2896 os.path.isdir(pjoin(self.me_dir,"SubProcesses",pdir, file))] 2897 for g_dir in g_dirs: 2898 for grid in all_grids: 2899 obs=grid.split('_')[-3] 2900 files.cp(grid,pjoin(self.me_dir,"SubProcesses",pdir,g_dir, 2901 'grid_obs_'+obs+'_in.root'))
2902 2903 2904 2905
2906 - def collect_log_files(self, jobs, integration_step):
2907 """collect the log files and put them in a single, html-friendly file 2908 inside the Events/run_.../ directory""" 2909 log_file = pjoin(self.me_dir, 'Events', self.run_name, 2910 'alllogs_%d.html' % integration_step) 2911 outfile = open(log_file, 'w') 2912 2913 content = '' 2914 content += '<HTML><BODY>\n<font face="courier" size=2>' 2915 for job in jobs: 2916 # put an anchor 2917 log=pjoin(job['dirname'],'log_MINT%s.txt' % integration_step) 2918 content += '<a name=%s></a>\n' % (os.path.dirname(log).replace( 2919 pjoin(self.me_dir,'SubProcesses'),'')) 2920 # and put some nice header 2921 content += '<font color="red">\n' 2922 content += '<br>LOG file for integration channel %s, %s <br>' % \ 2923 (os.path.dirname(log).replace(pjoin(self.me_dir, 2924 'SubProcesses'), ''), 2925 integration_step) 2926 content += '</font>\n' 2927 #then just flush the content of the small log inside the big log 2928 #the PRE tag prints everything verbatim 2929 with open(log) as l: 2930 content += '<PRE>\n' + l.read() + '\n</PRE>' 2931 content +='<br>\n' 2932 outfile.write(content) 2933 content='' 2934 2935 outfile.write('</font>\n</BODY></HTML>\n') 2936 outfile.close()
2937 2938
2939 - def finalise_run_FO(self,folder_name,jobs):
2940 """Combine the plots and put the res*.txt files in the Events/run.../ folder.""" 2941 # Copy the res_*.txt files to the Events/run* folder 2942 res_files = misc.glob('res_*.txt', pjoin(self.me_dir, 'SubProcesses')) 2943 for res_file in res_files: 2944 files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name)) 2945 # Collect the plots and put them in the Events/run* folder 2946 self.combine_plots_FO(folder_name,jobs) 2947 # If doing the applgrid-stuff, also combine those grids 2948 # and put those in the Events/run* folder 2949 if self.run_card['iappl'] != 0: 2950 cross=self.cross_sect_dict['xsect'] 2951 error=self.cross_sect_dict['errt'] 2952 self.applgrid_combine(cross,error,jobs)
2953 2954
2955 - def setup_cluster_or_multicore(self):
2956 """setup the number of cores for multicore, and the cluster-type for cluster runs""" 2957 if self.cluster_mode == 1: 2958 cluster_name = self.options['cluster_type'] 2959 try: 2960 self.cluster = cluster.from_name[cluster_name](**self.options) 2961 except KeyError: 2962 # Check if a plugin define this type of cluster 2963 # check for PLUGIN format 2964 cluster_class = misc.from_plugin_import(self.plugin_path, 2965 'new_cluster', cluster_name, 2966 info = 'cluster handling will be done with PLUGIN: %{plug}s' ) 2967 if cluster_class: 2968 self.cluster = cluster_class(**self.options) 2969 2970 if self.cluster_mode == 2: 2971 try: 2972 import multiprocessing 2973 if not self.nb_core: 2974 try: 2975 self.nb_core = int(self.options['nb_core']) 2976 except TypeError: 2977 self.nb_core = multiprocessing.cpu_count() 2978 logger.info('Using %d cores' % self.nb_core) 2979 except ImportError: 2980 self.nb_core = 1 2981 logger.warning('Impossible to detect the number of cores => Using One.\n'+ 2982 'Use set nb_core X in order to set this number and be able to'+ 2983 'run in multicore.') 2984 2985 self.cluster = cluster.MultiCore(**self.options)
2986 2987
2988 - def clean_previous_results(self,options,p_dirs,folder_name):
2989 """Clean previous results. 2990 o. If doing only the reweighting step, do not delete anything and return directlty. 2991 o. Always remove all the G*_* files (from split event generation). 2992 o. Remove the G* (or born_G* or all_G*) only when NOT doing only_generation or reweight_only.""" 2993 if options['reweightonly']: 2994 return 2995 if not options['only_generation']: 2996 self.update_status('Cleaning previous results', level=None) 2997 for dir in p_dirs: 2998 #find old folders to be removed 2999 for obj in folder_name: 3000 # list all the G* (or all_G* or born_G*) directories 3001 to_rm = [file for file in \ 3002 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \ 3003 if file.startswith(obj[:-1]) and \ 3004 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \ 3005 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))] 3006 # list all the G*_* directories (from split event generation) 3007 to_always_rm = [file for file in \ 3008 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \ 3009 if file.startswith(obj[:-1]) and 3010 '_' in file and not '_G' in file and \ 3011 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \ 3012 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))] 3013 3014 if not options['only_generation']: 3015 to_always_rm.extend(to_rm) 3016 if os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz')): 3017 to_always_rm.append(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz')) 3018 files.rm([pjoin(self.me_dir, 'SubProcesses', dir, d) for d in to_always_rm]) 3019 return
3020 3021
3022 - def print_summary(self, options, step, mode, scale_pdf_info=[], done=True):
3023 """print a summary of the results contained in self.cross_sect_dict. 3024 step corresponds to the mintMC step, if =2 (i.e. after event generation) 3025 some additional infos are printed""" 3026 # find process name 3027 proc_card_lines = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read().split('\n') 3028 process = '' 3029 for line in proc_card_lines: 3030 if line.startswith('generate') or line.startswith('add process'): 3031 process = process+(line.replace('generate ', '')).replace('add process ','')+' ; ' 3032 lpp = {0:'l', 1:'p', -1:'pbar', 2:'elastic photon from p', 3:'elastic photon from e'} 3033 if self.ninitial == 1: 3034 proc_info = '\n Process %s' % process[:-3] 3035 else: 3036 proc_info = '\n Process %s\n Run at %s-%s collider (%s + %s GeV)' % \ 3037 (process[:-3], lpp[self.run_card['lpp1']], lpp[self.run_card['lpp2']], 3038 self.run_card['ebeam1'], self.run_card['ebeam2']) 3039 3040 if self.ninitial == 1: 3041 self.cross_sect_dict['unit']='GeV' 3042 self.cross_sect_dict['xsec_string']='(Partial) decay width' 3043 self.cross_sect_dict['axsec_string']='(Partial) abs(decay width)' 3044 else: 3045 self.cross_sect_dict['unit']='pb' 3046 self.cross_sect_dict['xsec_string']='Total cross section' 3047 self.cross_sect_dict['axsec_string']='Total abs(cross section)' 3048 if self.run_card['event_norm'].lower()=='bias': 3049 self.cross_sect_dict['xsec_string']+=', incl. bias (DO NOT USE)' 3050 3051 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 3052 status = ['Determining the number of unweighted events per channel', 3053 'Updating the number of unweighted events per channel', 3054 'Summary:'] 3055 computed='(computed from LHE events)' 3056 elif mode in ['NLO', 'LO']: 3057 status = ['Results after grid setup:','Current results:', 3058 'Final results and run summary:'] 3059 computed='(computed from histogram information)' 3060 3061 if step != 2 and mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 3062 message = status[step] + '\n\n Intermediate results:' + \ 3063 ('\n Random seed: %(randinit)d' + \ 3064 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' + \ 3065 '\n %(axsec_string)s: %(xseca)8.3e +- %(erra)6.1e %(unit)s \n') \ 3066 % self.cross_sect_dict 3067 elif mode in ['NLO','LO'] and not done: 3068 if step == 0: 3069 message = '\n ' + status[0] + \ 3070 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \ 3071 self.cross_sect_dict 3072 else: 3073 message = '\n ' + status[1] + \ 3074 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \ 3075 self.cross_sect_dict 3076 3077 else: 3078 message = '\n --------------------------------------------------------------' 3079 message = message + \ 3080 '\n ' + status[2] + proc_info 3081 if mode not in ['LO', 'NLO']: 3082 message = message + \ 3083 '\n Number of events generated: %s' % self.run_card['nevents'] 3084 message = message + \ 3085 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \ 3086 self.cross_sect_dict 3087 message = message + \ 3088 '\n --------------------------------------------------------------' 3089 if scale_pdf_info and (self.run_card['nevents']>=10000 or mode in ['NLO', 'LO']): 3090 if scale_pdf_info[0]: 3091 # scale uncertainties 3092 message = message + '\n Scale variation %s:' % computed 3093 for s in scale_pdf_info[0]: 3094 if s['unc']: 3095 if self.run_card['ickkw'] != -1: 3096 message = message + \ 3097 ('\n Dynamical_scale_choice %(label)i (envelope of %(size)s values): '\ 3098 '\n %(cen)8.3e pb +%(max)0.1f%% -%(min)0.1f%%') % s 3099 else: 3100 message = message + \ 3101 ('\n Soft and hard scale dependence (added in quadrature): '\ 3102 '\n %(cen)8.3e pb +%(max_q)0.1f%% -%(min_q)0.1f%%') % s 3103 3104 else: 3105 message = message + \ 3106 ('\n Dynamical_scale_choice %(label)i: '\ 3107 '\n %(cen)8.3e pb') % s 3108 3109 if scale_pdf_info[1]: 3110 message = message + '\n PDF variation %s:' % computed 3111 for p in scale_pdf_info[1]: 3112 if p['unc']=='none': 3113 message = message + \ 3114 ('\n %(name)s (central value only): '\ 3115 '\n %(cen)8.3e pb') % p 3116 3117 elif p['unc']=='unknown': 3118 message = message + \ 3119 ('\n %(name)s (%(size)s members; combination method unknown): '\ 3120 '\n %(cen)8.3e pb') % p 3121 else: 3122 message = message + \ 3123 ('\n %(name)s (%(size)s members; using %(unc)s method): '\ 3124 '\n %(cen)8.3e pb +%(max)0.1f%% -%(min)0.1f%%') % p 3125 # pdf uncertainties 3126 message = message + \ 3127 '\n --------------------------------------------------------------' 3128 3129 3130 if (mode in ['NLO', 'LO'] and not done) or \ 3131 (mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO'] and step!=2): 3132 logger.info(message+'\n') 3133 return 3134 3135 # Some advanced general statistics are shown in the debug message at the 3136 # end of the run 3137 # Make sure it never stops a run 3138 # Gather some basic statistics for the run and extracted from the log files. 3139 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 3140 log_GV_files = misc.glob(pjoin('P*','G*','log_MINT*.txt'), 3141 pjoin(self.me_dir, 'SubProcesses')) 3142 all_log_files = log_GV_files 3143 elif mode == 'NLO': 3144 log_GV_files = misc.glob(pjoin('P*','all_G*','log_MINT*.txt'), 3145 pjoin(self.me_dir, 'SubProcesses')) 3146 all_log_files = log_GV_files 3147 3148 elif mode == 'LO': 3149 log_GV_files = '' 3150 all_log_files = misc.glob(pjoin('P*','born_G*','log_MINT*.txt'), 3151 pjoin(self.me_dir, 'SubProcesses')) 3152 else: 3153 raise aMCatNLOError('Running mode %s not supported.'%mode) 3154 3155 try: 3156 message, debug_msg = \ 3157 self.compile_advanced_stats(log_GV_files, all_log_files, message) 3158 except Exception as e: 3159 debug_msg = 'Advanced statistics collection failed with error "%s"\n'%str(e) 3160 err_string = StringIO.StringIO() 3161 traceback.print_exc(limit=4, file=err_string) 3162 debug_msg += 'Please report this backtrace to a MadGraph developer:\n%s'\ 3163 %err_string.getvalue() 3164 3165 logger.debug(debug_msg+'\n') 3166 logger.info(message+'\n') 3167 3168 # Now copy relevant information in the Events/Run_<xxx> directory 3169 evt_path = pjoin(self.me_dir, 'Events', self.run_name) 3170 open(pjoin(evt_path, 'summary.txt'),'w').write(message+'\n') 3171 open(pjoin(evt_path, '.full_summary.txt'), 3172 'w').write(message+'\n\n'+debug_msg+'\n') 3173 3174 self.archive_files(evt_path,mode)
3175
3176 - def archive_files(self, evt_path, mode):
3177 """ Copies in the Events/Run_<xxx> directory relevant files characterizing 3178 the run.""" 3179 3180 files_to_arxiv = [pjoin('Cards','param_card.dat'), 3181 pjoin('Cards','MadLoopParams.dat'), 3182 pjoin('Cards','FKS_params.dat'), 3183 pjoin('Cards','run_card.dat'), 3184 pjoin('Subprocesses','setscales.f'), 3185 pjoin('Subprocesses','cuts.f')] 3186 3187 if mode in ['NLO', 'LO']: 3188 files_to_arxiv.append(pjoin('Cards','FO_analyse_card.dat')) 3189 3190 if not os.path.exists(pjoin(evt_path,'RunMaterial')): 3191 os.mkdir(pjoin(evt_path,'RunMaterial')) 3192 3193 for path in files_to_arxiv: 3194 if os.path.isfile(pjoin(self.me_dir,path)): 3195 files.cp(pjoin(self.me_dir,path),pjoin(evt_path,'RunMaterial')) 3196 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'],cwd=evt_path) 3197 shutil.rmtree(pjoin(evt_path,'RunMaterial'))
3198
3199 - def compile_advanced_stats(self,log_GV_files,all_log_files,message):
3200 """ This functions goes through the log files given in arguments and 3201 compiles statistics about MadLoop stability, virtual integration 3202 optimization and detection of potential error messages into a nice 3203 debug message to printed at the end of the run """ 3204 3205 def safe_float(str_float): 3206 try: 3207 return float(str_float) 3208 except ValueError: 3209 logger.debug('Could not convert the following float during'+ 3210 ' advanced statistics printout: %s'%str(str_float)) 3211 return -1.0
3212 3213 3214 # > UPS is a dictionary of tuples with this format {channel:[nPS,nUPS]} 3215 # > Errors is a list of tuples with this format (log_file,nErrors) 3216 stats = {'UPS':{}, 'Errors':[], 'virt_stats':{}, 'timings':{}} 3217 mint_search = re.compile(r"MINT(?P<ID>\d*).txt") 3218 3219 # ================================== 3220 # == MadLoop stability statistics == 3221 # ================================== 3222 3223 # Recuperate the fraction of unstable PS points found in the runs for 3224 # the virtuals 3225 UPS_stat_finder = re.compile( 3226 r"Satistics from MadLoop:.*"+\ 3227 r"Total points tried\:\s+(?P<ntot>\d+).*"+\ 3228 r"Stability unknown\:\s+(?P<nsun>\d+).*"+\ 3229 r"Stable PS point\:\s+(?P<nsps>\d+).*"+\ 3230 r"Unstable PS point \(and rescued\)\:\s+(?P<nups>\d+).*"+\ 3231 r"Exceptional PS point \(unstable and not rescued\)\:\s+(?P<neps>\d+).*"+\ 3232 r"Double precision used\:\s+(?P<nddp>\d+).*"+\ 3233 r"Quadruple precision used\:\s+(?P<nqdp>\d+).*"+\ 3234 r"Initialization phase\-space points\:\s+(?P<nini>\d+).*"+\ 3235 r"Unknown return code \(100\)\:\s+(?P<n100>\d+).*"+\ 3236 r"Unknown return code \(10\)\:\s+(?P<n10>\d+).*",re.DOTALL) 3237 3238 unit_code_meaning = { 0 : 'Not identified (CTModeRun != -1)', 3239 1 : 'CutTools (double precision)', 3240 2 : 'PJFry++', 3241 3 : 'IREGI', 3242 4 : 'Golem95', 3243 5 : 'Samurai', 3244 6 : 'Ninja (double precision)', 3245 7 : 'COLLIER', 3246 8 : 'Ninja (quadruple precision)', 3247 9 : 'CutTools (quadruple precision)'} 3248 RetUnit_finder =re.compile( 3249 r"#Unit\s*(?P<unit>\d+)\s*=\s*(?P<n_occurences>\d+)") 3250 #Unit 3251 3252 for gv_log in log_GV_files: 3253 channel_name = '/'.join(gv_log.split('/')[-5:-1]) 3254 log=open(gv_log,'r').read() 3255 UPS_stats = re.search(UPS_stat_finder,log) 3256 for retunit_stats in re.finditer(RetUnit_finder, log): 3257 if channel_name not in list(stats['UPS'].keys()): 3258 stats['UPS'][channel_name] = [0]*10+[[0]*10] 3259 stats['UPS'][channel_name][10][int(retunit_stats.group('unit'))] \ 3260 += int(retunit_stats.group('n_occurences')) 3261 if not UPS_stats is None: 3262 try: 3263 stats['UPS'][channel_name][0] += int(UPS_stats.group('ntot')) 3264 stats['UPS'][channel_name][1] += int(UPS_stats.group('nsun')) 3265 stats['UPS'][channel_name][2] += int(UPS_stats.group('nsps')) 3266 stats['UPS'][channel_name][3] += int(UPS_stats.group('nups')) 3267 stats['UPS'][channel_name][4] += int(UPS_stats.group('neps')) 3268 stats['UPS'][channel_name][5] += int(UPS_stats.group('nddp')) 3269 stats['UPS'][channel_name][6] += int(UPS_stats.group('nqdp')) 3270 stats['UPS'][channel_name][7] += int(UPS_stats.group('nini')) 3271 stats['UPS'][channel_name][8] += int(UPS_stats.group('n100')) 3272 stats['UPS'][channel_name][9] += int(UPS_stats.group('n10')) 3273 except KeyError: 3274 stats['UPS'][channel_name] = [int(UPS_stats.group('ntot')), 3275 int(UPS_stats.group('nsun')),int(UPS_stats.group('nsps')), 3276 int(UPS_stats.group('nups')),int(UPS_stats.group('neps')), 3277 int(UPS_stats.group('nddp')),int(UPS_stats.group('nqdp')), 3278 int(UPS_stats.group('nini')),int(UPS_stats.group('n100')), 3279 int(UPS_stats.group('n10')),[0]*10] 3280 debug_msg = "" 3281 if len(list(stats['UPS'].keys()))>0: 3282 nTotPS = sum([chan[0] for chan in stats['UPS'].values()],0) 3283 nTotsun = sum([chan[1] for chan in stats['UPS'].values()],0) 3284 nTotsps = sum([chan[2] for chan in stats['UPS'].values()],0) 3285 nTotups = sum([chan[3] for chan in stats['UPS'].values()],0) 3286 nToteps = sum([chan[4] for chan in stats['UPS'].values()],0) 3287 nTotddp = sum([chan[5] for chan in stats['UPS'].values()],0) 3288 nTotqdp = sum([chan[6] for chan in stats['UPS'].values()],0) 3289 nTotini = sum([chan[7] for chan in stats['UPS'].values()],0) 3290 nTot100 = sum([chan[8] for chan in stats['UPS'].values()],0) 3291 nTot10 = sum([chan[9] for chan in stats['UPS'].values()],0) 3292 nTot1 = [sum([chan[10][i] for chan in stats['UPS'].values()],0) \ 3293 for i in range(10)] 3294 UPSfracs = [(chan[0] , 0.0 if chan[1][0]==0 else \ 3295 safe_float(chan[1][4]*100)/chan[1][0]) for chan in stats['UPS'].items()] 3296 maxUPS = max(UPSfracs, key = lambda w: w[1]) 3297 3298 tmpStr = "" 3299 tmpStr += '\n Number of loop ME evaluations (by MadLoop): %d'%nTotPS 3300 tmpStr += '\n Stability unknown: %d'%nTotsun 3301 tmpStr += '\n Stable PS point: %d'%nTotsps 3302 tmpStr += '\n Unstable PS point (and rescued): %d'%nTotups 3303 tmpStr += '\n Unstable PS point (and not rescued): %d'%nToteps 3304 tmpStr += '\n Only double precision used: %d'%nTotddp 3305 tmpStr += '\n Quadruple precision used: %d'%nTotqdp 3306 tmpStr += '\n Initialization phase-space points: %d'%nTotini 3307 tmpStr += '\n Reduction methods used:' 3308 red_methods = [(unit_code_meaning[i],nTot1[i]) for i in \ 3309 unit_code_meaning.keys() if nTot1[i]>0] 3310 for method, n in sorted(red_methods, key= lambda l: l[1], reverse=True): 3311 tmpStr += '\n > %s%s%s'%(method,' '*(33-len(method)),n) 3312 if nTot100 != 0: 3313 debug_msg += '\n Unknown return code (100): %d'%nTot100 3314 if nTot10 != 0: 3315 debug_msg += '\n Unknown return code (10): %d'%nTot10 3316 nUnknownUnit = sum(nTot1[u] for u in range(10) if u \ 3317 not in list(unit_code_meaning.keys())) 3318 if nUnknownUnit != 0: 3319 debug_msg += '\n Unknown return code (1): %d'\ 3320 %nUnknownUnit 3321 3322 if maxUPS[1]>0.001: 3323 message += tmpStr 3324 message += '\n Total number of unstable PS point detected:'+\ 3325 ' %d (%4.2f%%)'%(nToteps,safe_float(100*nToteps)/nTotPS) 3326 message += '\n Maximum fraction of UPS points in '+\ 3327 'channel %s (%4.2f%%)'%maxUPS 3328 message += '\n Please report this to the authors while '+\ 3329 'providing the file' 3330 message += '\n %s'%str(pjoin(os.path.dirname(self.me_dir), 3331 maxUPS[0],'UPS.log')) 3332 else: 3333 debug_msg += tmpStr 3334 3335 3336 # ==================================================== 3337 # == aMC@NLO virtual integration optimization stats == 3338 # ==================================================== 3339 3340 virt_tricks_finder = re.compile( 3341 r"accumulated results Virtual ratio\s*=\s*-?(?P<v_ratio>[\d\+-Eed\.]*)"+\ 3342 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_ratio_err>[\d\+-Eed\.]*)\s*\%\)\s*\n"+\ 3343 r"accumulated results ABS virtual\s*=\s*-?(?P<v_abs_contr>[\d\+-Eed\.]*)"+\ 3344 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_abs_contr_err>[\d\+-Eed\.]*)\s*\%\)") 3345 3346 virt_frac_finder = re.compile(r"update virtual fraction to\s*:\s*"+\ 3347 "-?(?P<v_frac>[\d\+-Eed\.]*)\s*-?(?P<v_average>[\d\+-Eed\.]*)") 3348 3349 channel_contr_finder = re.compile(r"Final result \[ABS\]\s*:\s*-?(?P<v_contr>[\d\+-Eed\.]*)") 3350 3351 channel_contr_list = {} 3352 for gv_log in log_GV_files: 3353 logfile=open(gv_log,'r') 3354 log = logfile.read() 3355 logfile.close() 3356 channel_name = '/'.join(gv_log.split('/')[-3:-1]) 3357 vf_stats = None 3358 for vf_stats in re.finditer(virt_frac_finder, log): 3359 pass 3360 if not vf_stats is None: 3361 v_frac = safe_float(vf_stats.group('v_frac')) 3362 v_average = safe_float(vf_stats.group('v_average')) 3363 try: 3364 if v_frac < stats['virt_stats']['v_frac_min'][0]: 3365 stats['virt_stats']['v_frac_min']=(v_frac,channel_name) 3366 if v_frac > stats['virt_stats']['v_frac_max'][0]: 3367 stats['virt_stats']['v_frac_max']=(v_frac,channel_name) 3368 stats['virt_stats']['v_frac_avg'][0] += v_frac 3369 stats['virt_stats']['v_frac_avg'][1] += 1 3370 except KeyError: 3371 stats['virt_stats']['v_frac_min']=[v_frac,channel_name] 3372 stats['virt_stats']['v_frac_max']=[v_frac,channel_name] 3373 stats['virt_stats']['v_frac_avg']=[v_frac,1] 3374 3375 3376 ccontr_stats = None 3377 for ccontr_stats in re.finditer(channel_contr_finder, log): 3378 pass 3379 if not ccontr_stats is None: 3380 contrib = safe_float(ccontr_stats.group('v_contr')) 3381 try: 3382 if contrib>channel_contr_list[channel_name]: 3383 channel_contr_list[channel_name]=contrib 3384 except KeyError: 3385 channel_contr_list[channel_name]=contrib 3386 3387 3388 # Now build the list of relevant virt log files to look for the maxima 3389 # of virt fractions and such. 3390 average_contrib = 0.0 3391 for value in channel_contr_list.values(): 3392 average_contrib += value 3393 if len(list(channel_contr_list.values())) !=0: 3394 average_contrib = average_contrib / len(list(channel_contr_list.values())) 3395 3396 relevant_log_GV_files = [] 3397 excluded_channels = set([]) 3398 all_channels = set([]) 3399 for log_file in log_GV_files: 3400 channel_name = '/'.join(log_file.split('/')[-3:-1]) 3401 all_channels.add(channel_name) 3402 try: 3403 if channel_contr_list[channel_name] > (0.1*average_contrib): 3404 relevant_log_GV_files.append(log_file) 3405 else: 3406 excluded_channels.add(channel_name) 3407 except KeyError: 3408 relevant_log_GV_files.append(log_file) 3409 3410 # Now we want to use the latest occurence of accumulated result in the log file 3411 for gv_log in relevant_log_GV_files: 3412 logfile=open(gv_log,'r') 3413 log = logfile.read() 3414 logfile.close() 3415 channel_name = '/'.join(gv_log.split('/')[-3:-1]) 3416 3417 vt_stats = None 3418 for vt_stats in re.finditer(virt_tricks_finder, log): 3419 pass 3420 if not vt_stats is None: 3421 vt_stats_group = vt_stats.groupdict() 3422 v_ratio = safe_float(vt_stats.group('v_ratio')) 3423 v_ratio_err = safe_float(vt_stats.group('v_ratio_err')) 3424 v_contr = safe_float(vt_stats.group('v_abs_contr')) 3425 v_contr_err = safe_float(vt_stats.group('v_abs_contr_err')) 3426 try: 3427 if v_ratio < stats['virt_stats']['v_ratio_min'][0]: 3428 stats['virt_stats']['v_ratio_min']=(v_ratio,channel_name) 3429 if v_ratio > stats['virt_stats']['v_ratio_max'][0]: 3430 stats['virt_stats']['v_ratio_max']=(v_ratio,channel_name) 3431 if v_ratio < stats['virt_stats']['v_ratio_err_min'][0]: 3432 stats['virt_stats']['v_ratio_err_min']=(v_ratio_err,channel_name) 3433 if v_ratio > stats['virt_stats']['v_ratio_err_max'][0]: 3434 stats['virt_stats']['v_ratio_err_max']=(v_ratio_err,channel_name) 3435 if v_contr < stats['virt_stats']['v_contr_min'][0]: 3436 stats['virt_stats']['v_contr_min']=(v_contr,channel_name) 3437 if v_contr > stats['virt_stats']['v_contr_max'][0]: 3438 stats['virt_stats']['v_contr_max']=(v_contr,channel_name) 3439 if v_contr_err < stats['virt_stats']['v_contr_err_min'][0]: 3440 stats['virt_stats']['v_contr_err_min']=(v_contr_err,channel_name) 3441 if v_contr_err > stats['virt_stats']['v_contr_err_max'][0]: 3442 stats['virt_stats']['v_contr_err_max']=(v_contr_err,channel_name) 3443 except KeyError: 3444 stats['virt_stats']['v_ratio_min']=[v_ratio,channel_name] 3445 stats['virt_stats']['v_ratio_max']=[v_ratio,channel_name] 3446 stats['virt_stats']['v_ratio_err_min']=[v_ratio_err,channel_name] 3447 stats['virt_stats']['v_ratio_err_max']=[v_ratio_err,channel_name] 3448 stats['virt_stats']['v_contr_min']=[v_contr,channel_name] 3449 stats['virt_stats']['v_contr_max']=[v_contr,channel_name] 3450 stats['virt_stats']['v_contr_err_min']=[v_contr_err,channel_name] 3451 stats['virt_stats']['v_contr_err_max']=[v_contr_err,channel_name] 3452 3453 vf_stats = None 3454 for vf_stats in re.finditer(virt_frac_finder, log): 3455 pass 3456 if not vf_stats is None: 3457 v_frac = safe_float(vf_stats.group('v_frac')) 3458 v_average = safe_float(vf_stats.group('v_average')) 3459 try: 3460 if v_average < stats['virt_stats']['v_average_min'][0]: 3461 stats['virt_stats']['v_average_min']=(v_average,channel_name) 3462 if v_average > stats['virt_stats']['v_average_max'][0]: 3463 stats['virt_stats']['v_average_max']=(v_average,channel_name) 3464 stats['virt_stats']['v_average_avg'][0] += v_average 3465 stats['virt_stats']['v_average_avg'][1] += 1 3466 except KeyError: 3467 stats['virt_stats']['v_average_min']=[v_average,channel_name] 3468 stats['virt_stats']['v_average_max']=[v_average,channel_name] 3469 stats['virt_stats']['v_average_avg']=[v_average,1] 3470 3471 try: 3472 debug_msg += '\n\n Statistics on virtual integration optimization : ' 3473 3474 debug_msg += '\n Maximum virt fraction computed %.3f (%s)'\ 3475 %tuple(stats['virt_stats']['v_frac_max']) 3476 debug_msg += '\n Minimum virt fraction computed %.3f (%s)'\ 3477 %tuple(stats['virt_stats']['v_frac_min']) 3478 debug_msg += '\n Average virt fraction computed %.3f'\ 3479 %safe_float(stats['virt_stats']['v_frac_avg'][0]/safe_float(stats['virt_stats']['v_frac_avg'][1])) 3480 debug_msg += '\n Stats below exclude negligible channels (%d excluded out of %d)'%\ 3481 (len(excluded_channels),len(all_channels)) 3482 debug_msg += '\n Maximum virt ratio used %.2f (%s)'\ 3483 %tuple(stats['virt_stats']['v_average_max']) 3484 debug_msg += '\n Maximum virt ratio found from grids %.2f (%s)'\ 3485 %tuple(stats['virt_stats']['v_ratio_max']) 3486 tmpStr = '\n Max. MC err. on virt ratio from grids %.1f %% (%s)'\ 3487 %tuple(stats['virt_stats']['v_ratio_err_max']) 3488 debug_msg += tmpStr 3489 # After all it was decided that it is better not to alarm the user unecessarily 3490 # with such printout of the statistics. 3491 # if stats['virt_stats']['v_ratio_err_max'][0]>100.0 or \ 3492 # stats['virt_stats']['v_ratio_err_max'][0]>100.0: 3493 # message += "\n Suspiciously large MC error in :" 3494 # if stats['virt_stats']['v_ratio_err_max'][0]>100.0: 3495 # message += tmpStr 3496 3497 tmpStr = '\n Maximum MC error on abs virt %.1f %% (%s)'\ 3498 %tuple(stats['virt_stats']['v_contr_err_max']) 3499 debug_msg += tmpStr 3500 # if stats['virt_stats']['v_contr_err_max'][0]>100.0: 3501 # message += tmpStr 3502 3503 3504 except KeyError: 3505 debug_msg += '\n Could not find statistics on the integration optimization. ' 3506 3507 # ======================================= 3508 # == aMC@NLO timing profile statistics == 3509 # ======================================= 3510 3511 timing_stat_finder = re.compile(r"\s*Time spent in\s*(?P<name>\w*)\s*:\s*"+\ 3512 "(?P<time>[\d\+-Eed\.]*)\s*") 3513 3514 for logf in log_GV_files: 3515 logfile=open(logf,'r') 3516 log = logfile.read() 3517 logfile.close() 3518 channel_name = '/'.join(logf.split('/')[-3:-1]) 3519 mint = re.search(mint_search,logf) 3520 if not mint is None: 3521 channel_name = channel_name+' [step %s]'%mint.group('ID') 3522 3523 for time_stats in re.finditer(timing_stat_finder, log): 3524 try: 3525 stats['timings'][time_stats.group('name')][channel_name]+=\ 3526 safe_float(time_stats.group('time')) 3527 except KeyError: 3528 if time_stats.group('name') not in list(stats['timings'].keys()): 3529 stats['timings'][time_stats.group('name')] = {} 3530 stats['timings'][time_stats.group('name')][channel_name]=\ 3531 safe_float(time_stats.group('time')) 3532 3533 # useful inline function 3534 Tstr = lambda secs: str(datetime.timedelta(seconds=int(secs))) 3535 try: 3536 totTimeList = [(time, chan) for chan, time in \ 3537 stats['timings']['Total'].items()] 3538 except KeyError: 3539 totTimeList = [] 3540 3541 totTimeList.sort() 3542 if len(totTimeList)>0: 3543 debug_msg += '\n\n Inclusive timing profile :' 3544 debug_msg += '\n Overall slowest channel %s (%s)'%\ 3545 (Tstr(totTimeList[-1][0]),totTimeList[-1][1]) 3546 debug_msg += '\n Average channel running time %s'%\ 3547 Tstr(sum([el[0] for el in totTimeList])/len(totTimeList)) 3548 debug_msg += '\n Aggregated total running time %s'%\ 3549 Tstr(sum([el[0] for el in totTimeList])) 3550 else: 3551 debug_msg += '\n\n Inclusive timing profile non available.' 3552 3553 sorted_keys = sorted(list(stats['timings'].keys()), key= lambda stat: \ 3554 sum(stats['timings'][stat].values()), reverse=True) 3555 for name in sorted_keys: 3556 if name=='Total': 3557 continue 3558 if sum(stats['timings'][name].values())<=0.0: 3559 debug_msg += '\n Zero time record for %s.'%name 3560 continue 3561 try: 3562 TimeList = [((100.0*time/stats['timings']['Total'][chan]), 3563 chan) for chan, time in stats['timings'][name].items()] 3564 except KeyError as ZeroDivisionError: 3565 debug_msg += '\n\n Timing profile for %s unavailable.'%name 3566 continue 3567 TimeList.sort() 3568 debug_msg += '\n Timing profile for <%s> :'%name 3569 try: 3570 debug_msg += '\n Overall fraction of time %.3f %%'%\ 3571 safe_float((100.0*(sum(stats['timings'][name].values())/ 3572 sum(stats['timings']['Total'].values())))) 3573 except KeyError as ZeroDivisionError: 3574 debug_msg += '\n Overall fraction of time unavailable.' 3575 debug_msg += '\n Largest fraction of time %.3f %% (%s)'%\ 3576 (TimeList[-1][0],TimeList[-1][1]) 3577 debug_msg += '\n Smallest fraction of time %.3f %% (%s)'%\ 3578 (TimeList[0][0],TimeList[0][1]) 3579 3580 # ============================= 3581 # == log file eror detection == 3582 # ============================= 3583 3584 # Find the number of potential errors found in all log files 3585 # This re is a simple match on a case-insensitve 'error' but there is 3586 # also some veto added for excluding the sentence 3587 # "See Section 6 of paper for error calculation." 3588 # which appear in the header of lhapdf in the logs. 3589 err_finder = re.compile(\ 3590 r"(?<!of\spaper\sfor\s)\bERROR\b(?!\scalculation\.)",re.IGNORECASE) 3591 for log in all_log_files: 3592 logfile=open(log,'r') 3593 nErrors = len(re.findall(err_finder, logfile.read())) 3594 logfile.close() 3595 if nErrors != 0: 3596 stats['Errors'].append((str(log),nErrors)) 3597 3598 nErrors = sum([err[1] for err in stats['Errors']],0) 3599 if nErrors != 0: 3600 debug_msg += '\n WARNING:: A total of %d error%s ha%s been '\ 3601 %(nErrors,'s' if nErrors>1 else '','ve' if nErrors>1 else 's')+\ 3602 'found in the following log file%s:'%('s' if \ 3603 len(stats['Errors'])>1 else '') 3604 for error in stats['Errors'][:3]: 3605 log_name = '/'.join(error[0].split('/')[-5:]) 3606 debug_msg += '\n > %d error%s in %s'%\ 3607 (error[1],'s' if error[1]>1 else '',log_name) 3608 if len(stats['Errors'])>3: 3609 nRemainingErrors = sum([err[1] for err in stats['Errors']][3:],0) 3610 nRemainingLogs = len(stats['Errors'])-3 3611 debug_msg += '\n And another %d error%s in %d other log file%s'%\ 3612 (nRemainingErrors, 's' if nRemainingErrors>1 else '', 3613 nRemainingLogs, 's ' if nRemainingLogs>1 else '') 3614 3615 return message, debug_msg 3616 3617
3618 - def reweight_and_collect_events(self, options, mode, nevents, event_norm):
3619 """this function calls the reweighting routines and creates the event file in the 3620 Event dir. Return the name of the event file created 3621 """ 3622 scale_pdf_info=[] 3623 if any(self.run_card['reweight_scale']) or any(self.run_card['reweight_PDF']) or \ 3624 len(self.run_card['dynamical_scale_choice']) > 1 or len(self.run_card['lhaid']) > 1\ 3625 or self.run_card['store_rwgt_info']: 3626 scale_pdf_info = self.run_reweight(options['reweightonly']) 3627 self.update_status('Collecting events', level='parton', update_results=True) 3628 misc.compile(['collect_events'], 3629 cwd=pjoin(self.me_dir, 'SubProcesses'), nocompile=options['nocompile']) 3630 p = misc.Popen(['./collect_events'], cwd=pjoin(self.me_dir, 'SubProcesses'), 3631 stdin=subprocess.PIPE, 3632 stdout=open(pjoin(self.me_dir, 'collect_events.log'), 'w')) 3633 if event_norm.lower() == 'sum': 3634 p.communicate(input = '1\n'.encode()) 3635 elif event_norm.lower() == 'unity': 3636 p.communicate(input = '3\n'.encode()) 3637 elif event_norm.lower() == 'bias': 3638 p.communicate(input = '0\n'.encode()) 3639 else: 3640 p.communicate(input = '2\n'.encode()) 3641 3642 #get filename from collect events 3643 filename = open(pjoin(self.me_dir, 'collect_events.log')).read().split()[-1] 3644 3645 if not os.path.exists(pjoin(self.me_dir, 'SubProcesses', filename)): 3646 raise aMCatNLOError('An error occurred during event generation. ' + \ 3647 'The event file has not been created. Check collect_events.log') 3648 evt_file = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz') 3649 misc.gzip(pjoin(self.me_dir, 'SubProcesses', filename), stdout=evt_file) 3650 if not options['reweightonly']: 3651 self.print_summary(options, 2, mode, scale_pdf_info) 3652 res_files = misc.glob('res*.txt', pjoin(self.me_dir, 'SubProcesses')) 3653 for res_file in res_files: 3654 files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name)) 3655 3656 logger.info('The %s file has been generated.\n' % (evt_file)) 3657 self.results.add_detail('nb_event', nevents) 3658 self.update_status('Events generated', level='parton', update_results=True) 3659 return evt_file[:-3]
3660 3661
3662 - def run_mcatnlo(self, evt_file, options):
3663 """runs mcatnlo on the generated event file, to produce showered-events 3664 """ 3665 logger.info('Preparing MCatNLO run') 3666 try: 3667 misc.gunzip(evt_file) 3668 except Exception: 3669 pass 3670 3671 self.banner = banner_mod.Banner(evt_file) 3672 shower = self.banner.get_detail('run_card', 'parton_shower').upper() 3673 3674 #check that the number of split event files divides the number of 3675 # events, otherwise set it to 1 3676 if int(self.banner.get_detail('run_card', 'nevents') / \ 3677 self.shower_card['nsplit_jobs']) * self.shower_card['nsplit_jobs'] \ 3678 != self.banner.get_detail('run_card', 'nevents'): 3679 logger.warning(\ 3680 'nsplit_jobs in the shower card is not a divisor of the number of events.\n' + \ 3681 'Setting it to 1.') 3682 self.shower_card['nsplit_jobs'] = 1 3683 3684 # don't split jobs if the user asks to shower only a part of the events 3685 if self.shower_card['nevents'] > 0 and \ 3686 self.shower_card['nevents'] < self.banner.get_detail('run_card', 'nevents') and \ 3687 self.shower_card['nsplit_jobs'] != 1: 3688 logger.warning(\ 3689 'Only a part of the events will be showered.\n' + \ 3690 'Setting nsplit_jobs in the shower_card to 1.') 3691 self.shower_card['nsplit_jobs'] = 1 3692 3693 self.banner_to_mcatnlo(evt_file) 3694 3695 # if fastjet has to be linked (in extralibs) then 3696 # add lib /include dirs for fastjet if fastjet-config is present on the 3697 # system, otherwise add fjcore to the files to combine 3698 if 'fastjet' in self.shower_card['extralibs']: 3699 #first, check that stdc++ is also linked 3700 if not 'stdc++' in self.shower_card['extralibs']: 3701 logger.warning('Linking FastJet: adding stdc++ to EXTRALIBS') 3702 self.shower_card['extralibs'] += ' stdc++' 3703 # then check if options[fastjet] corresponds to a valid fj installation 3704 try: 3705 #this is for a complete fj installation 3706 p = subprocess.Popen([self.options['fastjet'], '--prefix'], \ 3707 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 3708 output, error = p.communicate() 3709 #remove the line break from output (last character) 3710 output = output.decode()[:-1] 3711 # add lib/include paths 3712 if not pjoin(output, 'lib') in self.shower_card['extrapaths']: 3713 logger.warning('Linking FastJet: updating EXTRAPATHS') 3714 self.shower_card['extrapaths'] += ' ' + pjoin(output, 'lib') 3715 if not pjoin(output, 'include') in self.shower_card['includepaths']: 3716 logger.warning('Linking FastJet: updating INCLUDEPATHS') 3717 self.shower_card['includepaths'] += ' ' + pjoin(output, 'include') 3718 # to be changed in the fortran wrapper 3719 include_line = '#include "fastjet/ClusterSequence.hh"//INCLUDE_FJ' 3720 namespace_line = 'namespace fj = fastjet;//NAMESPACE_FJ' 3721 except Exception: 3722 logger.warning('Linking FastJet: using fjcore') 3723 # this is for FJcore, so no FJ library has to be linked 3724 self.shower_card['extralibs'] = self.shower_card['extralibs'].replace('fastjet', '') 3725 if not 'fjcore.o' in self.shower_card['analyse']: 3726 self.shower_card['analyse'] += ' fjcore.o' 3727 # to be changed in the fortran wrapper 3728 include_line = '#include "fjcore.hh"//INCLUDE_FJ' 3729 namespace_line = 'namespace fj = fjcore;//NAMESPACE_FJ' 3730 # change the fortran wrapper with the correct namespaces/include 3731 fjwrapper_lines = open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc')).read().split('\n') 3732 for line in fjwrapper_lines: 3733 if '//INCLUDE_FJ' in line: 3734 fjwrapper_lines[fjwrapper_lines.index(line)] = include_line 3735 if '//NAMESPACE_FJ' in line: 3736 fjwrapper_lines[fjwrapper_lines.index(line)] = namespace_line 3737 with open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc'), 'w') as fsock: 3738 fsock.write('\n'.join(fjwrapper_lines) + '\n') 3739 3740 extrapaths = self.shower_card['extrapaths'].split() 3741 3742 # check that the path needed by HW++ and PY8 are set if one uses these shower 3743 if shower in ['HERWIGPP', 'PYTHIA8']: 3744 path_dict = {'HERWIGPP': ['hepmc_path', 3745 'thepeg_path', 3746 'hwpp_path'], 3747 'PYTHIA8': ['pythia8_path']} 3748 3749 if not all([self.options[ppath] and os.path.exists(self.options[ppath]) for ppath in path_dict[shower]]): 3750 raise aMCatNLOError('Some paths are missing or invalid in the configuration file.\n' + \ 3751 ('Please make sure you have set these variables: %s' % ', '.join(path_dict[shower]))) 3752 3753 if shower == 'HERWIGPP': 3754 extrapaths.append(pjoin(self.options['hepmc_path'], 'lib')) 3755 self.shower_card['extrapaths'] += ' %s' % pjoin(self.options['hepmc_path'], 'lib') 3756 3757 # add the HEPMC path of the pythia8 installation 3758 if shower == 'PYTHIA8': 3759 hepmc = subprocess.Popen([pjoin(self.options['pythia8_path'], 'bin', 'pythia8-config'), '--hepmc2'], 3760 stdout = subprocess.PIPE).stdout.read().decode().strip() 3761 #this gives all the flags, i.e. 3762 #-I/Path/to/HepMC/include -L/Path/to/HepMC/lib -lHepMC 3763 # we just need the path to the HepMC libraries 3764 extrapaths.append(hepmc.split()[1].replace('-L', '')) 3765 3766 if shower == 'PYTHIA8' and not os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): 3767 extrapaths.append(pjoin(self.options['pythia8_path'], 'lib')) 3768 3769 # set the PATH for the dynamic libraries 3770 if sys.platform == 'darwin': 3771 ld_library_path = 'DYLD_LIBRARY_PATH' 3772 else: 3773 ld_library_path = 'LD_LIBRARY_PATH' 3774 if ld_library_path in list(os.environ.keys()): 3775 paths = os.environ[ld_library_path] 3776 else: 3777 paths = '' 3778 paths += ':' + ':'.join(extrapaths) 3779 os.putenv(ld_library_path, paths) 3780 3781 shower_card_path = pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat') 3782 self.shower_card.write_card(shower, shower_card_path) 3783 3784 # overwrite if shower_card_set.dat exists in MCatNLO 3785 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'shower_card_set.dat')): 3786 files.mv(pjoin(self.me_dir, 'MCatNLO', 'shower_card_set.dat'), 3787 pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat')) 3788 3789 mcatnlo_log = pjoin(self.me_dir, 'mcatnlo.log') 3790 self.update_status('Compiling MCatNLO for %s...' % shower, level='shower') 3791 3792 3793 # libdl may be needded for pythia 82xx 3794 #if shower == 'PYTHIA8' and not \ 3795 # os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')) and \ 3796 # 'dl' not in self.shower_card['extralibs'].split(): 3797 # # 'dl' has to be linked with the extralibs 3798 # self.shower_card['extralibs'] += ' dl' 3799 # logger.warning("'dl' was added to extralibs from the shower_card.dat.\n" + \ 3800 # "It is needed for the correct running of PY8.2xx.\n" + \ 3801 # "If this library cannot be found on your system, a crash will occur.") 3802 3803 misc.call(['./MCatNLO_MadFKS.inputs'], stdout=open(mcatnlo_log, 'w'), 3804 stderr=open(mcatnlo_log, 'w'), 3805 cwd=pjoin(self.me_dir, 'MCatNLO'), 3806 close_fds=True) 3807 3808 exe = 'MCATNLO_%s_EXE' % shower 3809 if not os.path.exists(pjoin(self.me_dir, 'MCatNLO', exe)) and \ 3810 not os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe')): 3811 print(open(mcatnlo_log).read()) 3812 raise aMCatNLOError('Compilation failed, check %s for details' % mcatnlo_log) 3813 logger.info(' ... done') 3814 3815 # create an empty dir where to run 3816 count = 1 3817 while os.path.isdir(pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \ 3818 (shower, count))): 3819 count += 1 3820 rundir = pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \ 3821 (shower, count)) 3822 os.mkdir(rundir) 3823 files.cp(shower_card_path, rundir) 3824 3825 #look for the event files (don't resplit if one asks for the 3826 # same number of event files as in the previous run) 3827 event_files = misc.glob('events_*.lhe', pjoin(self.me_dir, 'Events', self.run_name)) 3828 if max(len(event_files), 1) != self.shower_card['nsplit_jobs']: 3829 logger.info('Cleaning old files and splitting the event file...') 3830 #clean the old files 3831 files.rm([f for f in event_files if 'events.lhe' not in f]) 3832 if self.shower_card['nsplit_jobs'] > 1: 3833 misc.compile(['split_events'], cwd = pjoin(self.me_dir, 'Utilities'), nocompile=options['nocompile']) 3834 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'split_events')], 3835 stdin=subprocess.PIPE, 3836 stdout=open(pjoin(self.me_dir, 'Events', self.run_name, 'split_events.log'), 'w'), 3837 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 3838 p.communicate(input = ('events.lhe\n%d\n' % self.shower_card['nsplit_jobs']).encode()) 3839 logger.info('Splitting done.') 3840 event_files = misc.glob('events_*.lhe', pjoin(self.me_dir, 'Events', self.run_name)) 3841 3842 event_files.sort() 3843 3844 self.update_status('Showering events...', level='shower') 3845 logger.info('(Running in %s)' % rundir) 3846 if shower != 'PYTHIA8': 3847 files.mv(pjoin(self.me_dir, 'MCatNLO', exe), rundir) 3848 files.mv(pjoin(self.me_dir, 'MCatNLO', 'MCATNLO_%s_input' % shower), rundir) 3849 else: 3850 # special treatment for pythia8 3851 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.cmd'), rundir) 3852 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe'), rundir) 3853 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): # this is PY8.1xxx 3854 files.ln(pjoin(self.options['pythia8_path'], 'examples', 'config.sh'), rundir) 3855 files.ln(pjoin(self.options['pythia8_path'], 'xmldoc'), rundir) 3856 else: # this is PY8.2xxx 3857 files.ln(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc'), rundir) 3858 #link the hwpp exe in the rundir 3859 if shower == 'HERWIGPP': 3860 try: 3861 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++')): 3862 files.ln(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++'), rundir) 3863 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig')): 3864 files.ln(pjoin(self.options['hwpp_path'], 'bin', 'Herwig'), rundir) 3865 except Exception: 3866 raise aMCatNLOError('The Herwig++ path set in the configuration file is not valid.') 3867 3868 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so')): 3869 files.cp(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so'), rundir) 3870 3871 files.ln(evt_file, rundir, 'events.lhe') 3872 for i, f in enumerate(event_files): 3873 files.ln(f, rundir,'events_%d.lhe' % (i + 1)) 3874 3875 if not self.shower_card['analyse']: 3876 # an hep/hepmc file as output 3877 out_id = 'HEP' 3878 else: 3879 # one or more .top file(s) as output 3880 if "HwU" in self.shower_card['analyse']: 3881 out_id = 'HWU' 3882 else: 3883 out_id = 'TOP' 3884 3885 # write the executable 3886 with open(pjoin(rundir, 'shower.sh'), 'w') as fsock: 3887 # set the PATH for the dynamic libraries 3888 if sys.platform == 'darwin': 3889 ld_library_path = 'DYLD_LIBRARY_PATH' 3890 else: 3891 ld_library_path = 'LD_LIBRARY_PATH' 3892 fsock.write(open(pjoin(self.me_dir, 'MCatNLO', 'shower_template.sh')).read() \ 3893 % {'ld_library_path': ld_library_path, 3894 'extralibs': ':'.join(extrapaths)}) 3895 subprocess.call(['chmod', '+x', pjoin(rundir, 'shower.sh')]) 3896 3897 if event_files: 3898 arg_list = [[shower, out_id, self.run_name, '%d' % (i + 1)] \ 3899 for i in range(len(event_files))] 3900 else: 3901 arg_list = [[shower, out_id, self.run_name]] 3902 3903 self.run_all({rundir: 'shower.sh'}, arg_list, 'shower') 3904 self.njobs = 1 3905 self.wait_for_complete('shower') 3906 3907 # now collect the results 3908 message = '' 3909 warning = '' 3910 to_gzip = [evt_file] 3911 if out_id == 'HEP': 3912 #copy the showered stdhep/hepmc file back in events 3913 if shower in ['PYTHIA8', 'HERWIGPP']: 3914 hep_format = 'HEPMC' 3915 ext = 'hepmc' 3916 else: 3917 hep_format = 'StdHEP' 3918 ext = 'hep' 3919 3920 hep_file = '%s_%s_0.%s.gz' % \ 3921 (pjoin(os.path.dirname(evt_file), 'events'), shower, ext) 3922 count = 0 3923 3924 # find the first available name for the output: 3925 # check existing results with or without event splitting 3926 while os.path.exists(hep_file) or \ 3927 os.path.exists(hep_file.replace('.%s.gz' % ext, '__1.%s.gz' % ext)) : 3928 count +=1 3929 hep_file = '%s_%s_%d.%s.gz' % \ 3930 (pjoin(os.path.dirname(evt_file), 'events'), shower, count, ext) 3931 3932 try: 3933 if self.shower_card['nsplit_jobs'] == 1: 3934 files.mv(os.path.join(rundir, 'events.%s.gz' % ext), hep_file) 3935 message = ('The file %s has been generated. \nIt contains showered' + \ 3936 ' and hadronized events in the %s format obtained' + \ 3937 ' showering the parton-level event file %s.gz with %s') % \ 3938 (hep_file, hep_format, evt_file, shower) 3939 else: 3940 hep_list = [] 3941 for i in range(self.shower_card['nsplit_jobs']): 3942 hep_list.append(hep_file.replace('.%s.gz' % ext, '__%d.%s.gz' % (i + 1, ext))) 3943 files.mv(os.path.join(rundir, 'events_%d.%s.gz' % (i + 1, ext)), hep_list[-1]) 3944 message = ('The following files have been generated:\n %s\nThey contain showered' + \ 3945 ' and hadronized events in the %s format obtained' + \ 3946 ' showering the (split) parton-level event file %s.gz with %s') % \ 3947 ('\n '.join(hep_list), hep_format, evt_file, shower) 3948 3949 except OSError as IOError: 3950 raise aMCatNLOError('No file has been generated, an error occurred.'+\ 3951 ' More information in %s' % pjoin(os.getcwd(), 'amcatnlo_run.log')) 3952 3953 # run the plot creation in a secure way 3954 if hep_format == 'StdHEP': 3955 try: 3956 self.do_plot('%s -f' % self.run_name) 3957 except Exception as error: 3958 logger.info("Fail to make the plot. Continue...") 3959 pass 3960 3961 elif out_id == 'TOP' or out_id == 'HWU': 3962 #copy the topdrawer or HwU file(s) back in events 3963 if out_id=='TOP': 3964 ext='top' 3965 elif out_id=='HWU': 3966 ext='HwU' 3967 topfiles = [] 3968 top_tars = [tarfile.TarFile(f) for f in misc.glob('histfile*.tar', rundir)] 3969 for top_tar in top_tars: 3970 topfiles.extend(top_tar.getnames()) 3971 3972 # safety check 3973 if len(top_tars) != self.shower_card['nsplit_jobs']: 3974 raise aMCatNLOError('%d job(s) expected, %d file(s) found' % \ 3975 (self.shower_card['nsplit_jobs'], len(top_tars))) 3976 3977 # find the first available name for the output: 3978 # check existing results with or without event splitting 3979 filename = 'plot_%s_%d_' % (shower, 1) 3980 count = 1 3981 while os.path.exists(pjoin(self.me_dir, 'Events', 3982 self.run_name, '%s0.%s' % (filename,ext))) or \ 3983 os.path.exists(pjoin(self.me_dir, 'Events', 3984 self.run_name, '%s0__1.%s' % (filename,ext))): 3985 count += 1 3986 filename = 'plot_%s_%d_' % (shower, count) 3987 3988 if out_id=='TOP': 3989 hist_format='TopDrawer format' 3990 elif out_id=='HWU': 3991 hist_format='HwU and GnuPlot formats' 3992 3993 if not topfiles: 3994 # if no topfiles are found just warn the user 3995 warning = 'No .top file has been generated. For the results of your ' +\ 3996 'run, please check inside %s' % rundir 3997 elif self.shower_card['nsplit_jobs'] == 1: 3998 # only one job for the shower 3999 top_tars[0].extractall(path = rundir) 4000 plotfiles = [] 4001 for i, file in enumerate(topfiles): 4002 if out_id=='TOP': 4003 plotfile = pjoin(self.me_dir, 'Events', self.run_name, 4004 '%s%d.top' % (filename, i)) 4005 files.mv(pjoin(rundir, file), plotfile) 4006 elif out_id=='HWU': 4007 out=pjoin(self.me_dir,'Events', 4008 self.run_name,'%s%d'% (filename,i)) 4009 histos=[{'dirname':pjoin(rundir,file)}] 4010 self.combine_plots_HwU(histos,out) 4011 try: 4012 misc.call(['gnuplot','%s%d.gnuplot' % (filename,i)],\ 4013 stdout=os.open(os.devnull, os.O_RDWR),\ 4014 stderr=os.open(os.devnull, os.O_RDWR),\ 4015 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 4016 except Exception: 4017 pass 4018 plotfile=pjoin(self.me_dir,'Events',self.run_name, 4019 '%s%d.HwU'% (filename,i)) 4020 plotfiles.append(plotfile) 4021 4022 ffiles = 'files' 4023 have = 'have' 4024 if len(plotfiles) == 1: 4025 ffiles = 'file' 4026 have = 'has' 4027 4028 message = ('The %s %s %s been generated, with histograms in the' + \ 4029 ' %s, obtained by showering the parton-level' + \ 4030 ' file %s.gz with %s.') % (ffiles, ', '.join(plotfiles), have, \ 4031 hist_format, evt_file, shower) 4032 else: 4033 # many jobs for the shower have been run 4034 topfiles_set = set(topfiles) 4035 plotfiles = [] 4036 for j, top_tar in enumerate(top_tars): 4037 top_tar.extractall(path = rundir) 4038 for i, file in enumerate(topfiles_set): 4039 plotfile = pjoin(self.me_dir, 'Events', self.run_name, 4040 '%s%d__%d.%s' % (filename, i, j + 1,ext)) 4041 files.mv(pjoin(rundir, file), plotfile) 4042 plotfiles.append(plotfile) 4043 4044 # check if the user asked to combine the .top into a single file 4045 if self.shower_card['combine_td']: 4046 misc.compile(['sum_plots'], cwd = pjoin(self.me_dir, 'Utilities')) 4047 4048 if self.banner.get('run_card', 'event_norm').lower() == 'sum': 4049 norm = 1. 4050 else: 4051 norm = 1./float(self.shower_card['nsplit_jobs']) 4052 4053 plotfiles2 = [] 4054 for i, file in enumerate(topfiles_set): 4055 filelist = ['%s%d__%d.%s' % (filename, i, j + 1,ext) \ 4056 for j in range(self.shower_card['nsplit_jobs'])] 4057 if out_id=='TOP': 4058 infile="%d\n%s\n%s\n" % \ 4059 (self.shower_card['nsplit_jobs'], 4060 '\n'.join(filelist), 4061 '\n'.join([str(norm)] * self.shower_card['nsplit_jobs'])) 4062 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'sum_plots')], 4063 stdin=subprocess.PIPE, 4064 stdout=os.open(os.devnull, os.O_RDWR), 4065 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 4066 p.communicate(input = infile.encode()) 4067 files.mv(pjoin(self.me_dir, 'Events', self.run_name, 'sum.top'), 4068 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.top' % (filename, i))) 4069 elif out_id=='HWU': 4070 out=pjoin(self.me_dir,'Events', 4071 self.run_name,'%s%d'% (filename,i)) 4072 histos=[] 4073 norms=[] 4074 for plotfile in plotfiles: 4075 histos.append({'dirname':plotfile}) 4076 norms.append(norm) 4077 self.combine_plots_HwU(histos,out,normalisation=norms) 4078 try: 4079 misc.call(['gnuplot','%s%d.gnuplot' % (filename, i)],\ 4080 stdout=os.open(os.devnull, os.O_RDWR),\ 4081 stderr=os.open(os.devnull, os.O_RDWR),\ 4082 cwd=pjoin(self.me_dir, 'Events',self.run_name)) 4083 except Exception: 4084 pass 4085 4086 plotfiles2.append(pjoin(self.me_dir, 'Events', self.run_name, '%s%d.%s' % (filename, i,ext))) 4087 tar = tarfile.open( 4088 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.tar.gz' % (filename, i)), 'w:gz') 4089 for f in filelist: 4090 tar.add(pjoin(self.me_dir, 'Events', self.run_name, f), arcname=f) 4091 files.rm([pjoin(self.me_dir, 'Events', self.run_name, f) for f in filelist]) 4092 4093 tar.close() 4094 4095 ffiles = 'files' 4096 have = 'have' 4097 if len(plotfiles2) == 1: 4098 ffiles = 'file' 4099 have = 'has' 4100 4101 message = ('The %s %s %s been generated, with histograms in the' + \ 4102 ' %s, obtained by showering the parton-level' + \ 4103 ' file %s.gz with %s.\n' + \ 4104 'The files from the different shower ' + \ 4105 'jobs (before combining them) can be found inside %s.') % \ 4106 (ffiles, ', '.join(plotfiles2), have, hist_format,\ 4107 evt_file, shower, 4108 ', '.join([f.replace('%s' % ext, 'tar.gz') for f in plotfiles2])) 4109 4110 else: 4111 message = ('The following files have been generated:\n %s\n' + \ 4112 'They contain histograms in the' + \ 4113 ' %s, obtained by showering the parton-level' + \ 4114 ' file %s.gz with %s.') % ('\n '.join(plotfiles), \ 4115 hist_format, evt_file, shower) 4116 4117 # Now arxiv the shower card used if RunMaterial is present 4118 run_dir_path = pjoin(rundir, self.run_name) 4119 if os.path.exists(pjoin(run_dir_path,'RunMaterial.tar.gz')): 4120 misc.call(['tar','-xzpf','RunMaterial.tar.gz'],cwd=run_dir_path) 4121 files.cp(pjoin(self.me_dir,'Cards','shower_card.dat'), 4122 pjoin(run_dir_path,'RunMaterial','shower_card_for_%s_%d.dat'\ 4123 %(shower, count))) 4124 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'], 4125 cwd=run_dir_path) 4126 shutil.rmtree(pjoin(run_dir_path,'RunMaterial')) 4127 4128 if self.run_card['ickkw'] >0 : 4129 if self.run_card['ickkw'] != 3 or shower != 'PYTHIA8': 4130 logger.warning("Merged cross-section not retrieved by MadGraph. Please check the parton-shower log to get the correct cross-section after merging") 4131 else: 4132 pythia_log = misc.BackRead(pjoin(rundir, "mcatnlo_run.log") ) 4133 4134 pythiare = re.compile("\s*Les Houches User Process\(es\)\s+9999\s*\|\s*(?P<generated>\d+)\s+(?P<tried>\d+)\s+(?P<accepted>\d+)\s*\|\s*(?P<xsec>[\d\.DeE\-+]+)\s+(?P<xerr>[\d\.DeE\-+]+)\s*\|") 4135 # | Les Houches User Process(es) 9999 | 10000 10000 7115 | 1.120e-04 0.000e+00 | 4136 4137 for line in pythia_log: 4138 info = pythiare.search(line) 4139 if not info: 4140 continue 4141 try: 4142 # Pythia cross section in mb, we want pb 4143 sigma_m = float(info.group('xsec').replace('D','E')) *1e9 4144 sigma_err = float(info.group('xerr').replace('D','E')) *1e9 4145 Nacc = int(info.group('accepted')) 4146 #Ntry = int(info.group('accepted')) 4147 except: 4148 logger.warning("Merged cross-section not retrieved by MadGraph. Please check the parton-shower log to get the correct cross-section after merging") 4149 break 4150 4151 self.results.add_detail('cross_pythia', sigma_m) 4152 self.results.add_detail('nb_event_pythia', Nacc) 4153 self.results.add_detail('error_pythia', sigma_err) 4154 self.results.add_detail('shower_dir', os.path.basename(rundir)) 4155 logger.info("\nFxFx Cross-Section:\n"+\ 4156 "======================\n"+\ 4157 " %f pb.\n" 4158 " Number of events after merging: %s\n", sigma_m, Nacc, '$MG:BOLD') 4159 break 4160 else: 4161 logger.warning("Merged cross-section not retrieved by MadGraph. Please check the parton-shower log to get the correct cross-section after merging") 4162 4163 4164 4165 4166 4167 4168 # end of the run, gzip files and print out the message/warning 4169 for f in to_gzip: 4170 misc.gzip(f) 4171 if message: 4172 logger.info(message) 4173 if warning: 4174 logger.warning(warning) 4175 4176 self.update_status('Run complete', level='shower', update_results=True)
4177 4178 ############################################################################
4179 - def set_run_name(self, name, tag=None, level='parton', reload_card=False,**opts):
4180 """define the run name, the run_tag, the banner and the results.""" 4181 4182 # when are we force to change the tag new_run:previous run requiring changes 4183 upgrade_tag = {'parton': ['parton','delphes','shower','madanalysis5_hadron'], 4184 'shower': ['shower','delphes','madanalysis5_hadron'], 4185 'delphes':['delphes'], 4186 'madanalysis5_hadron':['madanalysis5_hadron'], 4187 'plot':[]} 4188 4189 if name == self.run_name: 4190 if reload_card: 4191 run_card = pjoin(self.me_dir, 'Cards','run_card.dat') 4192 self.run_card = banner_mod.RunCardNLO(run_card) 4193 4194 #check if we need to change the tag 4195 if tag: 4196 self.run_card['run_tag'] = tag 4197 self.run_tag = tag 4198 self.results.add_run(self.run_name, self.run_card) 4199 else: 4200 for tag in upgrade_tag[level]: 4201 if getattr(self.results[self.run_name][-1], tag): 4202 tag = self.get_available_tag() 4203 self.run_card['run_tag'] = tag 4204 self.run_tag = tag 4205 self.results.add_run(self.run_name, self.run_card) 4206 break 4207 return # Nothing to do anymore 4208 4209 # save/clean previous run 4210 if self.run_name: 4211 self.store_result() 4212 # store new name 4213 self.run_name = name 4214 4215 # Read run_card 4216 run_card = pjoin(self.me_dir, 'Cards','run_card.dat') 4217 self.run_card = banner_mod.RunCardNLO(run_card) 4218 4219 new_tag = False 4220 # First call for this run -> set the banner 4221 self.banner = banner_mod.recover_banner(self.results, level, self.run_name, tag) 4222 if 'mgruncard' in self.banner: 4223 self.run_card = self.banner.charge_card('run_card') 4224 if tag: 4225 self.run_card['run_tag'] = tag 4226 new_tag = True 4227 elif not self.run_name in self.results and level =='parton': 4228 pass # No results yet, so current tag is fine 4229 elif not self.run_name in self.results: 4230 #This is only for case when you want to trick the interface 4231 logger.warning('Trying to run data on unknown run.') 4232 self.results.add_run(name, self.run_card) 4233 self.results.update('add run %s' % name, 'all', makehtml=True) 4234 else: 4235 for tag in upgrade_tag[level]: 4236 4237 if getattr(self.results[self.run_name][-1], tag): 4238 # LEVEL is already define in the last tag -> need to switch tag 4239 tag = self.get_available_tag() 4240 self.run_card['run_tag'] = tag 4241 new_tag = True 4242 break 4243 if not new_tag: 4244 # We can add the results to the current run 4245 tag = self.results[self.run_name][-1]['tag'] 4246 self.run_card['run_tag'] = tag # ensure that run_tag is correct 4247 4248 4249 if name in self.results and not new_tag: 4250 self.results.def_current(self.run_name) 4251 else: 4252 self.results.add_run(self.run_name, self.run_card) 4253 4254 self.run_tag = self.run_card['run_tag'] 4255 4256 # Return the tag of the previous run having the required data for this 4257 # tag/run to working wel. 4258 if level == 'parton': 4259 return 4260 elif level == 'pythia': 4261 return self.results[self.run_name][0]['tag'] 4262 else: 4263 for i in range(-1,-len(self.results[self.run_name])-1,-1): 4264 tagRun = self.results[self.run_name][i] 4265 if tagRun.pythia: 4266 return tagRun['tag']
4267 4268
4269 - def store_result(self):
4270 """ tar the pythia results. This is done when we are quite sure that 4271 the pythia output will not be use anymore """ 4272 4273 if not self.run_name: 4274 return 4275 4276 self.results.save() 4277 4278 if not self.to_store: 4279 return 4280 4281 if 'event' in self.to_store: 4282 if os.path.exists(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')): 4283 if not os.path.exists(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz')): 4284 self.update_status('gzipping output file: events.lhe', level='parton', error=True) 4285 misc.gzip(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')) 4286 else: 4287 os.remove(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')) 4288 if os.path.exists(pjoin(self.me_dir,'Events','reweight.lhe')): 4289 os.remove(pjoin(self.me_dir,'Events', 'reweight.lhe')) 4290 4291 4292 tag = self.run_card['run_tag'] 4293 4294 self.to_store = []
4295 4296 4297 ############################################################################
4298 - def get_Gdir(self, Pdir=None):
4299 """get the list of Gdirectory if not yet saved.""" 4300 4301 if hasattr(self, "Gdirs"): 4302 if self.me_dir in self.Gdirs: 4303 if Pdir is None: 4304 return sum(self.Gdirs.values()) 4305 else: 4306 return self.Gdirs[Pdir] 4307 4308 Pdirs = self.get_Pdir() 4309 Gdirs = {self.me_dir:[]} 4310 for P in Pdirs: 4311 Gdirs[P] = [pjoin(P,G) for G in os.listdir(P) if G.startswith('G') and 4312 os.path.isdir(pjoin(P,G))] 4313 4314 self.Gdirs = Gdirs 4315 return self.getGdir(Pdir)
4316 4317
4318 - def get_init_dict(self, evt_file):
4319 """reads the info in the init block and returns them in a dictionary""" 4320 ev_file = open(evt_file) 4321 init = "" 4322 found = False 4323 while True: 4324 line = ev_file.readline() 4325 if "<init>" in line: 4326 found = True 4327 elif found and not line.startswith('#'): 4328 init += line 4329 if "</init>" in line or "<event>" in line: 4330 break 4331 ev_file.close() 4332 4333 # IDBMUP(1),IDBMUP(2),EBMUP(1),EBMUP(2), PDFGUP(1),PDFGUP(2), 4334 # PDFSUP(1),PDFSUP(2),IDWTUP,NPRUP 4335 # these are not included (so far) in the init_dict 4336 # XSECUP(1),XERRUP(1),XMAXUP(1),LPRUP(1) 4337 4338 init_dict = {} 4339 init_dict['idbmup1'] = int(init.split()[0]) 4340 init_dict['idbmup2'] = int(init.split()[1]) 4341 init_dict['ebmup1'] = float(init.split()[2]) 4342 init_dict['ebmup2'] = float(init.split()[3]) 4343 init_dict['pdfgup1'] = int(init.split()[4]) 4344 init_dict['pdfgup2'] = int(init.split()[5]) 4345 init_dict['pdfsup1'] = int(init.split()[6]) 4346 init_dict['pdfsup2'] = int(init.split()[7]) 4347 init_dict['idwtup'] = int(init.split()[8]) 4348 init_dict['nprup'] = int(init.split()[9]) 4349 4350 return init_dict
4351 4352
4353 - def banner_to_mcatnlo(self, evt_file):
4354 """creates the mcatnlo input script using the values set in the header of the event_file. 4355 It also checks if the lhapdf library is used""" 4356 4357 shower = self.banner.get('run_card', 'parton_shower').upper() 4358 pdlabel = self.banner.get('run_card', 'pdlabel') 4359 itry = 0 4360 nevents = self.shower_card['nevents'] 4361 init_dict = self.get_init_dict(evt_file) 4362 4363 if nevents < 0 or \ 4364 nevents > self.banner.get_detail('run_card', 'nevents'): 4365 nevents = self.banner.get_detail('run_card', 'nevents') 4366 4367 nevents = nevents / self.shower_card['nsplit_jobs'] 4368 4369 mcmass_dict = {} 4370 for line in [l for l in self.banner['montecarlomasses'].split('\n') if l]: 4371 pdg = int(line.split()[0]) 4372 mass = float(line.split()[1]) 4373 mcmass_dict[pdg] = mass 4374 4375 content = 'EVPREFIX=%s\n' % pjoin(os.path.split(evt_file)[1]) 4376 content += 'NEVENTS=%d\n' % nevents 4377 content += 'NEVENTS_TOT=%d\n' % (self.banner.get_detail('run_card', 'nevents') /\ 4378 self.shower_card['nsplit_jobs']) 4379 content += 'MCMODE=%s\n' % shower 4380 content += 'PDLABEL=%s\n' % pdlabel 4381 4382 try: 4383 aewm1 = self.banner.get_detail('param_card', 'sminputs', 1).value 4384 raise KeyError 4385 except KeyError: 4386 mod = self.get_model() 4387 if not hasattr(mod, 'parameter_dict'): 4388 from models import model_reader 4389 mod = model_reader.ModelReader(mod) 4390 mod.set_parameters_and_couplings(self.banner.param_card) 4391 aewm1 = 0 4392 for key in ['aEWM1', 'AEWM1', 'aEWm1', 'aewm1']: 4393 if key in mod['parameter_dict']: 4394 aewm1 = mod['parameter_dict'][key] 4395 break 4396 elif 'mdl_%s' % key in mod['parameter_dict']: 4397 aewm1 = mod['parameter_dict']['mod_%s' % key] 4398 break 4399 else: 4400 for key in ['aEW', 'AEW', 'aEw', 'aew']: 4401 if key in mod['parameter_dict']: 4402 aewm1 = 1./mod['parameter_dict'][key] 4403 break 4404 elif 'mdl_%s' % key in mod['parameter_dict']: 4405 aewm1 = 1./mod['parameter_dict']['mod_%s' % key] 4406 break 4407 4408 content += 'ALPHAEW=%s\n' % aewm1 4409 #content += 'PDFSET=%s\n' % self.banner.get_detail('run_card', 'lhaid') 4410 #content += 'PDFSET=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 4411 content += 'TMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 6).value 4412 content += 'TWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 6).value 4413 content += 'ZMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 23).value 4414 content += 'ZWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 23).value 4415 content += 'WMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 24).value 4416 content += 'WWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 24).value 4417 try: 4418 content += 'HGGMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 25).value 4419 content += 'HGGWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 25).value 4420 except KeyError: 4421 content += 'HGGMASS=120.\n' 4422 content += 'HGGWIDTH=0.00575308848\n' 4423 content += 'beammom1=%s\n' % self.banner.get_detail('run_card', 'ebeam1') 4424 content += 'beammom2=%s\n' % self.banner.get_detail('run_card', 'ebeam2') 4425 content += 'BEAM1=%s\n' % self.banner.get_detail('run_card', 'lpp1') 4426 content += 'BEAM2=%s\n' % self.banner.get_detail('run_card', 'lpp2') 4427 content += 'DMASS=%s\n' % mcmass_dict[1] 4428 content += 'UMASS=%s\n' % mcmass_dict[2] 4429 content += 'SMASS=%s\n' % mcmass_dict[3] 4430 content += 'CMASS=%s\n' % mcmass_dict[4] 4431 content += 'BMASS=%s\n' % mcmass_dict[5] 4432 try: 4433 content += 'EMASS=%s\n' % mcmass_dict[11] 4434 content += 'MUMASS=%s\n' % mcmass_dict[13] 4435 content += 'TAUMASS=%s\n' % mcmass_dict[15] 4436 except KeyError: 4437 # this is for backward compatibility 4438 mcmass_lines = [l for l in \ 4439 open(pjoin(self.me_dir, 'SubProcesses', 'MCmasses_%s.inc' % shower.upper()) 4440 ).read().split('\n') if l] 4441 new_mcmass_dict = {} 4442 for l in mcmass_lines: 4443 key, val = l.split('=') 4444 new_mcmass_dict[key.strip()] = val.replace('d', 'e').strip() 4445 content += 'EMASS=%s\n' % new_mcmass_dict['mcmass(11)'] 4446 content += 'MUMASS=%s\n' % new_mcmass_dict['mcmass(13)'] 4447 content += 'TAUMASS=%s\n' % new_mcmass_dict['mcmass(15)'] 4448 4449 content += 'GMASS=%s\n' % mcmass_dict[21] 4450 content += 'EVENT_NORM=%s\n' % self.banner.get_detail('run_card', 'event_norm').lower() 4451 # check if need to link lhapdf 4452 if int(self.shower_card['pdfcode']) > 1 or \ 4453 (pdlabel=='lhapdf' and int(self.shower_card['pdfcode'])==1) or \ 4454 shower=='HERWIGPP' : 4455 # Use LHAPDF (should be correctly installed, because 4456 # either events were already generated with them, or the 4457 # user explicitly gives an LHAPDF number in the 4458 # shower_card). 4459 self.link_lhapdf(pjoin(self.me_dir, 'lib')) 4460 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'], 4461 stdout = subprocess.PIPE).stdout.read().decode().strip() 4462 content += 'LHAPDFPATH=%s\n' % lhapdfpath 4463 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 4464 if self.shower_card['pdfcode']==0: 4465 lhaid_list = '' 4466 content += '' 4467 elif self.shower_card['pdfcode']==1: 4468 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])] 4469 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 4470 else: 4471 lhaid_list = [abs(int(self.shower_card['pdfcode']))] 4472 content += 'PDFCODE=%s\n' % self.shower_card['pdfcode'] 4473 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 4474 elif int(self.shower_card['pdfcode'])==1 or \ 4475 int(self.shower_card['pdfcode'])==-1 and True: 4476 # Try to use LHAPDF because user wants to use the same PDF 4477 # as was used for the event generation. However, for the 4478 # event generation, LHAPDF was not used, so non-trivial to 4479 # see if if LHAPDF is available with the corresponding PDF 4480 # set. If not found, give a warning and use build-in PDF 4481 # set instead. 4482 try: 4483 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'], 4484 stdout = subprocess.PIPE).stdout.read().decode().strip() 4485 self.link_lhapdf(pjoin(self.me_dir, 'lib')) 4486 content += 'LHAPDFPATH=%s\n' % lhapdfpath 4487 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 4488 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])] 4489 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 4490 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 4491 except Exception: 4492 logger.warning('Trying to shower events using the same PDF in the shower as used in the generation'+\ 4493 ' of the events using LHAPDF. However, no valid LHAPDF installation found with the'+\ 4494 ' needed PDF set. Will use default internal PDF for the shower instead. To use the'+\ 4495 ' same set as was used in the event generation install LHAPDF and set the path using'+\ 4496 ' "set /path_to_lhapdf/bin/lhapdf-config" from the MadGraph5_aMC@NLO python shell') 4497 content += 'LHAPDFPATH=\n' 4498 content += 'PDFCODE=0\n' 4499 else: 4500 content += 'LHAPDFPATH=\n' 4501 content += 'PDFCODE=0\n' 4502 4503 content += 'ICKKW=%s\n' % self.banner.get_detail('run_card', 'ickkw') 4504 content += 'PTJCUT=%s\n' % self.banner.get_detail('run_card', 'ptj') 4505 # add the pythia8/hwpp path(s) 4506 if self.options['pythia8_path']: 4507 content+='PY8PATH=%s\n' % self.options['pythia8_path'] 4508 if self.options['hwpp_path']: 4509 content+='HWPPPATH=%s\n' % self.options['hwpp_path'] 4510 if self.options['thepeg_path'] and self.options['thepeg_path'] != self.options['hwpp_path']: 4511 content+='THEPEGPATH=%s\n' % self.options['thepeg_path'] 4512 if self.options['hepmc_path'] and self.options['hepmc_path'] != self.options['hwpp_path']: 4513 content+='HEPMCPATH=%s\n' % self.options['hepmc_path'] 4514 4515 output = open(pjoin(self.me_dir, 'MCatNLO', 'banner.dat'), 'w') 4516 output.write(content) 4517 output.close() 4518 return shower
4519 4520
4521 - def run_reweight(self, only):
4522 """runs the reweight_xsec_events executables on each sub-event file generated 4523 to compute on the fly scale and/or PDF uncertainities""" 4524 logger.info(' Doing reweight') 4525 4526 nev_unw = pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted') 4527 # if only doing reweight, copy back the nevents_unweighted file 4528 if only: 4529 if os.path.exists(nev_unw + '.orig'): 4530 files.cp(nev_unw + '.orig', nev_unw) 4531 else: 4532 raise aMCatNLOError('Cannot find event file information') 4533 4534 #read the nevents_unweighted file to get the list of event files 4535 file = open(nev_unw) 4536 lines = file.read().split('\n') 4537 file.close() 4538 # make copy of the original nevent_unweighted file 4539 files.cp(nev_unw, nev_unw + '.orig') 4540 # loop over lines (all but the last one whith is empty) and check that the 4541 # number of events is not 0 4542 evt_files = [line.split()[0] for line in lines[:-1] if line.split()[1] != '0'] 4543 evt_wghts = [float(line.split()[3]) for line in lines[:-1] if line.split()[1] != '0'] 4544 if self.run_card['event_norm'].lower()=='bias' and self.run_card['nevents'] != 0: 4545 evt_wghts[:]=[1./float(self.run_card['nevents']) for wgt in evt_wghts] 4546 #prepare the job_dict 4547 job_dict = {} 4548 exe = 'reweight_xsec_events.local' 4549 for i, evt_file in enumerate(evt_files): 4550 path, evt = os.path.split(evt_file) 4551 files.ln(pjoin(self.me_dir, 'SubProcesses', exe), \ 4552 pjoin(self.me_dir, 'SubProcesses', path)) 4553 job_dict[path] = [exe] 4554 4555 self.run_all(job_dict, [[evt, '1']], 'Running reweight') 4556 4557 #check that the new event files are complete 4558 for evt_file in evt_files: 4559 last_line = subprocess.Popen(['tail', '-n1', '%s.rwgt' % \ 4560 pjoin(self.me_dir, 'SubProcesses', evt_file)], \ 4561 stdout = subprocess.PIPE).stdout.read().decode().strip() 4562 if last_line != "</LesHouchesEvents>": 4563 raise aMCatNLOError('An error occurred during reweight. Check the' + \ 4564 '\'reweight_xsec_events.output\' files inside the ' + \ 4565 '\'SubProcesses/P*/G*/ directories for details') 4566 4567 #update file name in nevents_unweighted 4568 newfile = open(nev_unw, 'w') 4569 for line in lines: 4570 if line: 4571 newfile.write(line.replace(line.split()[0], line.split()[0] + '.rwgt') + '\n') 4572 newfile.close() 4573 return self.pdf_scale_from_reweighting(evt_files,evt_wghts)
4574
4575 - def pdf_scale_from_reweighting(self, evt_files,evt_wghts):
4576 """This function takes the files with the scale and pdf values 4577 written by the reweight_xsec_events.f code 4578 (P*/G*/pdf_scale_dependence.dat) and computes the overall 4579 scale and PDF uncertainty (the latter is computed using the 4580 Hessian method (if lhaid<90000) or Gaussian (if lhaid>90000)) 4581 and returns it in percents. The expected format of the file 4582 is: n_scales xsec_scale_central xsec_scale1 ... n_pdf 4583 xsec_pdf0 xsec_pdf1 ....""" 4584 4585 scales=[] 4586 pdfs=[] 4587 for i,evt_file in enumerate(evt_files): 4588 path, evt=os.path.split(evt_file) 4589 with open(pjoin(self.me_dir, 'SubProcesses', path, 'scale_pdf_dependence.dat'),'r') as f: 4590 data_line=f.readline() 4591 if "scale variations:" in data_line: 4592 for j,scale in enumerate(self.run_card['dynamical_scale_choice']): 4593 data_line = f.readline().split() 4594 scales_this = [float(val)*evt_wghts[i] for val in f.readline().replace("D", "E").split()] 4595 try: 4596 scales[j] = [a + b for a, b in zip(scales[j], scales_this)] 4597 except IndexError: 4598 scales+=[scales_this] 4599 data_line=f.readline() 4600 if "pdf variations:" in data_line: 4601 for j,pdf in enumerate(self.run_card['lhaid']): 4602 data_line = f.readline().split() 4603 pdfs_this = [float(val)*evt_wghts[i] for val in f.readline().replace("D", "E").split()] 4604 try: 4605 pdfs[j] = [a + b for a, b in zip(pdfs[j], pdfs_this)] 4606 except IndexError: 4607 pdfs+=[pdfs_this] 4608 4609 # get the scale uncertainty in percent 4610 scale_info=[] 4611 for j,scale in enumerate(scales): 4612 s_cen=scale[0] 4613 if s_cen != 0.0 and self.run_card['reweight_scale'][j]: 4614 # max and min of the full envelope 4615 s_max=(max(scale)/s_cen-1)*100 4616 s_min=(1-min(scale)/s_cen)*100 4617 # ren and fac scale dependence added in quadrature 4618 ren_var=[] 4619 fac_var=[] 4620 for i in range(len(self.run_card['rw_rscale'])): 4621 ren_var.append(scale[i]-s_cen) # central fac scale 4622 for i in range(len(self.run_card['rw_fscale'])): 4623 fac_var.append(scale[i*len(self.run_card['rw_rscale'])]-s_cen) # central ren scale 4624 s_max_q=((s_cen+math.sqrt(math.pow(max(ren_var),2)+math.pow(max(fac_var),2)))/s_cen-1)*100 4625 s_min_q=(1-(s_cen-math.sqrt(math.pow(min(ren_var),2)+math.pow(min(fac_var),2)))/s_cen)*100 4626 s_size=len(scale) 4627 else: 4628 s_max=0.0 4629 s_min=0.0 4630 s_max_q=0.0 4631 s_min_q=0.0 4632 s_size=len(scale) 4633 scale_info.append({'cen':s_cen, 'min':s_min, 'max':s_max, \ 4634 'min_q':s_min_q, 'max_q':s_max_q, 'size':s_size, \ 4635 'label':self.run_card['dynamical_scale_choice'][j], \ 4636 'unc':self.run_card['reweight_scale'][j]}) 4637 4638 # check if we can use LHAPDF to compute the PDF uncertainty 4639 if any(self.run_card['reweight_pdf']): 4640 lhapdf = misc.import_python_lhapdf(self.options['lhapdf']) 4641 if lhapdf: 4642 use_lhapdf = True 4643 else: 4644 logger.warning("Failed to access python version of LHAPDF: "\ 4645 "cannot compute PDF uncertainty from the "\ 4646 "weights in the events. The weights in the LHE " \ 4647 "event files will still cover all PDF set members, "\ 4648 "but there will be no PDF uncertainty printed in the run summary. \n "\ 4649 "If the python interface to LHAPDF is available on your system, try "\ 4650 "adding its location to the PYTHONPATH environment variable and the"\ 4651 "LHAPDF library location to LD_LIBRARY_PATH (linux) or DYLD_LIBRARY_PATH (mac os x).") 4652 use_lhapdf=False 4653 4654 # turn off lhapdf printing any messages 4655 if any(self.run_card['reweight_pdf']) and use_lhapdf: lhapdf.setVerbosity(0) 4656 4657 pdf_info=[] 4658 for j,pdfset in enumerate(pdfs): 4659 p_cen=pdfset[0] 4660 if p_cen != 0.0 and self.run_card['reweight_pdf'][j]: 4661 if use_lhapdf: 4662 pdfsetname=self.run_card['lhapdfsetname'][j] 4663 try: 4664 p=lhapdf.getPDFSet(pdfsetname) 4665 ep=p.uncertainty(pdfset,-1) 4666 p_cen=ep.central 4667 p_min=abs(ep.errminus/p_cen)*100 4668 p_max=abs(ep.errplus/p_cen)*100 4669 p_type=p.errorType 4670 p_size=p.size 4671 p_conf=p.errorConfLevel 4672 except: 4673 logger.warning("Could not access LHAPDF to compute uncertainties for %s" % pdfsetname) 4674 p_min=0.0 4675 p_max=0.0 4676 p_type='unknown' 4677 p_conf='unknown' 4678 p_size=len(pdfset) 4679 else: 4680 p_min=0.0 4681 p_max=0.0 4682 p_type='unknown' 4683 p_conf='unknown' 4684 p_size=len(pdfset) 4685 pdfsetname=self.run_card['lhaid'][j] 4686 else: 4687 p_min=0.0 4688 p_max=0.0 4689 p_type='none' 4690 p_conf='unknown' 4691 p_size=len(pdfset) 4692 pdfsetname=self.run_card['lhaid'][j] 4693 pdf_info.append({'cen':p_cen, 'min':p_min, 'max':p_max, \ 4694 'unc':p_type, 'name':pdfsetname, 'size':p_size, \ 4695 'label':self.run_card['lhaid'][j], 'conf':p_conf}) 4696 4697 scale_pdf_info=[scale_info,pdf_info] 4698 return scale_pdf_info
4699 4700
4701 - def wait_for_complete(self, run_type):
4702 """this function waits for jobs on cluster to complete their run.""" 4703 starttime = time.time() 4704 #logger.info(' Waiting for submitted jobs to complete') 4705 update_status = lambda i, r, f: self.update_status((i, r, f, run_type), 4706 starttime=starttime, level='parton', update_results=True) 4707 try: 4708 self.cluster.wait(self.me_dir, update_status) 4709 except: 4710 self.cluster.remove() 4711 raise
4712
4713 - def run_all(self, job_dict, arg_list, run_type='monitor', split_jobs = False):
4714 """runs the jobs in job_dict (organized as folder: [job_list]), with arguments args""" 4715 self.ijob = 0 4716 if run_type != 'shower': 4717 self.njobs = sum(len(jobs) for jobs in job_dict.values()) * len(arg_list) 4718 for args in arg_list: 4719 for Pdir, jobs in job_dict.items(): 4720 for job in jobs: 4721 self.run_exe(job, args, run_type, cwd=pjoin(self.me_dir, 'SubProcesses', Pdir) ) 4722 if self.cluster_mode == 2: 4723 time.sleep(1) # security to allow all jobs to be launched 4724 else: 4725 self.njobs = len(arg_list) 4726 for args in arg_list: 4727 [(cwd, exe)] = list(job_dict.items()) 4728 self.run_exe(exe, args, run_type, cwd) 4729 4730 self.wait_for_complete(run_type)
4731 4732 4733
4734 - def check_event_files(self,jobs):
4735 """check the integrity of the event files after splitting, and resubmit 4736 those which are not nicely terminated""" 4737 jobs_to_resubmit = [] 4738 for job in jobs: 4739 last_line = '' 4740 try: 4741 last_line = subprocess.Popen( 4742 ['tail', '-n1', pjoin(job['dirname'], 'events.lhe')], \ 4743 stdout = subprocess.PIPE).stdout.read().decode().strip() 4744 except IOError: 4745 pass 4746 if last_line != "</LesHouchesEvents>": 4747 jobs_to_resubmit.append(job) 4748 self.njobs = 0 4749 if jobs_to_resubmit: 4750 run_type = 'Resubmitting broken jobs' 4751 logger.info('Some event files are broken, corresponding jobs will be resubmitted.') 4752 for job in jobs_to_resubmit: 4753 logger.debug('Resubmitting ' + job['dirname'] + '\n') 4754 self.run_all_jobs(jobs_to_resubmit,2,fixed_order=False)
4755 4756
4757 - def find_jobs_to_split(self, pdir, job, arg):
4758 """looks into the nevents_unweighed_splitted file to check how many 4759 split jobs are needed for this (pdir, job). arg is F, B or V""" 4760 # find the number of the integration channel 4761 splittings = [] 4762 ajob = open(pjoin(self.me_dir, 'SubProcesses', pdir, job)).read() 4763 pattern = re.compile('for i in (\d+) ; do') 4764 match = re.search(pattern, ajob) 4765 channel = match.groups()[0] 4766 # then open the nevents_unweighted_splitted file and look for the 4767 # number of splittings to be done 4768 nevents_file = open(pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted_splitted')).read() 4769 # This skips the channels with zero events, because they are 4770 # not of the form GFXX_YY, but simply GFXX 4771 pattern = re.compile(r"%s_(\d+)/events.lhe" % \ 4772 pjoin(pdir, 'G%s%s' % (arg,channel))) 4773 matches = re.findall(pattern, nevents_file) 4774 for m in matches: 4775 splittings.append(m) 4776 return splittings
4777 4778
4779 - def run_exe(self, exe, args, run_type, cwd=None):
4780 """this basic function launch locally/on cluster exe with args as argument. 4781 """ 4782 # first test that exe exists: 4783 execpath = None 4784 if cwd and os.path.exists(pjoin(cwd, exe)): 4785 execpath = pjoin(cwd, exe) 4786 elif not cwd and os.path.exists(exe): 4787 execpath = exe 4788 else: 4789 raise aMCatNLOError('Cannot find executable %s in %s' \ 4790 % (exe, os.getcwd())) 4791 # check that the executable has exec permissions 4792 if self.cluster_mode == 1 and not os.access(execpath, os.X_OK): 4793 subprocess.call(['chmod', '+x', exe], cwd=cwd) 4794 # finally run it 4795 if self.cluster_mode == 0: 4796 #this is for the serial run 4797 misc.call(['./'+exe] + args, cwd=cwd) 4798 self.ijob += 1 4799 self.update_status((max([self.njobs - self.ijob - 1, 0]), 4800 min([1, self.njobs - self.ijob]), 4801 self.ijob, run_type), level='parton') 4802 4803 #this is for the cluster/multicore run 4804 elif 'reweight' in exe: 4805 # a reweight run 4806 # Find the correct PDF input file 4807 input_files, output_files = [], [] 4808 pdfinput = self.get_pdf_input_filename() 4809 if os.path.exists(pdfinput): 4810 input_files.append(pdfinput) 4811 input_files.append(pjoin(os.path.dirname(exe), os.path.pardir, 'reweight_xsec_events')) 4812 input_files.append(pjoin(cwd, os.path.pardir, 'leshouche_info.dat')) 4813 input_files.append(args[0]) 4814 output_files.append('%s.rwgt' % os.path.basename(args[0])) 4815 output_files.append('reweight_xsec_events.output') 4816 output_files.append('scale_pdf_dependence.dat') 4817 4818 return self.cluster.submit2(exe, args, cwd=cwd, 4819 input_files=input_files, output_files=output_files, 4820 required_output=output_files) 4821 4822 elif 'ajob' in exe: 4823 # the 'standard' amcatnlo job 4824 # check if args is a list of string 4825 if type(args[0]) == str: 4826 input_files, output_files, required_output, args = self.getIO_ajob(exe,cwd,args) 4827 #submitting 4828 self.cluster.submit2(exe, args, cwd=cwd, 4829 input_files=input_files, output_files=output_files, 4830 required_output=required_output) 4831 4832 # # keep track of folders and arguments for splitted evt gen 4833 # subfolder=output_files[-1].split('/')[0] 4834 # if len(args) == 4 and '_' in subfolder: 4835 # self.split_folders[pjoin(cwd,subfolder)] = [exe] + args 4836 4837 elif 'shower' in exe: 4838 # a shower job 4839 # args are [shower, output(HEP or TOP), run_name] 4840 # cwd is the shower rundir, where the executable are found 4841 input_files, output_files = [], [] 4842 shower = args[0] 4843 # the input files 4844 if shower == 'PYTHIA8': 4845 input_files.append(pjoin(cwd, 'Pythia8.exe')) 4846 input_files.append(pjoin(cwd, 'Pythia8.cmd')) 4847 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): 4848 input_files.append(pjoin(cwd, 'config.sh')) 4849 input_files.append(pjoin(self.options['pythia8_path'], 'xmldoc')) 4850 else: 4851 input_files.append(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc')) 4852 else: 4853 input_files.append(pjoin(cwd, 'MCATNLO_%s_EXE' % shower)) 4854 input_files.append(pjoin(cwd, 'MCATNLO_%s_input' % shower)) 4855 if shower == 'HERWIGPP': 4856 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++')): 4857 input_files.append(pjoin(cwd, 'Herwig++')) 4858 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig')): 4859 input_files.append(pjoin(cwd, 'Herwig')) 4860 input_files.append(pjoin(cwd, 'HepMCFortran.so')) 4861 if len(args) == 3: 4862 if os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')): 4863 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')) 4864 elif os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')): 4865 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')) 4866 else: 4867 raise aMCatNLOError('Event file not present in %s' % \ 4868 pjoin(self.me_dir, 'Events', self.run_name)) 4869 else: 4870 input_files.append(pjoin(cwd, 'events_%s.lhe' % args[3])) 4871 # the output files 4872 if len(args) == 3: 4873 output_files.append('mcatnlo_run.log') 4874 else: 4875 output_files.append('mcatnlo_run_%s.log' % args[3]) 4876 if args[1] == 'HEP': 4877 if len(args) == 3: 4878 fname = 'events' 4879 else: 4880 fname = 'events_%s' % args[3] 4881 if shower in ['PYTHIA8', 'HERWIGPP']: 4882 output_files.append(fname + '.hepmc.gz') 4883 else: 4884 output_files.append(fname + '.hep.gz') 4885 elif args[1] == 'TOP' or args[1] == 'HWU': 4886 if len(args) == 3: 4887 fname = 'histfile' 4888 else: 4889 fname = 'histfile_%s' % args[3] 4890 output_files.append(fname + '.tar') 4891 else: 4892 raise aMCatNLOError('Not a valid output argument for shower job : %d' % args[1]) 4893 #submitting 4894 self.cluster.submit2(exe, args, cwd=cwd, 4895 input_files=input_files, output_files=output_files) 4896 4897 else: 4898 return self.cluster.submit(exe, args, cwd=cwd)
4899
4900 - def getIO_ajob(self,exe,cwd, args):
4901 # use local disk if possible => need to stands what are the 4902 # input/output files 4903 4904 output_files = [] 4905 required_output = [] 4906 input_files = [pjoin(self.me_dir, 'SubProcesses', 'randinit'), 4907 pjoin(cwd, 'symfact.dat'), 4908 pjoin(cwd, 'iproc.dat'), 4909 pjoin(cwd, 'initial_states_map.dat'), 4910 pjoin(cwd, 'configs_and_props_info.dat'), 4911 pjoin(cwd, 'leshouche_info.dat'), 4912 pjoin(cwd, 'FKS_params.dat')] 4913 4914 # For GoSam interface, we must copy the SLHA card as well 4915 if os.path.exists(pjoin(self.me_dir,'OLP_virtuals','gosam.rc')): 4916 input_files.append(pjoin(self.me_dir, 'Cards', 'param_card.dat')) 4917 4918 if os.path.exists(pjoin(cwd,'nevents.tar')): 4919 input_files.append(pjoin(cwd,'nevents.tar')) 4920 4921 if os.path.exists(pjoin(self.me_dir,'SubProcesses','OLE_order.olc')): 4922 input_files.append(pjoin(cwd, 'OLE_order.olc')) 4923 4924 # File for the loop (might not be present if MadLoop is not used) 4925 if os.path.exists(pjoin(cwd,'MadLoop5_resources.tar.gz')) and \ 4926 cluster.need_transfer(self.options): 4927 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz')) 4928 elif os.path.exists(pjoin(cwd,'MadLoop5_resources')) and \ 4929 cluster.need_transfer(self.options): 4930 tf=tarfile.open(pjoin(cwd,'MadLoop5_resources.tar.gz'),'w:gz', 4931 dereference=True) 4932 tf.add(pjoin(cwd,'MadLoop5_resources'),arcname='MadLoop5_resources') 4933 tf.close() 4934 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz')) 4935 4936 if args[1] == 'born' or args[1] == 'all': 4937 # MADEVENT MINT FO MODE 4938 input_files.append(pjoin(cwd, 'madevent_mintFO')) 4939 if args[2] == '0': 4940 current = '%s_G%s' % (args[1],args[0]) 4941 else: 4942 current = '%s_G%s_%s' % (args[1],args[0],args[2]) 4943 if os.path.exists(pjoin(cwd,current)): 4944 input_files.append(pjoin(cwd, current)) 4945 output_files.append(current) 4946 4947 required_output.append('%s/results.dat' % current) 4948 required_output.append('%s/res_%s.dat' % (current,args[3])) 4949 required_output.append('%s/log_MINT%s.txt' % (current,args[3])) 4950 required_output.append('%s/mint_grids' % current) 4951 required_output.append('%s/grid.MC_integer' % current) 4952 if args[3] != '0': 4953 required_output.append('%s/scale_pdf_dependence.dat' % current) 4954 4955 elif args[1] == 'F' or args[1] == 'B': 4956 # MINTMC MODE 4957 input_files.append(pjoin(cwd, 'madevent_mintMC')) 4958 4959 if args[2] == '0': 4960 current = 'G%s%s' % (args[1],args[0]) 4961 else: 4962 current = 'G%s%s_%s' % (args[1],args[0],args[2]) 4963 if os.path.exists(pjoin(cwd,current)): 4964 input_files.append(pjoin(cwd, current)) 4965 output_files.append(current) 4966 if args[2] > '0': 4967 # this is for the split event generation 4968 output_files.append('G%s%s_%s' % (args[1], args[0], args[2])) 4969 required_output.append('G%s%s_%s/log_MINT%s.txt' % (args[1],args[0],args[2],args[3])) 4970 4971 else: 4972 required_output.append('%s/log_MINT%s.txt' % (current,args[3])) 4973 if args[3] in ['0','1']: 4974 required_output.append('%s/results.dat' % current) 4975 if args[3] == '1': 4976 output_files.append('%s/results.dat' % current) 4977 4978 else: 4979 raise aMCatNLOError('not valid arguments: %s' %(', '.join(args))) 4980 4981 #Find the correct PDF input file 4982 pdfinput = self.get_pdf_input_filename() 4983 if os.path.exists(pdfinput): 4984 input_files.append(pdfinput) 4985 return input_files, output_files, required_output, args
4986 4987
4988 - def compile(self, mode, options):
4989 """compiles aMC@NLO to compute either NLO or NLO matched to shower, as 4990 specified in mode""" 4991 4992 os.mkdir(pjoin(self.me_dir, 'Events', self.run_name)) 4993 4994 self.banner.write(pjoin(self.me_dir, 'Events', self.run_name, 4995 '%s_%s_banner.txt' % (self.run_name, self.run_tag))) 4996 4997 self.get_characteristics(pjoin(self.me_dir, 4998 'SubProcesses', 'proc_characteristics')) 4999 5000 #define a bunch of log files 5001 amcatnlo_log = pjoin(self.me_dir, 'compile_amcatnlo.log') 5002 madloop_log = pjoin(self.me_dir, 'compile_madloop.log') 5003 reweight_log = pjoin(self.me_dir, 'compile_reweight.log') 5004 test_log = pjoin(self.me_dir, 'test.log') 5005 5006 # environmental variables to be included in make_opts 5007 self.make_opts_var = {} 5008 if self.proc_characteristics['has_loops'] and \ 5009 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')): 5010 self.make_opts_var['madloop'] = 'true' 5011 5012 self.update_status('Compiling the code', level=None, update_results=True) 5013 5014 libdir = pjoin(self.me_dir, 'lib') 5015 sourcedir = pjoin(self.me_dir, 'Source') 5016 5017 #clean files 5018 files.rm([amcatnlo_log, madloop_log, reweight_log, test_log]) 5019 #define which executable/tests to compile 5020 if '+' in mode: 5021 mode = mode.split('+')[0] 5022 if mode in ['NLO', 'LO']: 5023 exe = 'madevent_mintFO' 5024 tests = ['test_ME'] 5025 self.analyse_card.write_card(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts')) 5026 elif mode in ['aMC@NLO', 'aMC@LO','noshower','noshowerLO']: 5027 exe = 'madevent_mintMC' 5028 tests = ['test_ME', 'test_MC'] 5029 # write an analyse_opts with a dummy analysis so that compilation goes through 5030 with open(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts'),'w') as fsock: 5031 fsock.write('FO_ANALYSE=analysis_dummy.o dbook.o open_output_files_dummy.o HwU_dummy.o\n') 5032 5033 #directory where to compile exe 5034 p_dirs = [d for d in \ 5035 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d] 5036 # create param_card.inc and run_card.inc 5037 self.do_treatcards('', amcatnlo=True, mode=mode) 5038 # if --nocompile option is specified, check here that all exes exists. 5039 # If they exists, return 5040 if all([os.path.exists(pjoin(self.me_dir, 'SubProcesses', p_dir, exe)) \ 5041 for p_dir in p_dirs]) and options['nocompile']: 5042 return 5043 5044 # rm links to lhapdflib/ PDFsets if exist 5045 if os.path.exists(pjoin(libdir, 'PDFsets')): 5046 files.rm(pjoin(libdir, 'PDFsets')) 5047 5048 # read the run_card to find if lhapdf is used or not 5049 if self.run_card['pdlabel'] == 'lhapdf' and \ 5050 (self.banner.get_detail('run_card', 'lpp1') != 0 or \ 5051 self.banner.get_detail('run_card', 'lpp2') != 0): 5052 5053 self.link_lhapdf(libdir, [pjoin('SubProcesses', p) for p in p_dirs]) 5054 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 5055 lhaid_list = self.run_card['lhaid'] 5056 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 5057 5058 else: 5059 if self.run_card['lpp1'] == 1 == self.run_card['lpp2']: 5060 logger.info('Using built-in libraries for PDFs') 5061 5062 self.make_opts_var['lhapdf'] = "" 5063 5064 # read the run_card to find if applgrid is used or not 5065 if self.run_card['iappl'] != 0: 5066 self.make_opts_var['applgrid'] = 'True' 5067 # check versions of applgrid and amcfast 5068 for code in ['applgrid','amcfast']: 5069 try: 5070 p = subprocess.Popen([self.options[code], '--version'], \ 5071 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 5072 except OSError: 5073 raise aMCatNLOError(('No valid %s installation found. \n' + \ 5074 'Please set the path to %s-config by using \n' + \ 5075 'MG5_aMC> set <absolute-path-to-%s>/bin/%s-config \n') % (code,code,code,code)) 5076 else: 5077 output, _ = p.communicate() 5078 output.decode() 5079 if code == 'applgrid' and output < '1.4.63': 5080 raise aMCatNLOError('Version of APPLgrid is too old. Use 1.4.69 or later.'\ 5081 +' You are using %s',output) 5082 if code == 'amcfast' and output < '1.1.1': 5083 raise aMCatNLOError('Version of aMCfast is too old. Use 1.1.1 or later.'\ 5084 +' You are using %s',output) 5085 5086 # set-up the Source/make_opts with the correct applgrid-config file 5087 appllibs=" APPLLIBS=$(shell %s --ldflags) $(shell %s --ldcflags) \n" \ 5088 % (self.options['amcfast'],self.options['applgrid']) 5089 text=open(pjoin(self.me_dir,'Source','make_opts'),'r').readlines() 5090 text_out=[] 5091 for line in text: 5092 if line.strip().startswith('APPLLIBS=$'): 5093 line=appllibs 5094 text_out.append(line) 5095 with open(pjoin(self.me_dir,'Source','make_opts'),'w') as fsock: 5096 fsock.writelines(text_out) 5097 else: 5098 self.make_opts_var['applgrid'] = "" 5099 5100 if 'fastjet' in list(self.options.keys()) and self.options['fastjet']: 5101 self.make_opts_var['fastjet_config'] = self.options['fastjet'] 5102 5103 # add the make_opts_var to make_opts 5104 self.update_make_opts() 5105 5106 # make Source 5107 self.update_status('Compiling source...', level=None) 5108 misc.compile(['clean4pdf'], cwd = sourcedir) 5109 misc.compile(cwd = sourcedir) 5110 if os.path.exists(pjoin(libdir, 'libdhelas.a')) \ 5111 and os.path.exists(pjoin(libdir, 'libgeneric.a')) \ 5112 and os.path.exists(pjoin(libdir, 'libmodel.a')) \ 5113 and os.path.exists(pjoin(libdir, 'libpdf.a')): 5114 logger.info(' ...done, continuing with P* directories') 5115 else: 5116 raise aMCatNLOError('Compilation failed') 5117 5118 # make StdHep (only necessary with MG option output_dependencies='internal') 5119 MCatNLO_libdir = pjoin(self.me_dir, 'MCatNLO', 'lib') 5120 if not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libstdhep.a'))) or \ 5121 not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libFmcfio.a'))): 5122 if os.path.exists(pjoin(sourcedir,'StdHEP')): 5123 logger.info('Compiling StdHEP (can take a couple of minutes) ...') 5124 try: 5125 misc.compile(['StdHEP'], cwd = sourcedir) 5126 except Exception as error: 5127 logger.debug(str(error)) 5128 logger.warning("StdHep failed to compiled. This forbids to run NLO+PS with PY6 and Herwig6") 5129 logger.info("details on the compilation error are available if the code is run with --debug flag") 5130 else: 5131 logger.info(' ...done.') 5132 else: 5133 logger.warning('Could not compile StdHEP because its'+\ 5134 ' source directory could not be found in the SOURCE folder.\n'+\ 5135 " Check the MG5_aMC option 'output_dependencies'.\n"+\ 5136 " This will prevent the use of HERWIG6/Pythia6 shower.") 5137 5138 5139 # make CutTools (only necessary with MG option output_dependencies='internal') 5140 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \ 5141 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))): 5142 if os.path.exists(pjoin(sourcedir,'CutTools')): 5143 logger.info('Compiling CutTools (can take a couple of minutes) ...') 5144 misc.compile(['CutTools','-j1'], cwd = sourcedir, nb_core=1) 5145 logger.info(' ...done.') 5146 else: 5147 raise aMCatNLOError('Could not compile CutTools because its'+\ 5148 ' source directory could not be found in the SOURCE folder.\n'+\ 5149 " Check the MG5_aMC option 'output_dependencies.'") 5150 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \ 5151 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))): 5152 raise aMCatNLOError('CutTools compilation failed.') 5153 5154 # Verify compatibility between current compiler and the one which was 5155 # used when last compiling CutTools (if specified). 5156 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin( 5157 libdir, 'libcts.a')))),'compiler_version.log') 5158 if os.path.exists(compiler_log_path): 5159 compiler_version_used = open(compiler_log_path,'r').read() 5160 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\ 5161 pjoin(sourcedir,'make_opts')))) in compiler_version_used: 5162 if os.path.exists(pjoin(sourcedir,'CutTools')): 5163 logger.info('CutTools was compiled with a different fortran'+\ 5164 ' compiler. Re-compiling it now...') 5165 misc.compile(['cleanCT'], cwd = sourcedir) 5166 misc.compile(['CutTools','-j1'], cwd = sourcedir, nb_core=1) 5167 logger.info(' ...done.') 5168 else: 5169 raise aMCatNLOError("CutTools installation in %s"\ 5170 %os.path.realpath(pjoin(libdir, 'libcts.a'))+\ 5171 " seems to have been compiled with a different compiler than"+\ 5172 " the one specified in MG5_aMC. Please recompile CutTools.") 5173 5174 # make IREGI (only necessary with MG option output_dependencies='internal') 5175 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libiregi.a'))) \ 5176 and os.path.exists(pjoin(sourcedir,'IREGI')): 5177 logger.info('Compiling IREGI (can take a couple of minutes) ...') 5178 misc.compile(['IREGI'], cwd = sourcedir) 5179 logger.info(' ...done.') 5180 5181 if os.path.exists(pjoin(libdir, 'libiregi.a')): 5182 # Verify compatibility between current compiler and the one which was 5183 # used when last compiling IREGI (if specified). 5184 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin( 5185 libdir, 'libiregi.a')))),'compiler_version.log') 5186 if os.path.exists(compiler_log_path): 5187 compiler_version_used = open(compiler_log_path,'r').read() 5188 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\ 5189 pjoin(sourcedir,'make_opts')))) in compiler_version_used: 5190 if os.path.exists(pjoin(sourcedir,'IREGI')): 5191 logger.info('IREGI was compiled with a different fortran'+\ 5192 ' compiler. Re-compiling it now...') 5193 misc.compile(['cleanIR'], cwd = sourcedir) 5194 misc.compile(['IREGI'], cwd = sourcedir) 5195 logger.info(' ...done.') 5196 else: 5197 raise aMCatNLOError("IREGI installation in %s"\ 5198 %os.path.realpath(pjoin(libdir, 'libiregi.a'))+\ 5199 " seems to have been compiled with a different compiler than"+\ 5200 " the one specified in MG5_aMC. Please recompile IREGI.") 5201 5202 # check if MadLoop virtuals have been generated 5203 if self.proc_characteristics['has_loops'] and \ 5204 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')): 5205 if mode in ['NLO', 'aMC@NLO', 'noshower']: 5206 tests.append('check_poles') 5207 5208 # make and run tests (if asked for), gensym and make madevent in each dir 5209 self.update_status('Compiling directories...', level=None) 5210 5211 for test in tests: 5212 self.write_test_input(test) 5213 5214 try: 5215 import multiprocessing 5216 if not self.nb_core: 5217 try: 5218 self.nb_core = int(self.options['nb_core']) 5219 except TypeError: 5220 self.nb_core = multiprocessing.cpu_count() 5221 except ImportError: 5222 self.nb_core = 1 5223 5224 compile_options = copy.copy(self.options) 5225 compile_options['nb_core'] = self.nb_core 5226 compile_cluster = cluster.MultiCore(**compile_options) 5227 logger.info('Compiling on %d cores' % self.nb_core) 5228 5229 update_status = lambda i, r, f: self.donothing(i,r,f) 5230 for p_dir in p_dirs: 5231 compile_cluster.submit(prog = compile_dir, 5232 argument = [self.me_dir, p_dir, mode, options, 5233 tests, exe, self.options['run_mode']]) 5234 try: 5235 compile_cluster.wait(self.me_dir, update_status) 5236 except Exception as error: 5237 logger.warning("Fail to compile the Subprocesses") 5238 if __debug__: 5239 raise 5240 compile_cluster.remove() 5241 self.do_quit('') 5242 5243 logger.info('Checking test output:') 5244 for p_dir in p_dirs: 5245 logger.info(p_dir) 5246 for test in tests: 5247 logger.info(' Result for %s:' % test) 5248 5249 this_dir = pjoin(self.me_dir, 'SubProcesses', p_dir) 5250 #check that none of the tests failed 5251 self.check_tests(test, this_dir)
5252 5253
5254 - def donothing(*args):
5255 pass
5256 5257
5258 - def check_tests(self, test, dir):
5259 """just call the correct parser for the test log. 5260 Skip check_poles for LOonly folders""" 5261 if test in ['test_ME', 'test_MC']: 5262 return self.parse_test_mx_log(pjoin(dir, '%s.log' % test)) 5263 elif test == 'check_poles' and not os.path.exists(pjoin(dir,'parton_lum_0.f')): 5264 return self.parse_check_poles_log(pjoin(dir, '%s.log' % test))
5265 5266
5267 - def parse_test_mx_log(self, log):
5268 """read and parse the test_ME/MC.log file""" 5269 content = open(log).read() 5270 if 'FAILED' in content: 5271 logger.info('Output of the failing test:\n'+content[:-1],'$MG:BOLD') 5272 raise aMCatNLOError('Some tests failed, run cannot continue.\n' + \ 5273 'Please check that widths of final state particles (e.g. top) have been' + \ 5274 ' set to 0 in the param_card.dat.') 5275 else: 5276 lines = [l for l in content.split('\n') if 'PASSED' in l] 5277 logger.info(' Passed.') 5278 logger.debug('\n'+'\n'.join(lines))
5279 5280
5281 - def parse_check_poles_log(self, log):
5282 """reads and parse the check_poles.log file""" 5283 content = open(log).read() 5284 npass = 0 5285 nfail = 0 5286 for line in content.split('\n'): 5287 if 'PASSED' in line: 5288 npass +=1 5289 tolerance = float(line.split()[1]) 5290 if 'FAILED' in line: 5291 nfail +=1 5292 tolerance = float(line.split()[1]) 5293 5294 if nfail + npass == 0: 5295 logger.warning('0 points have been tried') 5296 return 5297 5298 if float(nfail)/float(nfail+npass) > 0.1: 5299 raise aMCatNLOError('Poles do not cancel, run cannot continue') 5300 else: 5301 logger.info(' Poles successfully cancel for %d points over %d (tolerance=%2.1e)' \ 5302 %(npass, nfail+npass, tolerance))
5303 5304
5305 - def write_test_input(self, test):
5306 """write the input files to run test_ME/MC or check_poles""" 5307 if test in ['test_ME', 'test_MC']: 5308 content = "-2 -2\n" #generate randomly energy/angle 5309 content+= "100 100\n" #run 100 points for soft and collinear tests 5310 content+= "0\n" #all FKS configs 5311 content+= '\n'.join(["-1"] * 50) #random diagram (=first diagram) 5312 elif test == 'check_poles': 5313 content = '20 \n -1\n' 5314 5315 file = open(pjoin(self.me_dir, '%s_input.txt' % test), 'w') 5316 if test == 'test_MC': 5317 shower = self.run_card['parton_shower'] 5318 header = "1 \n %s\n 1 -0.1\n-1 -0.1\n" % shower 5319 file.write(header + content) 5320 elif test == 'test_ME': 5321 header = "2 \n" 5322 file.write(header + content) 5323 else: 5324 file.write(content) 5325 file.close()
5326 5327 5328 action_switcher = AskRunNLO 5329 ############################################################################
5330 - def ask_run_configuration(self, mode, options, switch={}):
5331 """Ask the question when launching generate_events/multi_run""" 5332 5333 if 'parton' not in options: 5334 options['parton'] = False 5335 if 'reweightonly' not in options: 5336 options['reweightonly'] = False 5337 5338 if mode == 'auto': 5339 mode = None 5340 if not mode and (options['parton'] or options['reweightonly']): 5341 mode = 'noshower' 5342 5343 passing_cmd = [] 5344 for key,value in switch.keys(): 5345 passing_cmd.append('%s=%s' % (key,value)) 5346 5347 if 'do_reweight' in options and options['do_reweight']: 5348 passing_cmd.append('reweight=ON') 5349 if 'do_madspin' in options and options['do_madspin']: 5350 passing_cmd.append('madspin=ON') 5351 5352 force = self.force 5353 if mode == 'onlyshower': 5354 passing_cmd.append('onlyshower') 5355 force = True 5356 elif mode: 5357 passing_cmd.append(mode) 5358 5359 switch, cmd_switch = self.ask('', '0', [], ask_class = self.action_switcher, 5360 mode=mode, force=(force or mode), 5361 first_cmd=passing_cmd, 5362 return_instance=True) 5363 5364 if 'mode' in switch: 5365 mode = switch['mode'] 5366 5367 #assign the mode depending of the switch 5368 if not mode or mode == 'auto': 5369 if switch['order'] == 'LO': 5370 if switch['runshower']: 5371 mode = 'aMC@LO' 5372 elif switch['fixed_order'] == 'ON': 5373 mode = 'LO' 5374 else: 5375 mode = 'noshowerLO' 5376 elif switch['order'] == 'NLO': 5377 if switch['runshower']: 5378 mode = 'aMC@NLO' 5379 elif switch['fixed_order'] == 'ON': 5380 mode = 'NLO' 5381 else: 5382 mode = 'noshower' 5383 logger.info('will run in mode: %s' % mode) 5384 5385 if mode == 'noshower': 5386 if switch['shower'] == 'OFF': 5387 logger.warning("""You have chosen not to run a parton shower. 5388 NLO events without showering are NOT physical. 5389 Please, shower the LesHouches events before using them for physics analyses. 5390 You have to choose NOW which parton-shower you WILL use and specify it in the run_card.""") 5391 else: 5392 logger.info("""Your Parton-shower choice is not available for running. 5393 The events will be generated for the associated Parton-Shower. 5394 Remember that NLO events without showering are NOT physical.""", '$MG:BOLD') 5395 5396 5397 # specify the cards which are needed for this run. 5398 cards = ['param_card.dat', 'run_card.dat'] 5399 ignore = [] 5400 if mode in ['LO', 'NLO']: 5401 options['parton'] = True 5402 ignore = ['shower_card.dat', 'madspin_card.dat'] 5403 cards.append('FO_analyse_card.dat') 5404 else: 5405 if switch['madspin'] != 'OFF': 5406 cards.append('madspin_card.dat') 5407 if switch['reweight'] != 'OFF': 5408 cards.append('reweight_card.dat') 5409 if switch['madanalysis'] in ['HADRON', 'ON']: 5410 cards.append('madanalysis5_hadron_card.dat') 5411 if 'aMC@' in mode: 5412 cards.append('shower_card.dat') 5413 if mode == 'onlyshower': 5414 cards = ['shower_card.dat'] 5415 if options['reweightonly']: 5416 cards = ['run_card.dat'] 5417 5418 self.keep_cards(cards, ignore) 5419 5420 if mode =='onlyshower': 5421 cards = ['shower_card.dat'] 5422 5423 5424 # automatically switch to keep_wgt option 5425 first_cmd = cmd_switch.get_cardcmd() 5426 5427 if not options['force'] and not self.force: 5428 self.ask_edit_cards(cards, plot=False, first_cmd=first_cmd) 5429 5430 self.banner = banner_mod.Banner() 5431 5432 # store the cards in the banner 5433 for card in cards: 5434 self.banner.add(pjoin(self.me_dir, 'Cards', card)) 5435 # and the run settings 5436 run_settings = '\n'.join(['%s = %s' % (k, v) for (k, v) in switch.items()]) 5437 self.banner.add_text('run_settings', run_settings) 5438 5439 if not mode =='onlyshower': 5440 self.run_card = self.banner.charge_card('run_card') 5441 self.run_tag = self.run_card['run_tag'] 5442 #this is if the user did not provide a name for the current run 5443 if not hasattr(self, 'run_name') or not self.run_name: 5444 self.run_name = self.find_available_run_name(self.me_dir) 5445 #add a tag in the run_name for distinguish run_type 5446 if self.run_name.startswith('run_'): 5447 if mode in ['LO','aMC@LO','noshowerLO']: 5448 self.run_name += '_LO' 5449 self.set_run_name(self.run_name, self.run_tag, 'parton') 5450 if self.run_card['ickkw'] == 3 and mode in ['LO', 'aMC@LO', 'noshowerLO']: 5451 raise self.InvalidCmd("""FxFx merging (ickkw=3) not allowed at LO""") 5452 elif self.run_card['ickkw'] == 3 and mode in ['aMC@NLO', 'noshower'] and self.run_card['parton_shower'].upper() != 'PYTHIA8': 5453 logger.warning("""You are running with FxFx merging enabled. To be able to merge 5454 samples of various multiplicities without double counting, you 5455 have to remove some events after showering 'by hand'. Please 5456 read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""") 5457 if self.run_card['parton_shower'].upper() == 'PYTHIA6Q': 5458 raise self.InvalidCmd("""FxFx merging does not work with Q-squared ordered showers.""") 5459 elif self.run_card['parton_shower'].upper() != 'HERWIG6' and self.run_card['parton_shower'].upper() != 'PYTHIA8' and self.run_card['parton_shower'].upper() != 'HERWIGPP': 5460 question="FxFx merging not tested for %s shower. Do you want to continue?\n" % self.run_card['parton_shower'] + \ 5461 "Type \'n\' to stop or \'y\' to continue" 5462 answers = ['n','y'] 5463 answer = self.ask(question, 'n', answers) 5464 if answer == 'n': 5465 error = '''Stop opertation''' 5466 self.ask_run_configuration(mode, options) 5467 # raise aMCatNLOError(error) 5468 elif self.run_card['ickkw'] == -1 and mode in ['aMC@NLO', 'noshower']: 5469 # NNLL+NLO jet-veto only possible for LO event generation or fNLO runs. 5470 raise self.InvalidCmd("""NNLL+NLO jet veto runs (ickkw=-1) only possible for fNLO or LO.""") 5471 if 'aMC@' in mode or mode == 'onlyshower': 5472 self.shower_card = self.banner.charge_card('shower_card') 5473 5474 elif mode in ['LO', 'NLO']: 5475 analyse_card_path = pjoin(self.me_dir, 'Cards','FO_analyse_card.dat') 5476 self.analyse_card = self.banner.charge_card('FO_analyse_card') 5477 5478 return mode
5479
5480 5481 #=============================================================================== 5482 # aMCatNLOCmd 5483 #=============================================================================== 5484 -class aMCatNLOCmdShell(aMCatNLOCmd, cmd.CmdShell):
5485 """The command line processor of MadGraph"""
5486 5487 _compile_usage = "compile [MODE] [options]\n" + \ 5488 "-- compiles aMC@NLO \n" + \ 5489 " MODE can be either FO, for fixed-order computations, \n" + \ 5490 " or MC for matching with parton-shower monte-carlos. \n" + \ 5491 " (if omitted, it is set to MC)\n" 5492 _compile_parser = misc.OptionParser(usage=_compile_usage) 5493 _compile_parser.add_option("-f", "--force", default=False, action='store_true', 5494 help="Use the card present in the directory for the launch, without editing them") 5495 5496 _launch_usage = "launch [MODE] [options]\n" + \ 5497 "-- execute aMC@NLO \n" + \ 5498 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \ 5499 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \ 5500 " computation of the total cross section and the filling of parton-level histograms \n" + \ 5501 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \ 5502 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \ 5503 " event file is generated which will be showered with the MonteCarlo specified \n" + \ 5504 " in the run_card.dat\n" 5505 5506 _launch_parser = misc.OptionParser(usage=_launch_usage) 5507 _launch_parser.add_option("-f", "--force", default=False, action='store_true', 5508 help="Use the card present in the directory for the launch, without editing them") 5509 _launch_parser.add_option("-c", "--cluster", default=False, action='store_true', 5510 help="Submit the jobs on the cluster") 5511 _launch_parser.add_option("-m", "--multicore", default=False, action='store_true', 5512 help="Submit the jobs on multicore mode") 5513 _launch_parser.add_option("-x", "--nocompile", default=False, action='store_true', 5514 help="Skip compilation. Ignored if no executable is found") 5515 _launch_parser.add_option("-r", "--reweightonly", default=False, action='store_true', 5516 help="Skip integration and event generation, just run reweight on the" + \ 5517 " latest generated event files (see list in SubProcesses/nevents_unweighted)") 5518 _launch_parser.add_option("-p", "--parton", default=False, action='store_true', 5519 help="Stop the run after the parton level file generation (you need " + \ 5520 "to shower the file in order to get physical results)") 5521 _launch_parser.add_option("-o", "--only_generation", default=False, action='store_true', 5522 help="Skip grid set up, just generate events starting from " + \ 5523 "the last available results") 5524 _launch_parser.add_option("-n", "--name", default=False, dest='run_name', 5525 help="Provide a name to the run") 5526 _launch_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid', 5527 help="For use with APPLgrid only: start from existing grids") 5528 _launch_parser.add_option("-R", "--reweight", default=False, dest='do_reweight', action='store_true', 5529 help="Run the reweight module (reweighting by different model parameters)") 5530 _launch_parser.add_option("-M", "--madspin", default=False, dest='do_madspin', action='store_true', 5531 help="Run the madspin package") 5532 5533 5534 5535 _generate_events_usage = "generate_events [MODE] [options]\n" + \ 5536 "-- execute aMC@NLO \n" + \ 5537 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \ 5538 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \ 5539 " computation of the total cross section and the filling of parton-level histograms \n" + \ 5540 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \ 5541 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \ 5542 " event file is generated which will be showered with the MonteCarlo specified \n" + \ 5543 " in the run_card.dat\n" 5544 5545 _generate_events_parser = misc.OptionParser(usage=_generate_events_usage) 5546 _generate_events_parser.add_option("-f", "--force", default=False, action='store_true', 5547 help="Use the card present in the directory for the generate_events, without editing them") 5548 _generate_events_parser.add_option("-c", "--cluster", default=False, action='store_true', 5549 help="Submit the jobs on the cluster") 5550 _generate_events_parser.add_option("-m", "--multicore", default=False, action='store_true', 5551 help="Submit the jobs on multicore mode") 5552 _generate_events_parser.add_option("-x", "--nocompile", default=False, action='store_true', 5553 help="Skip compilation. Ignored if no executable is found") 5554 _generate_events_parser.add_option("-r", "--reweightonly", default=False, action='store_true', 5555 help="Skip integration and event generation, just run reweight on the" + \ 5556 " latest generated event files (see list in SubProcesses/nevents_unweighted)") 5557 _generate_events_parser.add_option("-p", "--parton", default=False, action='store_true', 5558 help="Stop the run after the parton level file generation (you need " + \ 5559 "to shower the file in order to get physical results)") 5560 _generate_events_parser.add_option("-o", "--only_generation", default=False, action='store_true', 5561 help="Skip grid set up, just generate events starting from " + \ 5562 "the last available results") 5563 _generate_events_parser.add_option("-n", "--name", default=False, dest='run_name', 5564 help="Provide a name to the run") 5565 5566 5567 5568 _calculate_xsect_usage = "calculate_xsect [ORDER] [options]\n" + \ 5569 "-- calculate cross section up to ORDER.\n" + \ 5570 " ORDER can be either LO or NLO (if omitted, it is set to NLO). \n" 5571 5572 _calculate_xsect_parser = misc.OptionParser(usage=_calculate_xsect_usage) 5573 _calculate_xsect_parser.add_option("-f", "--force", default=False, action='store_true', 5574 help="Use the card present in the directory for the launch, without editing them") 5575 _calculate_xsect_parser.add_option("-c", "--cluster", default=False, action='store_true', 5576 help="Submit the jobs on the cluster") 5577 _calculate_xsect_parser.add_option("-m", "--multicore", default=False, action='store_true', 5578 help="Submit the jobs on multicore mode") 5579 _calculate_xsect_parser.add_option("-x", "--nocompile", default=False, action='store_true', 5580 help="Skip compilation. Ignored if no executable is found") 5581 _calculate_xsect_parser.add_option("-n", "--name", default=False, dest='run_name', 5582 help="Provide a name to the run") 5583 _calculate_xsect_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid', 5584 help="For use with APPLgrid only: start from existing grids") 5585 _calculate_xsect_parser.add_option("-o", "--only_generation", default=False, action='store_true', 5586 help="Skip grid set up, just generate events starting from " + \ 5587 "the last available results") 5588 5589 _shower_usage = 'shower run_name [options]\n' + \ 5590 '-- do shower/hadronization on parton-level file generated for run run_name\n' + \ 5591 ' all the information (e.g. number of events, MonteCarlo, ...\n' + \ 5592 ' are directly read from the header of the event file\n' 5593 _shower_parser = misc.OptionParser(usage=_shower_usage) 5594 _shower_parser.add_option("-f", "--force", default=False, action='store_true', 5595 help="Use the shower_card present in the directory for the launch, without editing") 5596 5597 if '__main__' == __name__: 5598 # Launch the interface without any check if one code is already running. 5599 # This can ONLY run a single command !! 5600 import sys 5601 if sys.version_info[1] < 7: 5602 sys.exit('MadGraph/MadEvent 5 works only with python 2.7 or python3.7 and later.\n'+\ 5603 'Please upgrade your version of python or specify a compatible version') 5604 5605 import os 5606 import optparse 5607 # Get the directory of the script real path (bin) 5608 # and add it to the current PYTHONPATH 5609 root_path = os.path.dirname(os.path.dirname(os.path.realpath( __file__ ))) 5610 sys.path.insert(0, root_path)
5611 5612 - class MyOptParser(optparse.OptionParser):
5613 - class InvalidOption(Exception): pass
5614 - def error(self, msg=''):
5615 raise MyOptParser.InvalidOption(msg)
5616 # Write out nice usage message if called with -h or --help 5617 usage = "usage: %prog [options] [FILE] " 5618 parser = MyOptParser(usage=usage) 5619 parser.add_option("-l", "--logging", default='INFO', 5620 help="logging level (DEBUG|INFO|WARNING|ERROR|CRITICAL) [%default]") 5621 parser.add_option("","--web", action="store_true", default=False, dest='web', \ 5622 help='force toce to be in secure mode') 5623 parser.add_option("","--debug", action="store_true", default=False, dest='debug', \ 5624 help='force to launch debug mode') 5625 parser_error = '' 5626 done = False 5627 5628 for i in range(len(sys.argv)-1): 5629 try: 5630 (options, args) = parser.parse_args(sys.argv[1:len(sys.argv)-i]) 5631 done = True 5632 except MyOptParser.InvalidOption as error: 5633 pass 5634 else: 5635 args += sys.argv[len(sys.argv)-i:] 5636 if not done: 5637 # raise correct error: 5638 try: 5639 (options, args) = parser.parse_args() 5640 except MyOptParser.InvalidOption as error: 5641 print(error) 5642 sys.exit(2) 5643 5644 if len(args) == 0: 5645 args = '' 5646 5647 import subprocess 5648 import logging 5649 import logging.config 5650 # Set logging level according to the logging level given by options 5651 #logging.basicConfig(level=vars(logging)[options.logging]) 5652 import internal.coloring_logging 5653 try: 5654 if __debug__ and options.logging == 'INFO': 5655 options.logging = 'DEBUG' 5656 if options.logging.isdigit(): 5657 level = int(options.logging) 5658 else: 5659 level = eval('logging.' + options.logging) 5660 print(os.path.join(root_path, 'internal', 'me5_logging.conf')) 5661 logging.config.fileConfig(os.path.join(root_path, 'internal', 'me5_logging.conf')) 5662 logging.root.setLevel(level) 5663 logging.getLogger('madgraph').setLevel(level) 5664 except: 5665 raise 5666 pass 5667 5668 # Call the cmd interface main loop 5669 try: 5670 if args: 5671 # a single command is provided 5672 if '--web' in args: 5673 i = args.index('--web') 5674 args.pop(i) 5675 cmd_line = aMCatNLOCmd(me_dir=os.path.dirname(root_path),force_run=True) 5676 else: 5677 cmd_line = aMCatNLOCmdShell(me_dir=os.path.dirname(root_path),force_run=True) 5678 5679 if not hasattr(cmd_line, 'do_%s' % args[0]): 5680 if parser_error: 5681 print(parser_error) 5682 print('and %s can not be interpreted as a valid command.' % args[0]) 5683 else: 5684 print('ERROR: %s not a valid command. Please retry' % args[0]) 5685 else: 5686 cmd_line.use_rawinput = False 5687 cmd_line.run_cmd(' '.join(args)) 5688 cmd_line.run_cmd('quit') 5689 5690 except KeyboardInterrupt: 5691 print('quit on KeyboardInterrupt') 5692 pass 5693