Package madgraph :: Package iolibs :: Module export_fks
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_fks

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Methods and classes to export matrix elements to fks format.""" 
  16   
  17  from __future__ import absolute_import 
  18  from __future__ import print_function 
  19  from __future__ import division 
  20  import glob 
  21  import logging 
  22  import os 
  23  import re 
  24  import shutil 
  25  import subprocess 
  26  import string 
  27  import copy 
  28  import platform 
  29   
  30  import madgraph.core.color_algebra as color 
  31  import madgraph.core.helas_objects as helas_objects 
  32  import madgraph.core.base_objects as base_objects 
  33  import madgraph.fks.fks_helas_objects as fks_helas_objects 
  34  import madgraph.fks.fks_base as fks 
  35  import madgraph.fks.fks_common as fks_common 
  36  import madgraph.iolibs.drawing_eps as draw 
  37  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  38  import madgraph.iolibs.files as files 
  39  import madgraph.various.misc as misc 
  40  import madgraph.iolibs.file_writers as writers 
  41  import madgraph.iolibs.template_files as template_files 
  42  import madgraph.iolibs.ufo_expression_parsers as parsers 
  43  import madgraph.iolibs.export_v4 as export_v4 
  44  import madgraph.loop.loop_exporters as loop_exporters 
  45  import madgraph.various.q_polynomial as q_polynomial 
  46  import madgraph.various.banner as banner_mod 
  47   
  48  import aloha.create_aloha as create_aloha 
  49   
  50  import models.write_param_card as write_param_card 
  51  import models.check_param_card as check_param_card 
  52  from madgraph import MadGraph5Error, MG5DIR, InvalidCmd 
  53  from madgraph.iolibs.files import cp, ln, mv 
  54  from six.moves import range 
  55   
  56  pjoin = os.path.join 
  57   
  58  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  59  logger = logging.getLogger('madgraph.export_fks') 
  60   
  61   
62 -def make_jpeg_async(args):
63 Pdir = args[0] 64 old_pos = args[1] 65 dir_path = args[2] 66 67 devnull = os.open(os.devnull, os.O_RDWR) 68 69 os.chdir(Pdir) 70 subprocess.call([os.path.join(old_pos, dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 71 stdout = devnull) 72 os.chdir(os.path.pardir)
73 74 75 #================================================================================= 76 # Class for used of the (non-optimized) Loop process 77 #=================================================================================
78 -class ProcessExporterFortranFKS(loop_exporters.LoopProcessExporterFortranSA):
79 """Class to take care of exporting a set of matrix elements to 80 Fortran (v4) format.""" 81 82 #=============================================================================== 83 # copy the Template in a new directory. 84 #===============================================================================
85 - def copy_fkstemplate(self):
86 """create the directory run_name as a copy of the MadEvent 87 Template, and clean the directory 88 For now it is just the same as copy_v4template, but it will be modified 89 """ 90 91 mgme_dir = self.mgme_dir 92 dir_path = self.dir_path 93 clean =self.opt['clean'] 94 95 #First copy the full template tree if dir_path doesn't exit 96 if not os.path.isdir(dir_path): 97 if not mgme_dir: 98 raise MadGraph5Error("No valid MG_ME path given for MG4 run directory creation.") 99 logger.info('initialize a new directory: %s' % \ 100 os.path.basename(dir_path)) 101 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True) 102 # misc.copytree since dir_path already exists 103 misc.copytree(pjoin(self.mgme_dir, 'Template', 'Common'),dir_path) 104 # Copy plot_card 105 for card in ['plot_card']: 106 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 107 try: 108 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 109 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 110 except IOError: 111 logger.warning("Failed to move " + card + ".dat to default") 112 113 elif not os.path.isfile(os.path.join(dir_path, 'TemplateVersion.txt')): 114 if not mgme_dir: 115 raise MadGraph5Error("No valid MG_ME path given for MG4 run directory creation.") 116 try: 117 shutil.copy(os.path.join(mgme_dir, 'MGMEVersion.txt'), dir_path) 118 except IOError: 119 MG5_version = misc.get_pkg_info() 120 open(os.path.join(dir_path, 'MGMEVersion.txt'), 'w').write( \ 121 "5." + MG5_version['version']) 122 123 #Ensure that the Template is clean 124 if clean: 125 logger.info('remove old information in %s' % os.path.basename(dir_path)) 126 if 'MADGRAPH_BASE' in os.environ: 127 subprocess.call([os.path.join('bin', 'internal', 'clean_template'), 128 '--web'],cwd=dir_path) 129 else: 130 try: 131 subprocess.call([os.path.join('bin', 'internal', 'clean_template')], \ 132 cwd=dir_path) 133 except Exception as why: 134 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 135 % (os.path.basename(dir_path),why)) 136 #Write version info 137 MG_version = misc.get_pkg_info() 138 open(os.path.join(dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 139 MG_version['version']) 140 141 # We must link the CutTools to the Library folder of the active Template 142 self.link_CutTools(dir_path) 143 144 link_tir_libs=[] 145 tir_libs=[] 146 os.remove(os.path.join(self.dir_path,'SubProcesses','makefile_loop.inc')) 147 dirpath = os.path.join(self.dir_path, 'SubProcesses') 148 filename = pjoin(self.dir_path, 'SubProcesses','makefile_loop') 149 calls = self.write_makefile_TIR(writers.MakefileWriter(filename), 150 link_tir_libs,tir_libs) 151 os.remove(os.path.join(self.dir_path,'Source','make_opts.inc')) 152 filename = pjoin(self.dir_path, 'Source','make_opts') 153 calls = self.write_make_opts(writers.MakefileWriter(filename), 154 link_tir_libs,tir_libs) 155 156 # Duplicate run_card and FO_analyse_card 157 for card in ['FO_analyse_card', 'shower_card']: 158 try: 159 shutil.copy(pjoin(self.dir_path, 'Cards', 160 card + '.dat'), 161 pjoin(self.dir_path, 'Cards', 162 card + '_default.dat')) 163 except IOError: 164 logger.warning("Failed to copy " + card + ".dat to default") 165 166 cwd = os.getcwd() 167 dirpath = os.path.join(self.dir_path, 'SubProcesses') 168 try: 169 os.chdir(dirpath) 170 except os.error: 171 logger.error('Could not cd to directory %s' % dirpath) 172 return 0 173 174 # We add here the user-friendly MadLoop option setter. 175 cpfiles= ["SubProcesses/MadLoopParamReader.f", 176 "Cards/MadLoopParams.dat", 177 "SubProcesses/MadLoopParams.inc"] 178 179 for file in cpfiles: 180 shutil.copy(os.path.join(self.loop_dir,'StandAlone/', file), 181 os.path.join(self.dir_path, file)) 182 183 shutil.copy(pjoin(self.dir_path, 'Cards','MadLoopParams.dat'), 184 pjoin(self.dir_path, 'Cards','MadLoopParams_default.dat')) 185 186 if os.path.exists(pjoin(self.dir_path, 'Cards', 'MadLoopParams.dat')): 187 self.MadLoopparam = banner_mod.MadLoopParam(pjoin(self.dir_path, 188 'Cards', 'MadLoopParams.dat')) 189 # write the output file 190 self.MadLoopparam.write(pjoin(self.dir_path,"SubProcesses", 191 "MadLoopParams.dat")) 192 193 # We need minimal editing of MadLoopCommons.f 194 MadLoopCommon = open(os.path.join(self.loop_dir,'StandAlone', 195 "SubProcesses","MadLoopCommons.inc")).read() 196 writer = writers.FortranWriter(os.path.join(self.dir_path, 197 "SubProcesses","MadLoopCommons.f")) 198 writer.writelines(MadLoopCommon%{ 199 'print_banner_commands':self.MadLoop_banner}, 200 context={'collier_available':False}) 201 writer.close() 202 203 # Write the cts_mpc.h and cts_mprec.h files imported from CutTools 204 self.write_mp_files(writers.FortranWriter('cts_mprec.h'),\ 205 writers.FortranWriter('cts_mpc.h')) 206 207 208 # Finally make sure to turn off MC over Hel for the default mode. 209 FKS_card_path = pjoin(self.dir_path,'Cards','FKS_params.dat') 210 FKS_card_file = open(FKS_card_path,'r') 211 FKS_card = FKS_card_file.read() 212 FKS_card_file.close() 213 FKS_card = re.sub(r"#NHelForMCoverHels\n-?\d+", 214 "#NHelForMCoverHels\n-1", FKS_card) 215 FKS_card_file = open(FKS_card_path,'w') 216 FKS_card_file.write(FKS_card) 217 FKS_card_file.close() 218 219 # Return to original PWD 220 os.chdir(cwd) 221 # Copy the different python files in the Template 222 self.copy_python_files() 223 224 # We need to create the correct open_data for the pdf 225 self.write_pdf_opendata()
226 227 # I put it here not in optimized one, because I want to use the same makefile_loop.inc 228 # Also, we overload this function (i.e. it is already defined in 229 # LoopProcessExporterFortranSA) because the path of the template makefile 230 # is different.
231 - def write_makefile_TIR(self, writer, link_tir_libs,tir_libs,tir_include=[]):
232 """ Create the file makefile_loop which links to the TIR libraries.""" 233 234 file = open(os.path.join(self.mgme_dir,'Template','NLO', 235 'SubProcesses','makefile_loop.inc')).read() 236 replace_dict={} 237 replace_dict['link_tir_libs']=' '.join(link_tir_libs) 238 replace_dict['tir_libs']=' '.join(tir_libs) 239 replace_dict['dotf']='%.f' 240 replace_dict['doto']='%.o' 241 replace_dict['tir_include']=' '.join(tir_include) 242 file=file%replace_dict 243 if writer: 244 writer.writelines(file) 245 else: 246 return file
247 248 # I put it here not in optimized one, because I want to use the same make_opts.inc
249 - def write_make_opts(self, writer, link_tir_libs,tir_libs):
250 """ Create the file make_opts which links to the TIR libraries.""" 251 file = open(os.path.join(self.mgme_dir,'Template','NLO', 252 'Source','make_opts.inc')).read() 253 replace_dict={} 254 replace_dict['link_tir_libs']=' '.join(link_tir_libs) 255 replace_dict['tir_libs']=' '.join(tir_libs) 256 replace_dict['dotf']='%.f' 257 replace_dict['doto']='%.o' 258 file=file%replace_dict 259 if writer: 260 writer.writelines(file) 261 else: 262 return file
263 264 #=========================================================================== 265 # copy_python_files 266 #===========================================================================
267 - def copy_python_files(self):
268 """copy python files required for the Template""" 269 270 files_to_copy = [ \ 271 pjoin('interface','amcatnlo_run_interface.py'), 272 pjoin('interface','extended_cmd.py'), 273 pjoin('interface','common_run_interface.py'), 274 pjoin('interface','coloring_logging.py'), 275 pjoin('various','misc.py'), 276 pjoin('various','shower_card.py'), 277 pjoin('various','FO_analyse_card.py'), 278 pjoin('various','histograms.py'), 279 pjoin('various','banner.py'), 280 pjoin('various','cluster.py'), 281 pjoin('various','systematics.py'), 282 pjoin('various','lhe_parser.py'), 283 pjoin('madevent','sum_html.py'), 284 pjoin('madevent','gen_crossxhtml.py'), 285 pjoin('iolibs','files.py'), 286 pjoin('iolibs','save_load_object.py'), 287 pjoin('iolibs','file_writers.py'), 288 pjoin('..','models','check_param_card.py'), 289 pjoin('__init__.py') 290 ] 291 cp(_file_path+'/interface/.mg5_logging.conf', 292 self.dir_path+'/bin/internal/me5_logging.conf') 293 294 for cp_file in files_to_copy: 295 cp(pjoin(_file_path,cp_file), 296 pjoin(self.dir_path,'bin','internal',os.path.basename(cp_file)))
297
298 - def convert_model(self, model, wanted_lorentz = [], 299 wanted_couplings = []):
300 301 super(ProcessExporterFortranFKS,self).convert_model(model, 302 wanted_lorentz, wanted_couplings) 303 304 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 305 try: 306 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 307 except OSError as error: 308 pass 309 model_path = model.get('modelpath') 310 shutil.copytree(model_path, 311 pjoin(self.dir_path,'bin','internal','ufomodel'), 312 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 313 if hasattr(model, 'restrict_card'): 314 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 315 'restrict_default.dat') 316 if isinstance(model.restrict_card, check_param_card.ParamCard): 317 model.restrict_card.write(out_path) 318 else: 319 files.cp(model.restrict_card, out_path)
320 321 322 323 #=========================================================================== 324 # write_maxparticles_file 325 #===========================================================================
326 - def write_maxparticles_file(self, writer, maxparticles):
327 """Write the maxparticles.inc file for MadEvent""" 328 329 lines = "integer max_particles, max_branch\n" 330 lines += "parameter (max_particles=%d) \n" % maxparticles 331 lines += "parameter (max_branch=max_particles-1)" 332 333 # Write the file 334 writer.writelines(lines) 335 336 return True
337 338 339 #=========================================================================== 340 # write_maxconfigs_file 341 #===========================================================================
342 - def write_maxconfigs_file(self, writer, maxconfigs):
343 """Write the maxconfigs.inc file for MadEvent""" 344 345 lines = "integer lmaxconfigs\n" 346 lines += "parameter (lmaxconfigs=%d)" % maxconfigs 347 348 # Write the file 349 writer.writelines(lines) 350 351 return True
352 353 354 #=============================================================================== 355 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 356 #===============================================================================
357 - def write_procdef_mg5(self, file_pos, modelname, process_str):
358 """ write an equivalent of the MG4 proc_card in order that all the Madevent 359 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 360 361 proc_card_template = template_files.mg4_proc_card.mg4_template 362 process_template = template_files.mg4_proc_card.process_template 363 process_text = '' 364 coupling = '' 365 new_process_content = [] 366 367 # First find the coupling and suppress the coupling from process_str 368 #But first ensure that coupling are define whithout spaces: 369 process_str = process_str.replace(' =', '=') 370 process_str = process_str.replace('= ', '=') 371 process_str = process_str.replace(',',' , ') 372 #now loop on the element and treat all the coupling 373 for info in process_str.split(): 374 if '=' in info: 375 coupling += info + '\n' 376 else: 377 new_process_content.append(info) 378 # Recombine the process_str (which is the input process_str without coupling 379 #info) 380 process_str = ' '.join(new_process_content) 381 382 #format the SubProcess 383 process_text += process_template.substitute({'process': process_str, \ 384 'coupling': coupling}) 385 386 text = proc_card_template.substitute({'process': process_text, 387 'model': modelname, 388 'multiparticle':''}) 389 ff = open(file_pos, 'w') 390 ff.write(text) 391 ff.close()
392 393 394 #=============================================================================== 395 # write a initial states map, useful for the fast PDF NLO interface 396 #===============================================================================
397 - def write_init_map(self, file_pos, initial_states):
398 """ Write an initial state process map. Each possible PDF 399 combination gets an unique identifier.""" 400 401 text='' 402 for i,e in enumerate(initial_states): 403 text=text+str(i+1)+' '+str(len(e)) 404 for t in e: 405 if len(t) ==1: 406 t.append(0) 407 text=text+' ' 408 try: 409 for p in t: 410 if p == None : p = 0 411 text=text+' '+str(p) 412 except TypeError: 413 text=text+' '+str(t) 414 text=text+'\n' 415 416 ff = open(file_pos, 'w') 417 ff.write(text) 418 ff.close()
419
420 - def get_ME_identifier(self, matrix_element, *args, **opts):
421 """ A function returning a string uniquely identifying the matrix 422 element given in argument so that it can be used as a prefix to all 423 MadLoop5 subroutines and common blocks related to it. This allows 424 to compile several processes into one library as requested by the 425 BLHA (Binoth LesHouches Accord) guidelines. The MadFKS design 426 necessitates that there is no process prefix.""" 427 428 return ''
429 430 #=============================================================================== 431 # write_coef_specs 432 #===============================================================================
433 - def write_coef_specs_file(self, virt_me_list):
434 """writes the coef_specs.inc in the DHELAS folder. Should not be called in the 435 non-optimized mode""" 436 raise fks_common.FKSProcessError()("write_coef_specs should be called only in the loop-optimized mode")
437 438 439 #=============================================================================== 440 # generate_directories_fks 441 #===============================================================================
442 - def generate_directories_fks(self, matrix_element, fortran_model, me_number, 443 me_ntot, path=os.getcwd(),OLP='MadLoop'):
444 """Generate the Pxxxxx_i directories for a subprocess in MadFKS, 445 including the necessary matrix.f and various helper files""" 446 proc = matrix_element.born_matrix_element['processes'][0] 447 448 if not self.model: 449 self.model = matrix_element.get('processes')[0].get('model') 450 451 cwd = os.getcwd() 452 try: 453 os.chdir(path) 454 except OSError as error: 455 error_msg = "The directory %s should exist in order to be able " % path + \ 456 "to \"export\" in it. If you see this error message by " + \ 457 "typing the command \"export\" please consider to use " + \ 458 "instead the command \"output\". " 459 raise MadGraph5Error(error_msg) 460 461 calls = 0 462 463 self.fksdirs = [] 464 #first make and cd the direcrory corresponding to the born process: 465 borndir = "P%s" % \ 466 (matrix_element.get('processes')[0].shell_string()) 467 os.mkdir(borndir) 468 os.chdir(borndir) 469 logger.info('Writing files in %s (%d / %d)' % (borndir, me_number + 1, me_ntot)) 470 471 ## write the files corresponding to the born process in the P* directory 472 self.generate_born_fks_files(matrix_element, 473 fortran_model, me_number, path) 474 475 # With NJET you want to generate the order file per subprocess and most 476 # likely also generate it for each subproc. 477 if OLP=='NJET': 478 filename = 'OLE_order.lh' 479 self.write_lh_order(filename, [matrix_element.born_matrix_element.get('processes')[0]], OLP) 480 481 if matrix_element.virt_matrix_element: 482 calls += self.generate_virt_directory( \ 483 matrix_element.virt_matrix_element, \ 484 fortran_model, \ 485 os.path.join(path, borndir)) 486 487 #write the infortions for the different real emission processes 488 489 self.write_real_matrix_elements(matrix_element, fortran_model) 490 491 self.write_pdf_calls(matrix_element, fortran_model) 492 493 filename = 'nFKSconfigs.inc' 494 self.write_nfksconfigs_file(writers.FortranWriter(filename), 495 matrix_element, 496 fortran_model) 497 498 filename = 'iproc.dat' 499 self.write_iproc_file(writers.FortranWriter(filename), 500 me_number) 501 502 filename = 'fks_info.inc' 503 self.write_fks_info_file(writers.FortranWriter(filename), 504 matrix_element, 505 fortran_model) 506 507 filename = 'leshouche_info.dat' 508 nfksconfs,maxproc,maxflow,nexternal=\ 509 self.write_leshouche_info_file(filename,matrix_element) 510 511 # if no corrections are generated ([LOonly] mode), get 512 # these variables from the born 513 if nfksconfs == maxproc == maxflow == 0: 514 nfksconfs = 1 515 (dummylines, maxproc, maxflow) = self.get_leshouche_lines( 516 matrix_element.born_matrix_element, 1) 517 518 filename = 'leshouche_decl.inc' 519 self.write_leshouche_info_declarations( 520 writers.FortranWriter(filename), 521 nfksconfs,maxproc,maxflow,nexternal, 522 fortran_model) 523 filename = 'genps.inc' 524 ngraphs = matrix_element.born_matrix_element.get_number_of_amplitudes() 525 ncolor = max(1,len(matrix_element.born_matrix_element.get('color_basis'))) 526 self.write_genps(writers.FortranWriter(filename),maxproc,ngraphs,\ 527 ncolor,maxflow,fortran_model) 528 529 filename = 'configs_and_props_info.dat' 530 nconfigs,max_leg_number=self.write_configs_and_props_info_file( 531 filename, 532 matrix_element) 533 534 filename = 'configs_and_props_decl.inc' 535 self.write_configs_and_props_info_declarations( 536 writers.FortranWriter(filename), 537 nconfigs,max_leg_number,nfksconfs, 538 fortran_model) 539 540 filename = 'real_from_born_configs.inc' 541 self.write_real_from_born_configs( 542 writers.FortranWriter(filename), 543 matrix_element, 544 fortran_model) 545 546 filename = 'ngraphs.inc' 547 self.write_ngraphs_file(writers.FortranWriter(filename), 548 nconfigs) 549 550 #write the wrappers 551 filename = 'real_me_chooser.f' 552 self.write_real_me_wrapper(writers.FortranWriter(filename), 553 matrix_element, 554 fortran_model) 555 556 filename = 'parton_lum_chooser.f' 557 self.write_pdf_wrapper(writers.FortranWriter(filename), 558 matrix_element, 559 fortran_model) 560 561 filename = 'get_color.f' 562 self.write_colors_file(writers.FortranWriter(filename), 563 matrix_element) 564 565 filename = 'nexternal.inc' 566 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 567 self.write_nexternal_file(writers.FortranWriter(filename), 568 nexternal, ninitial) 569 self.proc_characteristic['ninitial'] = ninitial 570 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 571 572 filename = 'pmass.inc' 573 try: 574 self.write_pmass_file(writers.FortranWriter(filename), 575 matrix_element.real_processes[0].matrix_element) 576 except IndexError: 577 self.write_pmass_file(writers.FortranWriter(filename), 578 matrix_element.born_matrix_element) 579 580 #draw the diagrams 581 self.draw_feynman_diagrams(matrix_element) 582 583 linkfiles = ['BinothLHADummy.f', 584 'check_poles.f', 585 'MCmasses_HERWIG6.inc', 586 'MCmasses_HERWIGPP.inc', 587 'MCmasses_PYTHIA6Q.inc', 588 'MCmasses_PYTHIA6PT.inc', 589 'MCmasses_PYTHIA8.inc', 590 'add_write_info.f', 591 'coupl.inc', 592 'cuts.f', 593 'FKS_params.dat', 594 'initial_states_map.dat', 595 'OLE_order.olc', 596 'FKSParams.inc', 597 'FKSParamReader.f', 598 'cuts.inc', 599 'unlops.inc', 600 'pythia_unlops.f', 601 'driver_mintMC.f', 602 'driver_mintFO.f', 603 'appl_interface.cc', 604 'appl_interface_dummy.f', 605 'appl_common.inc', 606 'reweight_appl.inc', 607 'fastjetfortran_madfks_core.cc', 608 'fastjetfortran_madfks_full.cc', 609 'fjcore.cc', 610 'fastjet_wrapper.f', 611 'fjcore.hh', 612 'fks_Sij.f', 613 'fks_powers.inc', 614 'fks_singular.f', 615 'veto_xsec.f', 616 'veto_xsec.inc', 617 'weight_lines.f', 618 'fks_inc_chooser.f', 619 'leshouche_inc_chooser.f', 620 'configs_and_props_inc_chooser.f', 621 'genps_fks.f', 622 'boostwdir2.f', 623 'madfks_mcatnlo.inc', 624 'open_output_files.f', 625 'open_output_files_dummy.f', 626 'HwU_dummy.f', 627 'madfks_plot.f', 628 'analysis_dummy.f', 629 'analysis_lhe.f', 630 'mint-integrator2.f', 631 'MC_integer.f', 632 'mint.inc', 633 'montecarlocounter.f', 634 'q_es.inc', 635 'recluster.cc', 636 'Boosts.h', 637 'reweight_xsec.f', 638 'reweight_xsec_events.f', 639 'reweight_xsec_events_pdf_dummy.f', 640 'iproc_map.f', 641 'run.inc', 642 'run_card.inc', 643 'setcuts.f', 644 'setscales.f', 645 'test_soft_col_limits.f', 646 'symmetry_fks_v3.f', 647 'vegas2.for', 648 'write_ajob.f', 649 'handling_lhe_events.f', 650 'write_event.f', 651 'fill_MC_mshell.f', 652 'maxparticles.inc', 653 'message.inc', 654 'initcluster.f', 655 'cluster.inc', 656 'cluster.f', 657 'reweight.f', 658 'randinit', 659 'sudakov.inc', 660 'maxconfigs.inc', 661 'timing_variables.inc'] 662 663 for file in linkfiles: 664 ln('../' + file , '.') 665 os.system("ln -s ../../Cards/param_card.dat .") 666 667 #copy the makefile 668 os.system("ln -s ../makefile_fks_dir ./makefile") 669 if matrix_element.virt_matrix_element: 670 os.system("ln -s ../BinothLHA.f ./BinothLHA.f") 671 elif OLP!='MadLoop': 672 os.system("ln -s ../BinothLHA_OLP.f ./BinothLHA.f") 673 else: 674 os.system("ln -s ../BinothLHA_user.f ./BinothLHA.f") 675 676 # Return to SubProcesses dir 677 os.chdir(os.path.pardir) 678 # Add subprocess to subproc.mg 679 filename = 'subproc.mg' 680 files.append_to_file(filename, 681 self.write_subproc, 682 borndir) 683 684 os.chdir(cwd) 685 # Generate info page 686 gen_infohtml.make_info_html_nlo(self.dir_path) 687 688 689 return calls
690 691 #=========================================================================== 692 # create the run_card 693 #===========================================================================
694 - def create_run_card(self, processes, history):
695 """ """ 696 697 run_card = banner_mod.RunCardNLO() 698 699 run_card.create_default_for_process(self.proc_characteristic, 700 history, 701 processes) 702 703 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 704 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'))
705 706
707 - def pass_information_from_cmd(self, cmd):
708 """pass information from the command interface to the exporter. 709 Please do not modify any object of the interface from the exporter. 710 """ 711 self.proc_defs = cmd._curr_proc_defs 712 if hasattr(cmd,'born_processes'): 713 self.born_processes = cmd.born_processes 714 else: 715 self.born_processes = [] 716 return
717
718 - def finalize(self, matrix_elements, history, mg5options, flaglist):
719 """Finalize FKS directory by creating jpeg diagrams, html 720 pages,proc_card_mg5.dat and madevent.tar.gz and create the MA5 card if 721 necessary.""" 722 723 devnull = os.open(os.devnull, os.O_RDWR) 724 try: 725 res = misc.call([mg5options['lhapdf'], '--version'], \ 726 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 727 except Exception: 728 res = 1 729 if res != 0: 730 logger.info('The value for lhapdf in the current configuration does not ' + \ 731 'correspond to a valid executable.\nPlease set it correctly either in ' + \ 732 'input/mg5_configuration or with "set lhapdf /path/to/lhapdf-config" ' + \ 733 'and regenrate the process. \nTo avoid regeneration, edit the ' + \ 734 ('%s/Cards/amcatnlo_configuration.txt file.\n' % self.dir_path ) + \ 735 'Note that you can still compile and run aMC@NLO with the built-in PDFs\n') 736 737 compiler_dict = {'fortran': mg5options['fortran_compiler'], 738 'cpp': mg5options['cpp_compiler'], 739 'f2py': mg5options['f2py_compiler']} 740 741 if 'nojpeg' in flaglist: 742 makejpg = False 743 else: 744 makejpg = True 745 output_dependencies = mg5options['output_dependencies'] 746 747 748 self.proc_characteristic['grouped_matrix'] = False 749 self.proc_characteristic['complex_mass_scheme'] = mg5options['complex_mass_scheme'] 750 751 self.create_proc_charac() 752 753 self.create_run_card(matrix_elements.get_processes(), history) 754 # modelname = self.model.get('name') 755 # if modelname == 'mssm' or modelname.startswith('mssm-'): 756 # param_card = os.path.join(self.dir_path, 'Cards','param_card.dat') 757 # mg5_param = os.path.join(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 758 # check_param_card.convert_to_mg5card(param_card, mg5_param) 759 # check_param_card.check_valid_param_card(mg5_param) 760 761 # # write the model functions get_mass/width_from_id 762 filename = os.path.join(self.dir_path,'Source','MODEL','get_mass_width_fcts.f') 763 makeinc = os.path.join(self.dir_path,'Source','MODEL','makeinc.inc') 764 self.write_get_mass_width_file(writers.FortranWriter(filename), makeinc, self.model) 765 766 # # Write maxconfigs.inc based on max of ME's/subprocess groups 767 768 filename = os.path.join(self.dir_path,'Source','maxconfigs.inc') 769 self.write_maxconfigs_file(writers.FortranWriter(filename), 770 matrix_elements.get_max_configs()) 771 772 # # Write maxparticles.inc based on max of ME's/subprocess groups 773 filename = os.path.join(self.dir_path,'Source','maxparticles.inc') 774 self.write_maxparticles_file(writers.FortranWriter(filename), 775 matrix_elements.get_max_particles()) 776 777 # Touch "done" file 778 os.system('touch %s/done' % os.path.join(self.dir_path,'SubProcesses')) 779 780 # Check for compiler 781 fcompiler_chosen = self.set_fortran_compiler(compiler_dict) 782 ccompiler_chosen = self.set_cpp_compiler(compiler_dict['cpp']) 783 784 old_pos = os.getcwd() 785 os.chdir(os.path.join(self.dir_path, 'SubProcesses')) 786 P_dir_list = [proc for proc in os.listdir('.') if os.path.isdir(proc) and \ 787 proc[0] == 'P'] 788 789 devnull = os.open(os.devnull, os.O_RDWR) 790 # Convert the poscript in jpg files (if authorize) 791 if makejpg: 792 logger.info("Generate jpeg diagrams") 793 for Pdir in P_dir_list: 794 os.chdir(Pdir) 795 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 796 stdout = devnull) 797 os.chdir(os.path.pardir) 798 # 799 logger.info("Generate web pages") 800 # Create the WebPage using perl script 801 802 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 803 stdout = devnull) 804 805 os.chdir(os.path.pardir) 806 # 807 # obj = gen_infohtml.make_info_html(self.dir_path) 808 # [mv(name, './HTML/') for name in os.listdir('.') if \ 809 # (name.endswith('.html') or name.endswith('.jpg')) and \ 810 # name != 'index.html'] 811 # if online: 812 # nb_channel = obj.rep_rule['nb_gen_diag'] 813 # open(os.path.join('./Online'),'w').write(str(nb_channel)) 814 815 # Write command history as proc_card_mg5 816 if os.path.isdir('Cards'): 817 output_file = os.path.join('Cards', 'proc_card_mg5.dat') 818 history.write(output_file) 819 820 # Duplicate run_card and FO_analyse_card 821 for card in ['run_card', 'FO_analyse_card', 'shower_card']: 822 try: 823 shutil.copy(pjoin(self.dir_path, 'Cards', 824 card + '.dat'), 825 pjoin(self.dir_path, 'Cards', 826 card + '_default.dat')) 827 except IOError: 828 logger.warning("Failed to copy " + card + ".dat to default") 829 830 831 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 832 stdout = devnull) 833 834 # Run "make" to generate madevent.tar.gz file 835 if os.path.exists(pjoin('SubProcesses', 'subproc.mg')): 836 if os.path.exists('amcatnlo.tar.gz'): 837 os.remove('amcatnlo.tar.gz') 838 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'make_amcatnlo_tar')], 839 stdout = devnull) 840 # 841 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 842 stdout = devnull) 843 844 #return to the initial dir 845 os.chdir(old_pos) 846 847 # Setup stdHep 848 # Find the correct fortran compiler 849 base_compiler= ['FC=g77','FC=gfortran'] 850 851 StdHep_path = pjoin(MG5DIR, 'vendor', 'StdHEP') 852 if output_dependencies == 'external': 853 # check if stdhep has to be compiled (only the first time) 854 if (not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP', 'lib', 'libstdhep.a')) or \ 855 not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP', 'lib', 'libFmcfio.a'))) and \ 856 not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP','fail')): 857 if 'FC' not in os.environ or not os.environ['FC']: 858 path = os.path.join(StdHep_path, 'src', 'make_opts') 859 text = open(path).read() 860 for base in base_compiler: 861 text = text.replace(base,'FC=%s' % fcompiler_chosen) 862 open(path, 'w').writelines(text) 863 logger.info('Compiling StdHEP. This has to be done only once.') 864 try: 865 misc.compile(cwd = pjoin(MG5DIR, 'vendor', 'StdHEP')) 866 except Exception as error: 867 logger.debug(str(error)) 868 logger.warning("StdHep failed to compiled. This forbids to run NLO+PS with PY6 and Herwig6") 869 logger.info("details on the compilation error are available on %s", pjoin(MG5DIR, 'vendor', 'StdHEP','fail')) 870 logger.info("if you want to retry the compilation automatically, you have to remove that file first") 871 with open(pjoin(MG5DIR, 'vendor', 'StdHEP','fail'),'w') as fsock: 872 fsock.write(str(error)) 873 else: 874 logger.info('Done.') 875 if os.path.exists(pjoin(StdHep_path, 'lib', 'libstdhep.a')): 876 #then link the libraries in the exported dir 877 files.ln(pjoin(StdHep_path, 'lib', 'libstdhep.a'), \ 878 pjoin(self.dir_path, 'MCatNLO', 'lib')) 879 files.ln(pjoin(StdHep_path, 'lib', 'libFmcfio.a'), \ 880 pjoin(self.dir_path, 'MCatNLO', 'lib')) 881 882 elif output_dependencies == 'internal': 883 StdHEP_internal_path = pjoin(self.dir_path,'Source','StdHEP') 884 shutil.copytree(StdHep_path,StdHEP_internal_path, symlinks=True) 885 # Create the links to the lib folder 886 linkfiles = ['libstdhep.a', 'libFmcfio.a'] 887 for file in linkfiles: 888 ln(pjoin(os.path.pardir,os.path.pardir,'Source','StdHEP','lib',file), 889 os.path.join(self.dir_path, 'MCatNLO', 'lib')) 890 if 'FC' not in os.environ or not os.environ['FC']: 891 path = pjoin(StdHEP_internal_path, 'src', 'make_opts') 892 text = open(path).read() 893 for base in base_compiler: 894 text = text.replace(base,'FC=%s' % fcompiler_chosen) 895 open(path, 'w').writelines(text) 896 # To avoid compiler version conflicts, we force a clean here 897 misc.compile(['clean'],cwd = StdHEP_internal_path) 898 899 elif output_dependencies == 'environment_paths': 900 # Here the user chose to define the dependencies path in one of 901 # his environmental paths 902 libStdHep = misc.which_lib('libstdhep.a') 903 libFmcfio = misc.which_lib('libFmcfio.a') 904 if not libStdHep is None and not libFmcfio is None: 905 logger.info('MG5_aMC is using StdHep installation found at %s.'%\ 906 os.path.dirname(libStdHep)) 907 ln(pjoin(libStdHep),pjoin(self.dir_path, 'MCatNLO', 'lib'),abspath=True) 908 ln(pjoin(libFmcfio),pjoin(self.dir_path, 'MCatNLO', 'lib'),abspath=True) 909 else: 910 raise InvalidCmd("Could not find the location of the files"+\ 911 " libstdhep.a and libFmcfio.a in you environment paths.") 912 913 else: 914 raise MadGraph5Error('output_dependencies option %s not recognized'\ 915 %output_dependencies) 916 917 # Create the default MadAnalysis5 cards 918 if 'madanalysis5_path' in self.opt and not \ 919 self.opt['madanalysis5_path'] is None and not self.proc_defs is None: 920 # When using 921 processes = sum([me.get('processes') if not isinstance(me, str) else [] \ 922 for me in matrix_elements.get('matrix_elements')],[]) 923 924 # Try getting the processes from the generation info directly if no ME are 925 # available (as it is the case for parallel generation 926 if len(processes)==0: 927 processes = self.born_processes 928 if len(processes)==0: 929 logger.warning( 930 """MG5aMC could not provide to Madanalysis5 the list of processes generated. 931 As a result, the default card will not be tailored to the process generated. 932 This typically happens when using the 'low_mem_multicore_nlo_generation' NLO generation mode.""") 933 # For now, simply assign all processes to each proc_defs. 934 # That shouldn't really affect the default analysis card created by MA5 935 self.create_default_madanalysis5_cards( 936 history, self.proc_defs, [processes,]*len(self.proc_defs), 937 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'), 938 levels =['hadron'])
939
940 - def write_real_from_born_configs(self, writer, matrix_element, fortran_model):
941 """Writes the real_from_born_configs.inc file that contains 942 the mapping to go for a given born configuration (that is used 943 e.g. in the multi-channel phase-space integration to the 944 corresponding real-emission diagram, i.e. the real emission 945 diagram in which the combined ij is split in i_fks and 946 j_fks.""" 947 lines=[] 948 lines2=[] 949 max_links=0 950 born_me=matrix_element.born_matrix_element 951 for iFKS, conf in enumerate(matrix_element.get_fks_info_list()): 952 iFKS=iFKS+1 953 links=conf['fks_info']['rb_links'] 954 max_links=max(max_links,len(links)) 955 for i,diags in enumerate(links): 956 if not i == diags['born_conf']: 957 print(links) 958 raise MadGraph5Error("born_conf should be canonically ordered") 959 real_configs=', '.join(['%d' % int(diags['real_conf']+1) for diags in links]) 960 lines.append("data (real_from_born_conf(irfbc,%d),irfbc=1,%d) /%s/" \ 961 % (iFKS,len(links),real_configs)) 962 963 # this is for 'LOonly' processes; in this case, a fake configuration 964 # with all the born diagrams is written 965 if not matrix_element.get_fks_info_list(): 966 # compute (again) the number of configurations at the born 967 base_diagrams = born_me.get('base_amplitude').get('diagrams') 968 minvert = min([max([len(vert.get('legs')) for vert in \ 969 diag.get('vertices')]) for diag in base_diagrams]) 970 971 for idiag, diag in enumerate(base_diagrams): 972 if any([len(vert.get('legs')) > minvert for vert in 973 diag.get('vertices')]): 974 # Only 3-vertices allowed in configs.inc 975 continue 976 max_links = max_links + 1 977 978 real_configs=', '.join(['%d' % i for i in range(1, max_links+1)]) 979 lines.append("data (real_from_born_conf(irfbc,%d),irfbc=1,%d) /%s/" \ 980 % (1,max_links,real_configs)) 981 982 lines2.append("integer irfbc") 983 lines2.append("integer real_from_born_conf(%d,%d)" \ 984 % (max_links, max(len(matrix_element.get_fks_info_list()),1))) 985 # Write the file 986 writer.writelines(lines2+lines)
987 988 989 #=============================================================================== 990 # write_get_mass_width_file 991 #=============================================================================== 992 #test written
993 - def write_get_mass_width_file(self, writer, makeinc, model):
994 """Write the get_mass_width_file.f file for MG4. 995 Also update the makeinc.inc file 996 """ 997 mass_particles = [p for p in model['particles'] if p['mass'].lower() != 'zero'] 998 width_particles = [p for p in model['particles'] if p['width'].lower() != 'zero'] 999 1000 iflines_mass = '' 1001 iflines_width = '' 1002 1003 for i, part in enumerate(mass_particles): 1004 if i == 0: 1005 ifstring = 'if' 1006 else: 1007 ifstring = 'else if' 1008 if part['self_antipart']: 1009 iflines_mass += '%s (id.eq.%d) then\n' % \ 1010 (ifstring, part.get_pdg_code()) 1011 else: 1012 iflines_mass += '%s (id.eq.%d.or.id.eq.%d) then\n' % \ 1013 (ifstring, part.get_pdg_code(), part.get_anti_pdg_code()) 1014 iflines_mass += 'get_mass_from_id=abs(%s)\n' % part.get('mass') 1015 1016 for i, part in enumerate(width_particles): 1017 if i == 0: 1018 ifstring = 'if' 1019 else: 1020 ifstring = 'else if' 1021 if part['self_antipart']: 1022 iflines_width += '%s (id.eq.%d) then\n' % \ 1023 (ifstring, part.get_pdg_code()) 1024 else: 1025 iflines_width += '%s (id.eq.%d.or.id.eq.%d) then\n' % \ 1026 (ifstring, part.get_pdg_code(), part.get_anti_pdg_code()) 1027 iflines_width += 'get_width_from_id=abs(%s)\n' % part.get('width') 1028 1029 # Make sure it compiles with an if-statement if the above lists are empty 1030 if len(mass_particles)==0: 1031 iflines_mass = 'if (.True.) then\n' 1032 1033 if len(width_particles)==0: 1034 iflines_width = 'if (.True.) then\n' 1035 1036 replace_dict = {'iflines_mass' : iflines_mass, 1037 'iflines_width' : iflines_width} 1038 1039 file = open(os.path.join(_file_path, \ 1040 'iolibs/template_files/get_mass_width_fcts.inc')).read() 1041 file = file % replace_dict 1042 1043 # Write the file 1044 writer.writelines(file) 1045 1046 # update the makeinc 1047 makeinc_content = open(makeinc).read() 1048 makeinc_content = makeinc_content.replace('MODEL = ', 'MODEL = get_mass_width_fcts.o ') 1049 open(makeinc, 'w').write(makeinc_content) 1050 1051 return
1052 1053
1054 - def write_configs_and_props_info_declarations(self, writer, max_iconfig, max_leg_number, nfksconfs, fortran_model):
1055 """writes the declarations for the variables relevant for configs_and_props 1056 """ 1057 lines = [] 1058 lines.append("integer ifr,lmaxconfigs_used,max_branch_used") 1059 lines.append("parameter (lmaxconfigs_used=%4d)" % max_iconfig) 1060 lines.append("parameter (max_branch_used =%4d)" % -max_leg_number) 1061 lines.append("integer mapconfig_d(%3d,0:lmaxconfigs_used)" % nfksconfs) 1062 lines.append("integer iforest_d(%3d,2,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1063 lines.append("integer sprop_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1064 lines.append("integer tprid_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1065 lines.append("double precision pmass_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1066 lines.append("double precision pwidth_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1067 lines.append("integer pow_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1068 1069 writer.writelines(lines)
1070 1071
1072 - def write_configs_and_props_info_file(self, filename, matrix_element):
1073 """writes the configs_and_props_info.inc file that cointains 1074 all the (real-emission) configurations (IFOREST) as well as 1075 the masses and widths of intermediate particles""" 1076 lines = [] 1077 lines.append("# C -> MAPCONFIG_D") 1078 lines.append("# F/D -> IFOREST_D") 1079 lines.append("# S -> SPROP_D") 1080 lines.append("# T -> TPRID_D") 1081 lines.append("# M -> PMASS_D/PWIDTH_D") 1082 lines.append("# P -> POW_D") 1083 lines2 = [] 1084 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1085 1086 max_iconfig=0 1087 max_leg_number=0 1088 1089 ######################################################## 1090 # this is for standard processes with [(real=)XXX] 1091 ######################################################## 1092 for iFKS, conf in enumerate(matrix_element.get_fks_info_list()): 1093 iFKS=iFKS+1 1094 iconfig = 0 1095 s_and_t_channels = [] 1096 mapconfigs = [] 1097 fks_matrix_element=matrix_element.real_processes[conf['n_me'] - 1].matrix_element 1098 base_diagrams = fks_matrix_element.get('base_amplitude').get('diagrams') 1099 model = fks_matrix_element.get('base_amplitude').get('process').get('model') 1100 minvert = min([max([len(vert.get('legs')) for vert in \ 1101 diag.get('vertices')]) for diag in base_diagrams]) 1102 1103 lines.append("# ") 1104 lines.append("# nFKSprocess %d" % iFKS) 1105 for idiag, diag in enumerate(base_diagrams): 1106 if any([len(vert.get('legs')) > minvert for vert in 1107 diag.get('vertices')]): 1108 # Only 3-vertices allowed in configs.inc 1109 continue 1110 iconfig = iconfig + 1 1111 helas_diag = fks_matrix_element.get('diagrams')[idiag] 1112 mapconfigs.append(helas_diag.get('number')) 1113 lines.append("# Diagram %d for nFKSprocess %d" % \ 1114 (helas_diag.get('number'),iFKS)) 1115 # Correspondance between the config and the amplitudes 1116 lines.append("C %4d %4d %4d " % (iFKS,iconfig, 1117 helas_diag.get('number'))) 1118 1119 # Need to reorganize the topology so that we start with all 1120 # final state external particles and work our way inwards 1121 schannels, tchannels = helas_diag.get('amplitudes')[0].\ 1122 get_s_and_t_channels(ninitial, model, 990) 1123 1124 s_and_t_channels.append([schannels, tchannels]) 1125 1126 # Write out propagators for s-channel and t-channel vertices 1127 allchannels = schannels 1128 if len(tchannels) > 1: 1129 # Write out tchannels only if there are any non-trivial ones 1130 allchannels = schannels + tchannels 1131 1132 for vert in allchannels: 1133 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1134 last_leg = vert.get('legs')[-1] 1135 lines.append("F %4d %4d %4d %4d" % \ 1136 (iFKS,last_leg.get('number'), iconfig, len(daughters))) 1137 for d in daughters: 1138 lines.append("D %4d" % d) 1139 if vert in schannels: 1140 lines.append("S %4d %4d %4d %10d" % \ 1141 (iFKS,last_leg.get('number'), iconfig, 1142 last_leg.get('id'))) 1143 elif vert in tchannels[:-1]: 1144 lines.append("T %4d %4d %4d %10d" % \ 1145 (iFKS,last_leg.get('number'), iconfig, 1146 abs(last_leg.get('id')))) 1147 1148 # update what the array sizes (mapconfig,iforest,etc) will be 1149 max_leg_number = min(max_leg_number,last_leg.get('number')) 1150 max_iconfig = max(max_iconfig,iconfig) 1151 1152 # Write out number of configs 1153 lines.append("# Number of configs for nFKSprocess %d" % iFKS) 1154 lines.append("C %4d %4d %4d" % (iFKS,0,iconfig)) 1155 1156 # write the props.inc information 1157 lines2.append("# ") 1158 particle_dict = fks_matrix_element.get('processes')[0].get('model').\ 1159 get('particle_dict') 1160 1161 for iconf, configs in enumerate(s_and_t_channels): 1162 for vertex in configs[0] + configs[1][:-1]: 1163 leg = vertex.get('legs')[-1] 1164 if leg.get('id') not in particle_dict: 1165 # Fake propagator used in multiparticle vertices 1166 pow_part = 0 1167 else: 1168 particle = particle_dict[leg.get('id')] 1169 1170 pow_part = 1 + int(particle.is_boson()) 1171 1172 lines2.append("M %4d %4d %4d %10d " % \ 1173 (iFKS,leg.get('number'), iconf + 1, leg.get('id'))) 1174 lines2.append("P %4d %4d %4d %4d " % \ 1175 (iFKS,leg.get('number'), iconf + 1, pow_part)) 1176 1177 ######################################################## 1178 # this is for [LOonly=XXX] 1179 ######################################################## 1180 if not matrix_element.get_fks_info_list(): 1181 born_me = matrix_element.born_matrix_element 1182 # as usual, in this case we assume just one FKS configuration 1183 # exists with diagrams corresponding to born ones X the ij -> i,j 1184 # splitting. Here j is chosen to be the last colored particle in 1185 # the particle list 1186 bornproc = born_me.get('processes')[0] 1187 colors = [l.get('color') for l in bornproc.get('legs')] 1188 1189 fks_i = len(colors) 1190 # use the last colored particle if it exists, or 1191 # just the last 1192 fks_j=1 1193 for cpos, col in enumerate(colors): 1194 if col != 1: 1195 fks_j = cpos+1 1196 fks_j_id = [l.get('id') for l in bornproc.get('legs')][cpos] 1197 1198 # for the moment, if j is initial-state, we do nothing 1199 if fks_j > ninitial: 1200 iFKS=1 1201 iconfig = 0 1202 s_and_t_channels = [] 1203 mapconfigs = [] 1204 base_diagrams = born_me.get('base_amplitude').get('diagrams') 1205 model = born_me.get('base_amplitude').get('process').get('model') 1206 minvert = min([max([len(vert.get('legs')) for vert in \ 1207 diag.get('vertices')]) for diag in base_diagrams]) 1208 1209 lines.append("# ") 1210 lines.append("# nFKSprocess %d" % iFKS) 1211 for idiag, diag in enumerate(base_diagrams): 1212 if any([len(vert.get('legs')) > minvert for vert in 1213 diag.get('vertices')]): 1214 # Only 3-vertices allowed in configs.inc 1215 continue 1216 iconfig = iconfig + 1 1217 helas_diag = born_me.get('diagrams')[idiag] 1218 mapconfigs.append(helas_diag.get('number')) 1219 lines.append("# Diagram %d for nFKSprocess %d" % \ 1220 (helas_diag.get('number'),iFKS)) 1221 # Correspondance between the config and the amplitudes 1222 lines.append("C %4d %4d %4d " % (iFKS,iconfig, 1223 helas_diag.get('number'))) 1224 1225 # Need to reorganize the topology so that we start with all 1226 # final state external particles and work our way inwards 1227 schannels, tchannels = helas_diag.get('amplitudes')[0].\ 1228 get_s_and_t_channels(ninitial, model, 990) 1229 1230 s_and_t_channels.append([schannels, tchannels]) 1231 1232 #the first thing to write is the splitting ij -> i,j 1233 lines.append("F %4d %4d %4d %4d" % \ 1234 (iFKS,-1,iconfig,2)) 1235 #(iFKS,last_leg.get('number'), iconfig, len(daughters))) 1236 lines.append("D %4d" % nexternal) 1237 lines.append("D %4d" % fks_j) 1238 lines.append("S %4d %4d %4d %10d" % \ 1239 (iFKS,-1, iconfig,fks_j_id)) 1240 # now we continue with all the other vertices of the diagrams; 1241 # we need to shift the 'last_leg' by 1 and replace leg fks_j with -1 1242 1243 # Write out propagators for s-channel and t-channel vertices 1244 allchannels = schannels 1245 if len(tchannels) > 1: 1246 # Write out tchannels only if there are any non-trivial ones 1247 allchannels = schannels + tchannels 1248 1249 for vert in allchannels: 1250 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1251 last_leg = vert.get('legs')[-1] 1252 lines.append("F %4d %4d %4d %4d" % \ 1253 (iFKS,last_leg.get('number')-1, iconfig, len(daughters))) 1254 1255 # legs with negative number in daughters have to be shifted by -1 1256 for i_dau in range(len(daughters)): 1257 if daughters[i_dau] < 0: 1258 daughters[i_dau] += -1 1259 # finally relable fks with -1 if it appears in daughters 1260 if fks_j in daughters: 1261 daughters[daughters.index(fks_j)] = -1 1262 for d in daughters: 1263 lines.append("D %4d" % d) 1264 if vert in schannels: 1265 lines.append("S %4d %4d %4d %10d" % \ 1266 (iFKS,last_leg.get('number')-1, iconfig, 1267 last_leg.get('id'))) 1268 elif vert in tchannels[:-1]: 1269 lines.append("T %4d %4d %4d %10d" % \ 1270 (iFKS,last_leg.get('number')-1, iconfig, 1271 abs(last_leg.get('id')))) 1272 1273 # update what the array sizes (mapconfig,iforest,etc) will be 1274 max_leg_number = min(max_leg_number,last_leg.get('number')-1) 1275 max_iconfig = max(max_iconfig,iconfig) 1276 1277 # Write out number of configs 1278 lines.append("# Number of configs for nFKSprocess %d" % iFKS) 1279 lines.append("C %4d %4d %4d" % (iFKS,0,iconfig)) 1280 1281 # write the props.inc information 1282 lines2.append("# ") 1283 particle_dict = born_me.get('processes')[0].get('model').\ 1284 get('particle_dict') 1285 1286 for iconf, configs in enumerate(s_and_t_channels): 1287 lines2.append("M %4d %4d %4d %10d " % \ 1288 (iFKS,-1, iconf + 1, fks_j_id)) 1289 pow_part = 1 + int(particle_dict[fks_j_id].is_boson()) 1290 lines2.append("P %4d %4d %4d %4d " % \ 1291 (iFKS,-1, iconf + 1, pow_part)) 1292 for vertex in configs[0] + configs[1][:-1]: 1293 leg = vertex.get('legs')[-1] 1294 if leg.get('id') not in particle_dict: 1295 # Fake propagator used in multiparticle vertices 1296 pow_part = 0 1297 else: 1298 particle = particle_dict[leg.get('id')] 1299 1300 pow_part = 1 + int(particle.is_boson()) 1301 1302 lines2.append("M %4d %4d %4d %10d " % \ 1303 (iFKS,leg.get('number')-1, iconf + 1, leg.get('id'))) 1304 lines2.append("P %4d %4d %4d %4d " % \ 1305 (iFKS,leg.get('number')-1, iconf + 1, pow_part)) 1306 1307 # Write the file 1308 open(filename,'w').write('\n'.join(lines+lines2)) 1309 1310 return max_iconfig, max_leg_number
1311 1312
1313 - def write_leshouche_info_declarations(self, writer, nfksconfs, 1314 maxproc, maxflow, nexternal, fortran_model):
1315 """writes the declarations for the variables relevant for leshouche_info 1316 """ 1317 lines = [] 1318 lines.append('integer maxproc_used, maxflow_used') 1319 lines.append('parameter (maxproc_used = %d)' % maxproc) 1320 lines.append('parameter (maxflow_used = %d)' % maxflow) 1321 lines.append('integer idup_d(%d,%d,maxproc_used)' % (nfksconfs, nexternal)) 1322 lines.append('integer mothup_d(%d,%d,%d,maxproc_used)' % (nfksconfs, 2, nexternal)) 1323 lines.append('integer icolup_d(%d,%d,%d,maxflow_used)' % (nfksconfs, 2, nexternal)) 1324 lines.append('integer niprocs_d(%d)' % (nfksconfs)) 1325 1326 writer.writelines(lines)
1327 1328
1329 - def write_genps(self, writer, maxproc,ngraphs,ncolor,maxflow, fortran_model):
1330 """writes the genps.inc file 1331 """ 1332 lines = [] 1333 lines.append("include 'maxparticles.inc'") 1334 lines.append("include 'maxconfigs.inc'") 1335 lines.append("integer maxproc,ngraphs,ncolor,maxflow") 1336 lines.append("parameter (maxproc=%d,ngraphs=%d,ncolor=%d,maxflow=%d)" % \ 1337 (maxproc,ngraphs,ncolor,maxflow)) 1338 writer.writelines(lines)
1339 1340
1341 - def write_leshouche_info_file(self, filename, matrix_element):
1342 """writes the leshouche_info.inc file which contains 1343 the LHA informations for all the real emission processes 1344 """ 1345 lines = [] 1346 lines.append("# I -> IDUP_D") 1347 lines.append("# M -> MOTHUP_D") 1348 lines.append("# C -> ICOLUP_D") 1349 nfksconfs = len(matrix_element.get_fks_info_list()) 1350 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1351 1352 maxproc = 0 1353 maxflow = 0 1354 for i, conf in enumerate(matrix_element.get_fks_info_list()): 1355 # for i, real in enumerate(matrix_element.real_processes): 1356 (newlines, nprocs, nflows) = self.get_leshouche_lines( 1357 matrix_element.real_processes[conf['n_me'] - 1].matrix_element, i + 1) 1358 lines.extend(newlines) 1359 maxproc = max(maxproc, nprocs) 1360 maxflow = max(maxflow, nflows) 1361 1362 # this is for LOonly 1363 if not matrix_element.get_fks_info_list(): 1364 (newlines, nprocs, nflows) = self.get_leshouche_lines_dummy(matrix_element.born_matrix_element, 1) 1365 lines.extend(newlines) 1366 1367 # Write the file 1368 open(filename,'w').write('\n'.join(lines)) 1369 1370 return nfksconfs, maxproc, maxflow, nexternal
1371 1372
1373 - def write_pdf_wrapper(self, writer, matrix_element, fortran_model):
1374 """writes the wrapper which allows to chose among the different real matrix elements""" 1375 1376 file = \ 1377 """double precision function dlum() 1378 implicit none 1379 integer nfksprocess 1380 common/c_nfksprocess/nfksprocess 1381 """ 1382 if matrix_element.real_processes: 1383 for n, info in enumerate(matrix_element.get_fks_info_list()): 1384 file += \ 1385 """if (nfksprocess.eq.%(n)d) then 1386 call dlum_%(n_me)d(dlum) 1387 else""" % {'n': n + 1, 'n_me' : info['n_me']} 1388 file += \ 1389 """ 1390 write(*,*) 'ERROR: invalid n in dlum :', nfksprocess 1391 stop 1392 endif 1393 return 1394 end 1395 """ 1396 else: 1397 file+= \ 1398 """call dlum_0(dlum) 1399 return 1400 end 1401 """ 1402 1403 # Write the file 1404 writer.writelines(file) 1405 return 0
1406 1407
1408 - def write_real_me_wrapper(self, writer, matrix_element, fortran_model):
1409 """writes the wrapper which allows to chose among the different real matrix elements""" 1410 1411 file = \ 1412 """subroutine smatrix_real(p, wgt) 1413 implicit none 1414 include 'nexternal.inc' 1415 double precision p(0:3, nexternal) 1416 double precision wgt 1417 integer nfksprocess 1418 common/c_nfksprocess/nfksprocess 1419 """ 1420 for n, info in enumerate(matrix_element.get_fks_info_list()): 1421 file += \ 1422 """if (nfksprocess.eq.%(n)d) then 1423 call smatrix_%(n_me)d(p, wgt) 1424 else""" % {'n': n + 1, 'n_me' : info['n_me']} 1425 1426 if matrix_element.real_processes: 1427 file += \ 1428 """ 1429 write(*,*) 'ERROR: invalid n in real_matrix :', nfksprocess 1430 stop 1431 endif 1432 return 1433 end 1434 """ 1435 else: 1436 file += \ 1437 """ 1438 wgt=0d0 1439 return 1440 end 1441 """ 1442 # Write the file 1443 writer.writelines(file) 1444 return 0
1445 1446
1447 - def draw_feynman_diagrams(self, matrix_element):
1448 """Create the ps files containing the feynman diagrams for the born process, 1449 as well as for all the real emission processes""" 1450 1451 filename = 'born.ps' 1452 plot = draw.MultiEpsDiagramDrawer(matrix_element.born_matrix_element.\ 1453 get('base_amplitude').get('diagrams'), 1454 filename, 1455 model=matrix_element.born_matrix_element.\ 1456 get('processes')[0].get('model'), 1457 amplitude=True, diagram_type='born') 1458 plot.draw() 1459 1460 for n, fksreal in enumerate(matrix_element.real_processes): 1461 filename = 'matrix_%d.ps' % (n + 1) 1462 plot = draw.MultiEpsDiagramDrawer(fksreal.matrix_element.\ 1463 get('base_amplitude').get('diagrams'), 1464 filename, 1465 model=fksreal.matrix_element.\ 1466 get('processes')[0].get('model'), 1467 amplitude=True, diagram_type='real') 1468 plot.draw()
1469 1470
1471 - def write_real_matrix_elements(self, matrix_element, fortran_model):
1472 """writes the matrix_i.f files which contain the real matrix elements""" 1473 1474 1475 1476 for n, fksreal in enumerate(matrix_element.real_processes): 1477 filename = 'matrix_%d.f' % (n + 1) 1478 self.write_matrix_element_fks(writers.FortranWriter(filename), 1479 fksreal.matrix_element, n + 1, 1480 fortran_model)
1481
1482 - def write_pdf_calls(self, matrix_element, fortran_model):
1483 """writes the parton_lum_i.f files which contain the real matrix elements. 1484 If no real emission existst, write the one for the born""" 1485 1486 if matrix_element.real_processes: 1487 for n, fksreal in enumerate(matrix_element.real_processes): 1488 filename = 'parton_lum_%d.f' % (n + 1) 1489 self.write_pdf_file(writers.FortranWriter(filename), 1490 fksreal.matrix_element, n + 1, 1491 fortran_model) 1492 else: 1493 filename = 'parton_lum_0.f' 1494 self.write_pdf_file(writers.FortranWriter(filename), 1495 matrix_element.born_matrix_element, 0, 1496 fortran_model)
1497 1498
1499 - def generate_born_fks_files(self, matrix_element, fortran_model, me_number, path):
1500 """generates the files needed for the born amplitude in the P* directory, which will 1501 be needed by the P* directories""" 1502 pathdir = os.getcwd() 1503 1504 filename = 'born.f' 1505 calls_born, ncolor_born = \ 1506 self.write_born_fks(writers.FortranWriter(filename),\ 1507 matrix_element, 1508 fortran_model) 1509 1510 filename = 'born_hel.f' 1511 self.write_born_hel(writers.FortranWriter(filename),\ 1512 matrix_element, 1513 fortran_model) 1514 1515 1516 filename = 'born_conf.inc' 1517 nconfigs, mapconfigs, s_and_t_channels = \ 1518 self.write_configs_file( 1519 writers.FortranWriter(filename), 1520 matrix_element.born_matrix_element, 1521 fortran_model) 1522 1523 filename = 'born_props.inc' 1524 self.write_props_file(writers.FortranWriter(filename), 1525 matrix_element.born_matrix_element, 1526 fortran_model, 1527 s_and_t_channels) 1528 1529 filename = 'born_decayBW.inc' 1530 self.write_decayBW_file(writers.FortranWriter(filename), 1531 s_and_t_channels) 1532 1533 filename = 'born_leshouche.inc' 1534 nflows = self.write_leshouche_file(writers.FortranWriter(filename), 1535 matrix_element.born_matrix_element, 1536 fortran_model) 1537 1538 filename = 'born_nhel.inc' 1539 self.write_born_nhel_file(writers.FortranWriter(filename), 1540 matrix_element.born_matrix_element, nflows, 1541 fortran_model, 1542 ncolor_born) 1543 1544 filename = 'born_ngraphs.inc' 1545 self.write_ngraphs_file(writers.FortranWriter(filename), 1546 matrix_element.born_matrix_element.get_number_of_amplitudes()) 1547 1548 filename = 'ncombs.inc' 1549 self.write_ncombs_file(writers.FortranWriter(filename), 1550 matrix_element.born_matrix_element, 1551 fortran_model) 1552 1553 filename = 'born_maxamps.inc' 1554 maxamps = len(matrix_element.get('diagrams')) 1555 maxflows = ncolor_born 1556 self.write_maxamps_file(writers.FortranWriter(filename), 1557 maxamps, 1558 maxflows, 1559 max([len(matrix_element.get('processes')) for me in \ 1560 matrix_element.born_matrix_element]),1) 1561 1562 filename = 'config_subproc_map.inc' 1563 self.write_config_subproc_map_file(writers.FortranWriter(filename), 1564 s_and_t_channels) 1565 1566 filename = 'coloramps.inc' 1567 self.write_coloramps_file(writers.FortranWriter(filename), 1568 mapconfigs, 1569 matrix_element.born_matrix_element, 1570 fortran_model) 1571 1572 #write the sborn_sf.f and the b_sf_files 1573 filename = ['sborn_sf.f', 'sborn_sf_dum.f'] 1574 for i, links in enumerate([matrix_element.color_links, []]): 1575 self.write_sborn_sf(writers.FortranWriter(filename[i]), 1576 links, 1577 fortran_model) 1578 self.color_link_files = [] 1579 for i in range(len(matrix_element.color_links)): 1580 filename = 'b_sf_%3.3d.f' % (i + 1) 1581 self.color_link_files.append(filename) 1582 self.write_b_sf_fks(writers.FortranWriter(filename), 1583 matrix_element, i, 1584 fortran_model)
1585 1586
1587 - def generate_virtuals_from_OLP(self,process_list,export_path, OLP):
1588 """Generates the library for computing the loop matrix elements 1589 necessary for this process using the OLP specified.""" 1590 1591 # Start by writing the BLHA order file 1592 virtual_path = pjoin(export_path,'OLP_virtuals') 1593 if not os.path.exists(virtual_path): 1594 os.makedirs(virtual_path) 1595 filename = os.path.join(virtual_path,'OLE_order.lh') 1596 self.write_lh_order(filename, process_list, OLP) 1597 1598 fail_msg='Generation of the virtuals with %s failed.\n'%OLP+\ 1599 'Please check the virt_generation.log file in %s.'\ 1600 %str(pjoin(virtual_path,'virt_generation.log')) 1601 1602 # Perform some tasks specific to certain OLP's 1603 if OLP=='GoSam': 1604 cp(pjoin(self.mgme_dir,'Template','loop_material','OLP_specifics', 1605 'GoSam','makevirt'),pjoin(virtual_path,'makevirt')) 1606 cp(pjoin(self.mgme_dir,'Template','loop_material','OLP_specifics', 1607 'GoSam','gosam.rc'),pjoin(virtual_path,'gosam.rc')) 1608 ln(pjoin(export_path,'Cards','param_card.dat'),virtual_path) 1609 # Now generate the process 1610 logger.info('Generating the loop matrix elements with %s...'%OLP) 1611 virt_generation_log = \ 1612 open(pjoin(virtual_path,'virt_generation.log'), 'w') 1613 retcode = subprocess.call(['./makevirt'],cwd=virtual_path, 1614 stdout=virt_generation_log, stderr=virt_generation_log) 1615 virt_generation_log.close() 1616 # Check what extension is used for the share libraries on this system 1617 possible_other_extensions = ['so','dylib'] 1618 shared_lib_ext='so' 1619 for ext in possible_other_extensions: 1620 if os.path.isfile(pjoin(virtual_path,'Virtuals','lib', 1621 'libgolem_olp.'+ext)): 1622 shared_lib_ext = ext 1623 1624 # Now check that everything got correctly generated 1625 files_to_check = ['olp_module.mod',str(pjoin('lib', 1626 'libgolem_olp.'+shared_lib_ext))] 1627 if retcode != 0 or any([not os.path.exists(pjoin(virtual_path, 1628 'Virtuals',f)) for f in files_to_check]): 1629 raise fks_common.FKSProcessError(fail_msg) 1630 # link the library to the lib folder 1631 ln(pjoin(virtual_path,'Virtuals','lib','libgolem_olp.'+shared_lib_ext), 1632 pjoin(export_path,'lib')) 1633 1634 # Specify in make_opts the right library necessitated by the OLP 1635 make_opts_content=open(pjoin(export_path,'Source','make_opts')).read() 1636 make_opts=open(pjoin(export_path,'Source','make_opts'),'w') 1637 if OLP=='GoSam': 1638 if platform.system().lower()=='darwin': 1639 # On mac the -rpath is not supported and the path of the dynamic 1640 # library is automatically wired in the executable 1641 make_opts_content=make_opts_content.replace('libOLP=', 1642 'libOLP=-Wl,-lgolem_olp') 1643 else: 1644 # On other platforms the option , -rpath= path to libgolem.so is necessary 1645 # Using a relative path is not ideal because the file libgolem.so is not 1646 # copied on the worker nodes. 1647 # make_opts_content=make_opts_content.replace('libOLP=', 1648 # 'libOLP=-Wl,-rpath=../$(LIBDIR) -lgolem_olp') 1649 # Using the absolute path is working in the case where the disk of the 1650 # front end machine is mounted on all worker nodes as well. 1651 make_opts_content=make_opts_content.replace('libOLP=', 1652 'libOLP=-Wl,-rpath='+str(pjoin(export_path,'lib'))+' -lgolem_olp') 1653 1654 1655 make_opts.write(make_opts_content) 1656 make_opts.close() 1657 1658 # A priori this is generic to all OLP's 1659 1660 # Parse the contract file returned and propagate the process label to 1661 # the include of the BinothLHA.f file 1662 proc_to_label = self.parse_contract_file( 1663 pjoin(virtual_path,'OLE_order.olc')) 1664 1665 self.write_BinothLHA_inc(process_list,proc_to_label,\ 1666 pjoin(export_path,'SubProcesses')) 1667 1668 # Link the contract file to within the SubProcess directory 1669 ln(pjoin(virtual_path,'OLE_order.olc'),pjoin(export_path,'SubProcesses'))
1670
1671 - def write_BinothLHA_inc(self, processes, proc_to_label, SubProcPath):
1672 """ Write the file Binoth_proc.inc in each SubProcess directory so as 1673 to provide the right process_label to use in the OLP call to get the 1674 loop matrix element evaluation. The proc_to_label is the dictionary of 1675 the format of the one returned by the function parse_contract_file.""" 1676 1677 for proc in processes: 1678 name = "P%s"%proc.shell_string() 1679 proc_pdgs=(tuple([leg.get('id') for leg in proc.get('legs') if \ 1680 not leg.get('state')]), 1681 tuple([leg.get('id') for leg in proc.get('legs') if \ 1682 leg.get('state')])) 1683 incFile = open(pjoin(SubProcPath, name,'Binoth_proc.inc'),'w') 1684 try: 1685 incFile.write( 1686 """ INTEGER PROC_LABEL 1687 PARAMETER (PROC_LABEL=%d)"""%(proc_to_label[proc_pdgs])) 1688 except KeyError: 1689 raise fks_common.FKSProcessError('Could not found the target'+\ 1690 ' process %s > %s in '%(str(proc_pdgs[0]),str(proc_pdgs[1]))+\ 1691 ' the proc_to_label argument in write_BinothLHA_inc.') 1692 incFile.close()
1693
1694 - def parse_contract_file(self, contract_file_path):
1695 """ Parses the BLHA contract file, make sure all parameters could be 1696 understood by the OLP and return a mapping of the processes (characterized 1697 by the pdg's of the initial and final state particles) to their process 1698 label. The format of the mapping is {((in_pdgs),(out_pdgs)):proc_label}. 1699 """ 1700 1701 proc_def_to_label = {} 1702 1703 if not os.path.exists(contract_file_path): 1704 raise fks_common.FKSProcessError('Could not find the contract file'+\ 1705 ' OLE_order.olc in %s.'%str(contract_file_path)) 1706 1707 comment_re=re.compile(r"^\s*#") 1708 proc_def_re=re.compile( 1709 r"^(?P<in_pdgs>(\s*-?\d+\s*)+)->(?P<out_pdgs>(\s*-?\d+\s*)+)\|"+ 1710 r"\s*(?P<proc_class>\d+)\s*(?P<proc_label>\d+)\s*$") 1711 line_OK_re=re.compile(r"^.*\|\s*OK") 1712 for line in open(contract_file_path): 1713 # Ignore comments 1714 if not comment_re.match(line) is None: 1715 continue 1716 # Check if it is a proc definition line 1717 proc_def = proc_def_re.match(line) 1718 if not proc_def is None: 1719 if int(proc_def.group('proc_class'))!=1: 1720 raise fks_common.FKSProcessError( 1721 'aMCatNLO can only handle loop processes generated by the OLP which have only '+\ 1722 ' process class attribute. Found %s instead in: \n%s'\ 1723 %(proc_def.group('proc_class'),line)) 1724 in_pdgs=tuple([int(in_pdg) for in_pdg in \ 1725 proc_def.group('in_pdgs').split()]) 1726 out_pdgs=tuple([int(out_pdg) for out_pdg in \ 1727 proc_def.group('out_pdgs').split()]) 1728 proc_def_to_label[(in_pdgs,out_pdgs)]=\ 1729 int(proc_def.group('proc_label')) 1730 continue 1731 # For the other types of line, just make sure they end with | OK 1732 if line_OK_re.match(line) is None: 1733 raise fks_common.FKSProcessError( 1734 'The OLP could not process the following line: \n%s'%line) 1735 1736 return proc_def_to_label
1737 1738
1739 - def generate_virt_directory(self, loop_matrix_element, fortran_model, dir_name):
1740 """writes the V**** directory inside the P**** directories specified in 1741 dir_name""" 1742 1743 cwd = os.getcwd() 1744 1745 matrix_element = loop_matrix_element 1746 1747 # Create the MadLoop5_resources directory if not already existing 1748 dirpath = os.path.join(dir_name, 'MadLoop5_resources') 1749 try: 1750 os.mkdir(dirpath) 1751 except os.error as error: 1752 logger.warning(error.strerror + " " + dirpath) 1753 1754 # Create the directory PN_xx_xxxxx in the specified path 1755 name = "V%s" % matrix_element.get('processes')[0].shell_string() 1756 dirpath = os.path.join(dir_name, name) 1757 1758 try: 1759 os.mkdir(dirpath) 1760 except os.error as error: 1761 logger.warning(error.strerror + " " + dirpath) 1762 1763 try: 1764 os.chdir(dirpath) 1765 except os.error: 1766 logger.error('Could not cd to directory %s' % dirpath) 1767 return 0 1768 1769 logger.info('Creating files in directory %s' % name) 1770 1771 # Extract number of external particles 1772 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1773 1774 calls=self.write_loop_matrix_element_v4(None,matrix_element,fortran_model) 1775 # The born matrix element, if needed 1776 filename = 'born_matrix.f' 1777 calls = self.write_bornmatrix( 1778 writers.FortranWriter(filename), 1779 matrix_element, 1780 fortran_model) 1781 1782 filename = 'nexternal.inc' 1783 self.write_nexternal_file(writers.FortranWriter(filename), 1784 nexternal, ninitial) 1785 1786 filename = 'pmass.inc' 1787 self.write_pmass_file(writers.FortranWriter(filename), 1788 matrix_element) 1789 1790 filename = 'ngraphs.inc' 1791 self.write_ngraphs_file(writers.FortranWriter(filename), 1792 len(matrix_element.get_all_amplitudes())) 1793 1794 filename = "loop_matrix.ps" 1795 plot = draw.MultiEpsDiagramDrawer(base_objects.DiagramList( 1796 matrix_element.get('base_amplitude').get('loop_diagrams')[:1000]), 1797 filename, 1798 model=matrix_element.get('processes')[0].get('model'), 1799 amplitude='') 1800 logger.info("Drawing loop Feynman diagrams for " + \ 1801 matrix_element.get('processes')[0].nice_string(print_weighted=False)) 1802 plot.draw() 1803 1804 filename = "born_matrix.ps" 1805 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 1806 get('born_diagrams'),filename,model=matrix_element.get('processes')[0].\ 1807 get('model'),amplitude='') 1808 logger.info("Generating born Feynman diagrams for " + \ 1809 matrix_element.get('processes')[0].nice_string(print_weighted=False)) 1810 plot.draw() 1811 1812 # We also need to write the overall maximum quantities for this group 1813 # of processes in 'global_specs.inc'. In aMCatNLO, there is always 1814 # only one process, so this is trivial 1815 self.write_global_specs(matrix_element, output_path=pjoin(dirpath,'global_specs.inc')) 1816 open('unique_id.inc','w').write( 1817 """ integer UNIQUE_ID 1818 parameter(UNIQUE_ID=1)""") 1819 1820 linkfiles = ['coupl.inc', 'mp_coupl.inc', 'mp_coupl_same_name.inc', 1821 'cts_mprec.h', 'cts_mpc.h', 'MadLoopParamReader.f', 1822 'MadLoopCommons.f','MadLoopParams.inc'] 1823 1824 # We should move to MadLoop5_resources directory from the SubProcesses 1825 ln(pjoin(os.path.pardir,os.path.pardir,'MadLoopParams.dat'), 1826 pjoin('..','MadLoop5_resources')) 1827 1828 for file in linkfiles: 1829 ln('../../%s' % file) 1830 1831 os.system("ln -s ../../makefile_loop makefile") 1832 1833 linkfiles = ['mpmodule.mod'] 1834 1835 for file in linkfiles: 1836 ln('../../../lib/%s' % file) 1837 1838 linkfiles = ['coef_specs.inc'] 1839 1840 for file in linkfiles: 1841 ln('../../../Source/DHELAS/%s' % file) 1842 1843 # Return to original PWD 1844 os.chdir(cwd) 1845 1846 if not calls: 1847 calls = 0 1848 return calls
1849
1850 - def get_qed_qcd_orders_from_weighted(self, nexternal, weighted):
1851 """computes the QED/QCD orders from the knowledge of the n of ext particles 1852 and of the weighted orders""" 1853 # n vertices = nexternal - 2 =QED + QCD 1854 # weighted = 2*QED + QCD 1855 QED = weighted - nexternal + 2 1856 QCD = weighted - 2 * QED 1857 return QED, QCD
1858 1859 1860 1861 #=============================================================================== 1862 # write_lh_order 1863 #=============================================================================== 1864 #test written
1865 - def write_lh_order(self, filename, process_list, OLP='MadLoop'):
1866 """Creates the OLE_order.lh file. This function should be edited according 1867 to the OLP which is used. For now it is generic.""" 1868 1869 1870 if len(process_list)==0: 1871 raise fks_common.FKSProcessError('No matrix elements provided to '+\ 1872 'the function write_lh_order.') 1873 return 1874 1875 # We assume the orders to be common to all Subprocesses 1876 1877 orders = process_list[0].get('orders') 1878 if 'QED' in list(orders.keys()) and 'QCD' in list(orders.keys()): 1879 QED=orders['QED'] 1880 QCD=orders['QCD'] 1881 elif 'QED' in list(orders.keys()): 1882 QED=orders['QED'] 1883 QCD=0 1884 elif 'QCD' in list(orders.keys()): 1885 QED=0 1886 QCD=orders['QCD'] 1887 else: 1888 QED, QCD = self.get_qed_qcd_orders_from_weighted(\ 1889 len(process_list[0].get('legs')), 1890 orders['WEIGHTED']) 1891 1892 replace_dict = {} 1893 replace_dict['mesq'] = 'CHaveraged' 1894 replace_dict['corr'] = ' '.join(process_list[0].\ 1895 get('perturbation_couplings')) 1896 replace_dict['irreg'] = 'CDR' 1897 replace_dict['aspow'] = QCD 1898 replace_dict['aepow'] = QED 1899 replace_dict['modelfile'] = './param_card.dat' 1900 replace_dict['params'] = 'alpha_s' 1901 proc_lines=[] 1902 for proc in process_list: 1903 proc_lines.append('%s -> %s' % \ 1904 (' '.join(str(l['id']) for l in proc['legs'] if not l['state']), 1905 ' '.join(str(l['id']) for l in proc['legs'] if l['state']))) 1906 replace_dict['pdgs'] = '\n'.join(proc_lines) 1907 replace_dict['symfin'] = 'Yes' 1908 content = \ 1909 "#OLE_order written by MadGraph5_aMC@NLO\n\ 1910 \n\ 1911 MatrixElementSquareType %(mesq)s\n\ 1912 CorrectionType %(corr)s\n\ 1913 IRregularisation %(irreg)s\n\ 1914 AlphasPower %(aspow)d\n\ 1915 AlphaPower %(aepow)d\n\ 1916 NJetSymmetrizeFinal %(symfin)s\n\ 1917 ModelFile %(modelfile)s\n\ 1918 Parameters %(params)s\n\ 1919 \n\ 1920 # process\n\ 1921 %(pdgs)s\n\ 1922 " % replace_dict 1923 1924 file = open(filename, 'w') 1925 file.write(content) 1926 file.close 1927 return
1928 1929 1930 #=============================================================================== 1931 # write_born_fks 1932 #=============================================================================== 1933 # test written
1934 - def write_born_fks(self, writer, fksborn, fortran_model):
1935 """Export a matrix element to a born.f file in MadFKS format""" 1936 1937 matrix_element = fksborn.born_matrix_element 1938 1939 if not matrix_element.get('processes') or \ 1940 not matrix_element.get('diagrams'): 1941 return 0 1942 1943 if not isinstance(writer, writers.FortranWriter): 1944 raise writers.FortranWriter.FortranWriterError(\ 1945 "writer not FortranWriter") 1946 1947 # Add information relevant for FxFx matching: 1948 # Maximum QCD power in all the contributions 1949 max_qcd_order = 0 1950 for diag in matrix_element.get('diagrams'): 1951 orders = diag.calculate_orders() 1952 if 'QCD' in orders: 1953 max_qcd_order = max(max_qcd_order,orders['QCD']) 1954 max_n_light_final_partons = max(len([1 for id in proc.get_final_ids() 1955 if proc.get('model').get_particle(id).get('mass')=='ZERO' and 1956 proc.get('model').get_particle(id).get('color')>1]) 1957 for proc in matrix_element.get('processes')) 1958 # Maximum number of final state light jets to be matched 1959 self.proc_characteristic['max_n_matched_jets'] = max( 1960 self.proc_characteristic['max_n_matched_jets'], 1961 min(max_qcd_order,max_n_light_final_partons)) 1962 1963 1964 # Set lowercase/uppercase Fortran code 1965 writers.FortranWriter.downcase = False 1966 1967 replace_dict = {} 1968 1969 # Extract version number and date from VERSION file 1970 info_lines = self.get_mg5_info_lines() 1971 replace_dict['info_lines'] = info_lines 1972 1973 # Extract process info lines 1974 process_lines = self.get_process_info_lines(matrix_element) 1975 replace_dict['process_lines'] = process_lines 1976 1977 1978 # Extract ncomb 1979 ncomb = matrix_element.get_helicity_combinations() 1980 replace_dict['ncomb'] = ncomb 1981 1982 # Extract helicity lines 1983 helicity_lines = self.get_helicity_lines(matrix_element) 1984 replace_dict['helicity_lines'] = helicity_lines 1985 1986 # Extract IC line 1987 ic_line = self.get_ic_line(matrix_element) 1988 replace_dict['ic_line'] = ic_line 1989 1990 # Extract overall denominator 1991 # Averaging initial state color, spin, and identical FS particles 1992 #den_factor_line = get_den_factor_line(matrix_element) 1993 1994 # Extract ngraphs 1995 ngraphs = matrix_element.get_number_of_amplitudes() 1996 replace_dict['ngraphs'] = ngraphs 1997 1998 # Extract nwavefuncs 1999 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2000 replace_dict['nwavefuncs'] = nwavefuncs 2001 2002 # Extract ncolor 2003 ncolor = max(1, len(matrix_element.get('color_basis'))) 2004 replace_dict['ncolor'] = ncolor 2005 2006 # Extract color data lines 2007 color_data_lines = self.get_color_data_lines(matrix_element) 2008 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2009 2010 # Extract helas calls 2011 helas_calls = fortran_model.get_matrix_element_calls(\ 2012 matrix_element) 2013 replace_dict['helas_calls'] = "\n".join(helas_calls) 2014 2015 # Extract amp2 lines 2016 amp2_lines = self.get_amp2_lines(matrix_element) 2017 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2018 2019 # Extract JAMP lines 2020 jamp_lines, nb_tmp_jamp = self.get_JAMP_lines(matrix_element) 2021 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2022 replace_dict['nb_temp_jamp'] = nb_tmp_jamp 2023 2024 2025 # Set the size of Wavefunction 2026 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 2027 replace_dict['wavefunctionsize'] = 20 2028 else: 2029 replace_dict['wavefunctionsize'] = 8 2030 2031 # Extract glu_ij_lines 2032 ij_lines = self.get_ij_lines(fksborn) 2033 replace_dict['ij_lines'] = '\n'.join(ij_lines) 2034 2035 # Extract den_factor_lines 2036 den_factor_lines = self.get_den_factor_lines(fksborn) 2037 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 2038 2039 # Extract the number of FKS process 2040 replace_dict['nconfs'] = max(len(fksborn.get_fks_info_list()),1) 2041 2042 file = open(os.path.join(_file_path, \ 2043 'iolibs/template_files/born_fks.inc')).read() 2044 file = file % replace_dict 2045 2046 # Write the file 2047 writer.writelines(file) 2048 2049 return len([call for call in helas_calls if call.find('#') != 0]), ncolor
2050 2051
2052 - def write_born_hel(self, writer, fksborn, fortran_model):
2053 """Export a matrix element to a born_hel.f file in MadFKS format""" 2054 2055 matrix_element = fksborn.born_matrix_element 2056 2057 if not matrix_element.get('processes') or \ 2058 not matrix_element.get('diagrams'): 2059 return 0 2060 2061 if not isinstance(writer, writers.FortranWriter): 2062 raise writers.FortranWriter.FortranWriterError(\ 2063 "writer not FortranWriter") 2064 # Set lowercase/uppercase Fortran code 2065 writers.FortranWriter.downcase = False 2066 2067 replace_dict = {} 2068 2069 # Extract version number and date from VERSION file 2070 info_lines = self.get_mg5_info_lines() 2071 replace_dict['info_lines'] = info_lines 2072 2073 # Extract process info lines 2074 process_lines = self.get_process_info_lines(matrix_element) 2075 replace_dict['process_lines'] = process_lines 2076 2077 2078 # Extract ncomb 2079 ncomb = matrix_element.get_helicity_combinations() 2080 replace_dict['ncomb'] = ncomb 2081 2082 # Extract helicity lines 2083 helicity_lines = self.get_helicity_lines(matrix_element) 2084 replace_dict['helicity_lines'] = helicity_lines 2085 2086 # Extract IC line 2087 ic_line = self.get_ic_line(matrix_element) 2088 replace_dict['ic_line'] = ic_line 2089 2090 # Extract overall denominator 2091 # Averaging initial state color, spin, and identical FS particles 2092 #den_factor_line = get_den_factor_line(matrix_element) 2093 2094 # Extract ngraphs 2095 ngraphs = matrix_element.get_number_of_amplitudes() 2096 replace_dict['ngraphs'] = ngraphs 2097 2098 # Extract nwavefuncs 2099 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2100 replace_dict['nwavefuncs'] = nwavefuncs 2101 2102 # Extract ncolor 2103 ncolor = max(1, len(matrix_element.get('color_basis'))) 2104 replace_dict['ncolor'] = ncolor 2105 2106 # Extract color data lines 2107 color_data_lines = self.get_color_data_lines(matrix_element) 2108 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2109 2110 # Extract amp2 lines 2111 amp2_lines = self.get_amp2_lines(matrix_element) 2112 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2113 2114 # Extract JAMP lines 2115 jamp_lines, nb_tmp_jamp = self.get_JAMP_lines(matrix_element) 2116 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2117 replace_dict['nb_temp_jamp'] = nb_tmp_jamp 2118 2119 # Extract den_factor_lines 2120 den_factor_lines = self.get_den_factor_lines(fksborn) 2121 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 2122 2123 # Extract the number of FKS process 2124 replace_dict['nconfs'] = len(fksborn.get_fks_info_list()) 2125 2126 file = open(os.path.join(_file_path, \ 2127 'iolibs/template_files/born_fks_hel.inc')).read() 2128 file = file % replace_dict 2129 2130 # Write the file 2131 writer.writelines(file) 2132 2133 return
2134 2135 2136 #=============================================================================== 2137 # write_born_sf_fks 2138 #=============================================================================== 2139 #test written
2140 - def write_sborn_sf(self, writer, color_links, fortran_model):
2141 """Creates the sborn_sf.f file, containing the calls to the different 2142 color linked borns""" 2143 2144 replace_dict = {} 2145 nborns = len(color_links) 2146 ifkss = [] 2147 iborns = [] 2148 mms = [] 2149 nns = [] 2150 iflines = "\n" 2151 2152 #header for the sborn_sf.f file 2153 file = """subroutine sborn_sf(p_born,m,n,wgt) 2154 implicit none 2155 include "nexternal.inc" 2156 double precision p_born(0:3,nexternal-1),wgt 2157 double complex wgt1(2) 2158 integer m,n \n""" 2159 2160 if nborns > 0: 2161 2162 for i, c_link in enumerate(color_links): 2163 iborn = i+1 2164 2165 iff = {True : 'if', False : 'elseif'}[i==0] 2166 2167 m, n = c_link['link'] 2168 2169 if m != n: 2170 iflines += \ 2171 "c b_sf_%(iborn)3.3d links partons %(m)d and %(n)d \n\ 2172 %(iff)s ((m.eq.%(m)d .and. n.eq.%(n)d).or.(m.eq.%(n)d .and. n.eq.%(m)d)) then \n\ 2173 call sb_sf_%(iborn)3.3d(p_born,wgt)\n\n" \ 2174 %{'m':m, 'n': n, 'iff': iff, 'iborn': iborn} 2175 else: 2176 iflines += \ 2177 "c b_sf_%(iborn)3.3d links partons %(m)d and %(n)d \n\ 2178 %(iff)s (m.eq.%(m)d .and. n.eq.%(n)d) then \n\ 2179 call sb_sf_%(iborn)3.3d(p_born,wgt)\n\n" \ 2180 %{'m':m, 'n': n, 'iff': iff, 'iborn': iborn} 2181 2182 2183 file += iflines + \ 2184 """else 2185 wgt = 0d0 2186 endif 2187 2188 return 2189 end""" 2190 elif nborns == 0: 2191 #write a dummy file 2192 file+=""" 2193 c This is a dummy function because 2194 c this subdir has no soft singularities 2195 wgt = 0d0 2196 2197 return 2198 end""" 2199 # Write the end of the file 2200 2201 writer.writelines(file)
2202 2203 2204 #=============================================================================== 2205 # write_b_sf_fks 2206 #=============================================================================== 2207 #test written
2208 - def write_b_sf_fks(self, writer, fksborn, i, fortran_model):
2209 """Create the b_sf_xxx.f file for the soft linked born in MadFKS format""" 2210 2211 matrix_element = copy.copy(fksborn.born_matrix_element) 2212 2213 if not matrix_element.get('processes') or \ 2214 not matrix_element.get('diagrams'): 2215 return 0 2216 2217 if not isinstance(writer, writers.FortranWriter): 2218 raise writers.FortranWriter.FortranWriterError(\ 2219 "writer not FortranWriter") 2220 # Set lowercase/uppercase Fortran code 2221 writers.FortranWriter.downcase = False 2222 2223 iborn = i + 1 2224 link = fksborn.color_links[i] 2225 2226 replace_dict = {} 2227 2228 replace_dict['iborn'] = iborn 2229 2230 # Extract version number and date from VERSION file 2231 info_lines = self.get_mg5_info_lines() 2232 replace_dict['info_lines'] = info_lines 2233 2234 # Extract process info lines 2235 process_lines = self.get_process_info_lines(matrix_element) 2236 replace_dict['process_lines'] = process_lines + \ 2237 "\nc spectators: %d %d \n" % tuple(link['link']) 2238 2239 # Extract ncomb 2240 ncomb = matrix_element.get_helicity_combinations() 2241 replace_dict['ncomb'] = ncomb 2242 2243 # Extract helicity lines 2244 helicity_lines = self.get_helicity_lines(matrix_element) 2245 replace_dict['helicity_lines'] = helicity_lines 2246 2247 # Extract IC line 2248 ic_line = self.get_ic_line(matrix_element) 2249 replace_dict['ic_line'] = ic_line 2250 2251 # Extract den_factor_lines 2252 den_factor_lines = self.get_den_factor_lines(fksborn) 2253 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 2254 2255 # Extract ngraphs 2256 ngraphs = matrix_element.get_number_of_amplitudes() 2257 replace_dict['ngraphs'] = ngraphs 2258 2259 # Extract nwavefuncs 2260 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2261 replace_dict['nwavefuncs'] = nwavefuncs 2262 2263 # Extract ncolor 2264 ncolor1 = max(1, len(link['orig_basis'])) 2265 replace_dict['ncolor1'] = ncolor1 2266 ncolor2 = max(1, len(link['link_basis'])) 2267 replace_dict['ncolor2'] = ncolor2 2268 2269 # Extract color data lines 2270 color_data_lines = self.get_color_data_lines_from_color_matrix(\ 2271 link['link_matrix']) 2272 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2273 2274 # Extract amp2 lines 2275 amp2_lines = self.get_amp2_lines(matrix_element) 2276 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2277 2278 # Extract JAMP lines 2279 jamp_lines, nb_tmp_jamp = self.get_JAMP_lines(matrix_element, JAMP_format="JAMP1(%s)") 2280 replace_dict['jamp1_lines'] = '\n'.join(jamp_lines) 2281 replace_dict['nb_temp_jamp'] = nb_tmp_jamp 2282 2283 2284 matrix_element.set('color_basis', link['link_basis'] ) 2285 jamp_lines, nb_tmp_jamp = self.get_JAMP_lines(matrix_element, JAMP_format="JAMP2(%s)") 2286 replace_dict['jamp2_lines'] = '\n'.join(jamp_lines) 2287 replace_dict['nb_temp_jamp'] = max(nb_tmp_jamp, replace_dict['nb_temp_jamp']) 2288 2289 2290 # Extract the number of FKS process 2291 replace_dict['nconfs'] = len(fksborn.get_fks_info_list()) 2292 2293 file = open(os.path.join(_file_path, \ 2294 'iolibs/template_files/b_sf_xxx_fks.inc')).read() 2295 file = file % replace_dict 2296 2297 # Write the file 2298 writer.writelines(file) 2299 2300 return 0 , ncolor1
2301 2302 2303 #=============================================================================== 2304 # write_born_nhel_file 2305 #=============================================================================== 2306 #test written
2307 - def write_born_nhel_file(self, writer, matrix_element, nflows, fortran_model, ncolor):
2308 """Write the born_nhel.inc file for MG4.""" 2309 2310 ncomb = matrix_element.get_helicity_combinations() 2311 file = " integer max_bhel, max_bcol \n" 2312 file = file + "parameter (max_bhel=%d)\nparameter(max_bcol=%d)" % \ 2313 (ncomb, nflows) 2314 2315 # Write the file 2316 writer.writelines(file) 2317 2318 return True
2319 2320 #=============================================================================== 2321 # write_fks_info_file 2322 #===============================================================================
2323 - def write_nfksconfigs_file(self, writer, fksborn, fortran_model):
2324 """Writes the content of nFKSconfigs.inc, which just gives the 2325 total FKS dirs as a parameter. 2326 nFKSconfigs is always >=1 (use a fake configuration for LOonly)""" 2327 replace_dict = {} 2328 replace_dict['nconfs'] = max(len(fksborn.get_fks_info_list()), 1) 2329 content = \ 2330 """ INTEGER FKS_CONFIGS 2331 PARAMETER (FKS_CONFIGS=%(nconfs)d) 2332 2333 """ % replace_dict 2334 2335 writer.writelines(content)
2336 2337 2338 #=============================================================================== 2339 # write_fks_info_file 2340 #===============================================================================
2341 - def write_fks_info_file(self, writer, fksborn, fortran_model): #test_written
2342 """Writes the content of fks_info.inc, which lists the informations on the 2343 possible splittings of the born ME. 2344 nconfs is always >=1 (use a fake configuration for LOonly). 2345 The fake configuration use an 'antigluon' (id -21, color=8) as i_fks and 2346 the last colored particle as j_fks.""" 2347 2348 replace_dict = {} 2349 fks_info_list = fksborn.get_fks_info_list() 2350 replace_dict['nconfs'] = max(len(fks_info_list), 1) 2351 2352 # this is for processes with 'real' or 'all' as NLO mode 2353 if len(fks_info_list) > 0: 2354 fks_i_values = ', '.join(['%d' % info['fks_info']['i'] \ 2355 for info in fks_info_list]) 2356 fks_j_values = ', '.join(['%d' % info['fks_info']['j'] \ 2357 for info in fks_info_list]) 2358 2359 col_lines = [] 2360 pdg_lines = [] 2361 charge_lines = [] 2362 fks_j_from_i_lines = [] 2363 for i, info in enumerate(fks_info_list): 2364 col_lines.append( \ 2365 'DATA (PARTICLE_TYPE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2366 % (i + 1, ', '.join('%d' % col for col in fksborn.real_processes[info['n_me']-1].colors) )) 2367 pdg_lines.append( \ 2368 'DATA (PDG_TYPE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2369 % (i + 1, ', '.join('%d' % pdg for pdg in info['pdgs']))) 2370 charge_lines.append(\ 2371 'DATA (PARTICLE_CHARGE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /'\ 2372 % (i + 1, ', '.join('%19.15fd0' % charg\ 2373 for charg in fksborn.real_processes[info['n_me']-1].charges) )) 2374 fks_j_from_i_lines.extend(self.get_fks_j_from_i_lines(fksborn.real_processes[info['n_me']-1],\ 2375 i + 1)) 2376 else: 2377 # this is for 'LOonly', generate a fake FKS configuration with 2378 # - i_fks = nexternal, pdg type = -21 and color =8 2379 # - j_fks = the last colored particle 2380 bornproc = fksborn.born_matrix_element.get('processes')[0] 2381 pdgs = [l.get('id') for l in bornproc.get('legs')] + [-21] 2382 colors = [l.get('color') for l in bornproc.get('legs')] + [8] 2383 charges = [0.] * len(colors) 2384 2385 fks_i = len(colors) 2386 # use the last colored particle if it exists, or 2387 # just the last 2388 fks_j=1 2389 for cpos, col in enumerate(colors[:-1]): 2390 if col != 1: 2391 fks_j = cpos+1 2392 2393 fks_i_values = str(fks_i) 2394 fks_j_values = str(fks_j) 2395 col_lines = ['DATA (PARTICLE_TYPE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2396 % ', '.join([str(col) for col in colors])] 2397 pdg_lines = ['DATA (PDG_TYPE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2398 % ', '.join([str(pdg) for pdg in pdgs])] 2399 charge_lines = ['DATA (PARTICLE_CHARGE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2400 % ', '.join('%19.15fd0' % charg for charg in charges)] 2401 fks_j_from_i_lines = ['DATA (FKS_J_FROM_I_D(1, %d, JPOS), JPOS = 0, 1) / 1, %d /' \ 2402 % (fks_i, fks_j)] 2403 2404 2405 replace_dict['fks_i_line'] = "data fks_i_D / %s /" % fks_i_values 2406 replace_dict['fks_j_line'] = "data fks_j_D / %s /" % fks_j_values 2407 replace_dict['col_lines'] = '\n'.join(col_lines) 2408 replace_dict['pdg_lines'] = '\n'.join(pdg_lines) 2409 replace_dict['charge_lines'] = '\n'.join(charge_lines) 2410 replace_dict['fks_j_from_i_lines'] = '\n'.join(fks_j_from_i_lines) 2411 2412 content = \ 2413 """ INTEGER IPOS, JPOS 2414 INTEGER FKS_I_D(%(nconfs)d), FKS_J_D(%(nconfs)d) 2415 INTEGER FKS_J_FROM_I_D(%(nconfs)d, NEXTERNAL, 0:NEXTERNAL) 2416 INTEGER PARTICLE_TYPE_D(%(nconfs)d, NEXTERNAL), PDG_TYPE_D(%(nconfs)d, NEXTERNAL) 2417 REAL*8 PARTICLE_CHARGE_D(%(nconfs)d, NEXTERNAL) 2418 2419 %(fks_i_line)s 2420 %(fks_j_line)s 2421 2422 %(fks_j_from_i_lines)s 2423 2424 C 2425 C Particle type: 2426 C octet = 8, triplet = 3, singlet = 1 2427 %(col_lines)s 2428 2429 C 2430 C Particle type according to PDG: 2431 C 2432 %(pdg_lines)s 2433 2434 C 2435 C Particle charge: 2436 C charge is set 0. with QCD corrections, which is irrelevant 2437 %(charge_lines)s 2438 """ % replace_dict 2439 if not isinstance(writer, writers.FortranWriter): 2440 raise writers.FortranWriter.FortranWriterError(\ 2441 "writer not FortranWriter") 2442 # Set lowercase/uppercase Fortran code 2443 writers.FortranWriter.downcase = False 2444 2445 writer.writelines(content) 2446 2447 return True
2448 2449 2450 #=============================================================================== 2451 # write_matrix_element_fks 2452 #=============================================================================== 2453 #test written
2454 - def write_matrix_element_fks(self, writer, matrix_element, n, fortran_model):
2455 """Export a matrix element to a matrix.f file in MG4 madevent format""" 2456 2457 if not matrix_element.get('processes') or \ 2458 not matrix_element.get('diagrams'): 2459 return 0,0 2460 2461 if not isinstance(writer, writers.FortranWriter): 2462 raise writers.FortranWriter.FortranWriterError(\ 2463 "writer not FortranWriter") 2464 # Set lowercase/uppercase Fortran code 2465 writers.FortranWriter.downcase = False 2466 2467 replace_dict = {} 2468 replace_dict['N_me'] = n 2469 2470 # Extract version number and date from VERSION file 2471 info_lines = self.get_mg5_info_lines() 2472 replace_dict['info_lines'] = info_lines 2473 2474 # Extract process info lines 2475 process_lines = self.get_process_info_lines(matrix_element) 2476 replace_dict['process_lines'] = process_lines 2477 2478 # Extract ncomb 2479 ncomb = matrix_element.get_helicity_combinations() 2480 replace_dict['ncomb'] = ncomb 2481 2482 # Extract helicity lines 2483 helicity_lines = self.get_helicity_lines(matrix_element) 2484 replace_dict['helicity_lines'] = helicity_lines 2485 2486 # Extract IC line 2487 ic_line = self.get_ic_line(matrix_element) 2488 replace_dict['ic_line'] = ic_line 2489 2490 # Extract overall denominator 2491 # Averaging initial state color, spin, and identical FS particles 2492 den_factor_line = self.get_den_factor_line(matrix_element) 2493 replace_dict['den_factor_line'] = den_factor_line 2494 2495 # Extract ngraphs 2496 ngraphs = matrix_element.get_number_of_amplitudes() 2497 replace_dict['ngraphs'] = ngraphs 2498 2499 # Extract ncolor 2500 ncolor = max(1, len(matrix_element.get('color_basis'))) 2501 replace_dict['ncolor'] = ncolor 2502 2503 # Extract color data lines 2504 color_data_lines = self.get_color_data_lines(matrix_element) 2505 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2506 2507 # Extract helas calls 2508 helas_calls = fortran_model.get_matrix_element_calls(\ 2509 matrix_element) 2510 replace_dict['helas_calls'] = "\n".join(helas_calls) 2511 2512 # Extract nwavefuncs (important to place after get_matrix_element_calls 2513 # so that 'me_id' is set) 2514 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2515 replace_dict['nwavefuncs'] = nwavefuncs 2516 2517 # Extract amp2 lines 2518 amp2_lines = self.get_amp2_lines(matrix_element) 2519 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2520 2521 # Set the size of Wavefunction 2522 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 2523 replace_dict['wavefunctionsize'] = 20 2524 else: 2525 replace_dict['wavefunctionsize'] = 8 2526 2527 # Extract JAMP lines 2528 jamp_lines, nb_tmp_jamp = self.get_JAMP_lines(matrix_element) 2529 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2530 replace_dict['nb_temp_jamp'] = nb_tmp_jamp 2531 2532 realfile = open(os.path.join(_file_path, \ 2533 'iolibs/template_files/realmatrix_fks.inc')).read() 2534 2535 realfile = realfile % replace_dict 2536 2537 # Write the file 2538 writer.writelines(realfile) 2539 2540 return len([call for call in helas_calls if call.find('#') != 0]), ncolor
2541 2542 2543 #=============================================================================== 2544 # write_pdf_file 2545 #===============================================================================
2546 - def write_pdf_file(self, writer, matrix_element, n, fortran_model):
2547 #test written 2548 """Write the auto_dsig.f file for MadFKS, which contains 2549 pdf call information""" 2550 2551 if not matrix_element.get('processes') or \ 2552 not matrix_element.get('diagrams'): 2553 return 0 2554 2555 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 2556 2557 if ninitial < 1 or ninitial > 2: 2558 raise writers.FortranWriter.FortranWriterError("""Need ninitial = 1 or 2 to write auto_dsig file""") 2559 2560 replace_dict = {} 2561 2562 replace_dict['N_me'] = n 2563 2564 # Extract version number and date from VERSION file 2565 info_lines = self.get_mg5_info_lines() 2566 replace_dict['info_lines'] = info_lines 2567 2568 # Extract process info lines 2569 process_lines = self.get_process_info_lines(matrix_element) 2570 replace_dict['process_lines'] = process_lines 2571 2572 pdf_vars, pdf_data, pdf_lines = \ 2573 self.get_pdf_lines_mir(matrix_element, ninitial, False, False) 2574 replace_dict['pdf_vars'] = pdf_vars 2575 replace_dict['pdf_data'] = pdf_data 2576 replace_dict['pdf_lines'] = pdf_lines 2577 2578 pdf_vars_mirr, pdf_data_mirr, pdf_lines_mirr = \ 2579 self.get_pdf_lines_mir(matrix_element, ninitial, False, True) 2580 replace_dict['pdf_lines_mirr'] = pdf_lines_mirr 2581 2582 file = open(os.path.join(_file_path, \ 2583 'iolibs/template_files/parton_lum_n_fks.inc')).read() 2584 file = file % replace_dict 2585 2586 # Write the file 2587 writer.writelines(file)
2588 2589 2590 2591 #=============================================================================== 2592 # write_coloramps_file 2593 #=============================================================================== 2594 #test written
2595 - def write_coloramps_file(self, writer, mapconfigs, matrix_element, fortran_model):
2596 """Write the coloramps.inc file for MadEvent""" 2597 2598 lines = [] 2599 lines.append( "logical icolamp(%d,%d,1)" % \ 2600 (max(len(list(matrix_element.get('color_basis').keys())), 1), 2601 len(mapconfigs))) 2602 2603 lines += self.get_icolamp_lines(mapconfigs, matrix_element, 1) 2604 2605 # Write the file 2606 writer.writelines(lines) 2607 2608 return True
2609 2610 2611 #=============================================================================== 2612 # write_leshouche_file 2613 #=============================================================================== 2614 #test written
2615 - def write_leshouche_file(self, writer, matrix_element, fortran_model):
2616 """Write the leshouche.inc file for MG4""" 2617 2618 # Extract number of external particles 2619 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2620 2621 lines = [] 2622 for iproc, proc in enumerate(matrix_element.get('processes')): 2623 legs = proc.get_legs_with_decays() 2624 lines.append("DATA (IDUP(i,%d),i=1,%d)/%s/" % \ 2625 (iproc + 1, nexternal, 2626 ",".join([str(l.get('id')) for l in legs]))) 2627 for i in [1, 2]: 2628 lines.append("DATA (MOTHUP(%d,i,%3r),i=1,%2r)/%s/" % \ 2629 (i, iproc + 1, nexternal, 2630 ",".join([ "%3r" % 0 ] * ninitial + \ 2631 [ "%3r" % i ] * (nexternal - ninitial)))) 2632 2633 # Here goes the color connections corresponding to the JAMPs 2634 # Only one output, for the first subproc! 2635 if iproc == 0: 2636 # If no color basis, just output trivial color flow 2637 if not matrix_element.get('color_basis'): 2638 for i in [1, 2]: 2639 lines.append("DATA (ICOLUP(%d,i, 1),i=1,%2r)/%s/" % \ 2640 (i, nexternal, 2641 ",".join([ "%3r" % 0 ] * nexternal))) 2642 color_flow_list = [] 2643 2644 else: 2645 # First build a color representation dictionnary 2646 repr_dict = {} 2647 for l in legs: 2648 repr_dict[l.get('number')] = \ 2649 proc.get('model').get_particle(l.get('id')).get_color()\ 2650 * (-1)**(1+l.get('state')) 2651 # Get the list of color flows 2652 color_flow_list = \ 2653 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 2654 ninitial) 2655 # And output them properly 2656 for cf_i, color_flow_dict in enumerate(color_flow_list): 2657 for i in [0, 1]: 2658 lines.append("DATA (ICOLUP(%d,i,%3r),i=1,%2r)/%s/" % \ 2659 (i + 1, cf_i + 1, nexternal, 2660 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 2661 for l in legs]))) 2662 2663 # Write the file 2664 writer.writelines(lines) 2665 2666 return len(color_flow_list)
2667 2668 2669 #=============================================================================== 2670 # write_configs_file 2671 #=============================================================================== 2672 #test_written
2673 - def write_configs_file(self, writer, matrix_element, fortran_model):
2674 """Write the configs.inc file for MadEvent""" 2675 2676 # Extract number of external particles 2677 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2678 lines = [] 2679 2680 iconfig = 0 2681 2682 s_and_t_channels = [] 2683 mapconfigs = [] 2684 2685 model = matrix_element.get('processes')[0].get('model') 2686 # new_pdg = model.get_first_non_pdg() 2687 2688 base_diagrams = matrix_element.get('base_amplitude').get('diagrams') 2689 model = matrix_element.get('base_amplitude').get('process').get('model') 2690 minvert = min([max([len(vert.get('legs')) for vert in \ 2691 diag.get('vertices')]) for diag in base_diagrams]) 2692 2693 for idiag, diag in enumerate(base_diagrams): 2694 if any([len(vert.get('legs')) > minvert for vert in 2695 diag.get('vertices')]): 2696 # Only 3-vertices allowed in configs.inc 2697 continue 2698 iconfig = iconfig + 1 2699 helas_diag = matrix_element.get('diagrams')[idiag] 2700 mapconfigs.append(helas_diag.get('number')) 2701 lines.append("# Diagram %d, Amplitude %d" % \ 2702 (helas_diag.get('number'),helas_diag.get('amplitudes')[0]['number'])) 2703 # Correspondance between the config and the amplitudes 2704 lines.append("data mapconfig(%4d)/%4d/" % (iconfig, 2705 helas_diag.get('amplitudes')[0]['number'])) 2706 2707 # Need to reorganize the topology so that we start with all 2708 # final state external particles and work our way inwards 2709 schannels, tchannels = helas_diag.get('amplitudes')[0].\ 2710 get_s_and_t_channels(ninitial, model, 990) 2711 2712 s_and_t_channels.append([schannels, tchannels]) 2713 2714 # Write out propagators for s-channel and t-channel vertices 2715 allchannels = schannels 2716 if len(tchannels) > 1: 2717 # Write out tchannels only if there are any non-trivial ones 2718 allchannels = schannels + tchannels 2719 2720 for vert in allchannels: 2721 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 2722 last_leg = vert.get('legs')[-1] 2723 lines.append("data (iforest(i,%3d,%4d),i=1,%d)/%s/" % \ 2724 (last_leg.get('number'), iconfig, len(daughters), 2725 ",".join(["%3d" % d for d in daughters]))) 2726 if vert in schannels: 2727 lines.append("data sprop(%4d,%4d)/%8d/" % \ 2728 (last_leg.get('number'), iconfig, 2729 last_leg.get('id'))) 2730 elif vert in tchannels[:-1]: 2731 lines.append("data tprid(%4d,%4d)/%8d/" % \ 2732 (last_leg.get('number'), iconfig, 2733 abs(last_leg.get('id')))) 2734 2735 # Write out number of configs 2736 lines.append("# Number of configs") 2737 lines.append("data mapconfig(0)/%4d/" % iconfig) 2738 2739 # Write the file 2740 writer.writelines(lines) 2741 2742 return iconfig, mapconfigs, s_and_t_channels
2743 2744 2745 #=============================================================================== 2746 # write_decayBW_file 2747 #=============================================================================== 2748 #test written
2749 - def write_decayBW_file(self, writer, s_and_t_channels):
2750 """Write the decayBW.inc file for MadEvent""" 2751 2752 lines = [] 2753 2754 booldict = {False: ".false.", True: ".false."} 2755 ####Changed by MZ 2011-11-23!!!! 2756 2757 for iconf, config in enumerate(s_and_t_channels): 2758 schannels = config[0] 2759 for vertex in schannels: 2760 # For the resulting leg, pick out whether it comes from 2761 # decay or not, as given by the from_group flag 2762 leg = vertex.get('legs')[-1] 2763 lines.append("data gForceBW(%d,%d)/%s/" % \ 2764 (leg.get('number'), iconf + 1, 2765 booldict[leg.get('from_group')])) 2766 2767 # Write the file 2768 writer.writelines(lines) 2769 2770 return True
2771 2772 2773 #=============================================================================== 2774 # write_dname_file 2775 #===============================================================================
2776 - def write_dname_file(self, writer, matrix_element, fortran_model):
2777 """Write the dname.mg file for MG4""" 2778 2779 line = "DIRNAME=P%s" % \ 2780 matrix_element.get('processes')[0].shell_string() 2781 2782 # Write the file 2783 writer.write(line + "\n") 2784 2785 return True
2786 2787 2788 #=============================================================================== 2789 # write_iproc_file 2790 #===============================================================================
2791 - def write_iproc_file(self, writer, me_number):
2792 """Write the iproc.dat file for MG4""" 2793 2794 line = "%d" % (me_number + 1) 2795 2796 # Write the file 2797 for line_to_write in writer.write_line(line): 2798 writer.write(line_to_write) 2799 return True
2800 2801 2802 #=============================================================================== 2803 # Helper functions 2804 #=============================================================================== 2805 2806 2807 #=============================================================================== 2808 # get_fks_j_from_i_lines 2809 #=============================================================================== 2810
2811 - def get_fks_j_from_i_lines(self, me, i = 0): #test written
2812 """generate the lines for fks.inc describing initializating the 2813 fks_j_from_i array""" 2814 lines = [] 2815 if not me.isfinite: 2816 for ii, js in me.fks_j_from_i.items(): 2817 if js: 2818 lines.append('DATA (FKS_J_FROM_I_D(%d, %d, JPOS), JPOS = 0, %d) / %d, %s /' \ 2819 % (i, ii, len(js), len(js), ', '.join(["%d" % j for j in js]))) 2820 else: 2821 lines.append('DATA (FKS_J_FROM_I_D(%d, JPOS), JPOS = 0, %d) / %d, %s /' \ 2822 % (2, 1, 1, '1')) 2823 lines.append('') 2824 2825 return lines 2826 2827 2828 #=============================================================================== 2829 # get_leshouche_lines 2830 #===============================================================================
2831 - def get_leshouche_lines(self, matrix_element, ime):
2832 #test written 2833 """Write the leshouche.inc file for MG4""" 2834 2835 # Extract number of external particles 2836 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2837 2838 lines = [] 2839 for iproc, proc in enumerate(matrix_element.get('processes')): 2840 legs = proc.get_legs_with_decays() 2841 lines.append("I %4d %4d %s" % \ 2842 (ime, iproc + 1, 2843 " ".join([str(l.get('id')) for l in legs]))) 2844 for i in [1, 2]: 2845 lines.append("M %4d %4d %4d %s" % \ 2846 (ime, i, iproc + 1, 2847 " ".join([ "%3d" % 0 ] * ninitial + \ 2848 [ "%3d" % i ] * (nexternal - ninitial)))) 2849 2850 # Here goes the color connections corresponding to the JAMPs 2851 # Only one output, for the first subproc! 2852 if iproc == 0: 2853 # If no color basis, just output trivial color flow 2854 if not matrix_element.get('color_basis'): 2855 for i in [1, 2]: 2856 lines.append("C %4d %4d 1 %s" % \ 2857 (ime, i, 2858 " ".join([ "%3d" % 0 ] * nexternal))) 2859 color_flow_list = [] 2860 nflow = 1 2861 2862 else: 2863 # First build a color representation dictionnary 2864 repr_dict = {} 2865 for l in legs: 2866 repr_dict[l.get('number')] = \ 2867 proc.get('model').get_particle(l.get('id')).get_color()\ 2868 * (-1)**(1+l.get('state')) 2869 # Get the list of color flows 2870 color_flow_list = \ 2871 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 2872 ninitial) 2873 # And output them properly 2874 for cf_i, color_flow_dict in enumerate(color_flow_list): 2875 for i in [0, 1]: 2876 lines.append("C %4d %4d %4d %s" % \ 2877 (ime, i + 1, cf_i + 1, 2878 " ".join(["%3d" % color_flow_dict[l.get('number')][i] \ 2879 for l in legs]))) 2880 2881 nflow = len(color_flow_list) 2882 2883 nproc = len(matrix_element.get('processes')) 2884 2885 return lines, nproc, nflow
2886 2887
2888 - def get_leshouche_lines_dummy(self, matrix_element, ime):
2889 #test written 2890 """As get_leshouche_lines, but for 'fake' real emission processes (LOonly 2891 In this case, write born color structure times ij -> i,j splitting) 2892 """ 2893 2894 bornproc = matrix_element.get('processes')[0] 2895 colors = [l.get('color') for l in bornproc.get('legs')] 2896 2897 fks_i = len(colors) 2898 # use the last colored particle if it exists, or 2899 # just the last 2900 fks_j=1 2901 for cpos, col in enumerate(colors): 2902 if col != 1: 2903 fks_j = cpos+1 2904 2905 # Extract number of external particles 2906 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2907 nexternal+=1 # remember, in this case matrix_element is born 2908 2909 lines = [] 2910 for iproc, proc in enumerate(matrix_element.get('processes')): 2911 # add the fake extra leg 2912 legs = proc.get_legs_with_decays() + \ 2913 [fks_common.FKSLeg({'id': -21, 2914 'number': nexternal, 2915 'state': True, 2916 'fks': 'i', 2917 'color': 8, 2918 'charge': 0., 2919 'massless': True, 2920 'spin': 3, 2921 'is_part': True, 2922 'self_antipart': True})] 2923 2924 lines.append("I %4d %4d %s" % \ 2925 (ime, iproc + 1, 2926 " ".join([str(l.get('id')) for l in legs]))) 2927 for i in [1, 2]: 2928 lines.append("M %4d %4d %4d %s" % \ 2929 (ime, i, iproc + 1, 2930 " ".join([ "%3d" % 0 ] * ninitial + \ 2931 [ "%3d" % i ] * (nexternal - ninitial)))) 2932 2933 # Here goes the color connections corresponding to the JAMPs 2934 # Only one output, for the first subproc! 2935 if iproc == 0: 2936 # If no color basis, just output trivial color flow 2937 if not matrix_element.get('color_basis'): 2938 for i in [1, 2]: 2939 lines.append("C %4d %4d 1 %s" % \ 2940 (ime, i, 2941 " ".join([ "%3d" % 0 ] * nexternal))) 2942 color_flow_list = [] 2943 nflow = 1 2944 2945 else: 2946 # in this case the last particle (-21) has two color indices 2947 # and it has to be emitted by j_fks 2948 # First build a color representation dictionnary 2949 repr_dict = {} 2950 for l in legs[:-1]: 2951 repr_dict[l.get('number')] = \ 2952 proc.get('model').get_particle(l.get('id')).get_color()\ 2953 * (-1)**(1+l.get('state')) 2954 # Get the list of color flows 2955 color_flow_list = \ 2956 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 2957 ninitial) 2958 # And output them properly 2959 for cf_i, color_flow_dict in enumerate(color_flow_list): 2960 # we have to add the extra leg (-21), linked to the j_fks leg 2961 # first, find the maximum color label 2962 maxicol = max(sum(list(color_flow_dict.values()), [])) 2963 #then, replace the color labels 2964 if color_flow_dict[fks_j][0] == 0: 2965 anti = True 2966 icol_j = color_flow_dict[fks_j][1] 2967 else: 2968 anti = False 2969 icol_j = color_flow_dict[fks_j][0] 2970 2971 if anti: 2972 color_flow_dict[nexternal] = (maxicol + 1, color_flow_dict[fks_j][1]) 2973 color_flow_dict[fks_j][1] = maxicol + 1 2974 else: 2975 color_flow_dict[nexternal] = (color_flow_dict[fks_j][0], maxicol + 1) 2976 color_flow_dict[fks_j][0] = maxicol + 1 2977 2978 for i in [0, 1]: 2979 lines.append("C %4d %4d %4d %s" % \ 2980 (ime, i + 1, cf_i + 1, 2981 " ".join(["%3d" % color_flow_dict[l.get('number')][i] \ 2982 for l in legs]))) 2983 2984 nflow = len(color_flow_list) 2985 2986 nproc = len(matrix_element.get('processes')) 2987 2988 return lines, nproc, nflow
2989 2990 2991 #=============================================================================== 2992 # get_den_factor_lines 2993 #===============================================================================
2994 - def get_den_factor_lines(self, fks_born):
2995 """returns the lines with the information on the denominator keeping care 2996 of the identical particle factors in the various real emissions""" 2997 2998 lines = [] 2999 info_list = fks_born.get_fks_info_list() 3000 if info_list: 3001 # if the reals have been generated, fill with the corresponding average factor 3002 lines.append('INTEGER IDEN_VALUES(%d)' % len(info_list)) 3003 lines.append('DATA IDEN_VALUES /' + \ 3004 ', '.join(['%d' % ( 3005 fks_born.born_matrix_element.get_denominator_factor() ) \ 3006 for info in info_list]) + '/') 3007 else: 3008 # otherwise use the born 3009 lines.append('INTEGER IDEN_VALUES(1)') 3010 lines.append('DATA IDEN_VALUES / %d /' \ 3011 % fks_born.born_matrix_element.get_denominator_factor()) 3012 3013 return lines
3014 3015 3016 #=============================================================================== 3017 # get_ij_lines 3018 #===============================================================================
3019 - def get_ij_lines(self, fks_born):
3020 """returns the lines with the information on the particle number of the born 3021 that splits""" 3022 info_list = fks_born.get_fks_info_list() 3023 lines = [] 3024 if info_list: 3025 # if the reals have been generated, fill with the corresponding value of ij if 3026 # ij is massless, or with 0 if ij is massive (no collinear singularity) 3027 ij_list = [info['fks_info']['ij']if \ 3028 fks_born.born_matrix_element['processes'][0]['legs'][info['fks_info']['ij']-1]['massless'] \ 3029 else 0 for info in info_list] 3030 lines.append('INTEGER IJ_VALUES(%d)' % len(info_list)) 3031 lines.append('DATA IJ_VALUES /' + ', '.join(['%d' % ij for ij in ij_list]) + '/') 3032 else: 3033 #otherwise just put the first leg 3034 lines.append('INTEGER IJ_VALUES(1)') 3035 lines.append('DATA IJ_VALUES / 1 /') 3036 3037 return lines
3038 3039
3040 - def get_pdf_lines_mir(self, matrix_element, ninitial, subproc_group = False,\ 3041 mirror = False): #test written
3042 """Generate the PDF lines for the auto_dsig.f file""" 3043 3044 processes = matrix_element.get('processes') 3045 model = processes[0].get('model') 3046 3047 pdf_definition_lines = "" 3048 pdf_data_lines = "" 3049 pdf_lines = "" 3050 3051 if ninitial == 1: 3052 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 3053 for i, proc in enumerate(processes): 3054 process_line = proc.base_string() 3055 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 3056 pdf_lines = pdf_lines + "\nPD(IPROC) = 1d0\n" 3057 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 3058 else: 3059 # Pick out all initial state particles for the two beams 3060 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 3061 p in processes]))), 3062 sorted(list(set([p.get_initial_pdg(2) for \ 3063 p in processes])))] 3064 3065 # Prepare all variable names 3066 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 3067 sum(initial_states,[])]) 3068 for key,val in pdf_codes.items(): 3069 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 3070 3071 # Set conversion from PDG code to number used in PDF calls 3072 pdgtopdf = {21: 0, 22: 7} 3073 # Fill in missing entries of pdgtopdf 3074 for pdg in sum(initial_states,[]): 3075 if not pdg in pdgtopdf and not pdg in list(pdgtopdf.values()): 3076 pdgtopdf[pdg] = pdg 3077 elif pdg not in pdgtopdf and pdg in list(pdgtopdf.values()): 3078 # If any particle has pdg code 7, we need to use something else 3079 pdgtopdf[pdg] = 6000000 + pdg 3080 3081 # Get PDF variable declarations for all initial states 3082 for i in [0,1]: 3083 pdf_definition_lines += "DOUBLE PRECISION " + \ 3084 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 3085 for pdg in \ 3086 initial_states[i]]) + \ 3087 "\n" 3088 3089 # Get PDF data lines for all initial states 3090 for i in [0,1]: 3091 pdf_data_lines += "DATA " + \ 3092 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 3093 for pdg in initial_states[i]]) + \ 3094 "/%d*1D0/" % len(initial_states[i]) + \ 3095 "\n" 3096 3097 # Get PDF values for the different initial states 3098 for i, init_states in enumerate(initial_states): 3099 if not mirror: 3100 ibeam = i + 1 3101 else: 3102 ibeam = 2 - i 3103 if subproc_group: 3104 pdf_lines = pdf_lines + \ 3105 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 3106 % (ibeam, ibeam) 3107 else: 3108 pdf_lines = pdf_lines + \ 3109 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 3110 % (ibeam, ibeam) 3111 3112 for initial_state in init_states: 3113 if initial_state in list(pdf_codes.keys()): 3114 if subproc_group: 3115 if abs(pdgtopdf[initial_state]) <= 7: 3116 pdf_lines = pdf_lines + \ 3117 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP," + \ 3118 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 3119 (pdf_codes[initial_state], 3120 i + 1, ibeam, pdgtopdf[initial_state], 3121 ibeam, ibeam) 3122 else: 3123 # setting other partons flavours outside quark, gluon, photon to be 0d0 3124 pdf_lines = pdf_lines + \ 3125 ("c settings other partons flavours outside quark, gluon, photon to 0d0\n" + \ 3126 "%s%d=0d0\n") % \ 3127 (pdf_codes[initial_state],i + 1) 3128 else: 3129 if abs(pdgtopdf[initial_state]) <= 7: 3130 pdf_lines = pdf_lines + \ 3131 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP," + \ 3132 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 3133 (pdf_codes[initial_state], 3134 i + 1, ibeam, pdgtopdf[initial_state], 3135 ibeam, ibeam) 3136 else: 3137 # setting other partons flavours outside quark, gluon, photon to be 0d0 3138 pdf_lines = pdf_lines + \ 3139 ("c settings other partons flavours outside quark, gluon, photon to 0d0\n" + \ 3140 "%s%d=0d0\n") % \ 3141 (pdf_codes[initial_state],i + 1) 3142 3143 pdf_lines = pdf_lines + "ENDIF\n" 3144 3145 # Add up PDFs for the different initial state particles 3146 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 3147 for proc in processes: 3148 process_line = proc.base_string() 3149 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 3150 pdf_lines = pdf_lines + "\nPD(IPROC) = " 3151 for ibeam in [1, 2]: 3152 initial_state = proc.get_initial_pdg(ibeam) 3153 if initial_state in list(pdf_codes.keys()): 3154 pdf_lines = pdf_lines + "%s%d*" % \ 3155 (pdf_codes[initial_state], ibeam) 3156 else: 3157 pdf_lines = pdf_lines + "1d0*" 3158 # Remove last "*" from pdf_lines 3159 pdf_lines = pdf_lines[:-1] + "\n" 3160 3161 # Remove last line break from pdf_lines 3162 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1] 3163 3164 3165 #test written
3166 - def get_color_data_lines_from_color_matrix(self, color_matrix, n=6):
3167 """Return the color matrix definition lines for the given color_matrix. Split 3168 rows in chunks of size n.""" 3169 3170 if not color_matrix: 3171 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 3172 else: 3173 ret_list = [] 3174 my_cs = color.ColorString() 3175 for index, denominator in \ 3176 enumerate(color_matrix.get_line_denominators()): 3177 # First write the common denominator for this color matrix line 3178 #ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 3179 # Then write the numerators for the matrix elements 3180 num_list = color_matrix.get_line_numerators(index, denominator) 3181 for k in range(0, len(num_list), n): 3182 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 3183 (index + 1, k + 1, min(k + n, len(num_list)), 3184 ','.join([("%.15e" % (int(i)/denominator)).replace('e','d') for i in num_list[k:k + n]]))) 3185 return ret_list
3186 3187 #=========================================================================== 3188 # write_maxamps_file 3189 #===========================================================================
3190 - def write_maxamps_file(self, writer, maxamps, maxflows, 3191 maxproc,maxsproc):
3192 """Write the maxamps.inc file for MG4.""" 3193 3194 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 3195 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 3196 (maxamps, maxflows) 3197 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 3198 (maxproc, maxsproc) 3199 3200 # Write the file 3201 writer.writelines(file) 3202 3203 return True
3204 3205 #=============================================================================== 3206 # write_ncombs_file 3207 #===============================================================================
3208 - def write_ncombs_file(self, writer, matrix_element, fortran_model):
3209 # #test written 3210 """Write the ncombs.inc file for MadEvent.""" 3211 3212 # Extract number of external particles 3213 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3214 3215 # ncomb (used for clustering) is 2^(nexternal) 3216 file = " integer n_max_cl\n" 3217 file = file + "parameter (n_max_cl=%d)" % (2 ** (nexternal+1)) 3218 3219 # Write the file 3220 writer.writelines(file) 3221 3222 return True
3223 3224 #=========================================================================== 3225 # write_config_subproc_map_file 3226 #===========================================================================
3227 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
3228 """Write a dummy config_subproc.inc file for MadEvent""" 3229 3230 lines = [] 3231 3232 for iconfig in range(len(s_and_t_channels)): 3233 lines.append("DATA CONFSUB(1,%d)/1/" % \ 3234 (iconfig + 1)) 3235 3236 # Write the file 3237 writer.writelines(lines) 3238 3239 return True
3240 3241 #=========================================================================== 3242 # write_colors_file 3243 #===========================================================================
3244 - def write_colors_file(self, writer, matrix_element):
3245 """Write the get_color.f file for MadEvent, which returns color 3246 for all particles used in the matrix element.""" 3247 3248 try: 3249 matrix_elements=matrix_element.real_processes[0].matrix_element 3250 except IndexError: 3251 matrix_elements=[matrix_element.born_matrix_element] 3252 3253 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 3254 matrix_elements = [matrix_elements] 3255 3256 model = matrix_elements[0].get('processes')[0].get('model') 3257 3258 # We need the both particle and antiparticle wf_ids, since the identity 3259 # depends on the direction of the wf. 3260 # loop on the real emissions 3261 wf_ids = set(sum([sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 3262 for wf in d.get('wavefunctions')],[]) \ 3263 for d in me.get('diagrams')],[]) \ 3264 for me in [real_proc.matrix_element]],[])\ 3265 for real_proc in matrix_element.real_processes],[])) 3266 # and also on the born 3267 wf_ids = wf_ids.union(set(sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 3268 for wf in d.get('wavefunctions')],[]) \ 3269 for d in matrix_element.born_matrix_element.get('diagrams')],[]))) 3270 3271 # loop on the real emissions 3272 leg_ids = set(sum([sum([sum([[l.get('id') for l in \ 3273 p.get_legs_with_decays()] for p in \ 3274 me.get('processes')], []) for me in \ 3275 [real_proc.matrix_element]], []) for real_proc in \ 3276 matrix_element.real_processes],[])) 3277 # and also on the born 3278 leg_ids = leg_ids.union(set(sum([[l.get('id') for l in \ 3279 p.get_legs_with_decays()] for p in \ 3280 matrix_element.born_matrix_element.get('processes')], []))) 3281 particle_ids = sorted(list(wf_ids.union(leg_ids))) 3282 3283 lines = """function get_color(ipdg) 3284 implicit none 3285 integer get_color, ipdg 3286 3287 if(ipdg.eq.%d)then 3288 get_color=%d 3289 return 3290 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 3291 3292 for part_id in particle_ids[1:]: 3293 lines += """else if(ipdg.eq.%d)then 3294 get_color=%d 3295 return 3296 """ % (part_id, model.get_particle(part_id).get_color()) 3297 # Dummy particle for multiparticle vertices with pdg given by 3298 # first code not in the model 3299 lines += """else if(ipdg.eq.%d)then 3300 c This is dummy particle used in multiparticle vertices 3301 get_color=2 3302 return 3303 """ % model.get_first_non_pdg() 3304 lines += """else 3305 write(*,*)'Error: No color given for pdg ',ipdg 3306 get_color=0 3307 return 3308 endif 3309 end 3310 """ 3311 3312 # Write the file 3313 writer.writelines(lines) 3314 3315 return True
3316 3317 #=============================================================================== 3318 # write_props_file 3319 #=============================================================================== 3320 #test_written
3321 - def write_props_file(self, writer, matrix_element, fortran_model, s_and_t_channels):
3322 """Write the props.inc file for MadEvent. Needs input from 3323 write_configs_file. With respect to the parent routine, it has some 3324 more specific formats that allow the props.inc file to be read by the 3325 link program""" 3326 3327 lines = [] 3328 3329 particle_dict = matrix_element.get('processes')[0].get('model').\ 3330 get('particle_dict') 3331 3332 for iconf, configs in enumerate(s_and_t_channels): 3333 for vertex in configs[0] + configs[1][:-1]: 3334 leg = vertex.get('legs')[-1] 3335 if leg.get('id') not in particle_dict: 3336 # Fake propagator used in multiparticle vertices 3337 mass = 'zero' 3338 width = 'zero' 3339 pow_part = 0 3340 else: 3341 particle = particle_dict[leg.get('id')] 3342 # Get mass 3343 if particle.get('mass').lower() == 'zero': 3344 mass = particle.get('mass') 3345 else: 3346 mass = "abs(%s)" % particle.get('mass') 3347 # Get width 3348 if particle.get('width').lower() == 'zero': 3349 width = particle.get('width') 3350 else: 3351 width = "abs(%s)" % particle.get('width') 3352 3353 pow_part = 1 + int(particle.is_boson()) 3354 3355 lines.append("pmass(%3d,%4d) = %s" % \ 3356 (leg.get('number'), iconf + 1, mass)) 3357 lines.append("pwidth(%3d,%4d) = %s" % \ 3358 (leg.get('number'), iconf + 1, width)) 3359 lines.append("pow(%3d,%4d) = %d" % \ 3360 (leg.get('number'), iconf + 1, pow_part)) 3361 3362 # Write the file 3363 writer.writelines(lines) 3364 3365 return True
3366 3367 3368 #=========================================================================== 3369 # write_subproc 3370 #===========================================================================
3371 - def write_subproc(self, writer, subprocdir):
3372 """Append this subprocess to the subproc.mg file for MG4""" 3373 3374 # Write line to file 3375 writer.write(subprocdir + "\n") 3376 3377 return True
3378 3379 3380 3381 3382 3383 #================================================================================= 3384 # Class for using the optimized Loop process 3385 #=================================================================================
3386 -class ProcessOptimizedExporterFortranFKS(loop_exporters.LoopProcessOptimizedExporterFortranSA,\ 3387 ProcessExporterFortranFKS):
3388 """Class to take care of exporting a set of matrix elements to 3389 Fortran (v4) format.""" 3390 3391 jamp_optim = True 3392
3393 - def finalize(self, *args, **opts):
3395 #export_v4.ProcessExporterFortranSA.finalize(self, *args, **opts) 3396 3397 #=============================================================================== 3398 # copy the Template in a new directory. 3399 #===============================================================================
3400 - def copy_fkstemplate(self):
3401 """create the directory run_name as a copy of the MadEvent 3402 Template, and clean the directory 3403 For now it is just the same as copy_v4template, but it will be modified 3404 """ 3405 mgme_dir = self.mgme_dir 3406 dir_path = self.dir_path 3407 clean =self.opt['clean'] 3408 3409 #First copy the full template tree if dir_path doesn't exit 3410 if not os.path.isdir(dir_path): 3411 if not mgme_dir: 3412 raise MadGraph5Error("No valid MG_ME path given for MG4 run directory creation.") 3413 logger.info('initialize a new directory: %s' % \ 3414 os.path.basename(dir_path)) 3415 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True) 3416 # misc.copytree since dir_path already exists 3417 misc.copytree(pjoin(self.mgme_dir, 'Template', 'Common'), 3418 dir_path) 3419 # Copy plot_card 3420 for card in ['plot_card']: 3421 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 3422 try: 3423 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 3424 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 3425 except IOError: 3426 logger.warning("Failed to copy " + card + ".dat to default") 3427 3428 elif not os.path.isfile(os.path.join(dir_path, 'TemplateVersion.txt')): 3429 if not mgme_dir: 3430 raise MadGraph5Error("No valid MG_ME path given for MG4 run directory creation.") 3431 try: 3432 shutil.copy(os.path.join(mgme_dir, 'MGMEVersion.txt'), dir_path) 3433 except IOError: 3434 MG5_version = misc.get_pkg_info() 3435 open(os.path.join(dir_path, 'MGMEVersion.txt'), 'w').write( \ 3436 "5." + MG5_version['version']) 3437 3438 #Ensure that the Template is clean 3439 if clean: 3440 logger.info('remove old information in %s' % os.path.basename(dir_path)) 3441 if 'MADGRAPH_BASE' in os.environ: 3442 subprocess.call([os.path.join('bin', 'internal', 'clean_template'), 3443 '--web'], cwd=dir_path) 3444 else: 3445 try: 3446 subprocess.call([os.path.join('bin', 'internal', 'clean_template')], \ 3447 cwd=dir_path) 3448 except Exception as why: 3449 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 3450 % (os.path.basename(dir_path),why)) 3451 #Write version info 3452 MG_version = misc.get_pkg_info() 3453 open(os.path.join(dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 3454 MG_version['version']) 3455 3456 # We must link the CutTools to the Library folder of the active Template 3457 self.link_CutTools(dir_path) 3458 # We must link the TIR to the Library folder of the active Template 3459 link_tir_libs=[] 3460 tir_libs=[] 3461 tir_include=[] 3462 for tir in self.all_tir: 3463 tir_dir="%s_dir"%tir 3464 libpath=getattr(self,tir_dir) 3465 libpath = self.link_TIR(os.path.join(self.dir_path, 'lib'), 3466 libpath,"lib%s.a"%tir,tir_name=tir) 3467 setattr(self,tir_dir,libpath) 3468 if libpath != "": 3469 if tir in ['pjfry','ninja','golem', 'samurai','collier']: 3470 # We should link dynamically when possible, so we use the original 3471 # location of these libraries. 3472 link_tir_libs.append('-L%s/ -l%s'%(libpath,tir)) 3473 tir_libs.append('%s/lib%s.$(libext)'%(libpath,tir)) 3474 # For Ninja, we must also link against OneLoop. 3475 if tir in ['ninja']: 3476 if not any(os.path.isfile(pjoin(libpath,'libavh_olo.%s'%ext)) 3477 for ext in ['a','dylib','so']): 3478 raise MadGraph5Error( 3479 "The OneLOop library 'libavh_olo.(a|dylib|so)' could no be found in path '%s'. Please place a symlink to it there."%libpath) 3480 link_tir_libs.append('-L%s/ -l%s'%(libpath,'avh_olo')) 3481 tir_libs.append('%s/lib%s.$(libext)'%(libpath,'avh_olo')) 3482 # We must add the corresponding includes for these TIR 3483 if tir in ['golem','samurai','ninja','collier']: 3484 trg_path = pjoin(os.path.dirname(libpath),'include') 3485 if os.path.isdir(trg_path): 3486 to_include = misc.find_includes_path(trg_path, 3487 self.include_names[tir]) 3488 else: 3489 to_include = None 3490 # Special possible location for collier 3491 if to_include is None and tir=='collier': 3492 to_include = misc.find_includes_path( 3493 pjoin(libpath,'modules'),self.include_names[tir]) 3494 if to_include is None: 3495 logger.error( 3496 'Could not find the include directory for %s, looking in %s.\n' % (tir ,str(trg_path))+ 3497 'Generation carries on but you will need to edit the include path by hand in the makefiles.') 3498 to_include = '<Not_found_define_it_yourself>' 3499 tir_include.append('-I %s'%to_include) 3500 else: 3501 link_tir_libs.append('-l%s'%tir) 3502 tir_libs.append('$(LIBDIR)lib%s.$(libext)'%tir) 3503 3504 os.remove(os.path.join(self.dir_path,'SubProcesses','makefile_loop.inc')) 3505 cwd = os.getcwd() 3506 dirpath = os.path.join(self.dir_path, 'SubProcesses') 3507 try: 3508 os.chdir(dirpath) 3509 except os.error: 3510 logger.error('Could not cd to directory %s' % dirpath) 3511 return 0 3512 filename = 'makefile_loop' 3513 calls = self.write_makefile_TIR(writers.MakefileWriter(filename), 3514 link_tir_libs,tir_libs,tir_include=tir_include) 3515 os.remove(os.path.join(self.dir_path,'Source','make_opts.inc')) 3516 dirpath = os.path.join(self.dir_path, 'Source') 3517 try: 3518 os.chdir(dirpath) 3519 except os.error: 3520 logger.error('Could not cd to directory %s' % dirpath) 3521 return 0 3522 filename = 'make_opts' 3523 calls = self.write_make_opts(writers.MakefileWriter(filename), 3524 link_tir_libs,tir_libs) 3525 # Return to original PWD 3526 os.chdir(cwd) 3527 3528 cwd = os.getcwd() 3529 dirpath = os.path.join(self.dir_path, 'SubProcesses') 3530 try: 3531 os.chdir(dirpath) 3532 except os.error: 3533 logger.error('Could not cd to directory %s' % dirpath) 3534 return 0 3535 3536 # We add here the user-friendly MadLoop option setter. 3537 cpfiles= ["SubProcesses/MadLoopParamReader.f", 3538 "Cards/MadLoopParams.dat", 3539 "SubProcesses/MadLoopParams.inc"] 3540 3541 for file in cpfiles: 3542 shutil.copy(os.path.join(self.loop_dir,'StandAlone/', file), 3543 os.path.join(self.dir_path, file)) 3544 3545 shutil.copy(pjoin(self.dir_path, 'Cards','MadLoopParams.dat'), 3546 pjoin(self.dir_path, 'Cards','MadLoopParams_default.dat')) 3547 3548 3549 3550 if os.path.exists(pjoin(self.dir_path, 'Cards', 'MadLoopParams.dat')): 3551 self.MadLoopparam = banner_mod.MadLoopParam(pjoin(self.dir_path, 3552 'Cards', 'MadLoopParams.dat')) 3553 # write the output file 3554 self.MadLoopparam.write(pjoin(self.dir_path,"SubProcesses", 3555 "MadLoopParams.dat")) 3556 3557 # We need minimal editing of MadLoopCommons.f 3558 MadLoopCommon = open(os.path.join(self.loop_dir,'StandAlone', 3559 "SubProcesses","MadLoopCommons.inc")).read() 3560 writer = writers.FortranWriter(os.path.join(self.dir_path, 3561 "SubProcesses","MadLoopCommons.f")) 3562 writer.writelines(MadLoopCommon%{ 3563 'print_banner_commands':self.MadLoop_banner}, 3564 context={'collier_available':self.tir_available_dict['collier']}) 3565 writer.close() 3566 3567 # link the files from the MODEL 3568 model_path = self.dir_path + '/Source/MODEL/' 3569 # Note that for the [real=] mode, these files are not present 3570 if os.path.isfile(os.path.join(model_path,'mp_coupl.inc')): 3571 ln(model_path + '/mp_coupl.inc', self.dir_path + '/SubProcesses') 3572 if os.path.isfile(os.path.join(model_path,'mp_coupl_same_name.inc')): 3573 ln(model_path + '/mp_coupl_same_name.inc', \ 3574 self.dir_path + '/SubProcesses') 3575 3576 # Write the cts_mpc.h and cts_mprec.h files imported from CutTools 3577 self.write_mp_files(writers.FortranWriter('cts_mprec.h'),\ 3578 writers.FortranWriter('cts_mpc.h'),) 3579 3580 self.copy_python_files() 3581 3582 3583 # We need to create the correct open_data for the pdf 3584 self.write_pdf_opendata() 3585 3586 3587 # Return to original PWD 3588 os.chdir(cwd)
3589
3590 - def generate_virt_directory(self, loop_matrix_element, fortran_model, dir_name):
3591 """writes the V**** directory inside the P**** directories specified in 3592 dir_name""" 3593 3594 cwd = os.getcwd() 3595 3596 matrix_element = loop_matrix_element 3597 3598 # Create the MadLoop5_resources directory if not already existing 3599 dirpath = os.path.join(dir_name, 'MadLoop5_resources') 3600 try: 3601 os.mkdir(dirpath) 3602 except os.error as error: 3603 logger.warning(error.strerror + " " + dirpath) 3604 3605 # Create the directory PN_xx_xxxxx in the specified path 3606 name = "V%s" % matrix_element.get('processes')[0].shell_string() 3607 dirpath = os.path.join(dir_name, name) 3608 3609 try: 3610 os.mkdir(dirpath) 3611 except os.error as error: 3612 logger.warning(error.strerror + " " + dirpath) 3613 3614 try: 3615 os.chdir(dirpath) 3616 except os.error: 3617 logger.error('Could not cd to directory %s' % dirpath) 3618 return 0 3619 3620 logger.info('Creating files in directory %s' % name) 3621 3622 # Extract number of external particles 3623 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3624 3625 calls=self.write_loop_matrix_element_v4(None,matrix_element,fortran_model) 3626 3627 # We need a link to coefs.inc from DHELAS 3628 ln(pjoin(self.dir_path, 'Source', 'DHELAS', 'coef_specs.inc'), 3629 abspath=False, cwd=None) 3630 3631 # The born matrix element, if needed 3632 filename = 'born_matrix.f' 3633 calls = self.write_bornmatrix( 3634 writers.FortranWriter(filename), 3635 matrix_element, 3636 fortran_model) 3637 3638 filename = 'nexternal.inc' 3639 self.write_nexternal_file(writers.FortranWriter(filename), 3640 nexternal, ninitial) 3641 3642 filename = 'pmass.inc' 3643 self.write_pmass_file(writers.FortranWriter(filename), 3644 matrix_element) 3645 3646 filename = 'ngraphs.inc' 3647 self.write_ngraphs_file(writers.FortranWriter(filename), 3648 len(matrix_element.get_all_amplitudes())) 3649 3650 filename = "loop_matrix.ps" 3651 writers.FortranWriter(filename).writelines("""C Post-helas generation loop-drawing is not ready yet.""") 3652 plot = draw.MultiEpsDiagramDrawer(base_objects.DiagramList( 3653 matrix_element.get('base_amplitude').get('loop_diagrams')[:1000]), 3654 filename, 3655 model=matrix_element.get('processes')[0].get('model'), 3656 amplitude='') 3657 logger.info("Drawing loop Feynman diagrams for " + \ 3658 matrix_element.get('processes')[0].nice_string(\ 3659 print_weighted=False)) 3660 plot.draw() 3661 3662 filename = "born_matrix.ps" 3663 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3664 get('born_diagrams'), 3665 filename, 3666 model=matrix_element.get('processes')[0].\ 3667 get('model'), 3668 amplitude='') 3669 logger.info("Generating born Feynman diagrams for " + \ 3670 matrix_element.get('processes')[0].nice_string(\ 3671 print_weighted=False)) 3672 plot.draw() 3673 3674 # We also need to write the overall maximum quantities for this group 3675 # of processes in 'global_specs.inc'. In aMCatNLO, there is always 3676 # only one process, so this is trivial 3677 self.write_global_specs(matrix_element, output_path=pjoin(dirpath,'global_specs.inc')) 3678 3679 open('unique_id.inc','w').write( 3680 """ integer UNIQUE_ID 3681 parameter(UNIQUE_ID=1)""") 3682 3683 linkfiles = ['coupl.inc', 'mp_coupl.inc', 'mp_coupl_same_name.inc', 3684 'cts_mprec.h', 'cts_mpc.h', 'MadLoopParamReader.f', 3685 'MadLoopParams.inc','MadLoopCommons.f'] 3686 3687 for file in linkfiles: 3688 ln('../../%s' % file) 3689 3690 os.system("ln -s ../../makefile_loop makefile") 3691 3692 # We should move to MadLoop5_resources directory from the SubProcesses 3693 ln(pjoin(os.path.pardir,os.path.pardir,'MadLoopParams.dat'), 3694 pjoin('..','MadLoop5_resources')) 3695 3696 linkfiles = ['mpmodule.mod'] 3697 3698 for file in linkfiles: 3699 ln('../../../lib/%s' % file) 3700 3701 linkfiles = ['coef_specs.inc'] 3702 3703 for file in linkfiles: 3704 ln('../../../Source/DHELAS/%s' % file) 3705 3706 # Return to original PWD 3707 os.chdir(cwd) 3708 3709 if not calls: 3710 calls = 0 3711 return calls
3712 3713 3714 #=============================================================================== 3715 # write_coef_specs 3716 #===============================================================================
3717 - def write_coef_specs_file(self, max_loop_vertex_ranks):
3718 """ writes the coef_specs.inc in the DHELAS folder. Should not be called in the 3719 non-optimized mode""" 3720 filename = os.path.join(self.dir_path, 'Source', 'DHELAS', 'coef_specs.inc') 3721 3722 replace_dict = {} 3723 replace_dict['max_lwf_size'] = 4 3724 replace_dict['vertex_max_coefs'] = max(\ 3725 [q_polynomial.get_number_of_coefs_for_rank(n) 3726 for n in max_loop_vertex_ranks]) 3727 IncWriter=writers.FortranWriter(filename,'w') 3728 IncWriter.writelines("""INTEGER MAXLWFSIZE 3729 PARAMETER (MAXLWFSIZE=%(max_lwf_size)d) 3730 INTEGER VERTEXMAXCOEFS 3731 PARAMETER (VERTEXMAXCOEFS=%(vertex_max_coefs)d)"""\ 3732 % replace_dict) 3733 IncWriter.close()
3734