Package madgraph :: Package iolibs :: Module export_fks
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_fks

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Methods and classes to export matrix elements to fks format.""" 
  16   
  17  from __future__ import absolute_import 
  18  from __future__ import print_function 
  19  from __future__ import division 
  20  import glob 
  21  import logging 
  22  import os 
  23  import re 
  24  import shutil 
  25  import subprocess 
  26  import string 
  27  import copy 
  28  import platform 
  29   
  30  import madgraph.core.color_algebra as color 
  31  import madgraph.core.helas_objects as helas_objects 
  32  import madgraph.core.base_objects as base_objects 
  33  import madgraph.fks.fks_helas_objects as fks_helas_objects 
  34  import madgraph.fks.fks_base as fks 
  35  import madgraph.fks.fks_common as fks_common 
  36  import madgraph.iolibs.drawing_eps as draw 
  37  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  38  import madgraph.iolibs.files as files 
  39  import madgraph.various.misc as misc 
  40  import madgraph.iolibs.file_writers as writers 
  41  import madgraph.iolibs.template_files as template_files 
  42  import madgraph.iolibs.ufo_expression_parsers as parsers 
  43  import madgraph.iolibs.export_v4 as export_v4 
  44  import madgraph.loop.loop_exporters as loop_exporters 
  45  import madgraph.various.q_polynomial as q_polynomial 
  46  import madgraph.various.banner as banner_mod 
  47   
  48  import aloha.create_aloha as create_aloha 
  49   
  50  import models.write_param_card as write_param_card 
  51  import models.check_param_card as check_param_card 
  52  from madgraph import MadGraph5Error, MG5DIR, InvalidCmd 
  53  from madgraph.iolibs.files import cp, ln, mv 
  54  from six.moves import range 
  55  from six.moves import zip 
  56   
  57  pjoin = os.path.join 
  58   
  59  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  60  logger = logging.getLogger('madgraph.export_fks') 
  61   
  62   
63 -def make_jpeg_async(args):
64 Pdir = args[0] 65 old_pos = args[1] 66 dir_path = args[2] 67 68 devnull = os.open(os.devnull, os.O_RDWR) 69 70 os.chdir(Pdir) 71 subprocess.call([os.path.join(old_pos, dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 72 stdout = devnull) 73 os.chdir(os.path.pardir)
74 75 76 #================================================================================= 77 # Class for used of the (non-optimized) Loop process 78 #=================================================================================
79 -class ProcessExporterFortranFKS(loop_exporters.LoopProcessExporterFortranSA):
80 """Class to take care of exporting a set of matrix elements to 81 Fortran (v4) format.""" 82 83 #=============================================================================== 84 # copy the Template in a new directory. 85 #===============================================================================
86 - def copy_fkstemplate(self):
87 """create the directory run_name as a copy of the MadEvent 88 Template, and clean the directory 89 For now it is just the same as copy_v4template, but it will be modified 90 """ 91 92 mgme_dir = self.mgme_dir 93 dir_path = self.dir_path 94 clean =self.opt['clean'] 95 96 #First copy the full template tree if dir_path doesn't exit 97 if not os.path.isdir(dir_path): 98 if not mgme_dir: 99 raise MadGraph5Error("No valid MG_ME path given for MG4 run directory creation.") 100 logger.info('initialize a new directory: %s' % \ 101 os.path.basename(dir_path)) 102 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True) 103 # misc.copytree since dir_path already exists 104 misc.copytree(pjoin(self.mgme_dir, 'Template', 'Common'),dir_path) 105 # Copy plot_card 106 for card in ['plot_card']: 107 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 108 try: 109 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 110 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 111 except IOError: 112 logger.warning("Failed to move " + card + ".dat to default") 113 114 elif not os.path.isfile(os.path.join(dir_path, 'TemplateVersion.txt')): 115 if not mgme_dir: 116 raise MadGraph5Error("No valid MG_ME path given for MG4 run directory creation.") 117 try: 118 shutil.copy(os.path.join(mgme_dir, 'MGMEVersion.txt'), dir_path) 119 except IOError: 120 MG5_version = misc.get_pkg_info() 121 open(os.path.join(dir_path, 'MGMEVersion.txt'), 'w').write( \ 122 "5." + MG5_version['version']) 123 124 #Ensure that the Template is clean 125 if clean: 126 logger.info('remove old information in %s' % os.path.basename(dir_path)) 127 if 'MADGRAPH_BASE' in os.environ: 128 subprocess.call([os.path.join('bin', 'internal', 'clean_template'), 129 '--web'],cwd=dir_path) 130 else: 131 try: 132 subprocess.call([os.path.join('bin', 'internal', 'clean_template')], \ 133 cwd=dir_path) 134 except Exception as why: 135 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 136 % (os.path.basename(dir_path),why)) 137 #Write version info 138 MG_version = misc.get_pkg_info() 139 open(os.path.join(dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 140 MG_version['version']) 141 142 # We must link the CutTools to the Library folder of the active Template 143 self.link_CutTools(dir_path) 144 145 link_tir_libs=[] 146 tir_libs=[] 147 os.remove(os.path.join(self.dir_path,'SubProcesses','makefile_loop.inc')) 148 dirpath = os.path.join(self.dir_path, 'SubProcesses') 149 filename = pjoin(self.dir_path, 'SubProcesses','makefile_loop') 150 calls = self.write_makefile_TIR(writers.MakefileWriter(filename), 151 link_tir_libs,tir_libs) 152 os.remove(os.path.join(self.dir_path,'Source','make_opts.inc')) 153 filename = pjoin(self.dir_path, 'Source','make_opts') 154 calls = self.write_make_opts(writers.MakefileWriter(filename), 155 link_tir_libs,tir_libs) 156 157 # Duplicate run_card and FO_analyse_card 158 for card in ['FO_analyse_card', 'shower_card']: 159 try: 160 shutil.copy(pjoin(self.dir_path, 'Cards', 161 card + '.dat'), 162 pjoin(self.dir_path, 'Cards', 163 card + '_default.dat')) 164 except IOError: 165 logger.warning("Failed to copy " + card + ".dat to default") 166 167 cwd = os.getcwd() 168 dirpath = os.path.join(self.dir_path, 'SubProcesses') 169 try: 170 os.chdir(dirpath) 171 except os.error: 172 logger.error('Could not cd to directory %s' % dirpath) 173 return 0 174 175 # We add here the user-friendly MadLoop option setter. 176 cpfiles= ["SubProcesses/MadLoopParamReader.f", 177 "Cards/MadLoopParams.dat", 178 "SubProcesses/MadLoopParams.inc"] 179 180 for file in cpfiles: 181 shutil.copy(os.path.join(self.loop_dir,'StandAlone/', file), 182 os.path.join(self.dir_path, file)) 183 184 shutil.copy(pjoin(self.dir_path, 'Cards','MadLoopParams.dat'), 185 pjoin(self.dir_path, 'Cards','MadLoopParams_default.dat')) 186 187 if os.path.exists(pjoin(self.dir_path, 'Cards', 'MadLoopParams.dat')): 188 self.MadLoopparam = banner_mod.MadLoopParam(pjoin(self.dir_path, 189 'Cards', 'MadLoopParams.dat')) 190 # write the output file 191 self.MadLoopparam.write(pjoin(self.dir_path,"SubProcesses", 192 "MadLoopParams.dat")) 193 194 # We need minimal editing of MadLoopCommons.f 195 MadLoopCommon = open(os.path.join(self.loop_dir,'StandAlone', 196 "SubProcesses","MadLoopCommons.inc")).read() 197 writer = writers.FortranWriter(os.path.join(self.dir_path, 198 "SubProcesses","MadLoopCommons.f")) 199 writer.writelines(MadLoopCommon%{ 200 'print_banner_commands':self.MadLoop_banner}, 201 context={'collier_available':False}) 202 writer.close() 203 204 # Write the cts_mpc.h and cts_mprec.h files imported from CutTools 205 self.write_mp_files(writers.FortranWriter('cts_mprec.h'),\ 206 writers.FortranWriter('cts_mpc.h')) 207 208 209 # Finally make sure to turn off MC over Hel for the default mode. 210 FKS_card_path = pjoin(self.dir_path,'Cards','FKS_params.dat') 211 FKS_card_file = open(FKS_card_path,'r') 212 FKS_card = FKS_card_file.read() 213 FKS_card_file.close() 214 FKS_card = re.sub(r"#NHelForMCoverHels\n-?\d+", 215 "#NHelForMCoverHels\n-1", FKS_card) 216 FKS_card_file = open(FKS_card_path,'w') 217 FKS_card_file.write(FKS_card) 218 FKS_card_file.close() 219 220 # Return to original PWD 221 os.chdir(cwd) 222 # Copy the different python files in the Template 223 self.copy_python_files() 224 225 # We need to create the correct open_data for the pdf 226 self.write_pdf_opendata()
227 228 # I put it here not in optimized one, because I want to use the same makefile_loop.inc 229 # Also, we overload this function (i.e. it is already defined in 230 # LoopProcessExporterFortranSA) because the path of the template makefile 231 # is different.
232 - def write_makefile_TIR(self, writer, link_tir_libs,tir_libs,tir_include=[]):
233 """ Create the file makefile_loop which links to the TIR libraries.""" 234 235 file = open(os.path.join(self.mgme_dir,'Template','NLO', 236 'SubProcesses','makefile_loop.inc')).read() 237 replace_dict={} 238 replace_dict['link_tir_libs']=' '.join(link_tir_libs) 239 replace_dict['tir_libs']=' '.join(tir_libs) 240 replace_dict['dotf']='%.f' 241 replace_dict['doto']='%.o' 242 replace_dict['tir_include']=' '.join(tir_include) 243 file=file%replace_dict 244 if writer: 245 writer.writelines(file) 246 else: 247 return file
248 249 # I put it here not in optimized one, because I want to use the same make_opts.inc
250 - def write_make_opts(self, writer, link_tir_libs,tir_libs):
251 """ Create the file make_opts which links to the TIR libraries.""" 252 file = open(os.path.join(self.mgme_dir,'Template','NLO', 253 'Source','make_opts.inc')).read() 254 replace_dict={} 255 replace_dict['link_tir_libs']=' '.join(link_tir_libs) 256 replace_dict['tir_libs']=' '.join(tir_libs) 257 replace_dict['dotf']='%.f' 258 replace_dict['doto']='%.o' 259 file=file%replace_dict 260 if writer: 261 writer.writelines(file) 262 else: 263 return file
264 265 #=========================================================================== 266 # copy_python_files 267 #===========================================================================
268 - def copy_python_files(self):
269 """copy python files required for the Template""" 270 271 files_to_copy = [ \ 272 pjoin('interface','amcatnlo_run_interface.py'), 273 pjoin('interface','extended_cmd.py'), 274 pjoin('interface','common_run_interface.py'), 275 pjoin('interface','coloring_logging.py'), 276 pjoin('various','misc.py'), 277 pjoin('various','shower_card.py'), 278 pjoin('various','FO_analyse_card.py'), 279 pjoin('various','histograms.py'), 280 pjoin('various','banner.py'), 281 pjoin('various','cluster.py'), 282 pjoin('various','systematics.py'), 283 pjoin('various','lhe_parser.py'), 284 pjoin('madevent','sum_html.py'), 285 pjoin('madevent','gen_crossxhtml.py'), 286 pjoin('iolibs','files.py'), 287 pjoin('iolibs','save_load_object.py'), 288 pjoin('iolibs','file_writers.py'), 289 pjoin('..','models','check_param_card.py'), 290 pjoin('__init__.py') 291 ] 292 cp(_file_path+'/interface/.mg5_logging.conf', 293 self.dir_path+'/bin/internal/me5_logging.conf') 294 295 for cp_file in files_to_copy: 296 cp(pjoin(_file_path,cp_file), 297 pjoin(self.dir_path,'bin','internal',os.path.basename(cp_file)))
298
299 - def convert_model(self, model, wanted_lorentz = [], 300 wanted_couplings = []):
301 302 super(ProcessExporterFortranFKS,self).convert_model(model, 303 wanted_lorentz, wanted_couplings) 304 305 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 306 try: 307 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 308 except OSError as error: 309 pass 310 model_path = model.get('modelpath') 311 shutil.copytree(model_path, 312 pjoin(self.dir_path,'bin','internal','ufomodel'), 313 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 314 if hasattr(model, 'restrict_card'): 315 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 316 'restrict_default.dat') 317 if isinstance(model.restrict_card, check_param_card.ParamCard): 318 model.restrict_card.write(out_path) 319 else: 320 files.cp(model.restrict_card, out_path)
321 322 323 324 #=========================================================================== 325 # write_maxparticles_file 326 #===========================================================================
327 - def write_maxparticles_file(self, writer, maxparticles):
328 """Write the maxparticles.inc file for MadEvent""" 329 330 lines = "integer max_particles, max_branch\n" 331 lines += "parameter (max_particles=%d) \n" % maxparticles 332 lines += "parameter (max_branch=max_particles-1)" 333 334 # Write the file 335 writer.writelines(lines) 336 337 return True
338 339 340 #=========================================================================== 341 # write_maxconfigs_file 342 #===========================================================================
343 - def write_maxconfigs_file(self, writer, maxconfigs):
344 """Write the maxconfigs.inc file for MadEvent""" 345 346 lines = "integer lmaxconfigs\n" 347 lines += "parameter (lmaxconfigs=%d)" % maxconfigs 348 349 # Write the file 350 writer.writelines(lines) 351 352 return True
353 354 355 #=============================================================================== 356 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 357 #===============================================================================
358 - def write_procdef_mg5(self, file_pos, modelname, process_str):
359 """ write an equivalent of the MG4 proc_card in order that all the Madevent 360 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 361 362 proc_card_template = template_files.mg4_proc_card.mg4_template 363 process_template = template_files.mg4_proc_card.process_template 364 process_text = '' 365 coupling = '' 366 new_process_content = [] 367 368 # First find the coupling and suppress the coupling from process_str 369 #But first ensure that coupling are define whithout spaces: 370 process_str = process_str.replace(' =', '=') 371 process_str = process_str.replace('= ', '=') 372 process_str = process_str.replace(',',' , ') 373 #now loop on the element and treat all the coupling 374 for info in process_str.split(): 375 if '=' in info: 376 coupling += info + '\n' 377 else: 378 new_process_content.append(info) 379 # Recombine the process_str (which is the input process_str without coupling 380 #info) 381 process_str = ' '.join(new_process_content) 382 383 #format the SubProcess 384 process_text += process_template.substitute({'process': process_str, \ 385 'coupling': coupling}) 386 387 text = proc_card_template.substitute({'process': process_text, 388 'model': modelname, 389 'multiparticle':''}) 390 ff = open(file_pos, 'w') 391 ff.write(text) 392 ff.close()
393 394 395 #=============================================================================== 396 # write a initial states map, useful for the fast PDF NLO interface 397 #===============================================================================
398 - def write_maxproc_files(self, nmaxpdf, subproc_path):
399 """write the c++ and fortran header files with the max number of pdf pairs 400 """ 401 # fortran 402 content = " integer mxpdflumi\n integer max_nproc\n parameter(mxpdflumi=%d,max_nproc=%d)\n" \ 403 % (nmaxpdf, nmaxpdf) 404 fout = open(pjoin(subproc_path, 'pineappl_maxproc.inc'), 'w') 405 fout.write(content) 406 fout.close() 407 408 # c++ 409 content = "#define __max_nproc__ %d" % (nmaxpdf) 410 fout = open(pjoin(subproc_path, 'pineappl_maxproc.h'), 'w') 411 fout.write(content) 412 fout.close()
413 414 415 416 #=============================================================================== 417 # write a initial states map, useful for the fast PDF NLO interface 418 #===============================================================================
419 - def write_init_map(self, file_pos, initial_states):
420 """ Write an initial state process map. Each possible PDF 421 combination gets an unique identifier.""" 422 423 text='' 424 i=0 425 for i,e in enumerate(initial_states): 426 text=text+str(i+1)+' '+str(len(e)) 427 for t in e: 428 if len(t) ==1: 429 t.append(0) 430 text=text+' ' 431 try: 432 for p in t: 433 if p == None : p = 0 434 text=text+' '+str(p) 435 except TypeError: 436 text=text+' '+str(t) 437 text=text+'\n' 438 439 ff = open(file_pos, 'w') 440 ff.write(text) 441 ff.close() 442 443 return i+1
444
445 - def get_ME_identifier(self, matrix_element, *args, **opts):
446 """ A function returning a string uniquely identifying the matrix 447 element given in argument so that it can be used as a prefix to all 448 MadLoop5 subroutines and common blocks related to it. This allows 449 to compile several processes into one library as requested by the 450 BLHA (Binoth LesHouches Accord) guidelines. The MadFKS design 451 necessitates that there is no process prefix.""" 452 453 return ''
454 455 #=============================================================================== 456 # write_coef_specs 457 #===============================================================================
458 - def write_coef_specs_file(self, virt_me_list):
459 """writes the coef_specs.inc in the DHELAS folder. Should not be called in the 460 non-optimized mode""" 461 raise fks_common.FKSProcessError()("write_coef_specs should be called only in the loop-optimized mode")
462 463 464 #=============================================================================== 465 # generate_directories_fks 466 #===============================================================================
467 - def generate_directories_fks(self, matrix_element, fortran_model, me_number, 468 me_ntot, path=os.getcwd(),OLP='MadLoop'):
469 """Generate the Pxxxxx_i directories for a subprocess in MadFKS, 470 including the necessary matrix.f and various helper files""" 471 proc = matrix_element.born_me['processes'][0] 472 473 if not self.model: 474 self.model = matrix_element.get('processes')[0].get('model') 475 476 cwd = os.getcwd() 477 try: 478 os.chdir(path) 479 except OSError as error: 480 error_msg = "The directory %s should exist in order to be able " % path + \ 481 "to \"export\" in it. If you see this error message by " + \ 482 "typing the command \"export\" please consider to use " + \ 483 "instead the command \"output\". " 484 raise MadGraph5Error(error_msg) 485 486 calls = 0 487 488 self.fksdirs = [] 489 #first make and cd the direcrory corresponding to the born process: 490 borndir = "P%s" % \ 491 (matrix_element.born_me.get('processes')[0].shell_string()) 492 os.mkdir(borndir) 493 os.chdir(borndir) 494 logger.info('Writing files in %s (%d / %d)' % (borndir, me_number + 1, me_ntot)) 495 496 ## write the files corresponding to the born process in the P* directory 497 self.generate_born_fks_files(matrix_element, 498 fortran_model, me_number, path) 499 500 # With NJET you want to generate the order file per subprocess and most 501 # likely also generate it for each subproc. 502 if OLP=='NJET': 503 filename = 'OLE_order.lh' 504 self.write_lh_order(filename, [matrix_element.born_me.get('processes')[0]], OLP) 505 506 if matrix_element.virt_matrix_element: 507 calls += self.generate_virt_directory( \ 508 matrix_element.virt_matrix_element, \ 509 fortran_model, \ 510 os.path.join(path, borndir)) 511 512 #write the infortions for the different real emission processes 513 sqsorders_list = \ 514 self.write_real_matrix_elements(matrix_element, fortran_model) 515 516 filename = 'extra_cnt_wrapper.f' 517 self.write_extra_cnt_wrapper(writers.FortranWriter(filename), 518 matrix_element.extra_cnt_me_list, 519 fortran_model) 520 for i, extra_cnt_me in enumerate(matrix_element.extra_cnt_me_list): 521 replace_dict = {} 522 523 den_factor_lines = self.get_den_factor_lines(matrix_element, 524 extra_cnt_me) 525 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 526 527 ij_lines = self.get_ij_lines(matrix_element) 528 replace_dict['ij_lines'] = '\n'.join(ij_lines) 529 530 filename = 'born_cnt_%d.f' % (i+1) 531 self.write_split_me_fks(writers.FortranWriter(filename), 532 extra_cnt_me, 533 fortran_model, 'cnt', '%d' % (i+1), 534 replace_dict) 535 536 self.write_pdf_calls(matrix_element, fortran_model) 537 538 filename = 'nFKSconfigs.inc' 539 self.write_nfksconfigs_file(writers.FortranWriter(filename), 540 matrix_element, 541 fortran_model) 542 543 filename = 'iproc.dat' 544 self.write_iproc_file(writers.FortranWriter(filename), 545 me_number) 546 547 filename = 'fks_info.inc' 548 # write_fks_info_list returns a set of the splitting types 549 self.proc_characteristic['splitting_types'] = list(\ 550 set(self.proc_characteristic['splitting_types']).union(\ 551 self.write_fks_info_file(writers.FortranWriter(filename), 552 matrix_element, 553 fortran_model))) 554 555 filename = 'leshouche_info.dat' 556 nfksconfs,maxproc,maxflow,nexternal=\ 557 self.write_leshouche_info_file(filename,matrix_element) 558 559 # if no corrections are generated ([LOonly] mode), get 560 # these variables from the born 561 if nfksconfs == maxproc == maxflow == 0: 562 nfksconfs = 1 563 (dummylines, maxproc, maxflow) = self.get_leshouche_lines( 564 matrix_element.born_me, 1) 565 566 filename = 'leshouche_decl.inc' 567 self.write_leshouche_info_declarations( 568 writers.FortranWriter(filename), 569 nfksconfs,maxproc,maxflow,nexternal, 570 fortran_model) 571 filename = 'genps.inc' 572 ngraphs = matrix_element.born_me.get_number_of_amplitudes() 573 ncolor = max(1,len(matrix_element.born_me.get('color_basis'))) 574 self.write_genps(writers.FortranWriter(filename),maxproc,ngraphs,\ 575 ncolor,maxflow,fortran_model) 576 577 filename = 'configs_and_props_info.dat' 578 nconfigs,max_leg_number=self.write_configs_and_props_info_file( 579 filename, 580 matrix_element) 581 582 filename = 'configs_and_props_decl.inc' 583 self.write_configs_and_props_info_declarations( 584 writers.FortranWriter(filename), 585 nconfigs,max_leg_number,nfksconfs, 586 fortran_model) 587 588 # For processes with only QCD splittings, write 589 # the file with the mapping of born vs real diagrams 590 # Otherwise, write a dummy file 591 filename = 'real_from_born_configs.inc' 592 if self.proc_characteristic['splitting_types'] == ['QCD']: 593 self.write_real_from_born_configs( 594 writers.FortranWriter(filename), 595 matrix_element, 596 fortran_model) 597 else: 598 self.write_real_from_born_configs_dummy( 599 writers.FortranWriter(filename), 600 matrix_element, 601 fortran_model) 602 603 filename = 'ngraphs.inc' 604 self.write_ngraphs_file(writers.FortranWriter(filename), 605 nconfigs) 606 607 #write the wrappers for real ME's 608 filename_me = 'real_me_chooser.f' 609 filename_lum = 'parton_lum_chooser.f' 610 self.write_real_wrappers(writers.FortranWriter(filename_me), 611 writers.FortranWriter(filename_lum), 612 matrix_element, sqsorders_list, 613 fortran_model) 614 615 filename = 'get_color.f' 616 self.write_colors_file(writers.FortranWriter(filename), 617 matrix_element) 618 619 filename = 'nexternal.inc' 620 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 621 self.write_nexternal_file(writers.FortranWriter(filename), 622 nexternal, ninitial) 623 624 filename = 'orders.inc' 625 amp_split_orders, amp_split_size, amp_split_size_born = \ 626 self.write_orders_file( 627 writers.FortranWriter(filename), 628 matrix_element) 629 630 filename = 'orders.h' 631 self.write_orders_c_header_file( 632 writers.CPPWriter(filename), 633 amp_split_size, amp_split_size_born) 634 635 filename = 'amp_split_orders.inc' 636 self.write_amp_split_orders_file( 637 writers.FortranWriter(filename), 638 amp_split_orders) 639 self.proc_characteristic['ninitial'] = ninitial 640 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 641 642 filename = 'pmass.inc' 643 try: 644 self.write_pmass_file(writers.FortranWriter(filename), 645 matrix_element.real_processes[0].matrix_element) 646 except IndexError: 647 self.write_pmass_file(writers.FortranWriter(filename), 648 matrix_element.born_me) 649 650 #draw the diagrams 651 self.draw_feynman_diagrams(matrix_element) 652 653 linkfiles = ['BinothLHADummy.f', 654 'check_poles.f', 655 'MCmasses_HERWIG6.inc', 656 'MCmasses_HERWIGPP.inc', 657 'MCmasses_PYTHIA6Q.inc', 658 'MCmasses_PYTHIA6PT.inc', 659 'MCmasses_PYTHIA8.inc', 660 'add_write_info.f', 661 'coupl.inc', 662 'cuts.f', 663 'FKS_params.dat', 664 'initial_states_map.dat', 665 'OLE_order.olc', 666 'FKSParams.f90', 667 'cuts.inc', 668 'unlops.inc', 669 'pythia_unlops.f', 670 'driver_mintMC.f', 671 'driver_mintFO.f', 672 'pineappl_interface.cc', 673 'pineappl_interface_dummy.f', 674 'pineappl_common.inc', 675 'reweight_pineappl.inc', 676 'fastjetfortran_madfks_core.cc', 677 'fastjetfortran_madfks_full.cc', 678 'fjcore.cc', 679 'fastjet_wrapper.f', 680 'fjcore.hh', 681 'fks_Sij.f', 682 'fks_powers.inc', 683 'fks_singular.f', 684 'splitorders_stuff.f', 685 'orderstags_glob.f', 686 'chooser_functions.f', 687 'veto_xsec.f', 688 'veto_xsec.inc', 689 'weight_lines.f', 690 'genps_fks.f', 691 'boostwdir2.f', 692 'madfks_mcatnlo.inc', 693 'open_output_files.f', 694 'open_output_files_dummy.f', 695 'HwU_dummy.f', 696 'madfks_plot.f', 697 'analysis_dummy.f', 698 'analysis_lhe.f', 699 'mint_module.f90', 700 'MC_integer.f', 701 'mint.inc', 702 'montecarlocounter.f', 703 'q_es.inc', 704 'recluster.cc', 705 'Boosts.h', 706 'reweight_xsec.f', 707 'reweight_xsec_events.f', 708 'reweight_xsec_events_pdf_dummy.f', 709 'iproc_map.f', 710 'run.inc', 711 'run_card.inc', 712 'setcuts.f', 713 'setscales.f', 714 'recmom.f', 715 'test_soft_col_limits.f', 716 'symmetry_fks_v3.f', 717 'vegas2.for', 718 'write_ajob.f', 719 'handling_lhe_events.f', 720 'write_event.f', 721 'fill_MC_mshell.f', 722 'maxparticles.inc', 723 'message.inc', 724 'initcluster.f', 725 'cluster.inc', 726 'cluster.f', 727 'reweight.f', 728 'randinit', 729 'sudakov.inc', 730 'maxconfigs.inc', 731 'pineappl_maxproc.inc', 732 'pineappl_maxproc.h', 733 'timing_variables.inc', 734 'polfit.f'] 735 736 for file in linkfiles: 737 ln('../' + file , '.') 738 os.system("ln -s ../../Cards/param_card.dat .") 739 740 #copy the makefile 741 os.system("ln -s ../makefile_fks_dir ./makefile") 742 if matrix_element.virt_matrix_element: 743 os.system("ln -s ../BinothLHA.f ./BinothLHA.f") 744 elif OLP!='MadLoop': 745 os.system("ln -s ../BinothLHA_OLP.f ./BinothLHA.f") 746 else: 747 os.system("ln -s ../BinothLHA_user.f ./BinothLHA.f") 748 749 # Return to SubProcesses dir 750 os.chdir(os.path.pardir) 751 # Add subprocess to subproc.mg 752 filename = 'subproc.mg' 753 files.append_to_file(filename, 754 self.write_subproc, 755 borndir) 756 757 os.chdir(cwd) 758 # Generate info page 759 gen_infohtml.make_info_html_nlo(self.dir_path) 760 761 762 return calls
763 764 #=========================================================================== 765 # create the run_card 766 #===========================================================================
767 - def create_run_card(self, processes, history):
768 """ """ 769 770 run_card = banner_mod.RunCardNLO() 771 772 run_card.create_default_for_process(self.proc_characteristic, 773 history, 774 processes) 775 776 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 777 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'))
778 779
780 - def pass_information_from_cmd(self, cmd):
781 """pass information from the command interface to the exporter. 782 Please do not modify any object of the interface from the exporter. 783 """ 784 self.proc_defs = cmd._curr_proc_defs 785 if hasattr(cmd,'born_processes'): 786 self.born_processes = cmd.born_processes 787 else: 788 self.born_processes = [] 789 return
790
791 - def finalize(self, matrix_elements, history, mg5options, flaglist):
792 """Finalize FKS directory by creating jpeg diagrams, html 793 pages,proc_card_mg5.dat and madevent.tar.gz and create the MA5 card if 794 necessary.""" 795 796 devnull = os.open(os.devnull, os.O_RDWR) 797 try: 798 res = misc.call([mg5options['lhapdf'], '--version'], \ 799 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 800 except Exception: 801 res = 1 802 if res != 0: 803 logger.info('The value for lhapdf in the current configuration does not ' + \ 804 'correspond to a valid executable.\nPlease set it correctly either in ' + \ 805 'input/mg5_configuration or with "set lhapdf /path/to/lhapdf-config" ' + \ 806 'and regenrate the process. \nTo avoid regeneration, edit the ' + \ 807 ('%s/Cards/amcatnlo_configuration.txt file.\n' % self.dir_path ) + \ 808 'Note that you can still compile and run aMC@NLO with the built-in PDFs\n') 809 810 compiler_dict = {'fortran': mg5options['fortran_compiler'], 811 'cpp': mg5options['cpp_compiler'], 812 'f2py': mg5options['f2py_compiler']} 813 814 if 'nojpeg' in flaglist: 815 makejpg = False 816 else: 817 makejpg = True 818 output_dependencies = mg5options['output_dependencies'] 819 820 821 self.proc_characteristic['grouped_matrix'] = False 822 self.proc_characteristic['complex_mass_scheme'] = mg5options['complex_mass_scheme'] 823 self.proc_characteristic['nlo_mixed_expansion'] = mg5options['nlo_mixed_expansion'] 824 # determine perturbation order 825 perturbation_order = [] 826 firstprocess = history.get('generate') 827 order = re.findall("\[(.*)\]", firstprocess) 828 if 'QED' in order[0]: 829 perturbation_order.append('QED') 830 if 'QCD' in order[0]: 831 perturbation_order.append('QCD') 832 self.proc_characteristic['perturbation_order'] = perturbation_order 833 834 self.create_proc_charac() 835 836 self.create_run_card(matrix_elements.get_processes(), history) 837 # modelname = self.model.get('name') 838 # if modelname == 'mssm' or modelname.startswith('mssm-'): 839 # param_card = os.path.join(self.dir_path, 'Cards','param_card.dat') 840 # mg5_param = os.path.join(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 841 # check_param_card.convert_to_mg5card(param_card, mg5_param) 842 # check_param_card.check_valid_param_card(mg5_param) 843 844 # # write the model functions get_mass/width_from_id 845 filename = os.path.join(self.dir_path,'Source','MODEL','get_mass_width_fcts.f') 846 makeinc = os.path.join(self.dir_path,'Source','MODEL','makeinc.inc') 847 self.write_get_mass_width_file(writers.FortranWriter(filename), makeinc, self.model) 848 849 # # Write maxconfigs.inc based on max of ME's/subprocess groups 850 851 filename = os.path.join(self.dir_path,'Source','maxconfigs.inc') 852 self.write_maxconfigs_file(writers.FortranWriter(filename), 853 matrix_elements.get_max_configs()) 854 855 # # Write maxparticles.inc based on max of ME's/subprocess groups 856 filename = os.path.join(self.dir_path,'Source','maxparticles.inc') 857 self.write_maxparticles_file(writers.FortranWriter(filename), 858 matrix_elements.get_max_particles()) 859 860 # Touch "done" file 861 os.system('touch %s/done' % os.path.join(self.dir_path,'SubProcesses')) 862 863 # Check for compiler 864 fcompiler_chosen = self.set_fortran_compiler(compiler_dict) 865 ccompiler_chosen = self.set_cpp_compiler(compiler_dict['cpp']) 866 867 old_pos = os.getcwd() 868 os.chdir(os.path.join(self.dir_path, 'SubProcesses')) 869 P_dir_list = [proc for proc in os.listdir('.') if os.path.isdir(proc) and \ 870 proc[0] == 'P'] 871 872 devnull = os.open(os.devnull, os.O_RDWR) 873 # Convert the poscript in jpg files (if authorize) 874 if makejpg: 875 logger.info("Generate jpeg diagrams") 876 for Pdir in P_dir_list: 877 os.chdir(Pdir) 878 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 879 stdout = devnull) 880 os.chdir(os.path.pardir) 881 # 882 logger.info("Generate web pages") 883 # Create the WebPage using perl script 884 885 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 886 stdout = devnull) 887 888 os.chdir(os.path.pardir) 889 # 890 # obj = gen_infohtml.make_info_html(self.dir_path) 891 # [mv(name, './HTML/') for name in os.listdir('.') if \ 892 # (name.endswith('.html') or name.endswith('.jpg')) and \ 893 # name != 'index.html'] 894 # if online: 895 # nb_channel = obj.rep_rule['nb_gen_diag'] 896 # open(os.path.join('./Online'),'w').write(str(nb_channel)) 897 898 # Write command history as proc_card_mg5 899 if os.path.isdir('Cards'): 900 output_file = os.path.join('Cards', 'proc_card_mg5.dat') 901 history.write(output_file) 902 903 # Duplicate run_card and FO_analyse_card 904 for card in ['run_card', 'FO_analyse_card', 'shower_card']: 905 try: 906 shutil.copy(pjoin(self.dir_path, 'Cards', 907 card + '.dat'), 908 pjoin(self.dir_path, 'Cards', 909 card + '_default.dat')) 910 except IOError: 911 logger.warning("Failed to copy " + card + ".dat to default") 912 913 914 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 915 stdout = devnull) 916 917 # Run "make" to generate madevent.tar.gz file 918 if os.path.exists(pjoin('SubProcesses', 'subproc.mg')): 919 if os.path.exists('amcatnlo.tar.gz'): 920 os.remove('amcatnlo.tar.gz') 921 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'make_amcatnlo_tar')], 922 stdout = devnull) 923 # 924 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 925 stdout = devnull) 926 927 #return to the initial dir 928 os.chdir(old_pos) 929 930 # Setup stdHep 931 # Find the correct fortran compiler 932 base_compiler= ['FC=g77','FC=gfortran'] 933 934 StdHep_path = pjoin(MG5DIR, 'vendor', 'StdHEP') 935 if output_dependencies == 'external': 936 # check if stdhep has to be compiled (only the first time) 937 if (not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP', 'lib', 'libstdhep.a')) or \ 938 not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP', 'lib', 'libFmcfio.a'))) and \ 939 not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP','fail')): 940 if 'FC' not in os.environ or not os.environ['FC']: 941 path = os.path.join(StdHep_path, 'src', 'make_opts') 942 text = open(path).read() 943 for base in base_compiler: 944 text = text.replace(base,'FC=%s' % fcompiler_chosen) 945 open(path, 'w').writelines(text) 946 logger.info('Compiling StdHEP. This has to be done only once.') 947 try: 948 misc.compile(cwd = pjoin(MG5DIR, 'vendor', 'StdHEP')) 949 except Exception as error: 950 logger.debug(str(error)) 951 logger.warning("StdHep failed to compiled. This forbids to run NLO+PS with PY6 and Herwig6") 952 logger.info("details on the compilation error are available on %s", pjoin(MG5DIR, 'vendor', 'StdHEP','fail')) 953 logger.info("if you want to retry the compilation automatically, you have to remove that file first") 954 with open(pjoin(MG5DIR, 'vendor', 'StdHEP','fail'),'w') as fsock: 955 fsock.write(str(error)) 956 else: 957 logger.info('Done.') 958 if os.path.exists(pjoin(StdHep_path, 'lib', 'libstdhep.a')): 959 #then link the libraries in the exported dir 960 files.ln(pjoin(StdHep_path, 'lib', 'libstdhep.a'), \ 961 pjoin(self.dir_path, 'MCatNLO', 'lib')) 962 files.ln(pjoin(StdHep_path, 'lib', 'libFmcfio.a'), \ 963 pjoin(self.dir_path, 'MCatNLO', 'lib')) 964 965 elif output_dependencies == 'internal': 966 StdHEP_internal_path = pjoin(self.dir_path,'Source','StdHEP') 967 shutil.copytree(StdHep_path,StdHEP_internal_path, symlinks=True) 968 # Create the links to the lib folder 969 linkfiles = ['libstdhep.a', 'libFmcfio.a'] 970 for file in linkfiles: 971 ln(pjoin(os.path.pardir,os.path.pardir,'Source','StdHEP','lib',file), 972 os.path.join(self.dir_path, 'MCatNLO', 'lib')) 973 if 'FC' not in os.environ or not os.environ['FC']: 974 path = pjoin(StdHEP_internal_path, 'src', 'make_opts') 975 text = open(path).read() 976 for base in base_compiler: 977 text = text.replace(base,'FC=%s' % fcompiler_chosen) 978 open(path, 'w').writelines(text) 979 # To avoid compiler version conflicts, we force a clean here 980 misc.compile(['clean'],cwd = StdHEP_internal_path) 981 982 elif output_dependencies == 'environment_paths': 983 # Here the user chose to define the dependencies path in one of 984 # his environmental paths 985 libStdHep = misc.which_lib('libstdhep.a') 986 libFmcfio = misc.which_lib('libFmcfio.a') 987 if not libStdHep is None and not libFmcfio is None: 988 logger.info('MG5_aMC is using StdHep installation found at %s.'%\ 989 os.path.dirname(libStdHep)) 990 ln(pjoin(libStdHep),pjoin(self.dir_path, 'MCatNLO', 'lib'),abspath=True) 991 ln(pjoin(libFmcfio),pjoin(self.dir_path, 'MCatNLO', 'lib'),abspath=True) 992 else: 993 raise InvalidCmd("Could not find the location of the files"+\ 994 " libstdhep.a and libFmcfio.a in you environment paths.") 995 996 else: 997 raise MadGraph5Error('output_dependencies option %s not recognized'\ 998 %output_dependencies) 999 1000 # Create the default MadAnalysis5 cards 1001 if 'madanalysis5_path' in self.opt and not \ 1002 self.opt['madanalysis5_path'] is None and not self.proc_defs is None: 1003 # When using 1004 processes = sum([me.get('processes') if not isinstance(me, str) else [] \ 1005 for me in matrix_elements.get('matrix_elements')],[]) 1006 1007 # Try getting the processes from the generation info directly if no ME are 1008 # available (as it is the case for parallel generation 1009 if len(processes)==0: 1010 processes = self.born_processes 1011 if len(processes)==0: 1012 logger.warning( 1013 """MG5aMC could not provide to Madanalysis5 the list of processes generated. 1014 As a result, the default card will not be tailored to the process generated. 1015 This typically happens when using the 'low_mem_multicore_nlo_generation' NLO generation mode.""") 1016 # For now, simply assign all processes to each proc_defs. 1017 # That shouldn't really affect the default analysis card created by MA5 1018 self.create_default_madanalysis5_cards( 1019 history, self.proc_defs, [processes,]*len(self.proc_defs), 1020 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'), 1021 levels =['hadron'])
1022 1023 1024 1025 1026
1027 - def write_real_from_born_configs(self, writer, matrix_element, fortran_model):
1028 """Writes the real_from_born_configs.inc file that contains 1029 the mapping to go for a given born configuration (that is used 1030 e.g. in the multi-channel phase-space integration to the 1031 corresponding real-emission diagram, i.e. the real emission 1032 diagram in which the combined ij is split in i_fks and 1033 j_fks.""" 1034 lines = [] 1035 lines2 = [] 1036 max_links = 0 1037 born_me = matrix_element.born_me 1038 for iFKS, conf in enumerate(matrix_element.get_fks_info_list()): 1039 iFKS = iFKS+1 1040 links = conf['fks_info']['rb_links'] 1041 max_links = max(max_links,len(links)) 1042 for i,diags in enumerate(links): 1043 if not i == diags['born_conf']: 1044 print(links) 1045 raise MadGraph5Error( "born_conf should be canonically ordered") 1046 real_configs = ', '.join(['%d' % int(diags['real_conf']+1) for diags in links]) 1047 lines.append("data (real_from_born_conf(irfbc,%d),irfbc=1,%d) /%s/" \ 1048 % (iFKS,len(links),real_configs)) 1049 1050 # this is for 'LOonly' processes; in this case, a fake configuration 1051 # with all the born diagrams is written 1052 if not matrix_element.get_fks_info_list(): 1053 # compute (again) the number of configurations at the born 1054 base_diagrams = born_me.get('base_amplitude').get('diagrams') 1055 minvert = min([max([len(vert.get('legs')) for vert in \ 1056 diag.get('vertices')]) for diag in base_diagrams]) 1057 1058 for idiag, diag in enumerate(base_diagrams): 1059 if any([len(vert.get('legs')) > minvert for vert in 1060 diag.get('vertices')]): 1061 # Only 3-vertices allowed in configs.inc 1062 continue 1063 max_links = max_links + 1 1064 1065 real_configs=', '.join(['%d' % i for i in range(1, max_links+1)]) 1066 lines.append("data (real_from_born_conf(irfbc,%d),irfbc=1,%d) /%s/" \ 1067 % (1,max_links,real_configs)) 1068 1069 lines2.append("integer irfbc") 1070 lines2.append("integer real_from_born_conf(%d,%d)" \ 1071 % (max_links, max(len(matrix_element.get_fks_info_list()),1))) 1072 # Write the file 1073 writer.writelines(lines2+lines)
1074
1075 - def write_real_from_born_configs_dummy(self, writer, matrix_element, fortran_model):
1076 """write a dummy file""" 1077 max_links = 10 1078 lines2 = [] 1079 lines2.append("integer irfbc") 1080 lines2.append("integer real_from_born_conf(%d,%d)" \ 1081 % (max_links,len(matrix_element.get_fks_info_list()))) 1082 # Write the file 1083 writer.writelines(lines2)
1084 1085
1086 - def write_amp_split_orders_file(self, writer, amp_split_orders):
1087 """ write the include file with the information of the coupling power for the 1088 differen entries in the amp_split array""" 1089 text = "integer iaso, amp_split_orders(%d, nsplitorders)\n" % len(amp_split_orders) 1090 1091 for i, amp_orders in enumerate(amp_split_orders): 1092 text+= "data (amp_split_orders(%d, iaso), iaso=1,nsplitorders) / %s /\n" % \ 1093 (i + 1, ', '.join(['%d' % o for o in amp_orders])) 1094 1095 writer.writelines(text)
1096 1097
1098 - def write_orders_c_header_file(self, writer, amp_split_size, amp_split_size_born):
1099 """writes the header file including the amp_split_size declaration for amcblast 1100 """ 1101 text = "#define __amp_split_size %d\n" % amp_split_size 1102 text+= "#define __amp_split_size_born %d" % amp_split_size_born 1103 1104 writer.writelines(text)
1105 1106 1107
1108 - def write_orders_file(self, writer, matrix_element):
1109 """writes the include file with the informations about coupling orders. 1110 In particular this file should contain the constraints requested by the user 1111 for all the orders which are split""" 1112 1113 born_orders = {} 1114 for ordd, val in matrix_element.born_me['processes'][0]['born_sq_orders'].items(): 1115 born_orders[ordd] = val 1116 1117 nlo_orders = {} 1118 for ordd, val in matrix_element.born_me['processes'][0]['squared_orders'].items(): 1119 nlo_orders[ordd] = val 1120 1121 split_orders = \ 1122 matrix_element.born_me['processes'][0]['split_orders'] 1123 1124 pert_orders = \ 1125 matrix_element.born_me['processes'][0]['perturbation_couplings'] 1126 1127 max_born_orders = {} 1128 max_nlo_orders = {} 1129 1130 model = matrix_element.born_me['processes'][0]['model'] 1131 1132 # first get the max_born_orders 1133 if list(born_orders.keys()) == ['WEIGHTED']: 1134 # if user has not specified born_orders, check the 'weighted' for each 1135 # of the split_orders contributions 1136 wgt_ord_max = born_orders['WEIGHTED'] 1137 squared_orders, amp_orders = matrix_element.born_me.get_split_orders_mapping() 1138 for sq_order in squared_orders: 1139 # put the numbers in sq_order in a dictionary, with as keys 1140 # the corresponding order name 1141 ord_dict = {} 1142 assert len(sq_order) == len(split_orders) 1143 for o, v in zip(split_orders, list(sq_order)): 1144 ord_dict[o] = v 1145 1146 wgt = sum([v * model.get('order_hierarchy')[o] for \ 1147 o, v in ord_dict.items()]) 1148 if wgt > wgt_ord_max: 1149 continue 1150 1151 for o, v in ord_dict.items(): 1152 try: 1153 max_born_orders[o] = max(max_born_orders[o], v) 1154 except KeyError: 1155 max_born_orders[o] = v 1156 1157 else: 1158 for o in [oo for oo in split_orders if oo != 'WEIGHTED']: 1159 try: 1160 max_born_orders[o] = born_orders[o] 1161 except KeyError: 1162 # if the order is not in born_orders set it to 1000 1163 max_born_orders[o] = 1000 1164 try: 1165 max_nlo_orders[o] = nlo_orders[o] 1166 except KeyError: 1167 # if the order is not in born_orders set it to 1000 1168 max_nlo_orders[o] = 1000 1169 1170 # keep track also of the position of QED, QCD in the order array 1171 # might be useful in the fortran code 1172 qcd_pos = -1 1173 qed_pos = -1 1174 if 'QCD' in split_orders: 1175 qcd_pos = split_orders.index('QCD') + 1 1176 if 'QED' in split_orders: 1177 qed_pos = split_orders.index('QED') + 1 1178 1179 # determine the size of the array that keeps track 1180 # of the different split orders, and the position 1181 # of the different split order combinations in this array 1182 # to be written in orders_to_amp_split_pos.inc and 1183 # amp_split_pos_to_orders.inc 1184 1185 # the number of squared orders of the born ME 1186 amp_split_orders = [] 1187 squared_orders, amp_orders = matrix_element.born_me.get_split_orders_mapping() 1188 amp_split_size_born = len(squared_orders) 1189 amp_split_orders += squared_orders 1190 1191 #then check the real emissions 1192 for realme in matrix_element.real_processes: 1193 squared_orders, amp_orders = realme.matrix_element.get_split_orders_mapping() 1194 for order in squared_orders: 1195 if not order in amp_split_orders: 1196 amp_split_orders.append(order) 1197 1198 # check also the virtual 1199 # may be needed for processes without real emissions, e.g. z > v v 1200 # Note that for a loop_matrix_element squared_orders has a different format 1201 # (see the description of the get_split_orders_mapping function in loop_helas_objects) 1202 try: 1203 squared_orders, amp_orders = matrix_element.virt_matrix_element.get_split_orders_mapping() 1204 squared_orders = [so[0] for so in squared_orders] 1205 for order in squared_orders: 1206 if not order in amp_split_orders: 1207 amp_split_orders.append(order) 1208 except AttributeError: 1209 pass 1210 1211 amp_split_size=len(amp_split_orders) 1212 1213 text = '! The orders to be integrated for the Born and at NLO\n' 1214 text += 'integer nsplitorders\n' 1215 text += 'parameter (nsplitorders=%d)\n' % len(split_orders) 1216 text += 'character*3 ordernames(nsplitorders)\n' 1217 text += 'data ordernames / %s /\n' % ', '.join(['"%3s"' % o for o in split_orders]) 1218 text += 'integer born_orders(nsplitorders), nlo_orders(nsplitorders)\n' 1219 text += '! the order of the coupling orders is %s\n' % ', '.join(split_orders) 1220 text += 'data born_orders / %s /\n' % ', '.join([str(max_born_orders[o]) for o in split_orders]) 1221 text += 'data nlo_orders / %s /\n' % ', '.join([str(max_nlo_orders[o]) for o in split_orders]) 1222 text += '! The position of the QCD /QED orders in the array\n' 1223 text += 'integer qcd_pos, qed_pos\n' 1224 text += '! if = -1, then it is not in the split_orders\n' 1225 text += 'parameter (qcd_pos = %d)\n' % qcd_pos 1226 text += 'parameter (qed_pos = %d)\n' % qed_pos 1227 text += '! this is to keep track of the various \n' 1228 text += '! coupling combinations entering each ME\n' 1229 text += 'integer amp_split_size, amp_split_size_born\n' 1230 text += 'parameter (amp_split_size = %d)\n' % amp_split_size 1231 text += '! the first entries in the next line in amp_split are for the born \n' 1232 text += 'parameter (amp_split_size_born = %d)\n' % amp_split_size_born 1233 text += 'double precision amp_split(amp_split_size)\n' 1234 text += 'double complex amp_split_cnt(amp_split_size,2,nsplitorders)\n' 1235 text += 'common /to_amp_split/amp_split, amp_split_cnt\n' 1236 1237 writer.writelines(text) 1238 1239 return amp_split_orders, amp_split_size, amp_split_size_born
1240 1241 1242 #=============================================================================== 1243 # write_get_mass_width_file 1244 #=============================================================================== 1245 #test written
1246 - def write_get_mass_width_file(self, writer, makeinc, model):
1247 """Write the get_mass_width_file.f file for MG4. 1248 Also update the makeinc.inc file 1249 """ 1250 mass_particles = [p for p in model['particles'] if p['mass'].lower() != 'zero'] 1251 width_particles = [p for p in model['particles'] if p['width'].lower() != 'zero'] 1252 1253 iflines_mass = '' 1254 iflines_width = '' 1255 1256 for i, part in enumerate(mass_particles): 1257 if i == 0: 1258 ifstring = 'if' 1259 else: 1260 ifstring = 'else if' 1261 if part['self_antipart']: 1262 iflines_mass += '%s (id.eq.%d) then\n' % \ 1263 (ifstring, part.get_pdg_code()) 1264 else: 1265 iflines_mass += '%s (id.eq.%d.or.id.eq.%d) then\n' % \ 1266 (ifstring, part.get_pdg_code(), part.get_anti_pdg_code()) 1267 iflines_mass += 'get_mass_from_id=abs(%s)\n' % part.get('mass') 1268 1269 if mass_particles: 1270 iflines_mass += 'else\n' 1271 else: 1272 iflines_mass = 'if (.true.) then\n' 1273 1274 for i, part in enumerate(width_particles): 1275 if i == 0: 1276 ifstring = 'if' 1277 else: 1278 ifstring = 'else if' 1279 if part['self_antipart']: 1280 iflines_width += '%s (id.eq.%d) then\n' % \ 1281 (ifstring, part.get_pdg_code()) 1282 else: 1283 iflines_width += '%s (id.eq.%d.or.id.eq.%d) then\n' % \ 1284 (ifstring, part.get_pdg_code(), part.get_anti_pdg_code()) 1285 iflines_width += 'get_width_from_id=abs(%s)\n' % part.get('width') 1286 1287 if width_particles: 1288 iflines_width += 'else\n' 1289 else: 1290 iflines_width = 'if (.true.) then\n' 1291 1292 replace_dict = {'iflines_mass' : iflines_mass, 1293 'iflines_width' : iflines_width} 1294 1295 file = open(os.path.join(_file_path, \ 1296 'iolibs/template_files/get_mass_width_fcts.inc')).read() 1297 file = file % replace_dict 1298 1299 # Write the file 1300 writer.writelines(file) 1301 1302 # update the makeinc 1303 makeinc_content = open(makeinc).read() 1304 makeinc_content = makeinc_content.replace('MODEL = ', 'MODEL = get_mass_width_fcts.o ') 1305 open(makeinc, 'w').write(makeinc_content) 1306 1307 return
1308 1309
1310 - def write_configs_and_props_info_declarations(self, writer, max_iconfig, max_leg_number, nfksconfs, fortran_model):
1311 """writes the declarations for the variables relevant for configs_and_props 1312 """ 1313 lines = [] 1314 lines.append("integer ifr,lmaxconfigs_used,max_branch_used") 1315 lines.append("parameter (lmaxconfigs_used=%4d)" % max_iconfig) 1316 lines.append("parameter (max_branch_used =%4d)" % -max_leg_number) 1317 lines.append("integer mapconfig_d(%3d,0:lmaxconfigs_used)" % nfksconfs) 1318 lines.append("integer iforest_d(%3d,2,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1319 lines.append("integer sprop_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1320 lines.append("integer tprid_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1321 lines.append("double precision pmass_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1322 lines.append("double precision pwidth_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1323 lines.append("integer pow_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs) 1324 1325 writer.writelines(lines)
1326 1327
1328 - def write_configs_and_props_info_file(self, filename, matrix_element):
1329 """writes the configs_and_props_info.inc file that cointains 1330 all the (real-emission) configurations (IFOREST) as well as 1331 the masses and widths of intermediate particles""" 1332 lines = [] 1333 lines.append("# C -> MAPCONFIG_D") 1334 lines.append("# F/D -> IFOREST_D") 1335 lines.append("# S -> SPROP_D") 1336 lines.append("# T -> TPRID_D") 1337 lines.append("# M -> PMASS_D/PWIDTH_D") 1338 lines.append("# P -> POW_D") 1339 lines2 = [] 1340 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1341 1342 max_iconfig=0 1343 max_leg_number=0 1344 1345 ######################################################## 1346 # this is for standard processes with [(real=)XXX] 1347 ######################################################## 1348 for iFKS, conf in enumerate(matrix_element.get_fks_info_list()): 1349 iFKS=iFKS+1 1350 iconfig = 0 1351 s_and_t_channels = [] 1352 mapconfigs = [] 1353 fks_matrix_element=matrix_element.real_processes[conf['n_me'] - 1].matrix_element 1354 base_diagrams = fks_matrix_element.get('base_amplitude').get('diagrams') 1355 model = fks_matrix_element.get('base_amplitude').get('process').get('model') 1356 minvert = min([max([len(vert.get('legs')) for vert in \ 1357 diag.get('vertices')]) for diag in base_diagrams]) 1358 1359 lines.append("# ") 1360 lines.append("# nFKSprocess %d" % iFKS) 1361 for idiag, diag in enumerate(base_diagrams): 1362 if any([len(vert.get('legs')) > minvert for vert in 1363 diag.get('vertices')]): 1364 # Only 3-vertices allowed in configs.inc 1365 continue 1366 iconfig = iconfig + 1 1367 helas_diag = fks_matrix_element.get('diagrams')[idiag] 1368 mapconfigs.append(helas_diag.get('number')) 1369 lines.append("# Diagram %d for nFKSprocess %d" % \ 1370 (helas_diag.get('number'),iFKS)) 1371 # Correspondance between the config and the amplitudes 1372 lines.append("C %4d %4d %4d " % (iFKS,iconfig, 1373 helas_diag.get('number'))) 1374 1375 # Need to reorganize the topology so that we start with all 1376 # final state external particles and work our way inwards 1377 schannels, tchannels = helas_diag.get('amplitudes')[0].\ 1378 get_s_and_t_channels(ninitial, model, 990) 1379 1380 s_and_t_channels.append([schannels, tchannels]) 1381 1382 # Write out propagators for s-channel and t-channel vertices 1383 allchannels = schannels 1384 if len(tchannels) > 1: 1385 # Write out tchannels only if there are any non-trivial ones 1386 allchannels = schannels + tchannels 1387 1388 for vert in allchannels: 1389 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1390 last_leg = vert.get('legs')[-1] 1391 lines.append("F %4d %4d %4d %4d" % \ 1392 (iFKS,last_leg.get('number'), iconfig, len(daughters))) 1393 for d in daughters: 1394 lines.append("D %4d" % d) 1395 if vert in schannels: 1396 lines.append("S %4d %4d %4d %10d" % \ 1397 (iFKS,last_leg.get('number'), iconfig, 1398 last_leg.get('id'))) 1399 elif vert in tchannels[:-1]: 1400 lines.append("T %4d %4d %4d %10d" % \ 1401 (iFKS,last_leg.get('number'), iconfig, 1402 abs(last_leg.get('id')))) 1403 1404 # update what the array sizes (mapconfig,iforest,etc) will be 1405 max_leg_number = min(max_leg_number,last_leg.get('number')) 1406 max_iconfig = max(max_iconfig,iconfig) 1407 1408 # Write out number of configs 1409 lines.append("# Number of configs for nFKSprocess %d" % iFKS) 1410 lines.append("C %4d %4d %4d" % (iFKS,0,iconfig)) 1411 1412 # write the props.inc information 1413 lines2.append("# ") 1414 particle_dict = fks_matrix_element.get('processes')[0].get('model').\ 1415 get('particle_dict') 1416 1417 for iconf, configs in enumerate(s_and_t_channels): 1418 for vertex in configs[0] + configs[1][:-1]: 1419 leg = vertex.get('legs')[-1] 1420 if leg.get('id') not in particle_dict: 1421 # Fake propagator used in multiparticle vertices 1422 pow_part = 0 1423 else: 1424 particle = particle_dict[leg.get('id')] 1425 1426 pow_part = 1 + int(particle.is_boson()) 1427 1428 lines2.append("M %4d %4d %4d %10d " % \ 1429 (iFKS,leg.get('number'), iconf + 1, leg.get('id'))) 1430 lines2.append("P %4d %4d %4d %4d " % \ 1431 (iFKS,leg.get('number'), iconf + 1, pow_part)) 1432 1433 ######################################################## 1434 # this is for [LOonly=XXX] 1435 ######################################################## 1436 if not matrix_element.get_fks_info_list(): 1437 born_me = matrix_element.born_me 1438 # as usual, in this case we assume just one FKS configuration 1439 # exists with diagrams corresponding to born ones X the ij -> i,j 1440 # splitting. Here j is chosen to be the last colored particle in 1441 # the particle list 1442 bornproc = born_me.get('processes')[0] 1443 colors = [l.get('color') for l in bornproc.get('legs')] 1444 1445 fks_i = len(colors) 1446 # use the last colored particle if it exists, or 1447 # just the last 1448 fks_j=1 1449 for cpos, col in enumerate(colors): 1450 if col != 1: 1451 fks_j = cpos+1 1452 fks_j_id = [l.get('id') for l in bornproc.get('legs')][cpos] 1453 1454 # for the moment, if j is initial-state, we do nothing 1455 if fks_j > ninitial: 1456 iFKS=1 1457 iconfig = 0 1458 s_and_t_channels = [] 1459 mapconfigs = [] 1460 base_diagrams = born_me.get('base_amplitude').get('diagrams') 1461 model = born_me.get('base_amplitude').get('process').get('model') 1462 minvert = min([max([len(vert.get('legs')) for vert in \ 1463 diag.get('vertices')]) for diag in base_diagrams]) 1464 1465 lines.append("# ") 1466 lines.append("# nFKSprocess %d" % iFKS) 1467 for idiag, diag in enumerate(base_diagrams): 1468 if any([len(vert.get('legs')) > minvert for vert in 1469 diag.get('vertices')]): 1470 # Only 3-vertices allowed in configs.inc 1471 continue 1472 iconfig = iconfig + 1 1473 helas_diag = born_me.get('diagrams')[idiag] 1474 mapconfigs.append(helas_diag.get('number')) 1475 lines.append("# Diagram %d for nFKSprocess %d" % \ 1476 (helas_diag.get('number'),iFKS)) 1477 # Correspondance between the config and the amplitudes 1478 lines.append("C %4d %4d %4d " % (iFKS,iconfig, 1479 helas_diag.get('number'))) 1480 1481 # Need to reorganize the topology so that we start with all 1482 # final state external particles and work our way inwards 1483 schannels, tchannels = helas_diag.get('amplitudes')[0].\ 1484 get_s_and_t_channels(ninitial, model, 990) 1485 1486 s_and_t_channels.append([schannels, tchannels]) 1487 1488 #the first thing to write is the splitting ij -> i,j 1489 lines.append("F %4d %4d %4d %4d" % \ 1490 (iFKS,-1,iconfig,2)) 1491 #(iFKS,last_leg.get('number'), iconfig, len(daughters))) 1492 lines.append("D %4d" % nexternal) 1493 lines.append("D %4d" % fks_j) 1494 lines.append("S %4d %4d %4d %10d" % \ 1495 (iFKS,-1, iconfig,fks_j_id)) 1496 # now we continue with all the other vertices of the diagrams; 1497 # we need to shift the 'last_leg' by 1 and replace leg fks_j with -1 1498 1499 # Write out propagators for s-channel and t-channel vertices 1500 allchannels = schannels 1501 if len(tchannels) > 1: 1502 # Write out tchannels only if there are any non-trivial ones 1503 allchannels = schannels + tchannels 1504 1505 for vert in allchannels: 1506 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1507 last_leg = vert.get('legs')[-1] 1508 lines.append("F %4d %4d %4d %4d" % \ 1509 (iFKS,last_leg.get('number')-1, iconfig, len(daughters))) 1510 1511 # legs with negative number in daughters have to be shifted by -1 1512 for i_dau in range(len(daughters)): 1513 if daughters[i_dau] < 0: 1514 daughters[i_dau] += -1 1515 # finally relable fks with -1 if it appears in daughters 1516 if fks_j in daughters: 1517 daughters[daughters.index(fks_j)] = -1 1518 for d in daughters: 1519 lines.append("D %4d" % d) 1520 if vert in schannels: 1521 lines.append("S %4d %4d %4d %10d" % \ 1522 (iFKS,last_leg.get('number')-1, iconfig, 1523 last_leg.get('id'))) 1524 elif vert in tchannels[:-1]: 1525 lines.append("T %4d %4d %4d %10d" % \ 1526 (iFKS,last_leg.get('number')-1, iconfig, 1527 abs(last_leg.get('id')))) 1528 1529 # update what the array sizes (mapconfig,iforest,etc) will be 1530 max_leg_number = min(max_leg_number,last_leg.get('number')-1) 1531 max_iconfig = max(max_iconfig,iconfig) 1532 1533 # Write out number of configs 1534 lines.append("# Number of configs for nFKSprocess %d" % iFKS) 1535 lines.append("C %4d %4d %4d" % (iFKS,0,iconfig)) 1536 1537 # write the props.inc information 1538 lines2.append("# ") 1539 particle_dict = born_me.get('processes')[0].get('model').\ 1540 get('particle_dict') 1541 1542 for iconf, configs in enumerate(s_and_t_channels): 1543 lines2.append("M %4d %4d %4d %10d " % \ 1544 (iFKS,-1, iconf + 1, fks_j_id)) 1545 pow_part = 1 + int(particle_dict[fks_j_id].is_boson()) 1546 lines2.append("P %4d %4d %4d %4d " % \ 1547 (iFKS,-1, iconf + 1, pow_part)) 1548 for vertex in configs[0] + configs[1][:-1]: 1549 leg = vertex.get('legs')[-1] 1550 if leg.get('id') not in particle_dict: 1551 # Fake propagator used in multiparticle vertices 1552 pow_part = 0 1553 else: 1554 particle = particle_dict[leg.get('id')] 1555 1556 pow_part = 1 + int(particle.is_boson()) 1557 1558 lines2.append("M %4d %4d %4d %10d " % \ 1559 (iFKS,leg.get('number')-1, iconf + 1, leg.get('id'))) 1560 lines2.append("P %4d %4d %4d %4d " % \ 1561 (iFKS,leg.get('number')-1, iconf + 1, pow_part)) 1562 1563 # Write the file 1564 open(filename,'w').write('\n'.join(lines+lines2)) 1565 1566 return max_iconfig, max_leg_number
1567 1568
1569 - def write_leshouche_info_declarations(self, writer, nfksconfs, 1570 maxproc, maxflow, nexternal, fortran_model):
1571 """writes the declarations for the variables relevant for leshouche_info 1572 """ 1573 lines = [] 1574 lines.append('integer maxproc_used, maxflow_used') 1575 lines.append('parameter (maxproc_used = %d)' % maxproc) 1576 lines.append('parameter (maxflow_used = %d)' % maxflow) 1577 lines.append('integer idup_d(%d,%d,maxproc_used)' % (nfksconfs, nexternal)) 1578 lines.append('integer mothup_d(%d,%d,%d,maxproc_used)' % (nfksconfs, 2, nexternal)) 1579 lines.append('integer icolup_d(%d,%d,%d,maxflow_used)' % (nfksconfs, 2, nexternal)) 1580 lines.append('integer niprocs_d(%d)' % (nfksconfs)) 1581 1582 writer.writelines(lines)
1583 1584
1585 - def write_genps(self, writer, maxproc,ngraphs,ncolor,maxflow, fortran_model):
1586 """writes the genps.inc file 1587 """ 1588 lines = [] 1589 lines.append("include 'maxparticles.inc'") 1590 lines.append("include 'maxconfigs.inc'") 1591 lines.append("integer maxproc,ngraphs,ncolor,maxflow") 1592 lines.append("parameter (maxproc=%d,ngraphs=%d,ncolor=%d,maxflow=%d)" % \ 1593 (maxproc,ngraphs,ncolor,maxflow)) 1594 writer.writelines(lines)
1595 1596
1597 - def write_leshouche_info_file(self, filename, matrix_element):
1598 """writes the leshouche_info.inc file which contains 1599 the LHA informations for all the real emission processes 1600 """ 1601 lines = [] 1602 lines.append("# I -> IDUP_D") 1603 lines.append("# M -> MOTHUP_D") 1604 lines.append("# C -> ICOLUP_D") 1605 nfksconfs = len(matrix_element.get_fks_info_list()) 1606 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1607 1608 maxproc = 0 1609 maxflow = 0 1610 for i, conf in enumerate(matrix_element.get_fks_info_list()): 1611 (newlines, nprocs, nflows) = self.get_leshouche_lines( 1612 matrix_element.real_processes[conf['n_me'] - 1].matrix_element, i + 1) 1613 lines.extend(newlines) 1614 maxproc = max(maxproc, nprocs) 1615 maxflow = max(maxflow, nflows) 1616 1617 # this is for LOonly 1618 if not matrix_element.get_fks_info_list(): 1619 (newlines, nprocs, nflows) = self.get_leshouche_lines_dummy(matrix_element.born_me, 1) 1620 lines.extend(newlines) 1621 1622 # Write the file 1623 open(filename,'w').write('\n'.join(lines)) 1624 1625 return nfksconfs, maxproc, maxflow, nexternal
1626 1627
1628 - def write_real_wrappers(self, writer_me, writer_lum, matrix_element, sqsolist, fortran_model):
1629 """writes the wrappers which allows to chose among the different real matrix elements 1630 and among the different parton luminosities and 1631 among the various helper functions for the split-orders""" 1632 1633 # the real me wrapper 1634 text = \ 1635 """subroutine smatrix_real(p, wgt) 1636 implicit none 1637 include 'nexternal.inc' 1638 double precision p(0:3, nexternal) 1639 double precision wgt 1640 integer nfksprocess 1641 common/c_nfksprocess/nfksprocess 1642 """ 1643 # the pdf wrapper 1644 text1 = \ 1645 """\n\ndouble precision function dlum() 1646 implicit none 1647 integer nfksprocess 1648 common/c_nfksprocess/nfksprocess 1649 """ 1650 1651 if matrix_element.real_processes: 1652 for n, info in enumerate(matrix_element.get_fks_info_list()): 1653 text += \ 1654 """if (nfksprocess.eq.%(n)d) then 1655 call smatrix%(n_me)d(p, wgt) 1656 else""" % {'n': n + 1, 'n_me' : info['n_me']} 1657 text1 += \ 1658 """if (nfksprocess.eq.%(n)d) then 1659 call dlum_%(n_me)d(dlum) 1660 else""" % {'n': n + 1, 'n_me' : info['n_me']} 1661 1662 text += \ 1663 """ 1664 write(*,*) 'ERROR: invalid n in real_matrix :', nfksprocess 1665 stop\n endif 1666 return \n end 1667 """ 1668 text1 += \ 1669 """ 1670 write(*,*) 'ERROR: invalid n in dlum :', nfksprocess\n stop\n endif 1671 return \nend 1672 """ 1673 else: 1674 text += \ 1675 """ 1676 wgt=0d0 1677 return 1678 end 1679 """ 1680 text1 += \ 1681 """ 1682 call dlum_0(dlum) 1683 return 1684 end 1685 """ 1686 1687 # Write the file 1688 writer_me.writelines(text) 1689 writer_lum.writelines(text1) 1690 return 0
1691 1692
1693 - def draw_feynman_diagrams(self, matrix_element):
1694 """Create the ps files containing the feynman diagrams for the born process, 1695 as well as for all the real emission processes""" 1696 1697 filename = 'born.ps' 1698 plot = draw.MultiEpsDiagramDrawer( 1699 matrix_element.born_me.get('base_amplitude').get('diagrams'), 1700 filename, 1701 model=matrix_element.born_me.get('processes')[0].get('model'), 1702 amplitude=True, diagram_type='born') 1703 plot.draw() 1704 1705 for n, fksreal in enumerate(matrix_element.real_processes): 1706 filename = 'matrix_%d.ps' % (n + 1) 1707 plot = draw.MultiEpsDiagramDrawer(fksreal.matrix_element.\ 1708 get('base_amplitude').get('diagrams'), 1709 filename, 1710 model=fksreal.matrix_element.\ 1711 get('processes')[0].get('model'), 1712 amplitude=True, diagram_type='real') 1713 plot.draw()
1714 1715
1716 - def write_real_matrix_elements(self, matrix_element, fortran_model):
1717 """writes the matrix_i.f files which contain the real matrix elements""" 1718 1719 sqsorders_list = [] 1720 for n, fksreal in enumerate(matrix_element.real_processes): 1721 filename = 'matrix_%d.f' % (n + 1) 1722 ncalls, ncolors, nsplitorders, nsqsplitorders = \ 1723 self.write_split_me_fks(\ 1724 writers.FortranWriter(filename), 1725 fksreal.matrix_element, 1726 fortran_model, 'real', "%d" % (n+1)) 1727 sqsorders_list.append(nsqsplitorders) 1728 return sqsorders_list
1729 1730 1731
1732 - def write_extra_cnt_wrapper(self, writer, cnt_me_list, fortran_model):
1733 """write a wrapper for the extra born counterterms that may be 1734 present e.g. if the process has gluon at the born 1735 """ 1736 1737 replace_dict = {'ncnt': max(len(cnt_me_list),1)} 1738 1739 # this is the trivial case with no cnt. 1740 # fill everything with 0s (or 1 for color) 1741 if not cnt_me_list: 1742 replace_dict['cnt_charge_lines'] = \ 1743 "data (cnt_charge(1,i), i=1,nexternalB) / nexternalB * 0d0 /" 1744 replace_dict['cnt_color_lines'] = \ 1745 "data (cnt_color(1,i), i=1,nexternalB) / nexternalB * 1 /" 1746 replace_dict['cnt_pdg_lines'] = \ 1747 "data (cnt_pdg(1,i), i=1,nexternalB) / nexternalB * 0 /" 1748 1749 replace_dict['iflines'] = '' 1750 1751 else: 1752 iflines = '' 1753 cnt_charge_lines = '' 1754 cnt_color_lines = '' 1755 cnt_pdg_lines = '' 1756 1757 for i, cnt in enumerate(cnt_me_list): 1758 icnt = i+1 1759 if not iflines: 1760 iflines = \ 1761 'if (icnt.eq.%d) then\n call sborn_cnt%d(p,cnts)\n' % (icnt, icnt) 1762 else: 1763 iflines += \ 1764 'else if (icnt.eq.%d) then\n call sborn_cnt%d(p,cnts)\n' % (icnt, icnt) 1765 1766 cnt_charge_lines += 'data (cnt_charge(%d,i), i=1,nexternalB) / %s /\n' % \ 1767 (icnt, ', '.join(['%19.15fd0' % l['charge'] for l in cnt['processes'][0]['legs']])) 1768 cnt_color_lines += 'data (cnt_color(%d,i), i=1,nexternalB) / %s /\n' % \ 1769 (icnt, ', '.join(['%d' % l['color'] for l in cnt['processes'][0]['legs']])) 1770 cnt_pdg_lines += 'data (cnt_pdg(%d,i), i=1,nexternalB) / %s /\n' % \ 1771 (icnt, ', '.join(['%d' % l['id'] for l in cnt['processes'][0]['legs']])) 1772 1773 iflines += 'endif\n' 1774 1775 replace_dict['iflines'] = iflines 1776 replace_dict['cnt_color_lines'] = cnt_color_lines 1777 replace_dict['cnt_charge_lines'] = cnt_charge_lines 1778 replace_dict['cnt_pdg_lines'] = cnt_pdg_lines 1779 1780 file = open(pjoin(_file_path, \ 1781 'iolibs/template_files/extra_cnt_wrapper_fks.inc')).read() 1782 1783 file = file % replace_dict 1784 1785 # Write the file 1786 writer.writelines(file)
1787 1788 1789 1790 #=========================================================================== 1791 # write_split_me_fks 1792 #===========================================================================
1793 - def write_split_me_fks(self, writer, matrix_element, fortran_model, 1794 proc_type, proc_prefix='',start_dict={}):
1795 """Export a matrix element using the split_order format 1796 proc_type is either born, bhel, real or cnt, 1797 start_dict contains additional infos to be put in replace_dict""" 1798 1799 if not matrix_element.get('processes') or \ 1800 not matrix_element.get('diagrams'): 1801 return 0 1802 1803 if not isinstance(writer, writers.FortranWriter): 1804 raise writers.FortranWriter.FortranWriterError(\ 1805 "writer not FortranWriter") 1806 1807 if 'sa_symmetry 'not in self.opt: 1808 self.opt['sa_symmetry']=False 1809 1810 # Set lowercase/uppercase Fortran code 1811 writers.FortranWriter.downcase = False 1812 1813 replace_dict = {'global_variable':'', 'amp2_lines':''} 1814 if proc_prefix: 1815 replace_dict['proc_prefix'] = proc_prefix 1816 1817 # update replace_dict according to start_dict 1818 for k,v in start_dict.items(): 1819 replace_dict[k] = v 1820 1821 # Extract helas calls 1822 helas_calls = fortran_model.get_matrix_element_calls(\ 1823 matrix_element) 1824 replace_dict['helas_calls'] = "\n".join(helas_calls) 1825 1826 # Extract version number and date from VERSION file 1827 info_lines = self.get_mg5_info_lines() 1828 replace_dict['info_lines'] = info_lines 1829 1830 # Set the size of Wavefunction 1831 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 1832 replace_dict['wavefunctionsize'] = 20 1833 else: 1834 replace_dict['wavefunctionsize'] = 8 1835 1836 # Extract process info lines 1837 process_lines = self.get_process_info_lines(matrix_element) 1838 replace_dict['process_lines'] = process_lines 1839 1840 # Extract number of external particles 1841 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1842 replace_dict['nexternal'] = nexternal 1843 1844 # Extract ncomb 1845 ncomb = matrix_element.get_helicity_combinations() 1846 replace_dict['ncomb'] = ncomb 1847 1848 # Extract helicity lines 1849 helicity_lines = self.get_helicity_lines(matrix_element) 1850 replace_dict['helicity_lines'] = helicity_lines 1851 1852 # Extract overall denominator 1853 # Averaging initial state color, spin, and identical FS particles 1854 replace_dict['den_factor_line'] = self.get_den_factor_line(matrix_element) 1855 1856 # Extract ngraphs 1857 ngraphs = matrix_element.get_number_of_amplitudes() 1858 replace_dict['ngraphs'] = ngraphs 1859 1860 # Extract nwavefuncs 1861 nwavefuncs = matrix_element.get_number_of_wavefunctions() 1862 replace_dict['nwavefuncs'] = nwavefuncs 1863 1864 # Extract ncolor 1865 ncolor = max(1, len(matrix_element.get('color_basis'))) 1866 replace_dict['ncolor'] = ncolor 1867 1868 replace_dict['hel_avg_factor'] = matrix_element.get_hel_avg_factor() 1869 1870 # Extract color data lines 1871 color_data_lines = self.get_color_data_lines(matrix_element) 1872 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 1873 1874 if self.opt['export_format']=='standalone_msP': 1875 # For MadSpin need to return the AMP2 1876 amp2_lines = self.get_amp2_lines(matrix_element, [] ) 1877 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 1878 replace_dict['global_variable'] = " Double Precision amp2(NGRAPHS)\n common/to_amps/ amp2\n" 1879 1880 # JAMP definition, depends on the number of independent split orders 1881 split_orders=matrix_element.get('processes')[0].get('split_orders') 1882 if len(split_orders)==0: 1883 replace_dict['nSplitOrders']='' 1884 # Extract JAMP lines 1885 jamp_lines, nb_temp_jamp = self.get_JAMP_lines(matrix_element) 1886 else: 1887 split_orders_name = matrix_element['processes'][0]['split_orders'] 1888 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 1889 replace_dict['nAmpSplitOrders']=len(amp_orders) 1890 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 1891 replace_dict['nSplitOrders']=len(split_orders) 1892 amp_so = self.get_split_orders_lines( 1893 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 1894 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 1895 replace_dict['ampsplitorders']='\n'.join(amp_so) 1896 # add a comment line 1897 replace_dict['sqsplitorders']= \ 1898 'C the values listed below are for %s\n' % ', '.join(split_orders_name) 1899 replace_dict['sqsplitorders']+='\n'.join(sqamp_so) 1900 jamp_lines, nb_temp_jamp = self.get_JAMP_lines_split_order(\ 1901 matrix_element,amp_orders,split_order_names=split_orders) 1902 1903 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 1904 replace_dict['nb_temp_jamp'] = nb_temp_jamp 1905 1906 if proc_type=='born': 1907 file = open(pjoin(_file_path, \ 1908 'iolibs/template_files/bornmatrix_splitorders_fks.inc')).read() 1909 elif proc_type=='bhel': 1910 file = open(pjoin(_file_path, \ 1911 'iolibs/template_files/born_hel_splitorders_fks.inc')).read() 1912 elif proc_type=='real': 1913 file = open(pjoin(_file_path, \ 1914 'iolibs/template_files/realmatrix_splitorders_fks.inc')).read() 1915 elif proc_type=='cnt': 1916 # MZ this is probably not the best way to go 1917 file = open(pjoin(_file_path, \ 1918 'iolibs/template_files/born_cnt_splitorders_fks.inc')).read() 1919 1920 file = file % replace_dict 1921 1922 # Write the file 1923 writer.writelines(file) 1924 1925 return len(list([call for call in helas_calls if call.find('#') != 0])), ncolor, \ 1926 replace_dict['nAmpSplitOrders'], replace_dict['nSqAmpSplitOrders']
1927 1928
1929 - def write_pdf_calls(self, matrix_element, fortran_model):
1930 """writes the parton_lum_i.f files which contain the real matrix elements. 1931 If no real emission existst, write the one for the born""" 1932 1933 if matrix_element.real_processes: 1934 for n, fksreal in enumerate(matrix_element.real_processes): 1935 filename = 'parton_lum_%d.f' % (n + 1) 1936 self.write_pdf_file(writers.FortranWriter(filename), 1937 fksreal.matrix_element, n + 1, 1938 fortran_model) 1939 else: 1940 filename = 'parton_lum_0.f' 1941 self.write_pdf_file(writers.FortranWriter(filename), 1942 matrix_element.born_me, 0, 1943 fortran_model)
1944 1945
1946 - def generate_born_fks_files(self, matrix_element, fortran_model, me_number, path):
1947 """generates the files needed for the born amplitude in the P* directory, which will 1948 be needed by the P* directories""" 1949 pathdir = os.getcwd() 1950 1951 born_me = matrix_element.born_me 1952 1953 # the .inc files 1954 filename = 'born_conf.inc' 1955 nconfigs, mapconfigs, s_and_t_channels = \ 1956 self.write_born_conf_file( 1957 writers.FortranWriter(filename), 1958 born_me, fortran_model) 1959 1960 filename = 'born_props.inc' 1961 self.write_born_props_file( 1962 writers.FortranWriter(filename), 1963 born_me, s_and_t_channels, fortran_model) 1964 1965 filename = 'born_leshouche.inc' 1966 nflows = self.write_born_leshouche_file(writers.FortranWriter(filename), 1967 born_me, fortran_model) 1968 1969 filename = 'born_nhel.inc' 1970 self.write_born_nhel_file(writers.FortranWriter(filename), 1971 born_me, nflows, fortran_model) 1972 1973 filename = 'born_ngraphs.inc' 1974 self.write_ngraphs_file(writers.FortranWriter(filename), nconfigs) 1975 1976 filename = 'ncombs.inc' 1977 self.write_ncombs_file(writers.FortranWriter(filename), 1978 born_me, fortran_model) 1979 1980 filename = 'born_coloramps.inc' 1981 self.write_coloramps_file(writers.FortranWriter(filename), 1982 mapconfigs, born_me, fortran_model) 1983 1984 # the born ME's and color/charge links 1985 sqsorders_list = [] 1986 filename = 'born.f' 1987 1988 born_dict = {} 1989 born_dict['nconfs'] = max(len(matrix_element.get_fks_info_list()),1) 1990 1991 den_factor_lines = self.get_den_factor_lines(matrix_element) 1992 born_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 1993 1994 ij_lines = self.get_ij_lines(matrix_element) 1995 born_dict['ij_lines'] = '\n'.join(ij_lines) 1996 1997 #this is to skip computing amp_split_cnt if the process has no corrections 1998 if not matrix_element.real_processes: 1999 born_dict['skip_amp_cnt'] = 'goto 999 ! LOonly, no need to compute amp_split_cnt' 2000 else: 2001 born_dict['skip_amp_cnt'] = '' 2002 2003 calls_born, ncolor_born, norders, nsqorders = \ 2004 self.write_split_me_fks(writers.FortranWriter(filename), 2005 born_me, fortran_model, 'born', '', 2006 start_dict = born_dict) 2007 2008 filename = 'born_maxamps.inc' 2009 maxamps = len(matrix_element.get('diagrams')) 2010 maxflows = ncolor_born 2011 self.write_maxamps_file(writers.FortranWriter(filename), 2012 maxamps, 2013 maxflows, 2014 max([len(matrix_element.get('processes')) for me in \ 2015 matrix_element.born_me]),1) 2016 2017 2018 # the second call is for the born_hel file. use the same writer 2019 # function 2020 filename = 'born_hel.f' 2021 calls_born, ncolor_born, norders, nsqorders = \ 2022 self.write_split_me_fks(writers.FortranWriter(filename), 2023 born_me, fortran_model, 'bhel', '', 2024 start_dict = born_dict) 2025 2026 sqsorders_list.append(nsqorders) 2027 2028 self.color_link_files = [] 2029 for j in range(len(matrix_element.color_links)): 2030 filename = 'b_sf_%3.3d.f' % (j + 1) 2031 self.color_link_files.append(filename) 2032 self.write_b_sf_fks(writers.FortranWriter(filename), 2033 matrix_element, j, 2034 fortran_model) 2035 2036 #write the sborn_sf.f and the b_sf_files 2037 filename = 'sborn_sf.f' 2038 self.write_sborn_sf(writers.FortranWriter(filename), 2039 matrix_element, 2040 nsqorders, 2041 fortran_model)
2042 2043 2044
2045 - def generate_virtuals_from_OLP(self,process_list,export_path, OLP):
2046 """Generates the library for computing the loop matrix elements 2047 necessary for this process using the OLP specified.""" 2048 2049 # Start by writing the BLHA order file 2050 virtual_path = pjoin(export_path,'OLP_virtuals') 2051 if not os.path.exists(virtual_path): 2052 os.makedirs(virtual_path) 2053 filename = os.path.join(virtual_path,'OLE_order.lh') 2054 self.write_lh_order(filename, process_list, OLP) 2055 2056 fail_msg='Generation of the virtuals with %s failed.\n'%OLP+\ 2057 'Please check the virt_generation.log file in %s.'\ 2058 %str(pjoin(virtual_path,'virt_generation.log')) 2059 2060 # Perform some tasks specific to certain OLP's 2061 if OLP=='GoSam': 2062 cp(pjoin(self.mgme_dir,'Template','loop_material','OLP_specifics', 2063 'GoSam','makevirt'),pjoin(virtual_path,'makevirt')) 2064 cp(pjoin(self.mgme_dir,'Template','loop_material','OLP_specifics', 2065 'GoSam','gosam.rc'),pjoin(virtual_path,'gosam.rc')) 2066 ln(pjoin(export_path,'Cards','param_card.dat'),virtual_path) 2067 # Now generate the process 2068 logger.info('Generating the loop matrix elements with %s...'%OLP) 2069 virt_generation_log = \ 2070 open(pjoin(virtual_path,'virt_generation.log'), 'w') 2071 retcode = subprocess.call(['./makevirt'],cwd=virtual_path, 2072 stdout=virt_generation_log, stderr=virt_generation_log) 2073 virt_generation_log.close() 2074 # Check what extension is used for the share libraries on this system 2075 possible_other_extensions = ['so','dylib'] 2076 shared_lib_ext='so' 2077 for ext in possible_other_extensions: 2078 if os.path.isfile(pjoin(virtual_path,'Virtuals','lib', 2079 'libgolem_olp.'+ext)): 2080 shared_lib_ext = ext 2081 2082 # Now check that everything got correctly generated 2083 files_to_check = ['olp_module.mod',str(pjoin('lib', 2084 'libgolem_olp.'+shared_lib_ext))] 2085 if retcode != 0 or any([not os.path.exists(pjoin(virtual_path, 2086 'Virtuals',f)) for f in files_to_check]): 2087 raise fks_common.FKSProcessError(fail_msg) 2088 # link the library to the lib folder 2089 ln(pjoin(virtual_path,'Virtuals','lib','libgolem_olp.'+shared_lib_ext), 2090 pjoin(export_path,'lib')) 2091 2092 # Specify in make_opts the right library necessitated by the OLP 2093 make_opts_content=open(pjoin(export_path,'Source','make_opts')).read() 2094 make_opts=open(pjoin(export_path,'Source','make_opts'),'w') 2095 if OLP=='GoSam': 2096 if platform.system().lower()=='darwin': 2097 # On mac the -rpath is not supported and the path of the dynamic 2098 # library is automatically wired in the executable 2099 make_opts_content=make_opts_content.replace('libOLP=', 2100 'libOLP=-Wl,-lgolem_olp') 2101 else: 2102 # On other platforms the option , -rpath= path to libgolem.so is necessary 2103 # Using a relative path is not ideal because the file libgolem.so is not 2104 # copied on the worker nodes. 2105 # make_opts_content=make_opts_content.replace('libOLP=', 2106 # 'libOLP=-Wl,-rpath=../$(LIBDIR) -lgolem_olp') 2107 # Using the absolute path is working in the case where the disk of the 2108 # front end machine is mounted on all worker nodes as well. 2109 make_opts_content=make_opts_content.replace('libOLP=', 2110 'libOLP=-Wl,-rpath='+str(pjoin(export_path,'lib'))+' -lgolem_olp') 2111 2112 2113 make_opts.write(make_opts_content) 2114 make_opts.close() 2115 2116 # A priori this is generic to all OLP's 2117 2118 # Parse the contract file returned and propagate the process label to 2119 # the include of the BinothLHA.f file 2120 proc_to_label = self.parse_contract_file( 2121 pjoin(virtual_path,'OLE_order.olc')) 2122 2123 self.write_BinothLHA_inc(process_list,proc_to_label,\ 2124 pjoin(export_path,'SubProcesses')) 2125 2126 # Link the contract file to within the SubProcess directory 2127 ln(pjoin(virtual_path,'OLE_order.olc'),pjoin(export_path,'SubProcesses'))
2128
2129 - def write_BinothLHA_inc(self, processes, proc_to_label, SubProcPath):
2130 """ Write the file Binoth_proc.inc in each SubProcess directory so as 2131 to provide the right process_label to use in the OLP call to get the 2132 loop matrix element evaluation. The proc_to_label is the dictionary of 2133 the format of the one returned by the function parse_contract_file.""" 2134 2135 for proc in processes: 2136 name = "P%s"%proc.shell_string() 2137 proc_pdgs=(tuple([leg.get('id') for leg in proc.get('legs') if \ 2138 not leg.get('state')]), 2139 tuple([leg.get('id') for leg in proc.get('legs') if \ 2140 leg.get('state')])) 2141 incFile = open(pjoin(SubProcPath, name,'Binoth_proc.inc'),'w') 2142 try: 2143 incFile.write( 2144 """ INTEGER PROC_LABEL 2145 PARAMETER (PROC_LABEL=%d)"""%(proc_to_label[proc_pdgs])) 2146 except KeyError: 2147 raise fks_common.FKSProcessError('Could not found the target'+\ 2148 ' process %s > %s in '%(str(proc_pdgs[0]),str(proc_pdgs[1]))+\ 2149 ' the proc_to_label argument in write_BinothLHA_inc.') 2150 incFile.close()
2151
2152 - def parse_contract_file(self, contract_file_path):
2153 """ Parses the BLHA contract file, make sure all parameters could be 2154 understood by the OLP and return a mapping of the processes (characterized 2155 by the pdg's of the initial and final state particles) to their process 2156 label. The format of the mapping is {((in_pdgs),(out_pdgs)):proc_label}. 2157 """ 2158 2159 proc_def_to_label = {} 2160 2161 if not os.path.exists(contract_file_path): 2162 raise fks_common.FKSProcessError('Could not find the contract file'+\ 2163 ' OLE_order.olc in %s.'%str(contract_file_path)) 2164 2165 comment_re=re.compile(r"^\s*#") 2166 proc_def_re=re.compile( 2167 r"^(?P<in_pdgs>(\s*-?\d+\s*)+)->(?P<out_pdgs>(\s*-?\d+\s*)+)\|"+ 2168 r"\s*(?P<proc_class>\d+)\s*(?P<proc_label>\d+)\s*$") 2169 line_OK_re=re.compile(r"^.*\|\s*OK") 2170 for line in open(contract_file_path): 2171 # Ignore comments 2172 if not comment_re.match(line) is None: 2173 continue 2174 # Check if it is a proc definition line 2175 proc_def = proc_def_re.match(line) 2176 if not proc_def is None: 2177 if int(proc_def.group('proc_class'))!=1: 2178 raise fks_common.FKSProcessError( 2179 'aMCatNLO can only handle loop processes generated by the OLP which have only '+\ 2180 ' process class attribute. Found %s instead in: \n%s'\ 2181 %(proc_def.group('proc_class'),line)) 2182 in_pdgs=tuple([int(in_pdg) for in_pdg in \ 2183 proc_def.group('in_pdgs').split()]) 2184 out_pdgs=tuple([int(out_pdg) for out_pdg in \ 2185 proc_def.group('out_pdgs').split()]) 2186 proc_def_to_label[(in_pdgs,out_pdgs)]=\ 2187 int(proc_def.group('proc_label')) 2188 continue 2189 # For the other types of line, just make sure they end with | OK 2190 if line_OK_re.match(line) is None: 2191 raise fks_common.FKSProcessError( 2192 'The OLP could not process the following line: \n%s'%line) 2193 2194 return proc_def_to_label
2195 2196
2197 - def generate_virt_directory(self, loop_matrix_element, fortran_model, dir_name):
2198 """writes the V**** directory inside the P**** directories specified in 2199 dir_name""" 2200 2201 cwd = os.getcwd() 2202 2203 matrix_element = loop_matrix_element 2204 2205 # Create the MadLoop5_resources directory if not already existing 2206 dirpath = os.path.join(dir_name, 'MadLoop5_resources') 2207 try: 2208 os.mkdir(dirpath) 2209 except os.error as error: 2210 logger.warning(error.strerror + " " + dirpath) 2211 2212 # Create the directory PN_xx_xxxxx in the specified path 2213 name = "V%s" % matrix_element.get('processes')[0].shell_string() 2214 dirpath = os.path.join(dir_name, name) 2215 2216 try: 2217 os.mkdir(dirpath) 2218 except os.error as error: 2219 logger.warning(error.strerror + " " + dirpath) 2220 2221 try: 2222 os.chdir(dirpath) 2223 except os.error: 2224 logger.error('Could not cd to directory %s' % dirpath) 2225 return 0 2226 2227 logger.info('Creating files in directory %s' % name) 2228 2229 # Extract number of external particles 2230 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2231 2232 calls=self.write_loop_matrix_element_v4(None,matrix_element,fortran_model) 2233 # The born matrix element, if needed 2234 filename = 'born_matrix.f' 2235 calls = self.write_bornmatrix( 2236 writers.FortranWriter(filename), 2237 matrix_element, 2238 fortran_model) 2239 2240 filename = 'nexternal.inc' 2241 self.write_nexternal_file(writers.FortranWriter(filename), 2242 nexternal, ninitial) 2243 2244 filename = 'pmass.inc' 2245 self.write_pmass_file(writers.FortranWriter(filename), 2246 matrix_element) 2247 2248 filename = 'ngraphs.inc' 2249 self.write_ngraphs_file(writers.FortranWriter(filename), 2250 len(matrix_element.get_all_amplitudes())) 2251 2252 filename = "loop_matrix.ps" 2253 plot = draw.MultiEpsDiagramDrawer(base_objects.DiagramList( 2254 matrix_element.get('base_amplitude').get('loop_diagrams')[:1000]), 2255 filename, 2256 model=matrix_element.get('processes')[0].get('model'), 2257 amplitude='') 2258 logger.info("Drawing loop Feynman diagrams for " + \ 2259 matrix_element.get('processes')[0].nice_string(print_weighted=False)) 2260 plot.draw() 2261 2262 filename = "born_matrix.ps" 2263 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2264 get('born_diagrams'),filename,model=matrix_element.get('processes')[0].\ 2265 get('model'),amplitude='') 2266 logger.info("Generating born Feynman diagrams for " + \ 2267 matrix_element.get('processes')[0].nice_string(print_weighted=False)) 2268 plot.draw() 2269 2270 # We also need to write the overall maximum quantities for this group 2271 # of processes in 'global_specs.inc'. In aMCatNLO, there is always 2272 # only one process, so this is trivial 2273 self.write_global_specs(matrix_element, output_path=pjoin(dirpath,'global_specs.inc')) 2274 open('unique_id.inc','w').write( 2275 """ integer UNIQUE_ID 2276 parameter(UNIQUE_ID=1)""") 2277 2278 linkfiles = ['coupl.inc', 'mp_coupl.inc', 'mp_coupl_same_name.inc', 2279 'cts_mprec.h', 'cts_mpc.h', 'MadLoopParamReader.f', 2280 'MadLoopCommons.f','MadLoopParams.inc'] 2281 2282 # We should move to MadLoop5_resources directory from the SubProcesses 2283 ln(pjoin(os.path.pardir,os.path.pardir,'MadLoopParams.dat'), 2284 pjoin('..','MadLoop5_resources')) 2285 2286 for file in linkfiles: 2287 ln('../../%s' % file) 2288 2289 os.system("ln -s ../../makefile_loop makefile") 2290 2291 linkfiles = ['mpmodule.mod'] 2292 2293 for file in linkfiles: 2294 ln('../../../lib/%s' % file) 2295 2296 linkfiles = ['coef_specs.inc'] 2297 2298 for file in linkfiles: 2299 ln('../../../Source/DHELAS/%s' % file) 2300 2301 # Return to original PWD 2302 os.chdir(cwd) 2303 2304 if not calls: 2305 calls = 0 2306 return calls
2307 2308 2309 #=============================================================================== 2310 # write_lh_order 2311 #=============================================================================== 2312 #test written
2313 - def write_lh_order(self, filename, process_list, OLP='MadLoop'):
2314 """Creates the OLE_order.lh file. This function should be edited according 2315 to the OLP which is used. For now it is generic.""" 2316 2317 2318 if len(process_list)==0: 2319 raise fks_common.FKSProcessError('No matrix elements provided to '+\ 2320 'the function write_lh_order.') 2321 return 2322 2323 # We assume the orders to be common to all Subprocesses 2324 2325 orders = process_list[0].get('orders') 2326 if not orders: 2327 orders = {o : v / 2 for (o, v) in process_list[0].get('squared_orders').items()} 2328 if 'QED' in list(orders.keys()) and 'QCD' in list(orders.keys()): 2329 QED=orders['QED'] 2330 QCD=orders['QCD'] 2331 elif 'QED' in list(orders.keys()): 2332 QED=orders['QED'] 2333 QCD=0 2334 elif 'QCD' in list(orders.keys()): 2335 QED=0 2336 QCD=orders['QCD'] 2337 else: 2338 QED, QCD = fks_common.get_qed_qcd_orders_from_weighted(\ 2339 len(process_list[0].get('legs')), 2340 process_list[0].get('model').get('order_hierarchy'), 2341 orders['WEIGHTED']) 2342 2343 replace_dict = {} 2344 replace_dict['mesq'] = 'CHaveraged' 2345 replace_dict['corr'] = ' '.join(process_list[0].\ 2346 get('perturbation_couplings')) 2347 replace_dict['irreg'] = 'CDR' 2348 replace_dict['aspow'] = QCD 2349 replace_dict['aepow'] = QED 2350 replace_dict['modelfile'] = './param_card.dat' 2351 replace_dict['params'] = 'alpha_s' 2352 proc_lines=[] 2353 for proc in process_list: 2354 proc_lines.append('%s -> %s' % \ 2355 (' '.join(str(l['id']) for l in proc['legs'] if not l['state']), 2356 ' '.join(str(l['id']) for l in proc['legs'] if l['state']))) 2357 replace_dict['pdgs'] = '\n'.join(proc_lines) 2358 replace_dict['symfin'] = 'Yes' 2359 content = \ 2360 "#OLE_order written by MadGraph5_aMC@NLO\n\ 2361 \n\ 2362 MatrixElementSquareType %(mesq)s\n\ 2363 CorrectionType %(corr)s\n\ 2364 IRregularisation %(irreg)s\n\ 2365 AlphasPower %(aspow)d\n\ 2366 AlphaPower %(aepow)d\n\ 2367 NJetSymmetrizeFinal %(symfin)s\n\ 2368 ModelFile %(modelfile)s\n\ 2369 Parameters %(params)s\n\ 2370 \n\ 2371 # process\n\ 2372 %(pdgs)s\n\ 2373 " % replace_dict 2374 2375 file = open(filename, 'w') 2376 file.write(content) 2377 file.close 2378 return
2379 2380 2381 #=============================================================================== 2382 # write_born_fks 2383 #=============================================================================== 2384 # test written
2385 - def write_born_fks(self, writer, fksborn, fortran_model):
2386 """Export a matrix element to a born.f file in MadFKS format""" 2387 2388 matrix_element = fksborn.born_me 2389 2390 if not matrix_element.get('processes') or \ 2391 not matrix_element.get('diagrams'): 2392 return 0 2393 2394 if not isinstance(writer, writers.FortranWriter): 2395 raise writers.FortranWriter.FortranWriterError(\ 2396 "writer not FortranWriter") 2397 2398 # Add information relevant for FxFx matching: 2399 # Maximum QCD power in all the contributions 2400 max_qcd_order = 0 2401 for diag in matrix_element.get('diagrams'): 2402 orders = diag.calculate_orders() 2403 if 'QCD' in orders: 2404 max_qcd_order = max(max_qcd_order,orders['QCD']) 2405 max_n_light_final_partons = max(len([1 for id in proc.get_final_ids() 2406 if proc.get('model').get_particle(id).get('mass')=='ZERO' and 2407 proc.get('model').get_particle(id).get('color')>1]) 2408 for proc in matrix_element.get('processes')) 2409 # Maximum number of final state light jets to be matched 2410 self.proc_characteristic['max_n_matched_jets'] = max( 2411 self.proc_characteristic['max_n_matched_jets'], 2412 min(max_qcd_order,max_n_light_final_partons)) 2413 2414 2415 # Set lowercase/uppercase Fortran code 2416 writers.FortranWriter.downcase = False 2417 2418 replace_dict = {} 2419 2420 # Extract version number and date from VERSION file 2421 info_lines = self.get_mg5_info_lines() 2422 replace_dict['info_lines'] = info_lines 2423 2424 # Extract process info lines 2425 process_lines = self.get_process_info_lines(matrix_element) 2426 replace_dict['process_lines'] = process_lines 2427 2428 2429 # Extract ncomb 2430 ncomb = matrix_element.get_helicity_combinations() 2431 replace_dict['ncomb'] = ncomb 2432 2433 # Extract helicity lines 2434 helicity_lines = self.get_helicity_lines(matrix_element) 2435 replace_dict['helicity_lines'] = helicity_lines 2436 2437 # Extract IC line 2438 ic_line = self.get_ic_line(matrix_element) 2439 replace_dict['ic_line'] = ic_line 2440 2441 # Extract overall denominator 2442 # Averaging initial state color, spin, and identical FS particles 2443 #den_factor_line = get_den_factor_line(matrix_element) 2444 2445 # Extract ngraphs 2446 ngraphs = matrix_element.get_number_of_amplitudes() 2447 replace_dict['ngraphs'] = ngraphs 2448 2449 # Extract nwavefuncs 2450 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2451 replace_dict['nwavefuncs'] = nwavefuncs 2452 2453 # Extract ncolor 2454 ncolor = max(1, len(matrix_element.get('color_basis'))) 2455 replace_dict['ncolor'] = ncolor 2456 2457 # Extract color data lines 2458 color_data_lines = self.get_color_data_lines(matrix_element) 2459 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2460 2461 # Extract helas calls 2462 helas_calls = fortran_model.get_matrix_element_calls(\ 2463 matrix_element) 2464 replace_dict['helas_calls'] = "\n".join(helas_calls) 2465 2466 # Extract amp2 lines 2467 amp2_lines = self.get_amp2_lines(matrix_element) 2468 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2469 2470 # Extract JAMP lines 2471 jamp_lines, nb_tmp_jamp = self.get_JAMP_lines(matrix_element) 2472 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2473 replace_dict['nb_temp_jamp'] = nb_tmp_jamp 2474 2475 2476 # Set the size of Wavefunction 2477 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 2478 replace_dict['wavefunctionsize'] = 20 2479 else: 2480 replace_dict['wavefunctionsize'] = 8 2481 2482 # Extract glu_ij_lines 2483 ij_lines = self.get_ij_lines(fksborn) 2484 replace_dict['ij_lines'] = '\n'.join(ij_lines) 2485 2486 # Extract den_factor_lines 2487 den_factor_lines = self.get_den_factor_lines(fksborn) 2488 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 2489 2490 # Extract the number of FKS process 2491 replace_dict['nconfs'] = max(len(fksborn.get_fks_info_list()),1) 2492 2493 file = open(os.path.join(_file_path, \ 2494 'iolibs/template_files/born_fks.inc')).read() 2495 file = file % replace_dict 2496 2497 # Write the file 2498 writer.writelines(file) 2499 logger.warning('This function should not be called') 2500 2501 return len([call for call in helas_calls if call.find('#') != 0]), ncolor
2502 2503
2504 - def write_born_hel(self, writer, fksborn, fortran_model):
2505 """Export a matrix element to a born_hel.f file in MadFKS format""" 2506 2507 matrix_element = fksborn.born_me 2508 2509 if not matrix_element.get('processes') or \ 2510 not matrix_element.get('diagrams'): 2511 return 0 2512 2513 if not isinstance(writer, writers.FortranWriter): 2514 raise writers.FortranWriter.FortranWriterError(\ 2515 "writer not FortranWriter") 2516 # Set lowercase/uppercase Fortran code 2517 writers.FortranWriter.downcase = False 2518 2519 replace_dict = {} 2520 2521 # Extract version number and date from VERSION file 2522 info_lines = self.get_mg5_info_lines() 2523 replace_dict['info_lines'] = info_lines 2524 2525 # Extract process info lines 2526 process_lines = self.get_process_info_lines(matrix_element) 2527 replace_dict['process_lines'] = process_lines 2528 2529 2530 # Extract ncomb 2531 ncomb = matrix_element.get_helicity_combinations() 2532 replace_dict['ncomb'] = ncomb 2533 2534 # Extract helicity lines 2535 helicity_lines = self.get_helicity_lines(matrix_element) 2536 replace_dict['helicity_lines'] = helicity_lines 2537 2538 # Extract IC line 2539 ic_line = self.get_ic_line(matrix_element) 2540 replace_dict['ic_line'] = ic_line 2541 2542 # Extract overall denominator 2543 # Averaging initial state color, spin, and identical FS particles 2544 #den_factor_line = get_den_factor_line(matrix_element) 2545 2546 # Extract ngraphs 2547 ngraphs = matrix_element.get_number_of_amplitudes() 2548 replace_dict['ngraphs'] = ngraphs 2549 2550 # Extract nwavefuncs 2551 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2552 replace_dict['nwavefuncs'] = nwavefuncs 2553 2554 # Extract ncolor 2555 ncolor = max(1, len(matrix_element.get('color_basis'))) 2556 replace_dict['ncolor'] = ncolor 2557 2558 # Extract color data lines 2559 color_data_lines = self.get_color_data_lines(matrix_element) 2560 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2561 2562 # Extract amp2 lines 2563 amp2_lines = self.get_amp2_lines(matrix_element) 2564 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2565 2566 # Extract JAMP lines 2567 jamp_lines, nb_tmp_jamp = self.get_JAMP_lines(matrix_element) 2568 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2569 replace_dict['nb_temp_jamp'] = nb_tmp_jamp 2570 2571 # Extract den_factor_lines 2572 den_factor_lines = self.get_den_factor_lines(fksborn) 2573 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 2574 misc.sprint(replace_dict['den_factor_lines']) 2575 replace_dict['den_factor_lines'] = '' 2576 2577 # Extract the number of FKS process 2578 replace_dict['nconfs'] = len(fksborn.get_fks_info_list()) 2579 2580 file = open(os.path.join(_file_path, \ 2581 'iolibs/template_files/born_fks_hel.inc')).read() 2582 file = file % replace_dict 2583 2584 # Write the file 2585 writer.writelines(file) 2586 2587 return
2588 2589 2590 #=============================================================================== 2591 # write_born_sf_fks 2592 #=============================================================================== 2593 #test written
2594 - def write_sborn_sf(self, writer, me, nsqorders, fortran_model):
2595 """Creates the sborn_sf.f file, containing the calls to the different 2596 color linked borns""" 2597 2598 replace_dict = {} 2599 color_links = me.color_links 2600 nlinks = len(color_links) 2601 2602 replace_dict['nsqorders'] = nsqorders 2603 replace_dict['iflines_col'] = '' 2604 2605 for i, c_link in enumerate(color_links): 2606 ilink = i+1 2607 iff = {True : 'if', False : 'elseif'}[i==0] 2608 m, n = c_link['link'] 2609 if m != n: 2610 replace_dict['iflines_col'] += \ 2611 "c link partons %(m)d and %(n)d \n\ 2612 %(iff)s ((m.eq.%(m)d .and. n.eq.%(n)d).or.(m.eq.%(n)d .and. n.eq.%(m)d)) then \n\ 2613 call sb_sf_%(ilink)3.3d(p_born,wgt_col)\n" \ 2614 % {'m':m, 'n': n, 'iff': iff, 'ilink': ilink} 2615 else: 2616 replace_dict['iflines_col'] += \ 2617 "c link partons %(m)d and %(n)d \n\ 2618 %(iff)s (m.eq.%(m)d .and. n.eq.%(n)d) then \n\ 2619 call sb_sf_%(ilink)3.3d(p_born,wgt_col)\n" \ 2620 % {'m':m, 'n': n, 'iff': iff, 'ilink': ilink} 2621 2622 2623 if replace_dict['iflines_col']: 2624 replace_dict['iflines_col'] += 'endif\n' 2625 else: 2626 # this is when no color links are there 2627 replace_dict['iflines_col'] += 'write(*,*) \'Error in sborn_sf, no color links\'\nstop\n' 2628 2629 file = open(os.path.join(_file_path, \ 2630 'iolibs/template_files/sborn_sf_fks.inc')).read() 2631 file = file % replace_dict 2632 writer.writelines(file)
2633 2634
2635 - def get_chargeprod(self, charge_list, ninitial, n, m):
2636 """return the product of charges (as a string) of particles m and n. 2637 Special sign conventions may be needed for initial/final state particles 2638 """ 2639 return charge_list[n - 1] * charge_list[m - 1]
2640 2641 2642 #=============================================================================== 2643 # write_b_sf_fks 2644 #=============================================================================== 2645 #test written
2646 - def write_b_sf_fks(self, writer, fksborn, ilink, fortran_model):
2647 """Create the b_sf_xxx.f file for the ilink-th soft linked born 2648 """ 2649 2650 matrix_element = copy.copy(fksborn.born_me) 2651 2652 if not matrix_element.get('processes') or \ 2653 not matrix_element.get('diagrams'): 2654 return 0 2655 2656 if not isinstance(writer, writers.FortranWriter): 2657 raise writers.FortranWriter.FortranWriterError(\ 2658 "writer not FortranWriter") 2659 # Set lowercase/uppercase Fortran code 2660 writers.FortranWriter.downcase = False 2661 2662 link = fksborn.color_links[ilink] 2663 2664 replace_dict = {} 2665 2666 replace_dict['ilink'] = ilink + 1 2667 2668 # Extract version number and date from VERSION file 2669 info_lines = self.get_mg5_info_lines() 2670 replace_dict['info_lines'] = info_lines 2671 2672 # Extract process info lines 2673 process_lines = self.get_process_info_lines(matrix_element) 2674 replace_dict['process_lines'] = process_lines + \ 2675 "\nc spectators: %d %d \n" % tuple(link['link']) 2676 2677 # Extract ncomb 2678 ncomb = matrix_element.get_helicity_combinations() 2679 replace_dict['ncomb'] = ncomb 2680 2681 # Extract helicity lines 2682 helicity_lines = self.get_helicity_lines(matrix_element) 2683 replace_dict['helicity_lines'] = helicity_lines 2684 2685 # Extract IC line 2686 ic_line = self.get_ic_line(matrix_element) 2687 replace_dict['ic_line'] = ic_line 2688 2689 # Extract den_factor_lines 2690 den_factor_lines = self.get_den_factor_lines(fksborn) 2691 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines) 2692 2693 # Extract ngraphs 2694 ngraphs = matrix_element.get_number_of_amplitudes() 2695 replace_dict['ngraphs'] = ngraphs 2696 2697 # Extract nwavefuncs 2698 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2699 replace_dict['nwavefuncs'] = nwavefuncs 2700 2701 # Extract ncolor 2702 ncolor1 = max(1, len(link['orig_basis'])) 2703 replace_dict['ncolor1'] = ncolor1 2704 ncolor2 = max(1, len(link['link_basis'])) 2705 replace_dict['ncolor2'] = ncolor2 2706 2707 # Extract color data lines 2708 color_data_lines = self.get_color_data_lines_from_color_matrix(\ 2709 link['link_matrix']) 2710 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2711 2712 # Extract amp2 lines 2713 amp2_lines = self.get_amp2_lines(matrix_element) 2714 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2715 2716 # Extract JAMP lines 2717 # JAMP definition, depends on the number of independent split orders 2718 split_orders=matrix_element.get('processes')[0].get('split_orders') 2719 if len(split_orders)==0: 2720 replace_dict['nSplitOrders']='' 2721 # Extract JAMP lines 2722 jamp_lines, nb_tmp_jamp = self.get_JAMP_lines(matrix_element, JAMP_format="JAMP1(%s)") 2723 else: 2724 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 2725 replace_dict['nAmpSplitOrders']=len(amp_orders) 2726 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 2727 replace_dict['nSplitOrders']=len(split_orders) 2728 amp_so = self.get_split_orders_lines( 2729 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 2730 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 2731 replace_dict['ampsplitorders']='\n'.join(amp_so) 2732 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 2733 jamp_lines, nb_tmp_jamp = self.get_JAMP_lines_split_order(\ 2734 matrix_element,amp_orders,split_order_names=split_orders, 2735 JAMP_format="JAMP1(%s,{0})") 2736 2737 replace_dict['jamp1_lines'] = '\n'.join(jamp_lines) 2738 replace_dict['nb_temp_jamp'] = nb_tmp_jamp 2739 2740 matrix_element.set('color_basis', link['link_basis'] ) 2741 if len(split_orders)==0: 2742 replace_dict['nSplitOrders']='' 2743 # Extract JAMP lines 2744 jamp_lines, nb_tmp_jamp = self.get_JAMP_lines(matrix_element, JAMP_format="JAMP2(%s)") 2745 else: 2746 jamp_lines,nb_tmp_jamp = self.get_JAMP_lines_split_order(\ 2747 matrix_element,amp_orders,split_order_names=split_orders, 2748 JAMP_format="JAMP2(%s,{0})") 2749 replace_dict['jamp2_lines'] = '\n'.join(jamp_lines) 2750 replace_dict['nb_temp_jamp'] = max(nb_tmp_jamp, replace_dict['nb_temp_jamp']) 2751 2752 # Extract the number of FKS process 2753 replace_dict['nconfs'] = len(fksborn.get_fks_info_list()) 2754 2755 file = open(os.path.join(_file_path, \ 2756 'iolibs/template_files/b_sf_xxx_splitorders_fks.inc')).read() 2757 file = file % replace_dict 2758 2759 # Write the file 2760 writer.writelines(file) 2761 2762 return 0 , ncolor1
2763 2764 2765 #=============================================================================== 2766 # write_born_nhel_file_list 2767 #===============================================================================
2768 - def write_born_nhel_file(self, writer, me, nflows, fortran_model):
2769 """Write the born_nhel.inc file for MG4. Write the maximum as they are 2770 typically used for setting array limits.""" 2771 2772 ncomb = me.get_helicity_combinations() 2773 file = "integer max_bhel, max_bcol \n" 2774 file += "parameter (max_bhel=%d)\nparameter(max_bcol=%d)" % \ 2775 (ncomb, nflows) 2776 2777 # Write the file 2778 writer.writelines(file) 2779 2780 return True
2781 2782 #=============================================================================== 2783 # write_nfksconfigs_file 2784 #===============================================================================
2785 - def write_nfksconfigs_file(self, writer, fksborn, fortran_model):
2786 """Writes the content of nFKSconfigs.inc, which just gives the 2787 total FKS dirs as a parameter. 2788 nFKSconfigs is always >=1 (use a fake configuration for LOonly)""" 2789 replace_dict = {} 2790 replace_dict['nconfs'] = max(len(fksborn.get_fks_info_list()), 1) 2791 content = \ 2792 """ INTEGER FKS_CONFIGS 2793 PARAMETER (FKS_CONFIGS=%(nconfs)d) 2794 2795 """ % replace_dict 2796 2797 writer.writelines(content)
2798 2799 2800 #=============================================================================== 2801 # write_fks_info_file 2802 #===============================================================================
2803 - def write_fks_info_file(self, writer, fksborn, fortran_model): #test_written
2804 """Writes the content of fks_info.inc, which lists the informations on the 2805 possible splittings of the born ME. 2806 nconfs is always >=1 (use a fake configuration for LOonly). 2807 The fake configuration use an 'antigluon' (id -21, color=8) as i_fks and 2808 the last colored particle as j_fks.""" 2809 2810 replace_dict = {} 2811 fks_info_list = fksborn.get_fks_info_list() 2812 split_orders = fksborn.born_me['processes'][0]['split_orders'] 2813 replace_dict['nconfs'] = max(len(fks_info_list), 1) 2814 replace_dict['nsplitorders'] = len(split_orders) 2815 replace_dict['splitorders_name'] = ', '.join(split_orders) 2816 2817 bool_dict = {True: '.true.', False: '.false.'} 2818 split_types_return = set(sum([info['fks_info']['splitting_type'] for info in fks_info_list], [])) 2819 2820 # this is for processes with 'real' or 'all' as NLO mode 2821 if len(fks_info_list) > 0: 2822 replace_dict['fks_i_values'] = ', '.join(['%d' % info['fks_info']['i'] \ 2823 for info in fks_info_list]) 2824 replace_dict['fks_j_values'] = ', '.join(['%d' % info['fks_info']['j'] \ 2825 for info in fks_info_list]) 2826 replace_dict['extra_cnt_values'] = ', '.join(['%d' % (info['fks_info']['extra_cnt_index'] + 1) \ 2827 for info in fks_info_list]) 2828 # extra array to be filled, with the type of the splitting of the born and of the extra cnt 2829 isplitorder_born = [] 2830 isplitorder_cnt = [] 2831 for info in fks_info_list: 2832 # fill 0 if no extra_cnt is needed 2833 if info['fks_info']['extra_cnt_index'] == -1: 2834 isplitorder_born.append(0) 2835 isplitorder_cnt.append(0) 2836 else: 2837 # the 0th component of split_type correspond to the born, the 1st 2838 # to the extra_cnt 2839 isplitorder_born.append(split_orders.index( 2840 info['fks_info']['splitting_type'][0]) + 1) 2841 isplitorder_cnt.append(split_orders.index( 2842 info['fks_info']['splitting_type'][1]) + 1) 2843 2844 replace_dict['isplitorder_born_values'] = \ 2845 ', '.join(['%d' % n for n in isplitorder_born]) 2846 replace_dict['isplitorder_cnt_values'] = \ 2847 ', '.join(['%d' % n for n in isplitorder_cnt]) 2848 2849 replace_dict['need_color_links'] = ', '.join(\ 2850 [bool_dict[info['fks_info']['need_color_links']] for \ 2851 info in fks_info_list ]) 2852 replace_dict['need_charge_links'] = ', '.join(\ 2853 [bool_dict[info['fks_info']['need_charge_links']] for \ 2854 info in fks_info_list ]) 2855 2856 col_lines = [] 2857 pdg_lines = [] 2858 charge_lines = [] 2859 fks_j_from_i_lines = [] 2860 split_type_lines = [] 2861 for i, info in enumerate(fks_info_list): 2862 col_lines.append( \ 2863 'DATA (PARTICLE_TYPE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2864 % (i + 1, ', '.join('%d' % col for col in fksborn.real_processes[info['n_me']-1].colors) )) 2865 pdg_lines.append( \ 2866 'DATA (PDG_TYPE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2867 % (i + 1, ', '.join('%d' % pdg for pdg in info['pdgs']))) 2868 charge_lines.append(\ 2869 'DATA (PARTICLE_CHARGE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /'\ 2870 % (i + 1, ', '.join('%19.15fd0' % charg\ 2871 for charg in fksborn.real_processes[info['n_me']-1].charges) )) 2872 fks_j_from_i_lines.extend(self.get_fks_j_from_i_lines(fksborn.real_processes[info['n_me']-1],\ 2873 i + 1)) 2874 split_type_lines.append( \ 2875 'DATA (SPLIT_TYPE_D (%d, IPOS), IPOS=1, %d) / %s /' % 2876 (i + 1, len(split_orders), 2877 ', '.join([bool_dict[ordd in info['fks_info']['splitting_type']] for ordd in split_orders]))) 2878 else: 2879 # this is for 'LOonly', generate a fake FKS configuration with 2880 # - i_fks = nexternal, pdg type = -21 and color =8 2881 # - j_fks = the last colored particle 2882 bornproc = fksborn.born_me.get('processes')[0] 2883 pdgs = [l.get('id') for l in bornproc.get('legs')] + [-21] 2884 colors = [l.get('color') for l in bornproc.get('legs')] + [8] 2885 charges = [l.get('charge') for l in bornproc.get('legs')] + [0.] 2886 2887 fks_i = len(colors) 2888 # fist look for a colored legs (set j to 1 otherwise) 2889 fks_j=0 2890 for cpos, col in enumerate(colors[:-1]): 2891 if col != 1: 2892 fks_j = cpos+1 2893 # if no colored leg exist, look for a charged leg 2894 if fks_j == 0: 2895 for cpos, chg in enumerate(charges[:-1]): 2896 if chg != 0.: 2897 fks_j = cpos+1 2898 # no coloured or charged particle found. Pick the final particle in the (Born) process 2899 if fks_j==0: fks_j=len(colors)-1 2900 2901 # this is special for 2->1 processes: j must be picked initial 2902 # keep in mind that colors include the fake extra particle 2903 if len(colors) == 4: 2904 fks_j = 2 2905 2906 replace_dict['fks_i_values'] = str(fks_i) 2907 replace_dict['fks_j_values'] = str(fks_j) 2908 replace_dict['extra_cnt_values'] = '0' 2909 replace_dict['isplitorder_born_values'] = '0' 2910 replace_dict['isplitorder_cnt_values'] = '0' 2911 # set both color/charge links to true 2912 replace_dict['need_color_links'] = '.true.' 2913 replace_dict['need_charge_links'] = '.true.' 2914 2915 col_lines = ['DATA (PARTICLE_TYPE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2916 % ', '.join([str(col) for col in colors])] 2917 pdg_lines = ['DATA (PDG_TYPE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2918 % ', '.join([str(pdg) for pdg in pdgs])] 2919 charge_lines = ['DATA (PARTICLE_CHARGE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \ 2920 % ', '.join('%19.15fd0' % charg for charg in charges)] 2921 fks_j_from_i_lines = ['DATA (FKS_J_FROM_I_D(1, %d, JPOS), JPOS = 0, 1) / 1, %d /' \ 2922 % (fks_i, fks_j)] 2923 split_type_lines = [ \ 2924 'DATA (SPLIT_TYPE_D (%d, IPOS), IPOS=1, %d) / %s /' % 2925 (1, len(split_orders), 2926 ', '.join([bool_dict[False]] * len(split_orders)))] 2927 2928 2929 replace_dict['col_lines'] = '\n'.join(col_lines) 2930 replace_dict['pdg_lines'] = '\n'.join(pdg_lines) 2931 replace_dict['charge_lines'] = '\n'.join(charge_lines) 2932 replace_dict['fks_j_from_i_lines'] = '\n'.join(fks_j_from_i_lines) 2933 replace_dict['split_type_lines'] = '\n'.join(split_type_lines) 2934 2935 content = open(os.path.join(_file_path, \ 2936 'iolibs/template_files/fks_info.inc')).read() % replace_dict 2937 2938 if not isinstance(writer, writers.FortranWriter): 2939 raise writers.FortranWriter.FortranWriterError(\ 2940 "writer not FortranWriter") 2941 # Set lowercase/uppercase Fortran code 2942 writers.FortranWriter.downcase = False 2943 2944 writer.writelines(content) 2945 2946 return split_types_return
2947 2948 2949 #=============================================================================== 2950 # write_pdf_file 2951 #===============================================================================
2952 - def write_pdf_file(self, writer, matrix_element, n, fortran_model):
2953 #test written 2954 """Write the auto_dsig.f file for MadFKS, which contains 2955 pdf call information""" 2956 2957 if not matrix_element.get('processes') or \ 2958 not matrix_element.get('diagrams'): 2959 return 0 2960 2961 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 2962 2963 if ninitial < 1 or ninitial > 2: 2964 raise writers.FortranWriter.FortranWriterError("""Need ninitial = 1 or 2 to write auto_dsig file""") 2965 2966 replace_dict = {} 2967 2968 replace_dict['N_me'] = n 2969 2970 # Extract version number and date from VERSION file 2971 info_lines = self.get_mg5_info_lines() 2972 replace_dict['info_lines'] = info_lines 2973 2974 # Extract process info lines 2975 process_lines = self.get_process_info_lines(matrix_element) 2976 replace_dict['process_lines'] = process_lines 2977 2978 pdf_vars, pdf_data, pdf_lines = \ 2979 self.get_pdf_lines_mir(matrix_element, ninitial, False, False) 2980 replace_dict['pdf_vars'] = pdf_vars 2981 replace_dict['pdf_data'] = pdf_data 2982 replace_dict['pdf_lines'] = pdf_lines 2983 2984 file = open(os.path.join(_file_path, \ 2985 'iolibs/template_files/parton_lum_n_fks.inc')).read() 2986 file = file % replace_dict 2987 2988 # Write the file 2989 writer.writelines(file)
2990 2991 2992 2993 #=============================================================================== 2994 # write_coloramps_file 2995 #===============================================================================
2996 - def write_coloramps_file(self, writer, mapconfigs, me, fortran_model):
2997 """Write the coloramps.inc file for MadEvent""" 2998 2999 lines = [] 3000 lines.append( "logical icolamp(%d,%d,1)" % \ 3001 (max([len(list(me.get('color_basis').keys())), 1]), 3002 len(mapconfigs))) 3003 3004 lines += self.get_icolamp_lines(mapconfigs, me, 1) 3005 3006 # Write the file 3007 writer.writelines(lines) 3008 3009 return True
3010 3011 3012 #=============================================================================== 3013 # write_leshouche_file_list 3014 #===============================================================================
3015 - def write_born_leshouche_file(self, writer, me, fortran_model):
3016 """Write the leshouche.inc file for MG4""" 3017 3018 # Extract number of external particles 3019 (nexternal, ninitial) = me.get_nexternal_ninitial() 3020 3021 lines = [] 3022 3023 for iproc, proc in enumerate(me.get('processes')): 3024 legs = proc.get_legs_with_decays() 3025 lines.append("DATA (IDUP(i,%d),i=1,%d)/%s/" % \ 3026 (iproc + 1, nexternal, 3027 ",".join([str(l.get('id')) for l in legs]))) 3028 for i in [1, 2]: 3029 lines.append("DATA (MOTHUP(%d,i,%3r),i=1,%2r)/%s/" % \ 3030 (i, iproc + 1, nexternal, 3031 ",".join([ "%3r" % 0 ] * ninitial + \ 3032 [ "%3r" % i ] * (nexternal - ninitial)))) 3033 3034 # Here goes the color connections corresponding to the JAMPs 3035 # Only one output, for the first subproc! 3036 if iproc == 0: 3037 # If no color basis, just output trivial color flow 3038 if not me.get('color_basis'): 3039 for i in [1, 2]: 3040 lines.append("DATA (ICOLUP(%d,i, 1),i=1,%2r)/%s/" % \ 3041 (i, nexternal, 3042 ",".join([ "%3r" % 0 ] * nexternal))) 3043 color_flow_list = [] 3044 3045 else: 3046 # First build a color representation dictionnary 3047 repr_dict = {} 3048 for l in legs: 3049 repr_dict[l.get('number')] = \ 3050 proc.get('model').get_particle(l.get('id')).get_color()\ 3051 * (-1)**(1+l.get('state')) 3052 # Get the list of color flows 3053 color_flow_list = \ 3054 me.get('color_basis').color_flow_decomposition(repr_dict, ninitial) 3055 # And output them properly 3056 for cf_i, color_flow_dict in enumerate(color_flow_list): 3057 for i in [0, 1]: 3058 lines.append("DATA (ICOLUP(%d,i,%3r),i=1,%2r)/%s/" % \ 3059 (i + 1, cf_i + 1, nexternal, 3060 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 3061 for l in legs]))) 3062 3063 # Write the file 3064 writer.writelines(lines) 3065 3066 return len(color_flow_list)
3067 3068 3069 #=============================================================================== 3070 # write_born_conf_file 3071 #===============================================================================
3072 - def write_born_conf_file(self, writer, me, fortran_model):
3073 """Write the configs.inc file for the list of born matrix-elements""" 3074 3075 # Extract number of external particles 3076 (nexternal, ninitial) = me.get_nexternal_ninitial() 3077 model = me.get('processes')[0].get('model') 3078 lines = ['', 'C Here are the congifurations'] 3079 lines_P = ['', 'C Here are the propagators'] 3080 lines_BW = ['', 'C Here are the BWs'] 3081 3082 iconfig = 0 3083 3084 iconfig_list = [] 3085 mapconfigs_list = [] 3086 s_and_t_channels_list = [] 3087 nschannels = [] 3088 3089 particle_dict = me.get('processes')[0].get('model').\ 3090 get('particle_dict') 3091 3092 booldict = {True: '.false.', False: '.false'} 3093 3094 max_leg_number = 0 3095 3096 ######first get the configurations 3097 s_and_t_channels = [] 3098 mapconfigs = [] 3099 lines.extend(['C %s' % proc.nice_string() for proc in me.get('processes')]) 3100 base_diagrams = me.get('base_amplitude').get('diagrams') 3101 minvert = min([max([len(vert.get('legs')) for vert in \ 3102 diag.get('vertices')]) for diag in base_diagrams]) 3103 3104 for idiag, diag in enumerate(base_diagrams): 3105 if any([len(vert.get('legs')) > minvert for vert in 3106 diag.get('vertices')]): 3107 # Only 3-vertices allowed in configs.inc 3108 continue 3109 iconfig = iconfig + 1 3110 helas_diag = me.get('diagrams')[idiag] 3111 mapconfigs.append(helas_diag.get('number')) 3112 lines.append("# Diagram %d, Amplitude %d" % \ 3113 (helas_diag.get('number'),helas_diag.get('amplitudes')[0]['number'])) 3114 # Correspondance between the config and the amplitudes 3115 lines.append("data mapconfig(%4d)/%4d/" % (iconfig, 3116 helas_diag.get('amplitudes')[0]['number'])) 3117 3118 # Need to reorganize the topology so that we start with all 3119 # final state external particles and work our way inwards 3120 schannels, tchannels = helas_diag.get('amplitudes')[0].\ 3121 get_s_and_t_channels(ninitial, model, 990) 3122 3123 s_and_t_channels.append([schannels, tchannels]) 3124 3125 # Write out propagators for s-channel and t-channel vertices 3126 allchannels = schannels 3127 if len(tchannels) > 1: 3128 # Write out tchannels only if there are any non-trivial ones 3129 allchannels = schannels + tchannels 3130 3131 for vert in allchannels: 3132 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 3133 last_leg = vert.get('legs')[-1] 3134 lines.append("data (iforest(ifr,%3d,%4d),ifr=1,%d)/%s/" % \ 3135 (last_leg.get('number'), iconfig, len(daughters), 3136 ",".join(["%3d" % d for d in daughters]))) 3137 if vert in schannels: 3138 lines.append("data sprop(%4d,%4d)/%8d/" % \ 3139 (last_leg.get('number'), iconfig, 3140 last_leg.get('id'))) 3141 elif vert in tchannels[:-1]: 3142 lines.append("data tprid(%4d,%4d)/%8d/" % \ 3143 (last_leg.get('number'), iconfig, 3144 abs(last_leg.get('id')))) 3145 3146 max_leg_number = min(max_leg_number,last_leg.get('number')) 3147 3148 ##### Write out number of configs 3149 lines.append("# Number of configs") 3150 lines.append("data mapconfig(0)/%4d/" % (iconfig)) 3151 3152 ######finally the BWs 3153 for iconf, config in enumerate(s_and_t_channels): 3154 schannels = config[0] 3155 nschannels.append(len(schannels)) 3156 for vertex in schannels: 3157 # For the resulting leg, pick out whether it comes from 3158 # decay or not, as given by the from_group flag 3159 leg = vertex.get('legs')[-1] 3160 lines_BW.append("data gForceBW(%d,%d)/%s/" % \ 3161 (leg.get('number'), iconf + 1, 3162 booldict[leg.get('from_group')])) 3163 3164 #lines for the declarations 3165 firstlines = [] 3166 firstlines.append('integer ifr') 3167 firstlines.append('integer lmaxconfigsb_used\nparameter (lmaxconfigsb_used=%d)' % iconfig) 3168 firstlines.append('integer max_branchb_used\nparameter (max_branchb_used=%d)' % -max_leg_number) 3169 firstlines.append('integer mapconfig(0 : lmaxconfigsb_used)') 3170 firstlines.append('integer iforest(2, -max_branchb_used:-1, lmaxconfigsb_used)') 3171 firstlines.append('integer sprop(-max_branchb_used:-1, lmaxconfigsb_used)') 3172 firstlines.append('integer tprid(-max_branchb_used:-1, lmaxconfigsb_used)') 3173 firstlines.append('logical gforceBW(-max_branchb_used : -1, lmaxconfigsb_used)') 3174 3175 # Write the file 3176 writer.writelines(firstlines + lines + lines_BW) 3177 3178 return iconfig, mapconfigs, s_and_t_channels
3179 3180 3181 #=============================================================================== 3182 # write_born_props_file 3183 #===============================================================================
3184 - def write_born_props_file(self, writer, me, s_and_t_channels, fortran_model):
3185 """Write the configs.inc file for the list of born matrix-elements""" 3186 3187 # Extract number of external particles 3188 lines_P = ['', 'C Here are the propagators'] 3189 3190 particle_dict = me.get('processes')[0].get('model').\ 3191 get('particle_dict') 3192 3193 for iconf, configs in enumerate(s_and_t_channels): 3194 for vertex in configs[0] + configs[1][:-1]: 3195 leg = vertex.get('legs')[-1] 3196 if leg.get('id') not in particle_dict: 3197 # Fake propagator used in multiparticle vertices 3198 mass = 'zero' 3199 width = 'zero' 3200 pow_part = 0 3201 else: 3202 particle = particle_dict[leg.get('id')] 3203 # Get mass 3204 if particle.get('mass').lower() == 'zero': 3205 mass = particle.get('mass') 3206 else: 3207 mass = "abs(%s)" % particle.get('mass') 3208 # Get width 3209 if particle.get('width').lower() == 'zero': 3210 width = particle.get('width') 3211 else: 3212 width = "abs(%s)" % particle.get('width') 3213 3214 pow_part = 1 + int(particle.is_boson()) 3215 3216 lines_P.append("pmass(%3d,%4d) = %s" % \ 3217 (leg.get('number'), iconf + 1, mass)) 3218 lines_P.append("pwidth(%3d,%4d) = %s" % \ 3219 (leg.get('number'), iconf + 1, width)) 3220 lines_P.append("pow(%3d,%4d) = %d" % \ 3221 (leg.get('number'), iconf + 1, pow_part)) 3222 3223 # Write the file 3224 writer.writelines(lines_P)
3225 3226 3227 3228 3229 #=============================================================================== 3230 # write_dname_file 3231 #===============================================================================
3232 - def write_dname_file(self, writer, matrix_element, fortran_model):
3233 """Write the dname.mg file for MG4""" 3234 3235 line = "DIRNAME=P%s" % \ 3236 matrix_element.get('processes')[0].shell_string() 3237 3238 # Write the file 3239 writer.write(line + "\n") 3240 3241 return True
3242 3243 3244 #=============================================================================== 3245 # write_iproc_file 3246 #===============================================================================
3247 - def write_iproc_file(self, writer, me_number):
3248 """Write the iproc.dat file for MG4""" 3249 3250 line = "%d" % (me_number + 1) 3251 3252 # Write the file 3253 for line_to_write in writer.write_line(line): 3254 writer.write(line_to_write) 3255 return True
3256 3257 3258 #=============================================================================== 3259 # Helper functions 3260 #=============================================================================== 3261 3262 3263 #=============================================================================== 3264 # get_fks_j_from_i_lines 3265 #=============================================================================== 3266
3267 - def get_fks_j_from_i_lines(self, me, i = 0): #test written
3268 """generate the lines for fks.inc describing initializating the 3269 fks_j_from_i array""" 3270 lines = [] 3271 if not me.isfinite: 3272 for ii, js in me.fks_j_from_i.items(): 3273 if js: 3274 lines.append('DATA (FKS_J_FROM_I_D(%d, %d, JPOS), JPOS = 0, %d) / %d, %s /' \ 3275 % (i, ii, len(js), len(js), ', '.join(["%d" % j for j in js]))) 3276 else: 3277 lines.append('DATA (FKS_J_FROM_I_D(%d, JPOS), JPOS = 0, %d) / %d, %s /' \ 3278 % (2, 1, 1, '1')) 3279 lines.append('') 3280 3281 return lines 3282 3283 3284 #=============================================================================== 3285 # get_leshouche_lines 3286 #===============================================================================
3287 - def get_leshouche_lines(self, matrix_element, ime):
3288 #test written 3289 """Write the leshouche.inc file for MG4""" 3290 3291 # Extract number of external particles 3292 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3293 3294 lines = [] 3295 for iproc, proc in enumerate(matrix_element.get('processes')): 3296 legs = proc.get_legs_with_decays() 3297 lines.append("I %4d %4d %s" % \ 3298 (ime, iproc + 1, 3299 " ".join([str(l.get('id')) for l in legs]))) 3300 for i in [1, 2]: 3301 lines.append("M %4d %4d %4d %s" % \ 3302 (ime, i, iproc + 1, 3303 " ".join([ "%3d" % 0 ] * ninitial + \ 3304 [ "%3d" % i ] * (nexternal - ninitial)))) 3305 3306 # Here goes the color connections corresponding to the JAMPs 3307 # Only one output, for the first subproc! 3308 if iproc == 0: 3309 # If no color basis, just output trivial color flow 3310 if not matrix_element.get('color_basis'): 3311 for i in [1, 2]: 3312 lines.append("C %4d %4d 1 %s" % \ 3313 (ime, i, 3314 " ".join([ "%3d" % 0 ] * nexternal))) 3315 color_flow_list = [] 3316 nflow = 1 3317 3318 else: 3319 # First build a color representation dictionnary 3320 repr_dict = {} 3321 for l in legs: 3322 repr_dict[l.get('number')] = \ 3323 proc.get('model').get_particle(l.get('id')).get_color()\ 3324 * (-1)**(1+l.get('state')) 3325 # Get the list of color flows 3326 color_flow_list = \ 3327 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 3328 ninitial) 3329 # And output them properly 3330 for cf_i, color_flow_dict in enumerate(color_flow_list): 3331 for i in [0, 1]: 3332 lines.append("C %4d %4d %4d %s" % \ 3333 (ime, i + 1, cf_i + 1, 3334 " ".join(["%3d" % color_flow_dict[l.get('number')][i] \ 3335 for l in legs]))) 3336 3337 nflow = len(color_flow_list) 3338 3339 nproc = len(matrix_element.get('processes')) 3340 3341 return lines, nproc, nflow
3342 3343
3344 - def get_leshouche_lines_dummy(self, matrix_element, ime):
3345 #test written 3346 """As get_leshouche_lines, but for 'fake' real emission processes (LOonly 3347 In this case, write born color structure times ij -> i,j splitting) 3348 """ 3349 3350 bornproc = matrix_element.get('processes')[0] 3351 colors = [l.get('color') for l in bornproc.get('legs')] 3352 3353 fks_i = len(colors) 3354 # use the last colored particle if it exists, or 3355 # just the last 3356 fks_j=1 3357 for cpos, col in enumerate(colors): 3358 if col != 1: 3359 fks_j = cpos+1 3360 3361 # Extract number of external particles 3362 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3363 nexternal+=1 # remember, in this case matrix_element is born 3364 3365 lines = [] 3366 for iproc, proc in enumerate(matrix_element.get('processes')): 3367 # add the fake extra leg 3368 legs = proc.get_legs_with_decays() + \ 3369 [fks_common.FKSLeg({'id': -21, 3370 'number': nexternal, 3371 'state': True, 3372 'fks': 'i', 3373 'color': 8, 3374 'charge': 0., 3375 'massless': True, 3376 'spin': 3, 3377 'is_part': True, 3378 'self_antipart': True})] 3379 3380 lines.append("I %4d %4d %s" % \ 3381 (ime, iproc + 1, 3382 " ".join([str(l.get('id')) for l in legs]))) 3383 for i in [1, 2]: 3384 lines.append("M %4d %4d %4d %s" % \ 3385 (ime, i, iproc + 1, 3386 " ".join([ "%3d" % 0 ] * ninitial + \ 3387 [ "%3d" % i ] * (nexternal - ninitial)))) 3388 3389 # Here goes the color connections corresponding to the JAMPs 3390 # Only one output, for the first subproc! 3391 if iproc == 0: 3392 # If no color basis, just output trivial color flow 3393 if not matrix_element.get('color_basis'): 3394 for i in [1, 2]: 3395 lines.append("C %4d %4d 1 %s" % \ 3396 (ime, i, 3397 " ".join([ "%3d" % 0 ] * nexternal))) 3398 color_flow_list = [] 3399 nflow = 1 3400 3401 else: 3402 # in this case the last particle (-21) has two color indices 3403 # and it has to be emitted by j_fks 3404 # First build a color representation dictionnary 3405 repr_dict = {} 3406 for l in legs[:-1]: 3407 repr_dict[l.get('number')] = \ 3408 proc.get('model').get_particle(l.get('id')).get_color()\ 3409 * (-1)**(1+l.get('state')) 3410 # Get the list of color flows 3411 color_flow_list = \ 3412 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 3413 ninitial) 3414 # And output them properly 3415 for cf_i, color_flow_dict in enumerate(color_flow_list): 3416 # we have to add the extra leg (-21), linked to the j_fks leg 3417 # first, find the maximum color label 3418 maxicol = max(sum(list(color_flow_dict.values()), [])) 3419 #then, replace the color labels 3420 if color_flow_dict[fks_j][0] == 0: 3421 anti = True 3422 icol_j = color_flow_dict[fks_j][1] 3423 else: 3424 anti = False 3425 icol_j = color_flow_dict[fks_j][0] 3426 3427 if anti: 3428 color_flow_dict[nexternal] = (maxicol + 1, color_flow_dict[fks_j][1]) 3429 color_flow_dict[fks_j][1] = maxicol + 1 3430 else: 3431 color_flow_dict[nexternal] = (color_flow_dict[fks_j][0], maxicol + 1) 3432 color_flow_dict[fks_j][0] = maxicol + 1 3433 3434 for i in [0, 1]: 3435 lines.append("C %4d %4d %4d %s" % \ 3436 (ime, i + 1, cf_i + 1, 3437 " ".join(["%3d" % color_flow_dict[l.get('number')][i] \ 3438 for l in legs]))) 3439 3440 nflow = len(color_flow_list) 3441 3442 nproc = len(matrix_element.get('processes')) 3443 3444 return lines, nproc, nflow
3445 3446 3447 #=============================================================================== 3448 # get_den_factor_lines 3449 #===============================================================================
3450 - def get_den_factor_lines(self, fks_born, born_me=None):
3451 """returns the lines with the information on the denominator keeping care 3452 of the identical particle factors in the various real emissions 3453 If born_me is procided, it is used instead of fksborn.born_me""" 3454 3455 compensate = True 3456 if not born_me: 3457 born_me = fks_born.born_me 3458 compensate = False 3459 3460 lines = [] 3461 info_list = fks_born.get_fks_info_list() 3462 if info_list: 3463 # if the reals have been generated, fill with the corresponding average factor 3464 lines.append('INTEGER IDEN_VALUES(%d)' % len(info_list)) 3465 if not compensate: 3466 lines.append('DATA IDEN_VALUES /' + \ 3467 ', '.join(['%d' % ( 3468 born_me.get_denominator_factor()) \ 3469 for info in info_list]) + '/') 3470 else: 3471 lines.append('DATA IDEN_VALUES /' + \ 3472 ', '.join(['%d' % ( 3473 born_me.get_denominator_factor() / \ 3474 born_me['identical_particle_factor'] * \ 3475 fks_born.born_me['identical_particle_factor']) \ 3476 for info in info_list]) + '/') 3477 else: 3478 # otherwise use the born 3479 lines.append('INTEGER IDEN_VALUES(1)') 3480 lines.append('DATA IDEN_VALUES / %d /' \ 3481 % fks_born.born_me.get_denominator_factor()) 3482 3483 return lines
3484 3485 3486 #=============================================================================== 3487 # get_ij_lines 3488 #===============================================================================
3489 - def get_ij_lines(self, fks_born):
3490 """returns the lines with the information on the particle number of the born 3491 that splits""" 3492 info_list = fks_born.get_fks_info_list() 3493 lines = [] 3494 if info_list: 3495 # if the reals have been generated, fill with the corresponding value of ij if 3496 # ij is massless, or with 0 if ij is massive (no collinear singularity) 3497 ij_list = [info['fks_info']['ij']if \ 3498 fks_born.born_me['processes'][0]['legs'][info['fks_info']['ij']-1]['massless'] \ 3499 else 0 for info in info_list] 3500 lines.append('INTEGER IJ_VALUES(%d)' % len(info_list)) 3501 lines.append('DATA IJ_VALUES /' + ', '.join(['%d' % ij for ij in ij_list]) + '/') 3502 else: 3503 #otherwise just put zero 3504 lines.append('INTEGER IJ_VALUES(1)') 3505 lines.append('DATA IJ_VALUES / 0 /') 3506 3507 return lines
3508 3509
3510 - def get_pdf_lines_mir(self, matrix_element, ninitial, subproc_group = False,\ 3511 mirror = False): #test written
3512 """Generate the PDF lines for the auto_dsig.f file""" 3513 3514 processes = matrix_element.get('processes') 3515 model = processes[0].get('model') 3516 3517 pdf_definition_lines = "" 3518 pdf_data_lines = "" 3519 pdf_lines = "" 3520 3521 if ninitial == 1: 3522 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 3523 for i, proc in enumerate(processes): 3524 process_line = proc.base_string() 3525 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 3526 pdf_lines = pdf_lines + "\nPD(IPROC) = 1d0\n" 3527 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 3528 else: 3529 # Pick out all initial state particles for the two beams 3530 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 3531 p in processes]))), 3532 sorted(list(set([p.get_initial_pdg(2) for \ 3533 p in processes])))] 3534 3535 # Prepare all variable names 3536 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 3537 sum(initial_states,[])]) 3538 for key,val in pdf_codes.items(): 3539 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 3540 3541 # Set conversion from PDG code to number used in PDF calls 3542 pdgtopdf = {21: 0, 22: 7, -11: -8, 11: 8, -13: -9, 13: 9, -15: -10, 15: 10} 3543 # Fill in missing entries of pdgtopdf 3544 for pdg in sum(initial_states,[]): 3545 if not pdg in pdgtopdf and not pdg in list(pdgtopdf.values()): 3546 pdgtopdf[pdg] = pdg 3547 elif pdg not in pdgtopdf and pdg in list(pdgtopdf.values()): 3548 # If any particle has pdg code 7, we need to use something else 3549 pdgtopdf[pdg] = 6000000 + pdg 3550 3551 # Get PDF variable declarations for all initial states 3552 for i in [0,1]: 3553 pdf_definition_lines += "DOUBLE PRECISION " + \ 3554 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 3555 for pdg in \ 3556 initial_states[i]]) + \ 3557 "\n" 3558 3559 # Get PDF data lines for all initial states 3560 for i in [0,1]: 3561 pdf_data_lines += "DATA " + \ 3562 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 3563 for pdg in initial_states[i]]) + \ 3564 "/%d*1D0/" % len(initial_states[i]) + \ 3565 "\n" 3566 3567 # Get PDF values for the different initial states 3568 for i, init_states in enumerate(initial_states): 3569 if not mirror: 3570 ibeam = i + 1 3571 else: 3572 ibeam = 2 - i 3573 if subproc_group: 3574 pdf_lines = pdf_lines + \ 3575 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 3576 % (ibeam, ibeam) 3577 else: 3578 pdf_lines = pdf_lines + \ 3579 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 3580 % (ibeam, ibeam) 3581 3582 for initial_state in init_states: 3583 if initial_state in list(pdf_codes.keys()): 3584 if subproc_group: 3585 if abs(pdgtopdf[initial_state]) <= 10: 3586 pdf_lines = pdf_lines + \ 3587 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP," + \ 3588 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 3589 (pdf_codes[initial_state], 3590 i + 1, ibeam, pdgtopdf[initial_state], 3591 ibeam, ibeam) 3592 else: 3593 # setting other partons flavours outside quark, gluon, photon to be 0d0 3594 pdf_lines = pdf_lines + \ 3595 ("c settings other partons flavours outside quark, gluon, photon to 0d0\n" + \ 3596 "%s%d=0d0\n") % \ 3597 (pdf_codes[initial_state],i + 1) 3598 else: 3599 if abs(pdgtopdf[initial_state]) <= 10: 3600 pdf_lines = pdf_lines + \ 3601 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP," + \ 3602 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 3603 (pdf_codes[initial_state], 3604 i + 1, ibeam, pdgtopdf[initial_state], 3605 ibeam, ibeam) 3606 else: 3607 # setting other partons flavours outside quark, gluon, photon to be 0d0 3608 pdf_lines = pdf_lines + \ 3609 ("c settings other partons flavours outside quark, gluon, photon to 0d0\n" + \ 3610 "%s%d=0d0\n") % \ 3611 (pdf_codes[initial_state],i + 1) 3612 3613 pdf_lines = pdf_lines + "ENDIF\n" 3614 3615 # Add up PDFs for the different initial state particles 3616 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 3617 for proc in processes: 3618 process_line = proc.base_string() 3619 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 3620 pdf_lines = pdf_lines + "\nPD(IPROC) = " 3621 for ibeam in [1, 2]: 3622 initial_state = proc.get_initial_pdg(ibeam) 3623 if initial_state in list(pdf_codes.keys()): 3624 pdf_lines = pdf_lines + "%s%d*" % \ 3625 (pdf_codes[initial_state], ibeam) 3626 else: 3627 pdf_lines = pdf_lines + "1d0*" 3628 # Remove last "*" from pdf_lines 3629 pdf_lines = pdf_lines[:-1] + "\n" 3630 3631 # Remove last line break from pdf_lines 3632 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1] 3633 3634 3635 #test written
3636 - def get_color_data_lines_from_color_matrix(self, color_matrix, n=6):
3637 """Return the color matrix definition lines for the given color_matrix. Split 3638 rows in chunks of size n.""" 3639 3640 if not color_matrix: 3641 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 3642 else: 3643 ret_list = [] 3644 my_cs = color.ColorString() 3645 for index, denominator in \ 3646 enumerate(color_matrix.get_line_denominators()): 3647 # Then write the numerators for the matrix elements 3648 num_list = color_matrix.get_line_numerators(index, denominator) 3649 for k in range(0, len(num_list), n): 3650 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 3651 (index + 1, k + 1, min(k + n, len(num_list)), 3652 ','.join([("%.15e" % (int(i)/denominator)).replace('e','d') for i in num_list[k:k + n]]))) 3653 return ret_list
3654 3655 #=========================================================================== 3656 # write_maxamps_file 3657 #===========================================================================
3658 - def write_maxamps_file(self, writer, maxamps, maxflows, 3659 maxproc,maxsproc):
3660 """Write the maxamps.inc file for MG4.""" 3661 3662 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 3663 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 3664 (maxamps, maxflows) 3665 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 3666 (maxproc, maxsproc) 3667 3668 # Write the file 3669 writer.writelines(file) 3670 3671 return True
3672 3673 #=============================================================================== 3674 # write_ncombs_file 3675 #===============================================================================
3676 - def write_ncombs_file(self, writer, matrix_element, fortran_model):
3677 # #test written 3678 """Write the ncombs.inc file for MadEvent.""" 3679 3680 # Extract number of external particles 3681 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3682 3683 # ncomb (used for clustering) is 2^(nexternal) 3684 file = " integer n_max_cl\n" 3685 file = file + "parameter (n_max_cl=%d)" % (2 ** (nexternal+1)) 3686 3687 # Write the file 3688 writer.writelines(file) 3689 3690 return True
3691 3692 #=========================================================================== 3693 # write_config_subproc_map_file 3694 #===========================================================================
3695 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
3696 """Write a dummy config_subproc.inc file for MadEvent""" 3697 3698 lines = [] 3699 3700 for iconfig in range(len(s_and_t_channels)): 3701 lines.append("DATA CONFSUB(1,%d)/1/" % \ 3702 (iconfig + 1)) 3703 3704 # Write the file 3705 writer.writelines(lines) 3706 3707 return True
3708 3709 #=========================================================================== 3710 # write_colors_file 3711 #===========================================================================
3712 - def write_colors_file(self, writer, matrix_element):
3713 """Write the get_color.f file for MadEvent, which returns color 3714 for all particles used in the matrix element.""" 3715 3716 try: 3717 matrix_elements=matrix_element.real_processes[0].matrix_element 3718 except IndexError: 3719 matrix_elements=[matrix_element.born_me] 3720 3721 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 3722 matrix_elements = [matrix_elements] 3723 3724 model = matrix_elements[0].get('processes')[0].get('model') 3725 3726 # We need the both particle and antiparticle wf_ids, since the identity 3727 # depends on the direction of the wf. 3728 # loop on the real emissions 3729 wf_ids = set(sum([sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 3730 for wf in d.get('wavefunctions')],[]) \ 3731 for d in me.get('diagrams')],[]) \ 3732 for me in [real_proc.matrix_element]],[])\ 3733 for real_proc in matrix_element.real_processes],[])) 3734 # and also on the born 3735 wf_ids = wf_ids.union(set(sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 3736 for wf in d.get('wavefunctions')],[]) \ 3737 for d in matrix_element.born_me.get('diagrams')],[]))) 3738 3739 # loop on the real emissions 3740 leg_ids = set(sum([sum([sum([[l.get('id') for l in \ 3741 p.get_legs_with_decays()] for p in \ 3742 me.get('processes')], []) for me in \ 3743 [real_proc.matrix_element]], []) for real_proc in \ 3744 matrix_element.real_processes],[])) 3745 # and also on the born 3746 leg_ids = leg_ids.union(set(sum([[l.get('id') for l in \ 3747 p.get_legs_with_decays()] for p in \ 3748 matrix_element.born_me.get('processes')], []))) 3749 particle_ids = sorted(list(wf_ids.union(leg_ids))) 3750 3751 lines = """function get_color(ipdg) 3752 implicit none 3753 integer get_color, ipdg 3754 3755 if(ipdg.eq.%d)then 3756 get_color=%d 3757 return 3758 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 3759 3760 for part_id in particle_ids[1:]: 3761 lines += """else if(ipdg.eq.%d)then 3762 get_color=%d 3763 return 3764 """ % (part_id, model.get_particle(part_id).get_color()) 3765 # Dummy particle for multiparticle vertices with pdg given by 3766 # first code not in the model 3767 lines += """else if(ipdg.eq.%d)then 3768 c This is dummy particle used in multiparticle vertices 3769 get_color=2 3770 return 3771 """ % model.get_first_non_pdg() 3772 lines += """else 3773 write(*,*)'Error: No color given for pdg ',ipdg 3774 get_color=0 3775 return 3776 endif 3777 end 3778 """ 3779 3780 # Write the file 3781 writer.writelines(lines) 3782 3783 return True
3784 3785 #=============================================================================== 3786 # write_props_file 3787 #=============================================================================== 3788 #test_written
3789 - def write_props_file(self, writer, matrix_element, fortran_model, s_and_t_channels):
3790 """Write the props.inc file for MadEvent. Needs input from 3791 write_configs_file. With respect to the parent routine, it has some 3792 more specific formats that allow the props.inc file to be read by the 3793 link program""" 3794 3795 lines = [] 3796 3797 particle_dict = matrix_element.get('processes')[0].get('model').\ 3798 get('particle_dict') 3799 3800 for iconf, configs in enumerate(s_and_t_channels): 3801 for vertex in configs[0] + configs[1][:-1]: 3802 leg = vertex.get('legs')[-1] 3803 if leg.get('id') not in particle_dict: 3804 # Fake propagator used in multiparticle vertices 3805 mass = 'zero' 3806 width = 'zero' 3807 pow_part = 0 3808 else: 3809 particle = particle_dict[leg.get('id')] 3810 # Get mass 3811 if particle.get('mass').lower() == 'zero': 3812 mass = particle.get('mass') 3813 else: 3814 mass = "abs(%s)" % particle.get('mass') 3815 # Get width 3816 if particle.get('width').lower() == 'zero': 3817 width = particle.get('width') 3818 else: 3819 width = "abs(%s)" % particle.get('width') 3820 3821 pow_part = 1 + int(particle.is_boson()) 3822 3823 lines.append("pmass(%3d,%4d) = %s" % \ 3824 (leg.get('number'), iconf + 1, mass)) 3825 lines.append("pwidth(%3d,%4d) = %s" % \ 3826 (leg.get('number'), iconf + 1, width)) 3827 lines.append("pow(%3d,%4d) = %d" % \ 3828 (leg.get('number'), iconf + 1, pow_part)) 3829 3830 # Write the file 3831 writer.writelines(lines) 3832 3833 return True
3834 3835 3836 #=========================================================================== 3837 # write_subproc 3838 #===========================================================================
3839 - def write_subproc(self, writer, subprocdir):
3840 """Append this subprocess to the subproc.mg file for MG4""" 3841 3842 # Write line to file 3843 writer.write(subprocdir + "\n") 3844 3845 return True
3846 3847 3848 3849 #================================================================================= 3850 # Class for using the optimized Loop process 3851 #=================================================================================
3852 -class ProcessOptimizedExporterFortranFKS(loop_exporters.LoopProcessOptimizedExporterFortranSA,\ 3853 ProcessExporterFortranFKS):
3854 """Class to take care of exporting a set of matrix elements to 3855 Fortran (v4) format.""" 3856 3857 jamp_optim = True 3858
3859 - def finalize(self, *args, **opts):
3861 #export_v4.ProcessExporterFortranSA.finalize(self, *args, **opts) 3862 3863 #=============================================================================== 3864 # copy the Template in a new directory. 3865 #===============================================================================
3866 - def copy_fkstemplate(self):
3867 """create the directory run_name as a copy of the MadEvent 3868 Template, and clean the directory 3869 For now it is just the same as copy_v4template, but it will be modified 3870 """ 3871 mgme_dir = self.mgme_dir 3872 dir_path = self.dir_path 3873 clean =self.opt['clean'] 3874 3875 #First copy the full template tree if dir_path doesn't exit 3876 if not os.path.isdir(dir_path): 3877 if not mgme_dir: 3878 raise MadGraph5Error("No valid MG_ME path given for MG4 run directory creation.") 3879 logger.info('initialize a new directory: %s' % \ 3880 os.path.basename(dir_path)) 3881 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True) 3882 # misc.copytree since dir_path already exists 3883 misc.copytree(pjoin(self.mgme_dir, 'Template', 'Common'), 3884 dir_path) 3885 # Copy plot_card 3886 for card in ['plot_card']: 3887 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 3888 try: 3889 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 3890 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 3891 except IOError: 3892 logger.warning("Failed to copy " + card + ".dat to default") 3893 3894 elif not os.path.isfile(os.path.join(dir_path, 'TemplateVersion.txt')): 3895 if not mgme_dir: 3896 raise MadGraph5Error("No valid MG_ME path given for MG4 run directory creation.") 3897 try: 3898 shutil.copy(os.path.join(mgme_dir, 'MGMEVersion.txt'), dir_path) 3899 except IOError: 3900 MG5_version = misc.get_pkg_info() 3901 open(os.path.join(dir_path, 'MGMEVersion.txt'), 'w').write( \ 3902 "5." + MG5_version['version']) 3903 3904 #Ensure that the Template is clean 3905 if clean: 3906 logger.info('remove old information in %s' % os.path.basename(dir_path)) 3907 if 'MADGRAPH_BASE' in os.environ: 3908 subprocess.call([os.path.join('bin', 'internal', 'clean_template'), 3909 '--web'], cwd=dir_path) 3910 else: 3911 try: 3912 subprocess.call([os.path.join('bin', 'internal', 'clean_template')], \ 3913 cwd=dir_path) 3914 except Exception as why: 3915 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 3916 % (os.path.basename(dir_path),why)) 3917 #Write version info 3918 MG_version = misc.get_pkg_info() 3919 open(os.path.join(dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 3920 MG_version['version']) 3921 3922 # We must link the CutTools to the Library folder of the active Template 3923 self.link_CutTools(dir_path) 3924 # We must link the TIR to the Library folder of the active Template 3925 link_tir_libs=[] 3926 tir_libs=[] 3927 tir_include=[] 3928 for tir in self.all_tir: 3929 tir_dir="%s_dir"%tir 3930 libpath=getattr(self,tir_dir) 3931 libpath = self.link_TIR(os.path.join(self.dir_path, 'lib'), 3932 libpath,"lib%s.a"%tir,tir_name=tir) 3933 setattr(self,tir_dir,libpath) 3934 if libpath != "": 3935 if tir in ['pjfry','ninja','golem', 'samurai','collier']: 3936 # We should link dynamically when possible, so we use the original 3937 # location of these libraries. 3938 link_tir_libs.append('-L%s/ -l%s'%(libpath,tir)) 3939 tir_libs.append('%s/lib%s.$(libext)'%(libpath,tir)) 3940 # For Ninja, we must also link against OneLoop. 3941 if tir in ['ninja']: 3942 if not any(os.path.isfile(pjoin(libpath,'libavh_olo.%s'%ext)) 3943 for ext in ['a','dylib','so']): 3944 raise MadGraph5Error( 3945 "The OneLOop library 'libavh_olo.(a|dylib|so)' could no be found in path '%s'. Please place a symlink to it there."%libpath) 3946 link_tir_libs.append('-L%s/ -l%s'%(libpath,'avh_olo')) 3947 tir_libs.append('%s/lib%s.$(libext)'%(libpath,'avh_olo')) 3948 # We must add the corresponding includes for these TIR 3949 if tir in ['golem','samurai','ninja','collier']: 3950 trg_path = pjoin(os.path.dirname(libpath),'include') 3951 if os.path.isdir(trg_path): 3952 to_include = misc.find_includes_path(trg_path, 3953 self.include_names[tir]) 3954 else: 3955 to_include = None 3956 # Special possible location for collier 3957 if to_include is None and tir=='collier': 3958 to_include = misc.find_includes_path( 3959 pjoin(libpath,'modules'),self.include_names[tir]) 3960 if to_include is None: 3961 logger.error( 3962 'Could not find the include directory for %s, looking in %s.\n' % (tir ,str(trg_path))+ 3963 'Generation carries on but you will need to edit the include path by hand in the makefiles.') 3964 to_include = '<Not_found_define_it_yourself>' 3965 tir_include.append('-I %s'%to_include) 3966 else: 3967 link_tir_libs.append('-l%s'%tir) 3968 tir_libs.append('$(LIBDIR)lib%s.$(libext)'%tir) 3969 3970 os.remove(os.path.join(self.dir_path,'SubProcesses','makefile_loop.inc')) 3971 cwd = os.getcwd() 3972 dirpath = os.path.join(self.dir_path, 'SubProcesses') 3973 try: 3974 os.chdir(dirpath) 3975 except os.error: 3976 logger.error('Could not cd to directory %s' % dirpath) 3977 return 0 3978 filename = 'makefile_loop' 3979 calls = self.write_makefile_TIR(writers.MakefileWriter(filename), 3980 link_tir_libs,tir_libs,tir_include=tir_include) 3981 os.remove(os.path.join(self.dir_path,'Source','make_opts.inc')) 3982 dirpath = os.path.join(self.dir_path, 'Source') 3983 try: 3984 os.chdir(dirpath) 3985 except os.error: 3986 logger.error('Could not cd to directory %s' % dirpath) 3987 return 0 3988 filename = 'make_opts' 3989 calls = self.write_make_opts(writers.MakefileWriter(filename), 3990 link_tir_libs,tir_libs) 3991 # Return to original PWD 3992 os.chdir(cwd) 3993 3994 cwd = os.getcwd() 3995 dirpath = os.path.join(self.dir_path, 'SubProcesses') 3996 try: 3997 os.chdir(dirpath) 3998 except os.error: 3999 logger.error('Could not cd to directory %s' % dirpath) 4000 return 0 4001 4002 # We add here the user-friendly MadLoop option setter. 4003 cpfiles= ["SubProcesses/MadLoopParamReader.f", 4004 "Cards/MadLoopParams.dat", 4005 "SubProcesses/MadLoopParams.inc"] 4006 4007 for file in cpfiles: 4008 shutil.copy(os.path.join(self.loop_dir,'StandAlone/', file), 4009 os.path.join(self.dir_path, file)) 4010 4011 shutil.copy(pjoin(self.dir_path, 'Cards','MadLoopParams.dat'), 4012 pjoin(self.dir_path, 'Cards','MadLoopParams_default.dat')) 4013 4014 4015 4016 if os.path.exists(pjoin(self.dir_path, 'Cards', 'MadLoopParams.dat')): 4017 self.MadLoopparam = banner_mod.MadLoopParam(pjoin(self.dir_path, 4018 'Cards', 'MadLoopParams.dat')) 4019 # write the output file 4020 self.MadLoopparam.write(pjoin(self.dir_path,"SubProcesses", 4021 "MadLoopParams.dat")) 4022 4023 # We need minimal editing of MadLoopCommons.f 4024 MadLoopCommon = open(os.path.join(self.loop_dir,'StandAlone', 4025 "SubProcesses","MadLoopCommons.inc")).read() 4026 writer = writers.FortranWriter(os.path.join(self.dir_path, 4027 "SubProcesses","MadLoopCommons.f")) 4028 writer.writelines(MadLoopCommon%{ 4029 'print_banner_commands':self.MadLoop_banner}, 4030 context={'collier_available':self.tir_available_dict['collier']}) 4031 writer.close() 4032 4033 # link the files from the MODEL 4034 model_path = self.dir_path + '/Source/MODEL/' 4035 # Note that for the [real=] mode, these files are not present 4036 if os.path.isfile(os.path.join(model_path,'mp_coupl.inc')): 4037 ln(model_path + '/mp_coupl.inc', self.dir_path + '/SubProcesses') 4038 if os.path.isfile(os.path.join(model_path,'mp_coupl_same_name.inc')): 4039 ln(model_path + '/mp_coupl_same_name.inc', \ 4040 self.dir_path + '/SubProcesses') 4041 4042 # Write the cts_mpc.h and cts_mprec.h files imported from CutTools 4043 self.write_mp_files(writers.FortranWriter('cts_mprec.h'),\ 4044 writers.FortranWriter('cts_mpc.h'),) 4045 4046 self.copy_python_files() 4047 4048 4049 # We need to create the correct open_data for the pdf 4050 self.write_pdf_opendata() 4051 4052 4053 # Return to original PWD 4054 os.chdir(cwd)
4055
4056 - def generate_virt_directory(self, loop_matrix_element, fortran_model, dir_name):
4057 """writes the V**** directory inside the P**** directories specified in 4058 dir_name""" 4059 4060 cwd = os.getcwd() 4061 4062 matrix_element = loop_matrix_element 4063 4064 # Create the MadLoop5_resources directory if not already existing 4065 dirpath = os.path.join(dir_name, 'MadLoop5_resources') 4066 try: 4067 os.mkdir(dirpath) 4068 except os.error as error: 4069 logger.warning(error.strerror + " " + dirpath) 4070 4071 # Create the directory PN_xx_xxxxx in the specified path 4072 name = "V%s" % matrix_element.get('processes')[0].shell_string() 4073 dirpath = os.path.join(dir_name, name) 4074 4075 try: 4076 os.mkdir(dirpath) 4077 except os.error as error: 4078 logger.warning(error.strerror + " " + dirpath) 4079 4080 try: 4081 os.chdir(dirpath) 4082 except os.error: 4083 logger.error('Could not cd to directory %s' % dirpath) 4084 return 0 4085 4086 logger.info('Creating files in directory %s' % name) 4087 4088 # Extract number of external particles 4089 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4090 4091 calls=self.write_loop_matrix_element_v4(None,matrix_element,fortran_model) 4092 4093 # We need a link to coefs.inc from DHELAS 4094 ln(pjoin(self.dir_path, 'Source', 'DHELAS', 'coef_specs.inc'), 4095 abspath=False, cwd=None) 4096 4097 # The born matrix element, if needed 4098 filename = 'born_matrix.f' 4099 calls = self.write_bornmatrix( 4100 writers.FortranWriter(filename), 4101 matrix_element, 4102 fortran_model) 4103 4104 filename = 'nexternal.inc' 4105 self.write_nexternal_file(writers.FortranWriter(filename), 4106 nexternal, ninitial) 4107 4108 filename = 'pmass.inc' 4109 self.write_pmass_file(writers.FortranWriter(filename), 4110 matrix_element) 4111 4112 filename = 'ngraphs.inc' 4113 self.write_ngraphs_file(writers.FortranWriter(filename), 4114 len(matrix_element.get_all_amplitudes())) 4115 4116 filename = "loop_matrix.ps" 4117 writers.FortranWriter(filename).writelines("""C Post-helas generation loop-drawing is not ready yet.""") 4118 plot = draw.MultiEpsDiagramDrawer(base_objects.DiagramList( 4119 matrix_element.get('base_amplitude').get('loop_diagrams')[:1000]), 4120 filename, 4121 model=matrix_element.get('processes')[0].get('model'), 4122 amplitude='') 4123 logger.info("Drawing loop Feynman diagrams for " + \ 4124 matrix_element.get('processes')[0].nice_string(\ 4125 print_weighted=False)) 4126 plot.draw() 4127 4128 filename = "born_matrix.ps" 4129 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 4130 get('born_diagrams'), 4131 filename, 4132 model=matrix_element.get('processes')[0].\ 4133 get('model'), 4134 amplitude='') 4135 logger.info("Generating born Feynman diagrams for " + \ 4136 matrix_element.get('processes')[0].nice_string(\ 4137 print_weighted=False)) 4138 plot.draw() 4139 4140 # We also need to write the overall maximum quantities for this group 4141 # of processes in 'global_specs.inc'. In aMCatNLO, there is always 4142 # only one process, so this is trivial 4143 self.write_global_specs(matrix_element, output_path=pjoin(dirpath,'global_specs.inc')) 4144 4145 open('unique_id.inc','w').write( 4146 """ integer UNIQUE_ID 4147 parameter(UNIQUE_ID=1)""") 4148 4149 linkfiles = ['coupl.inc', 'mp_coupl.inc', 'mp_coupl_same_name.inc', 4150 'cts_mprec.h', 'cts_mpc.h', 'MadLoopParamReader.f', 4151 'MadLoopParams.inc','MadLoopCommons.f'] 4152 4153 for file in linkfiles: 4154 ln('../../%s' % file) 4155 4156 os.system("ln -s ../../makefile_loop makefile") 4157 4158 # We should move to MadLoop5_resources directory from the SubProcesses 4159 ln(pjoin(os.path.pardir,os.path.pardir,'MadLoopParams.dat'), 4160 pjoin('..','MadLoop5_resources')) 4161 4162 linkfiles = ['mpmodule.mod'] 4163 4164 for file in linkfiles: 4165 ln('../../../lib/%s' % file) 4166 4167 linkfiles = ['coef_specs.inc'] 4168 4169 for file in linkfiles: 4170 ln('../../../Source/DHELAS/%s' % file) 4171 4172 # Return to original PWD 4173 os.chdir(cwd) 4174 4175 if not calls: 4176 calls = 0 4177 return calls
4178 4179 4180 #=============================================================================== 4181 # write_coef_specs 4182 #===============================================================================
4183 - def write_coef_specs_file(self, max_loop_vertex_ranks):
4184 """ writes the coef_specs.inc in the DHELAS folder. Should not be called in the 4185 non-optimized mode""" 4186 filename = os.path.join(self.dir_path, 'Source', 'DHELAS', 'coef_specs.inc') 4187 4188 replace_dict = {} 4189 replace_dict['max_lwf_size'] = 4 4190 replace_dict['vertex_max_coefs'] = max(\ 4191 [q_polynomial.get_number_of_coefs_for_rank(n) 4192 for n in max_loop_vertex_ranks]) 4193 IncWriter=writers.FortranWriter(filename,'w') 4194 IncWriter.writelines("""INTEGER MAXLWFSIZE 4195 PARAMETER (MAXLWFSIZE=%(max_lwf_size)d) 4196 INTEGER VERTEXMAXCOEFS 4197 PARAMETER (VERTEXMAXCOEFS=%(vertex_max_coefs)d)"""\ 4198 % replace_dict) 4199 IncWriter.close()
4200