Package madgraph :: Package iolibs :: Module export_v4
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_v4

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  from __future__ import absolute_import, division 
  16  from madgraph.iolibs.helas_call_writers import HelasCallWriter 
  17  from six.moves import range 
  18  from six.moves import zip 
  19  from fractions import Fraction 
  20  """Methods and classes to export matrix elements to v4 format.""" 
  21   
  22  import copy 
  23  from six import StringIO 
  24  import itertools 
  25  import fractions 
  26  import glob 
  27  import logging 
  28  import math 
  29  import os 
  30  import io 
  31  import re 
  32  import shutil 
  33  import subprocess 
  34  import sys 
  35  import time 
  36  import traceback 
  37  import  collections 
  38   
  39  import aloha 
  40   
  41  import madgraph.core.base_objects as base_objects 
  42  import madgraph.core.color_algebra as color 
  43  import madgraph.core.helas_objects as helas_objects 
  44  import madgraph.iolibs.drawing_eps as draw 
  45  import madgraph.iolibs.files as files 
  46  import madgraph.iolibs.group_subprocs as group_subprocs 
  47  import madgraph.iolibs.file_writers as writers 
  48  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  49  import madgraph.iolibs.template_files as template_files 
  50  import madgraph.iolibs.ufo_expression_parsers as parsers 
  51  import madgraph.iolibs.helas_call_writers as helas_call_writers 
  52  import madgraph.interface.common_run_interface as common_run_interface 
  53  import madgraph.various.diagram_symmetry as diagram_symmetry 
  54  import madgraph.various.misc as misc 
  55  import madgraph.various.banner as banner_mod 
  56  import madgraph.various.process_checks as process_checks 
  57  import madgraph.loop.loop_diagram_generation as loop_diagram_generation 
  58  import aloha.create_aloha as create_aloha 
  59  import models.import_ufo as import_ufo 
  60  import models.write_param_card as param_writer 
  61  import models.check_param_card as check_param_card 
  62  from models import UFOError 
  63   
  64   
  65  from madgraph import MadGraph5Error, MG5DIR, ReadWrite 
  66  from madgraph.iolibs.files import cp, ln, mv 
  67   
  68  from madgraph import InvalidCmd 
  69   
  70  pjoin = os.path.join 
  71   
  72  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  73  logger = logging.getLogger('madgraph.export_v4') 
  74   
  75  default_compiler= {'fortran': 'gfortran', 
  76                         'f2py': 'f2py', 
  77                         'cpp':'g++'} 
78 79 80 -class VirtualExporter(object):
81 82 #exporter variable who modified the way madgraph interacts with this class 83 84 grouped_mode = 'madevent' 85 # This variable changes the type of object called within 'generate_subprocess_directory' 86 #functions. 87 # False to avoid grouping (only identical matrix element are merged) 88 # 'madevent' group the massless quark and massless lepton 89 # 'madweight' group the gluon with the massless quark 90 sa_symmetry = False 91 # If no grouped_mode=False, uu~ and u~u will be called independently. 92 #Putting sa_symmetry generates only one of the two matrix-element. 93 check = True 94 # Ask madgraph to check if the directory already exists and propose to the user to 95 #remove it first if this is the case 96 output = 'Template' 97 # [Template, None, dir] 98 # - Template, madgraph will call copy_template 99 # - dir, madgraph will just create an empty directory for initialisation 100 # - None, madgraph do nothing for initialisation 101 exporter = 'v4' 102 # language of the output 'v4' for Fortran output 103 # 'cpp' for C++ output 104 105
106 - def __init__(self, dir_path = "", opt=None):
107 # cmd_options is a dictionary with all the optional argurment passed at output time 108 109 # Activate some monkey patching for the helas call writer. 110 helas_call_writers.HelasCallWriter.customize_argument_for_all_other_helas_object = \ 111 self.helas_call_writer_custom
112 113 114 # helper function for customise helas writter 115 @staticmethod
116 - def custom_helas_call(call, arg):
117 """static method to customise the way aloha function call are written 118 call is the default template for the call 119 arg are the dictionary used for the call 120 """ 121 return call, arg
122 123 helas_call_writer_custom = lambda x,y,z: x.custom_helas_call(y,z) 124 125
126 - def copy_template(self, model):
127 return
128
129 - def generate_subprocess_directory(self, subproc_group, helicity_model, me=None):
130 # generate_subprocess_directory(self, matrix_element, helicity_model, me_number) [for ungrouped] 131 return 0 # return an integer stating the number of call to helicity routine
132
133 - def convert_model(self, model, wanted_lorentz=[], wanted_couplings=[]):
134 return
135
136 - def finalize(self,matrix_element, cmdhistory, MG5options, outputflag):
137 return
138 139
140 - def pass_information_from_cmd(self, cmd):
141 """pass information from the command interface to the exporter. 142 Please do not modify any object of the interface from the exporter. 143 """ 144 return
145
146 - def modify_grouping(self, matrix_element):
147 return False, matrix_element
148
149 - def export_model_files(self, model_v4_path):
150 raise Exception("V4 model not supported by this type of exporter. Please use UFO model") 151 return
152
153 - def export_helas(self, HELAS_PATH):
154 raise Exception("V4 model not supported by this type of exporter. Please use UFO model") 155 return
156
157 #=============================================================================== 158 # ProcessExporterFortran 159 #=============================================================================== 160 -class ProcessExporterFortran(VirtualExporter):
161 """Class to take care of exporting a set of matrix elements to 162 Fortran (v4) format.""" 163 164 default_opt = {'clean': False, 'complex_mass':False, 165 'export_format':'madevent', 'mp': False, 166 'v5_model': True, 167 'output_options':{} 168 } 169 grouped_mode = False 170 jamp_optim = False 171
172 - def __init__(self, dir_path = "", opt=None):
173 """Initiate the ProcessExporterFortran with directory information""" 174 self.mgme_dir = MG5DIR 175 self.dir_path = dir_path 176 self.model = None 177 178 self.opt = dict(self.default_opt) 179 if opt: 180 self.opt.update(opt) 181 self.cmd_options = self.opt['output_options'] 182 183 #place holder to pass information to the run_interface 184 self.proc_characteristic = banner_mod.ProcCharacteristic() 185 # call mother class 186 super(ProcessExporterFortran,self).__init__(dir_path, opt)
187 188 189 #=========================================================================== 190 # process exporter fortran switch between group and not grouped 191 #===========================================================================
192 - def export_processes(self, matrix_elements, fortran_model):
193 """Make the switch between grouped and not grouped output""" 194 195 calls = 0 196 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 197 for (group_number, me_group) in enumerate(matrix_elements): 198 calls = calls + self.generate_subprocess_directory(\ 199 me_group, fortran_model, group_number) 200 else: 201 for me_number, me in enumerate(matrix_elements.get_matrix_elements()): 202 calls = calls + self.generate_subprocess_directory(\ 203 me, fortran_model, me_number) 204 205 return calls
206 207 208 #=========================================================================== 209 # create the run_card 210 #===========================================================================
211 - def create_run_card(self, matrix_elements, history):
212 """ """ 213 214 215 # bypass this for the loop-check 216 import madgraph.loop.loop_helas_objects as loop_helas_objects 217 if isinstance(matrix_elements, loop_helas_objects.LoopHelasMatrixElement): 218 matrix_elements = None 219 220 run_card = banner_mod.RunCard() 221 222 223 default=True 224 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 225 processes = [me.get('processes') for megroup in matrix_elements 226 for me in megroup['matrix_elements']] 227 elif matrix_elements: 228 processes = [me.get('processes') 229 for me in matrix_elements['matrix_elements']] 230 else: 231 default =False 232 233 if default: 234 run_card.create_default_for_process(self.proc_characteristic, 235 history, 236 processes) 237 238 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 239 shutil.copyfile(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 240 pjoin(self.dir_path, 'Cards', 'run_card.dat'))
241 242 243 244 #=========================================================================== 245 # copy the Template in a new directory. 246 #===========================================================================
247 - def copy_template(self, model):
248 """create the directory run_name as a copy of the MadEvent 249 Template, and clean the directory 250 """ 251 252 #First copy the full template tree if dir_path doesn't exit 253 if not os.path.isdir(self.dir_path): 254 assert self.mgme_dir, \ 255 "No valid MG_ME path given for MG4 run directory creation." 256 logger.info('initialize a new directory: %s' % \ 257 os.path.basename(self.dir_path)) 258 shutil.copytree(pjoin(self.mgme_dir, 'Template/LO'), 259 self.dir_path, True) 260 # misc.copytree since dir_path already exists 261 misc.copytree(pjoin(self.mgme_dir, 'Template/Common'), 262 self.dir_path) 263 # copy plot_card 264 for card in ['plot_card']: 265 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 266 try: 267 shutil.copy(pjoin(self.dir_path, 'Cards',card + '.dat'), 268 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 269 except IOError: 270 logger.warning("Failed to copy " + card + ".dat to default") 271 elif os.getcwd() == os.path.realpath(self.dir_path): 272 logger.info('working in local directory: %s' % \ 273 os.path.realpath(self.dir_path)) 274 # misc.copytree since dir_path already exists 275 misc.copytree(pjoin(self.mgme_dir, 'Template/LO'), 276 self.dir_path) 277 # for name in misc.glob('Template/LO/*', self.mgme_dir): 278 # name = os.path.basename(name) 279 # filname = pjoin(self.mgme_dir, 'Template','LO',name) 280 # if os.path.isfile(filename): 281 # files.cp(filename, pjoin(self.dir_path,name)) 282 # elif os.path.isdir(filename): 283 # shutil.copytree(filename, pjoin(self.dir_path,name), True) 284 # misc.copytree since dir_path already exists 285 misc.copytree(pjoin(self.mgme_dir, 'Template/Common'), 286 self.dir_path) 287 # Copy plot_card 288 for card in ['plot_card']: 289 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 290 try: 291 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 292 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 293 except IOError: 294 logger.warning("Failed to copy " + card + ".dat to default") 295 elif not os.path.isfile(pjoin(self.dir_path, 'TemplateVersion.txt')): 296 assert self.mgme_dir, \ 297 "No valid MG_ME path given for MG4 run directory creation." 298 try: 299 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 300 except IOError: 301 MG5_version = misc.get_pkg_info() 302 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write(MG5_version['version']) 303 304 #Ensure that the Template is clean 305 if self.opt['clean']: 306 logger.info('remove old information in %s' % \ 307 os.path.basename(self.dir_path)) 308 if 'MADGRAPH_BASE' in os.environ: 309 misc.call([pjoin('bin', 'internal', 'clean_template'), 310 '--web'], cwd=self.dir_path) 311 else: 312 try: 313 misc.call([pjoin('bin', 'internal', 'clean_template')], \ 314 cwd=self.dir_path) 315 except Exception as why: 316 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 317 % (os.path.basename(self.dir_path),why)) 318 319 #Write version info 320 MG_version = misc.get_pkg_info() 321 open(pjoin(self.dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 322 MG_version['version']) 323 324 # add the makefile in Source directory 325 filename = pjoin(self.dir_path,'Source','makefile') 326 self.write_source_makefile(writers.FileWriter(filename)) 327 328 # add the DiscreteSampler information 329 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'DiscreteSampler.f'), 330 pjoin(self.dir_path, 'Source')) 331 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'StringCast.f'), 332 pjoin(self.dir_path, 'Source')) 333 334 # We need to create the correct open_data for the pdf 335 self.write_pdf_opendata()
336 337 338 #=========================================================================== 339 # Call MadAnalysis5 to generate the default cards for this process 340 #===========================================================================
341 - def create_default_madanalysis5_cards(self, history, proc_defs, processes, 342 ma5_path, output_dir, levels = ['parton','hadron']):
343 """ Call MA5 so that it writes default cards for both parton and 344 post-shower levels, tailored for this particular process.""" 345 346 if len(levels)==0: 347 return 348 start = time.time() 349 logger.info('Generating MadAnalysis5 default cards tailored to this process') 350 try: 351 MA5_interpreter = common_run_interface.CommonRunCmd.\ 352 get_MadAnalysis5_interpreter(MG5DIR,ma5_path,loglevel=100) 353 except (Exception, SystemExit) as e: 354 logger.warning('Fail to create a MadAnalysis5 instance. Therefore the default analysis with MadAnalysis5 will be empty') 355 return 356 if MA5_interpreter is None: 357 return 358 359 MA5_main = MA5_interpreter.main 360 for lvl in ['parton','hadron']: 361 if lvl in levels: 362 card_to_generate = pjoin(output_dir,'madanalysis5_%s_card_default.dat'%lvl) 363 try: 364 text = MA5_main.madgraph.generate_card(history, proc_defs, processes,lvl) 365 except (Exception, SystemExit) as e: 366 # keep the default card (skip only) 367 logger.warning('MadAnalysis5 failed to write a %s-level'%lvl+ 368 ' default analysis card for this process.') 369 logger.warning('Therefore, %s-level default analysis with MadAnalysis5 will be empty.'%lvl) 370 error=StringIO() 371 traceback.print_exc(file=error) 372 logger.debug('MadAnalysis5 error was:') 373 logger.debug('-'*60) 374 logger.debug(error.getvalue()[:-1]) 375 logger.debug('-'*60) 376 else: 377 open(card_to_generate,'w').write(text) 378 stop = time.time() 379 if stop-start >1: 380 logger.info('Cards created in %.2fs' % (stop-start))
381 382 #=========================================================================== 383 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 384 #===========================================================================
385 - def write_procdef_mg5(self, file_pos, modelname, process_str):
386 """ write an equivalent of the MG4 proc_card in order that all the Madevent 387 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 388 389 proc_card_template = template_files.mg4_proc_card.mg4_template 390 process_template = template_files.mg4_proc_card.process_template 391 process_text = '' 392 coupling = '' 393 new_process_content = [] 394 395 396 # First find the coupling and suppress the coupling from process_str 397 #But first ensure that coupling are define whithout spaces: 398 process_str = process_str.replace(' =', '=') 399 process_str = process_str.replace('= ', '=') 400 process_str = process_str.replace(',',' , ') 401 #now loop on the element and treat all the coupling 402 for info in process_str.split(): 403 if '=' in info: 404 coupling += info + '\n' 405 else: 406 new_process_content.append(info) 407 # Recombine the process_str (which is the input process_str without coupling 408 #info) 409 process_str = ' '.join(new_process_content) 410 411 #format the SubProcess 412 replace_dict = {'process': process_str, 413 'coupling': coupling} 414 process_text += process_template.substitute(replace_dict) 415 416 replace_dict = {'process': process_text, 417 'model': modelname, 418 'multiparticle':''} 419 text = proc_card_template.substitute(replace_dict) 420 421 if file_pos: 422 ff = open(file_pos, 'w') 423 ff.write(text) 424 ff.close() 425 else: 426 return replace_dict
427 428
429 - def pass_information_from_cmd(self, cmd):
430 """Pass information for MA5""" 431 432 self.proc_defs = cmd._curr_proc_defs
433 434 #=========================================================================== 435 # Create jpeg diagrams, html pages,proc_card_mg5.dat and madevent.tar.gz 436 #===========================================================================
437 - def finalize(self, matrix_elements, history='', mg5options={}, flaglist=[]):
438 """Function to finalize v4 directory, for inheritance.""" 439 440 self.create_run_card(matrix_elements, history) 441 self.create_MA5_cards(matrix_elements, history)
442
443 - def create_MA5_cards(self,matrix_elements,history):
444 """ A wrapper around the creation of the MA5 cards so that it can be 445 bypassed by daughter classes (i.e. in standalone).""" 446 if 'madanalysis5_path' in self.opt and not \ 447 self.opt['madanalysis5_path'] is None and not self.proc_defs is None: 448 processes = None 449 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 450 processes = [me.get('processes') for megroup in matrix_elements 451 for me in megroup['matrix_elements']] 452 elif matrix_elements: 453 processes = [me.get('processes') 454 for me in matrix_elements['matrix_elements']] 455 456 self.create_default_madanalysis5_cards( 457 history, self.proc_defs, processes, 458 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'), 459 levels = ['hadron','parton']) 460 461 for level in ['hadron','parton']: 462 # Copying these cards turn on the use of MadAnalysis5 by default. 463 if os.path.isfile(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level)): 464 shutil.copy(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level), 465 pjoin(self.dir_path,'Cards','madanalysis5_%s_card.dat'%level))
466 467 #=========================================================================== 468 # Create the proc_characteristic file passing information to the run_interface 469 #===========================================================================
470 - def create_proc_charac(self, matrix_elements=None, history="", **opts):
471 472 self.proc_characteristic.write(pjoin(self.dir_path, 'SubProcesses', 'proc_characteristics'))
473 474 #=========================================================================== 475 # write_matrix_element_v4 476 #===========================================================================
477 - def write_matrix_element_v4(self):
478 """Function to write a matrix.f file, for inheritance. 479 """ 480 pass
481 482 #=========================================================================== 483 # write_pdf_opendata 484 #===========================================================================
485 - def write_pdf_opendata(self):
486 """ modify the pdf opendata file, to allow direct access to cluster node 487 repository if configure""" 488 489 if not self.opt["cluster_local_path"]: 490 changer = {"pdf_systemwide": ""} 491 else: 492 to_add = """ 493 tempname='%(path)s'//Tablefile 494 open(IU,file=tempname,status='old',ERR=1) 495 return 496 1 tempname='%(path)s/Pdfdata/'//Tablefile 497 open(IU,file=tempname,status='old',ERR=2) 498 return 499 2 tempname='%(path)s/lhapdf'//Tablefile 500 open(IU,file=tempname,status='old',ERR=3) 501 return 502 3 tempname='%(path)s/../lhapdf/pdfsets/'//Tablefile 503 open(IU,file=tempname,status='old',ERR=4) 504 return 505 4 tempname='%(path)s/../lhapdf/pdfsets/6.1/'//Tablefile 506 open(IU,file=tempname,status='old',ERR=5) 507 return 508 """ % {"path" : self.opt["cluster_local_path"]} 509 510 changer = {"pdf_systemwide": to_add} 511 512 513 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "opendata.f")) 514 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_opendata.f"),"r").read() 515 ff.writelines(template % changer) 516 517 # Do the same for lhapdf set 518 if not self.opt["cluster_local_path"]: 519 changer = {"cluster_specific_path": ""} 520 else: 521 to_add=""" 522 LHAPath='%(path)s/PDFsets' 523 Inquire(File=LHAPath, exist=exists) 524 if(exists)return 525 LHAPath='%(path)s/../lhapdf/pdfsets/6.1/' 526 Inquire(File=LHAPath, exist=exists) 527 if(exists)return 528 LHAPath='%(path)s/../lhapdf/pdfsets/' 529 Inquire(File=LHAPath, exist=exists) 530 if(exists)return 531 LHAPath='./PDFsets' 532 """ % {"path" : self.opt["cluster_local_path"]} 533 changer = {"cluster_specific_path": to_add} 534 535 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f")) 536 #ff = open(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f"),"w") 537 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_wrap_lhapdf.f"),"r").read() 538 ff.writelines(template % changer) 539 540 541 return
542 543 544 545 #=========================================================================== 546 # write_maxparticles_file 547 #===========================================================================
548 - def write_maxparticles_file(self, writer, matrix_elements):
549 """Write the maxparticles.inc file for MadEvent""" 550 551 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 552 maxparticles = max([me.get_nexternal_ninitial()[0] for me in \ 553 matrix_elements.get('matrix_elements')]) 554 else: 555 maxparticles = max([me.get_nexternal_ninitial()[0] \ 556 for me in matrix_elements]) 557 558 lines = "integer max_particles\n" 559 lines += "parameter(max_particles=%d)" % maxparticles 560 561 # Write the file 562 writer.writelines(lines) 563 564 return True
565 566 567 #=========================================================================== 568 # export the model 569 #===========================================================================
570 - def export_model_files(self, model_path):
571 """Configure the files/link of the process according to the model""" 572 573 # Import the model 574 for file in os.listdir(model_path): 575 if os.path.isfile(pjoin(model_path, file)): 576 shutil.copy2(pjoin(model_path, file), \ 577 pjoin(self.dir_path, 'Source', 'MODEL'))
578 579 593 601 602 603 #=========================================================================== 604 # export the helas routine 605 #===========================================================================
606 - def export_helas(self, helas_path):
607 """Configure the files/link of the process according to the model""" 608 609 # Import helas routine 610 for filename in os.listdir(helas_path): 611 filepos = pjoin(helas_path, filename) 612 if os.path.isfile(filepos): 613 if filepos.endswith('Makefile.template'): 614 cp(filepos, self.dir_path + '/Source/DHELAS/Makefile') 615 elif filepos.endswith('Makefile'): 616 pass 617 else: 618 cp(filepos, self.dir_path + '/Source/DHELAS')
619 # following lines do the same but whithout symbolic link 620 # 621 #def export_helas(mgme_dir, dir_path): 622 # 623 # # Copy the HELAS directory 624 # helas_dir = pjoin(mgme_dir, 'HELAS') 625 # for filename in os.listdir(helas_dir): 626 # if os.path.isfile(pjoin(helas_dir, filename)): 627 # shutil.copy2(pjoin(helas_dir, filename), 628 # pjoin(dir_path, 'Source', 'DHELAS')) 629 # shutil.move(pjoin(dir_path, 'Source', 'DHELAS', 'Makefile.template'), 630 # pjoin(dir_path, 'Source', 'DHELAS', 'Makefile')) 631 # 632 633 #=========================================================================== 634 # generate_subprocess_directory 635 #===========================================================================
636 - def generate_subprocess_directory(self, matrix_element, 637 fortran_model, 638 me_number):
639 """Routine to generate a subprocess directory (for inheritance)""" 640 641 pass
642 643 #=========================================================================== 644 # get_source_libraries_list 645 #===========================================================================
646 - def get_source_libraries_list(self):
647 """ Returns the list of libraries to be compiling when compiling the 648 SOURCE directory. It is different for loop_induced processes and 649 also depends on the value of the 'output_dependencies' option""" 650 651 return ['$(LIBDIR)libdhelas.$(libext)', 652 '$(LIBDIR)libpdf.$(libext)', 653 '$(LIBDIR)libmodel.$(libext)', 654 '$(LIBDIR)libcernlib.$(libext)', 655 '$(LIBDIR)libbias.$(libext)']
656 657 #=========================================================================== 658 # write_source_makefile 659 #===========================================================================
660 - def write_source_makefile(self, writer):
661 """Write the nexternal.inc file for MG4""" 662 663 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 664 set_of_lib = ' '.join(['$(LIBRARIES)']+self.get_source_libraries_list()) 665 if self.opt['model'] == 'mssm' or self.opt['model'].startswith('mssm-'): 666 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 667 MODEL/MG5_param.dat: ../Cards/param_card.dat\n\t../bin/madevent treatcards param 668 param_card.inc: MODEL/MG5_param.dat\n\t../bin/madevent treatcards param\n''' 669 else: 670 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 671 param_card.inc: ../Cards/param_card.dat\n\t../bin/madevent treatcards param\n''' 672 673 replace_dict= {'libraries': set_of_lib, 674 'model':model_line, 675 'additional_dsample': '', 676 'additional_dependencies':''} 677 678 if writer: 679 text = open(path).read() % replace_dict 680 writer.write(text) 681 682 return replace_dict
683 684 #=========================================================================== 685 # write_nexternal_madspin 686 #===========================================================================
687 - def write_nexternal_madspin(self, writer, nexternal, ninitial):
688 """Write the nexternal_prod.inc file for madspin""" 689 690 replace_dict = {} 691 692 replace_dict['nexternal'] = nexternal 693 replace_dict['ninitial'] = ninitial 694 695 file = """ \ 696 integer nexternal_prod 697 parameter (nexternal_prod=%(nexternal)d) 698 integer nincoming_prod 699 parameter (nincoming_prod=%(ninitial)d)""" % replace_dict 700 701 # Write the file 702 if writer: 703 writer.writelines(file) 704 return True 705 else: 706 return replace_dict
707 708 #=========================================================================== 709 # write_helamp_madspin 710 #===========================================================================
711 - def write_helamp_madspin(self, writer, ncomb):
712 """Write the helamp.inc file for madspin""" 713 714 replace_dict = {} 715 716 replace_dict['ncomb'] = ncomb 717 718 file = """ \ 719 integer ncomb1 720 parameter (ncomb1=%(ncomb)d) 721 double precision helamp(ncomb1) 722 common /to_helamp/helamp """ % replace_dict 723 724 # Write the file 725 if writer: 726 writer.writelines(file) 727 return True 728 else: 729 return replace_dict
730 731 732 733 #=========================================================================== 734 # write_nexternal_file 735 #===========================================================================
736 - def write_nexternal_file(self, writer, nexternal, ninitial):
737 """Write the nexternal.inc file for MG4""" 738 739 replace_dict = {} 740 741 replace_dict['nexternal'] = nexternal 742 replace_dict['ninitial'] = ninitial 743 744 file = """ \ 745 integer nexternal 746 parameter (nexternal=%(nexternal)d) 747 integer nincoming 748 parameter (nincoming=%(ninitial)d)""" % replace_dict 749 750 # Write the file 751 if writer: 752 writer.writelines(file) 753 return True 754 else: 755 return replace_dict
756 #=========================================================================== 757 # write_pmass_file 758 #===========================================================================
759 - def write_pmass_file(self, writer, matrix_element):
760 """Write the pmass.inc file for MG4""" 761 762 model = matrix_element.get('processes')[0].get('model') 763 764 lines = [] 765 for wf in matrix_element.get_external_wavefunctions(): 766 mass = model.get('particle_dict')[wf.get('pdg_code')].get('mass') 767 if mass.lower() != "zero": 768 mass = "abs(%s)" % mass 769 770 lines.append("pmass(%d)=%s" % \ 771 (wf.get('number_external'), mass)) 772 773 # Write the file 774 writer.writelines(lines) 775 776 return True
777 778 #=========================================================================== 779 # write_ngraphs_file 780 #===========================================================================
781 - def write_ngraphs_file(self, writer, nconfigs):
782 """Write the ngraphs.inc file for MG4. Needs input from 783 write_configs_file.""" 784 785 file = " integer n_max_cg\n" 786 file = file + "parameter (n_max_cg=%d)" % nconfigs 787 788 # Write the file 789 writer.writelines(file) 790 791 return True
792 793 #=========================================================================== 794 # write_leshouche_file 795 #===========================================================================
796 - def write_leshouche_file(self, writer, matrix_element):
797 """Write the leshouche.inc file for MG4""" 798 799 # Write the file 800 writer.writelines(self.get_leshouche_lines(matrix_element, 0)) 801 802 return True
803 804 #=========================================================================== 805 # get_leshouche_lines 806 #===========================================================================
807 - def get_leshouche_lines(self, matrix_element, numproc):
808 """Write the leshouche.inc file for MG4""" 809 810 # Extract number of external particles 811 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 812 813 lines = [] 814 for iproc, proc in enumerate(matrix_element.get('processes')): 815 legs = proc.get_legs_with_decays() 816 lines.append("DATA (IDUP(i,%d,%d),i=1,%d)/%s/" % \ 817 (iproc + 1, numproc+1, nexternal, 818 ",".join([str(l.get('id')) for l in legs]))) 819 if iproc == 0 and numproc == 0: 820 for i in [1, 2]: 821 lines.append("DATA (MOTHUP(%d,i),i=1,%2r)/%s/" % \ 822 (i, nexternal, 823 ",".join([ "%3r" % 0 ] * ninitial + \ 824 [ "%3r" % i ] * (nexternal - ninitial)))) 825 826 # Here goes the color connections corresponding to the JAMPs 827 # Only one output, for the first subproc! 828 if iproc == 0: 829 # If no color basis, just output trivial color flow 830 if not matrix_element.get('color_basis'): 831 for i in [1, 2]: 832 lines.append("DATA (ICOLUP(%d,i,1,%d),i=1,%2r)/%s/" % \ 833 (i, numproc+1,nexternal, 834 ",".join([ "%3r" % 0 ] * nexternal))) 835 836 else: 837 # First build a color representation dictionnary 838 repr_dict = {} 839 for l in legs: 840 repr_dict[l.get('number')] = \ 841 proc.get('model').get_particle(l.get('id')).get_color()\ 842 * (-1)**(1+l.get('state')) 843 # Get the list of color flows 844 color_flow_list = \ 845 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 846 ninitial) 847 # And output them properly 848 for cf_i, color_flow_dict in enumerate(color_flow_list): 849 for i in [0, 1]: 850 lines.append("DATA (ICOLUP(%d,i,%d,%d),i=1,%2r)/%s/" % \ 851 (i + 1, cf_i + 1, numproc+1, nexternal, 852 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 853 for l in legs]))) 854 855 return lines
856 857 858 859 860 #=========================================================================== 861 # write_maxamps_file 862 #===========================================================================
863 - def write_maxamps_file(self, writer, maxamps, maxflows, 864 maxproc,maxsproc):
865 """Write the maxamps.inc file for MG4.""" 866 867 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 868 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 869 (maxamps, maxflows) 870 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 871 (maxproc, maxsproc) 872 873 # Write the file 874 writer.writelines(file) 875 876 return True
877 878 879 #=========================================================================== 880 # Routines to output UFO models in MG4 format 881 #=========================================================================== 882
883 - def convert_model(self, model, wanted_lorentz = [], 884 wanted_couplings = []):
885 """ Create a full valid MG4 model from a MG5 model (coming from UFO)""" 886 887 # Make sure aloha is in quadruple precision if needed 888 old_aloha_mp=aloha.mp_precision 889 aloha.mp_precision=self.opt['mp'] 890 self.model = model 891 # create the MODEL 892 write_dir=pjoin(self.dir_path, 'Source', 'MODEL') 893 self.opt['exporter'] = self.__class__ 894 model_builder = UFO_model_to_mg4(model, write_dir, self.opt + self.proc_characteristic) 895 model_builder.build(wanted_couplings) 896 897 # Backup the loop mode, because it can be changed in what follows. 898 old_loop_mode = aloha.loop_mode 899 900 # Create the aloha model or use the existing one (for loop exporters 901 # this is useful as the aloha model will be used again in the 902 # LoopHelasMatrixElements generated). We do not save the model generated 903 # here if it didn't exist already because it would be a waste of 904 # memory for tree level applications since aloha is only needed at the 905 # time of creating the aloha fortran subroutines. 906 if hasattr(self, 'aloha_model'): 907 aloha_model = self.aloha_model 908 else: 909 try: 910 with misc.MuteLogger(['madgraph.models'], [60]): 911 aloha_model = create_aloha.AbstractALOHAModel(os.path.basename(model.get('modelpath'))) 912 except (ImportError, UFOError): 913 aloha_model = create_aloha.AbstractALOHAModel(model.get('modelpath')) 914 aloha_model.add_Lorentz_object(model.get('lorentz')) 915 916 # Compute the subroutines 917 if wanted_lorentz: 918 aloha_model.compute_subset(wanted_lorentz) 919 else: 920 aloha_model.compute_all(save=False) 921 922 # Write them out 923 write_dir=pjoin(self.dir_path, 'Source', 'DHELAS') 924 aloha_model.write(write_dir, 'Fortran') 925 926 # Revert the original aloha loop mode 927 aloha.loop_mode = old_loop_mode 928 929 #copy Helas Template 930 cp(MG5DIR + '/aloha/template_files/Makefile_F', write_dir+'/makefile') 931 if any([any([tag.startswith('L') for tag in d[1]]) for d in wanted_lorentz]): 932 cp(MG5DIR + '/aloha/template_files/aloha_functions_loop.f', 933 write_dir+'/aloha_functions.f') 934 aloha_model.loop_mode = False 935 else: 936 cp(MG5DIR + '/aloha/template_files/aloha_functions.f', 937 write_dir+'/aloha_functions.f') 938 create_aloha.write_aloha_file_inc(write_dir, '.f', '.o') 939 940 # Make final link in the Process 941 self.make_model_symbolic_link() 942 943 # Re-establish original aloha mode 944 aloha.mp_precision=old_aloha_mp
945 946 947 #=========================================================================== 948 # Helper functions 949 #===========================================================================
950 - def modify_grouping(self, matrix_element):
951 """allow to modify the grouping (if grouping is in place) 952 return two value: 953 - True/False if the matrix_element was modified 954 - the new(or old) matrix element""" 955 956 return False, matrix_element
957 958 #=========================================================================== 959 # Helper functions 960 #===========================================================================
961 - def get_mg5_info_lines(self):
962 """Return info lines for MG5, suitable to place at beginning of 963 Fortran files""" 964 965 info = misc.get_pkg_info() 966 info_lines = "" 967 if info and 'version' in info and 'date' in info: 968 info_lines = "# Generated by MadGraph5_aMC@NLO v. %s, %s\n" % \ 969 (info['version'], info['date']) 970 info_lines = info_lines + \ 971 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 972 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 973 else: 974 info_lines = "# Generated by MadGraph5_aMC@NLO\n" + \ 975 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 976 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 977 978 return info_lines
979
980 - def get_process_info_lines(self, matrix_element):
981 """Return info lines describing the processes for this matrix element""" 982 983 return"\n".join([ "C " + process.nice_string().replace('\n', '\nC * ') \ 984 for process in matrix_element.get('processes')])
985 986
987 - def get_helicity_lines(self, matrix_element,array_name='NHEL'):
988 """Return the Helicity matrix definition lines for this matrix element""" 989 990 helicity_line_list = [] 991 i = 0 992 for helicities in matrix_element.get_helicity_matrix(): 993 i = i + 1 994 int_list = [i, len(helicities)] 995 int_list.extend(helicities) 996 helicity_line_list.append(\ 997 ("DATA ("+array_name+"(I,%4r),I=1,%d) /" + \ 998 ",".join(['%2r'] * len(helicities)) + "/") % tuple(int_list)) 999 1000 return "\n".join(helicity_line_list)
1001
1002 - def get_ic_line(self, matrix_element):
1003 """Return the IC definition line coming after helicities, required by 1004 switchmom in madevent""" 1005 1006 nexternal = matrix_element.get_nexternal_ninitial()[0] 1007 int_list = list(range(1, nexternal + 1)) 1008 1009 return "DATA (IC(I,1),I=1,%i) /%s/" % (nexternal, 1010 ",".join([str(i) for \ 1011 i in int_list]))
1012
1013 - def set_chosen_SO_index(self, process, squared_orders):
1014 """ From the squared order constraints set by the user, this function 1015 finds what indices of the squared_orders list the user intends to pick. 1016 It returns this as a string of comma-separated successive '.true.' or 1017 '.false.' for each index.""" 1018 1019 user_squared_orders = process.get('squared_orders') 1020 split_orders = process.get('split_orders') 1021 1022 if len(user_squared_orders)==0: 1023 return ','.join(['.true.']*len(squared_orders)) 1024 1025 res = [] 1026 for sqsos in squared_orders: 1027 is_a_match = True 1028 for user_sqso, value in user_squared_orders.items(): 1029 if (process.get_squared_order_type(user_sqso) =='==' and \ 1030 value!=sqsos[split_orders.index(user_sqso)]) or \ 1031 (process.get_squared_order_type(user_sqso) in ['<=','='] and \ 1032 value<sqsos[split_orders.index(user_sqso)]) or \ 1033 (process.get_squared_order_type(user_sqso) == '>' and \ 1034 value>=sqsos[split_orders.index(user_sqso)]): 1035 is_a_match = False 1036 break 1037 res.append('.true.' if is_a_match else '.false.') 1038 1039 return ','.join(res)
1040
1041 - def get_split_orders_lines(self, orders, array_name, n=5):
1042 """ Return the split orders definition as defined in the list orders and 1043 for the name of the array 'array_name'. Split rows in chunks of size n.""" 1044 1045 ret_list = [] 1046 for index, order in enumerate(orders): 1047 for k in range(0, len(order), n): 1048 ret_list.append("DATA (%s(%3r,i),i=%3r,%3r) /%s/" % \ 1049 (array_name,index + 1, k + 1, min(k + n, len(order)), 1050 ','.join(["%5r" % i for i in order[k:k + n]]))) 1051 return ret_list
1052
1053 - def format_integer_list(self, list, name, n=5):
1054 """ Return an initialization of the python list in argument following 1055 the fortran syntax using the data keyword assignment, filling an array 1056 of name 'name'. It splits rows in chunks of size n.""" 1057 1058 ret_list = [] 1059 for k in range(0, len(list), n): 1060 ret_list.append("DATA (%s(i),i=%3r,%3r) /%s/" % \ 1061 (name, k + 1, min(k + n, len(list)), 1062 ','.join(["%5r" % i for i in list[k:k + n]]))) 1063 return ret_list
1064
1065 - def get_color_data_lines(self, matrix_element, n=6):
1066 """Return the color matrix definition lines for this matrix element. Split 1067 rows in chunks of size n.""" 1068 1069 if not matrix_element.get('color_matrix'): 1070 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 1071 else: 1072 ret_list = [] 1073 my_cs = color.ColorString() 1074 for index, denominator in \ 1075 enumerate(matrix_element.get('color_matrix').\ 1076 get_line_denominators()): 1077 # First write the common denominator for this color matrix line 1078 #ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 1079 # Then write the numerators for the matrix elements 1080 num_list = matrix_element.get('color_matrix').\ 1081 get_line_numerators(index, denominator) 1082 1083 assert all([int(i)==i for i in num_list]) 1084 1085 for k in range(0, len(num_list), n): 1086 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 1087 (index + 1, k + 1, min(k + n, len(num_list)), 1088 ','.join([("%.15e" % (int(i)/denominator)).replace('e','d') for i in num_list[k:k + n]]))) 1089 1090 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[index]) 1091 ret_list.append("C %s" % repr(my_cs)) 1092 return ret_list
1093 1094
1095 - def get_den_factor_line(self, matrix_element):
1096 """Return the denominator factor line for this matrix element""" 1097 1098 return "DATA IDEN/%2r/" % \ 1099 matrix_element.get_denominator_factor()
1100
1101 - def get_icolamp_lines(self, mapconfigs, matrix_element, num_matrix_element):
1102 """Return the ICOLAMP matrix, showing which JAMPs contribute to 1103 which configs (diagrams).""" 1104 1105 ret_list = [] 1106 1107 booldict = {False: ".false.", True: ".true."} 1108 1109 if not matrix_element.get('color_basis'): 1110 # No color, so only one color factor. Simply write a ".true." 1111 # for each config (i.e., each diagram with only 3 particle 1112 # vertices 1113 configs = len(mapconfigs) 1114 ret_list.append("DATA(icolamp(1,i,%d),i=1,%d)/%s/" % \ 1115 (num_matrix_element, configs, 1116 ','.join([".true." for i in range(configs)]))) 1117 return ret_list 1118 1119 1120 # There is a color basis - create a list showing which JAMPs have 1121 # contributions to which configs 1122 1123 # Only want to include leading color flows, so find max_Nc 1124 color_basis = matrix_element.get('color_basis') 1125 1126 # We don't want to include the power of Nc's which come from the potential 1127 # loop color trace (i.e. in the case of a closed fermion loop for example) 1128 # so we subtract it here when computing max_Nc 1129 max_Nc = max(sum([[(v[4]-v[5]) for v in val] for val in 1130 color_basis.values()],[])) 1131 1132 # Crate dictionary between diagram number and JAMP number 1133 diag_jamp = {} 1134 for ijamp, col_basis_elem in \ 1135 enumerate(sorted(matrix_element.get('color_basis').keys())): 1136 for diag_tuple in matrix_element.get('color_basis')[col_basis_elem]: 1137 # Only use color flows with Nc == max_Nc. However, notice that 1138 # we don't want to include the Nc power coming from the loop 1139 # in this counting. 1140 if (diag_tuple[4]-diag_tuple[5]) == max_Nc: 1141 diag_num = diag_tuple[0] + 1 1142 # Add this JAMP number to this diag_num 1143 diag_jamp[diag_num] = diag_jamp.setdefault(diag_num, []) + \ 1144 [ijamp+1] 1145 else: 1146 self.proc_characteristic['single_color'] = False 1147 1148 colamps = ijamp + 1 1149 for iconfig, num_diag in enumerate(mapconfigs): 1150 if num_diag == 0: 1151 continue 1152 1153 # List of True or False 1154 bool_list = [(i + 1 in diag_jamp[num_diag]) for i in range(colamps)] 1155 # Add line 1156 ret_list.append("DATA(icolamp(i,%d,%d),i=1,%d)/%s/" % \ 1157 (iconfig+1, num_matrix_element, colamps, 1158 ','.join(["%s" % booldict[b] for b in \ 1159 bool_list]))) 1160 1161 return ret_list
1162
1163 - def get_amp2_lines(self, matrix_element, config_map = [], replace_dict=None):
1164 """Return the amp2(i) = sum(amp for diag(i))^2 lines""" 1165 1166 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 1167 # Get minimum legs in a vertex 1168 vert_list = [max(diag.get_vertex_leg_numbers()) for diag in \ 1169 matrix_element.get('diagrams') if diag.get_vertex_leg_numbers()!=[]] 1170 minvert = min(vert_list) if vert_list!=[] else 0 1171 1172 ret_lines = [] 1173 if config_map: 1174 # In this case, we need to sum up all amplitudes that have 1175 # identical topologies, as given by the config_map (which 1176 # gives the topology/config for each of the diagrams 1177 diagrams = matrix_element.get('diagrams') 1178 # Combine the diagrams with identical topologies 1179 config_to_diag_dict = {} 1180 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1181 if config_map[idiag] == 0: 1182 continue 1183 try: 1184 config_to_diag_dict[config_map[idiag]].append(idiag) 1185 except KeyError: 1186 config_to_diag_dict[config_map[idiag]] = [idiag] 1187 # Write out the AMP2s summing squares of amplitudes belonging 1188 # to eiher the same diagram or different diagrams with 1189 # identical propagator properties. Note that we need to use 1190 # AMP2 number corresponding to the first diagram number used 1191 # for that AMP2. 1192 for config in sorted(config_to_diag_dict.keys()): 1193 1194 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % \ 1195 {"num": (config_to_diag_dict[config][0] + 1)} 1196 1197 amp = "+".join(["AMP(%(num)d)" % {"num": a.get('number')} for a in \ 1198 sum([diagrams[idiag].get('amplitudes') for \ 1199 idiag in config_to_diag_dict[config]], [])]) 1200 1201 # Not using \sum |M|^2 anymore since this creates troubles 1202 # when ckm is not diagonal due to the JIM mechanism. 1203 if '+' in amp: 1204 amp = "(%s)*dconjg(%s)" % (amp, amp) 1205 else: 1206 amp = "%s*dconjg(%s)" % (amp, amp) 1207 1208 line = line + "%s" % (amp) 1209 #line += " * get_channel_cut(p, %s) " % (config) 1210 ret_lines.append(line) 1211 else: 1212 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1213 # Ignore any diagrams with 4-particle vertices. 1214 if diag.get_vertex_leg_numbers()!=[] and max(diag.get_vertex_leg_numbers()) > minvert: 1215 continue 1216 # Now write out the expression for AMP2, meaning the sum of 1217 # squared amplitudes belonging to the same diagram 1218 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % {"num": (idiag + 1)} 1219 line += "+".join(["AMP(%(num)d)*dconjg(AMP(%(num)d))" % \ 1220 {"num": a.get('number')} for a in \ 1221 diag.get('amplitudes')]) 1222 ret_lines.append(line) 1223 1224 return ret_lines
1225 1226 #=========================================================================== 1227 # Returns the data statements initializing the coeffictients for the JAMP 1228 # decomposition. It is used when the JAMP initialization is decided to be 1229 # done through big arrays containing the projection coefficients. 1230 #===========================================================================
1231 - def get_JAMP_coefs(self, color_amplitudes, color_basis=None, tag_letter="",\ 1232 n=50, Nc_value=3):
1233 """This functions return the lines defining the DATA statement setting 1234 the coefficients building the JAMPS out of the AMPS. Split rows in 1235 bunches of size n. 1236 One can specify the color_basis from which the color amplitudes originates 1237 so that there are commentaries telling what color structure each JAMP 1238 corresponds to.""" 1239 1240 if(not isinstance(color_amplitudes,list) or 1241 not (color_amplitudes and isinstance(color_amplitudes[0],list))): 1242 raise MadGraph5Error("Incorrect col_amps argument passed to get_JAMP_coefs") 1243 1244 res_list = [] 1245 my_cs = color.ColorString() 1246 for index, coeff_list in enumerate(color_amplitudes): 1247 # Create the list of the complete numerical coefficient. 1248 coefs_list=[coefficient[0][0]*coefficient[0][1]*\ 1249 (fractions.Fraction(Nc_value)**coefficient[0][3]) for \ 1250 coefficient in coeff_list] 1251 # Create the list of the numbers of the contributing amplitudes. 1252 # Mutliply by -1 for those which have an imaginary coefficient. 1253 ampnumbers_list=[coefficient[1]*(-1 if coefficient[0][2] else 1) \ 1254 for coefficient in coeff_list] 1255 # Find the common denominator. 1256 commondenom=abs(reduce(fractions.gcd, coefs_list).denominator) 1257 num_list=[(coefficient*commondenom).numerator \ 1258 for coefficient in coefs_list] 1259 res_list.append("DATA NCONTRIBAMPS%s(%i)/%i/"%(tag_letter,\ 1260 index+1,len(num_list))) 1261 res_list.append("DATA DENOMCCOEF%s(%i)/%i/"%(tag_letter,\ 1262 index+1,commondenom)) 1263 if color_basis: 1264 my_cs.from_immutable(sorted(color_basis.keys())[index]) 1265 res_list.append("C %s" % repr(my_cs)) 1266 for k in range(0, len(num_list), n): 1267 res_list.append("DATA (NUMCCOEF%s(%3r,i),i=%6r,%6r) /%s/" % \ 1268 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1269 ','.join(["%6r" % i for i in num_list[k:k + n]]))) 1270 res_list.append("DATA (AMPNUMBERS%s(%3r,i),i=%6r,%6r) /%s/" % \ 1271 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1272 ','.join(["%6r" % i for i in ampnumbers_list[k:k + n]]))) 1273 pass 1274 return res_list
1275 1276
1277 - def get_JAMP_lines_split_order(self, col_amps, split_order_amps, 1278 split_order_names=None, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)"):
1279 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1280 defined as a matrix element or directly as a color_amplitudes dictionary. 1281 The split_order_amps specifies the group of amplitudes sharing the same 1282 amplitude orders which should be put in together in a given set of JAMPS. 1283 The split_order_amps is supposed to have the format of the second output 1284 of the function get_split_orders_mapping function in helas_objects.py. 1285 The split_order_names is optional (it should correspond to the process 1286 'split_orders' attribute) and only present to provide comments in the 1287 JAMP definitions in the code.""" 1288 1289 # Let the user call get_JAMP_lines_split_order directly from a 1290 error_msg="Malformed '%s' argument passed to the "+\ 1291 "get_JAMP_lines_split_order function: %s"%str(split_order_amps) 1292 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1293 color_amplitudes=col_amps.get_color_amplitudes() 1294 elif(isinstance(col_amps,list)): 1295 if(col_amps and isinstance(col_amps[0],list)): 1296 color_amplitudes=col_amps 1297 else: 1298 raise MadGraph5Error(error_msg%'col_amps') 1299 else: 1300 raise MadGraph5Error(error_msg%'col_amps') 1301 1302 # Verify the sanity of the split_order_amps and split_order_names args 1303 if isinstance(split_order_amps,list): 1304 for elem in split_order_amps: 1305 if len(elem)!=2: 1306 raise MadGraph5Error(error_msg%'split_order_amps') 1307 # Check the first element of the two lists to make sure they are 1308 # integers, although in principle they should all be integers. 1309 if not isinstance(elem[0],tuple) or \ 1310 not isinstance(elem[1],tuple) or \ 1311 not isinstance(elem[0][0],int) or \ 1312 not isinstance(elem[1][0],int): 1313 raise MadGraph5Error(error_msg%'split_order_amps') 1314 else: 1315 raise MadGraph5Error(error_msg%'split_order_amps') 1316 1317 if not split_order_names is None: 1318 if isinstance(split_order_names,list): 1319 # Should specify the same number of names as there are elements 1320 # in the key of the split_order_amps. 1321 if len(split_order_names)!=len(split_order_amps[0][0]): 1322 raise MadGraph5Error(error_msg%'split_order_names') 1323 # Check the first element of the list to be a string 1324 if not isinstance(split_order_names[0],str): 1325 raise MadGraph5Error(error_msg%'split_order_names') 1326 else: 1327 raise MadGraph5Error(error_msg%'split_order_names') 1328 1329 # Now scan all contributing orders to be individually computed and 1330 # construct the list of color_amplitudes for JAMP to be constructed 1331 # accordingly. 1332 res_list=[] 1333 max_tmp = 0 1334 for i, amp_order in enumerate(split_order_amps): 1335 col_amps_order = [] 1336 for jamp in color_amplitudes: 1337 col_amps_order.append([col_amp for col_amp in jamp if col_amp[1] in amp_order[1]]) 1338 if split_order_names: 1339 res_list.append('C JAMPs contributing to orders '+' '.join( 1340 ['%s=%i'%order for order in zip(split_order_names, 1341 amp_order[0])])) 1342 if self.opt['export_format'] in ['madloop_matchbox']: 1343 res_list.extend(self.get_JAMP_lines(col_amps_order, 1344 JAMP_format="JAMP(%s,{0})".format(str(i+1)), 1345 JAMP_formatLC="LNJAMP(%s,{0})".format(str(i+1)))[0]) 1346 else: 1347 toadd, nb_tmp = self.get_JAMP_lines(col_amps_order, 1348 JAMP_format="JAMP(%s,{0})".format(str(i+1))) 1349 res_list.extend(toadd) 1350 max_tmp = max(max_tmp, nb_tmp) 1351 1352 return res_list, max_tmp
1353 1354
1355 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", 1356 split=-1):
1357 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1358 defined as a matrix element or directly as a color_amplitudes dictionary, 1359 Jamp_formatLC should be define to allow to add LeadingColor computation 1360 (usefull for MatchBox) 1361 The split argument defines how the JAMP lines should be split in order 1362 not to be too long.""" 1363 1364 # Let the user call get_JAMP_lines directly from a MatrixElement or from 1365 # the color amplitudes lists. 1366 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1367 color_amplitudes=col_amps.get_color_amplitudes() 1368 elif(isinstance(col_amps,list)): 1369 if(col_amps and isinstance(col_amps[0],list)): 1370 color_amplitudes=col_amps 1371 else: 1372 raise MadGraph5Error("Incorrect col_amps argument passed to get_JAMP_lines") 1373 else: 1374 raise MadGraph5Error("Incorrect col_amps argument passed to get_JAMP_lines") 1375 1376 all_element = {} 1377 res_list = [] 1378 for i, coeff_list in enumerate(color_amplitudes): 1379 # It might happen that coeff_list is empty if this function was 1380 # called from get_JAMP_lines_split_order (i.e. if some color flow 1381 # does not contribute at all for a given order). 1382 # In this case we simply set it to 0. 1383 if coeff_list==[]: 1384 res_list.append(((JAMP_format+"=0D0") % str(i + 1))) 1385 continue 1386 # Break the JAMP definition into 'n=split' pieces to avoid having 1387 # arbitrarly long lines. 1388 first=True 1389 n = (len(coeff_list)+1 if split<=0 else split) 1390 while coeff_list!=[]: 1391 coefs=coeff_list[:n] 1392 coeff_list=coeff_list[n:] 1393 res = ((JAMP_format+"=") % str(i + 1)) + \ 1394 ((JAMP_format % str(i + 1)) if not first and split>0 else '') 1395 1396 first=False 1397 # Optimization: if all contributions to that color basis element have 1398 # the same coefficient (up to a sign), put it in front 1399 list_fracs = [abs(coefficient[0][1]) for coefficient in coefs] 1400 common_factor = False 1401 diff_fracs = list(set(list_fracs)) 1402 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1: 1403 common_factor = True 1404 global_factor = diff_fracs[0] 1405 res = res + '%s(' % self.coeff(1, global_factor, False, 0) 1406 1407 # loop for JAMP 1408 for (coefficient, amp_number) in coefs: 1409 if not coefficient: 1410 continue 1411 value = (1j if coefficient[2] else 1)* coefficient[0] * coefficient[1] * fractions.Fraction(3)**coefficient[3] 1412 if (i+1, amp_number) not in all_element: 1413 all_element[(i+1, amp_number)] = value 1414 else: 1415 all_element[(i+1, amp_number)] += value 1416 if common_factor: 1417 res = (res + "%s" + AMP_format) % \ 1418 (self.coeff(coefficient[0], 1419 coefficient[1] / abs(coefficient[1]), 1420 coefficient[2], 1421 coefficient[3]), 1422 str(amp_number)) 1423 else: 1424 res = (res + "%s" + AMP_format) % (self.coeff(coefficient[0], 1425 coefficient[1], 1426 coefficient[2], 1427 coefficient[3]), 1428 str(amp_number)) 1429 1430 if common_factor: 1431 res = res + ')' 1432 res_list.append(res) 1433 1434 if 'jamp_optim' in self.cmd_options: 1435 jamp_optim = banner_mod.ConfigFile.format_variable(self.cmd_options['jamp_optim'], bool, 'jamp_optim') 1436 else: 1437 # class default 1438 jamp_optim = self.jamp_optim 1439 1440 if not jamp_optim: 1441 return res_list, 0 1442 else: 1443 saved = list(res_list) 1444 1445 if len(all_element) > 1000: 1446 logger.info("Computing Color-Flow optimization [%s term]", len(all_element)) 1447 start_time = time.time() 1448 else: 1449 start_time = 0 1450 1451 res_list = [] 1452 #misc.sprint(len(all_element)) 1453 1454 self.myjamp_count = 0 1455 for key in all_element: 1456 all_element[key] = complex(all_element[key]) 1457 new_mat, defs = self.optimise_jamp(all_element) 1458 if start_time: 1459 logger.info("Color-Flow passed to %s term in %ss. Introduce %i contraction", len(new_mat), int(time.time()-start_time), len(defs)) 1460 1461 1462 #misc.sprint("number of iteration", self.myjamp_count) 1463 def format(frac): 1464 if isinstance(frac, Fraction): 1465 if frac.denominator == 1: 1466 return str(frac.numerator) 1467 else: 1468 return "%id0/%id0" % (frac.numerator, frac.denominator) 1469 elif frac.real == frac: 1470 #misc.sprint(frac.real, frac) 1471 return ('%.15e' % frac.real).replace('e','d') 1472 #str(float(frac.real)).replace('e','d') 1473 else: 1474 return ('(%.15e,%.15e)' % (frac.real, frac.imag)).replace('e','d')
1475 #str(frac).replace('e','d').replace('j','*imag1') 1476 1477 1478 1479 for i, amp1, amp2, frac, nb in defs: 1480 if amp1 > 0: 1481 amp1 = AMP_format % amp1 1482 else: 1483 amp1 = "TMP_JAMP(%d)" % -amp1 1484 if amp2 > 0: 1485 amp2 = AMP_format % amp2 1486 else: 1487 amp2 = "TMP_JAMP(%d)" % -amp2 1488 1489 if frac not in [1., -1]: 1490 res_list.append(' TMP_JAMP(%d) = %s + (%s) * %s ! used %d times' % (i,amp1, format(frac), amp2, nb)) 1491 elif frac == 1.: 1492 res_list.append(' TMP_JAMP(%d) = %s + %s ! used %d times' % (i,amp1, amp2, nb)) 1493 else: 1494 res_list.append(' TMP_JAMP(%d) = %s - %s ! used %d times' % (i,amp1, amp2, nb)) 1495 1496 1497 # misc.sprint(new_mat) 1498 jamp_res = collections.defaultdict(list) 1499 max_jamp=0 1500 for (jamp, var), factor in new_mat.items(): 1501 if var > 0: 1502 name = AMP_format % var 1503 else: 1504 name = "TMP_JAMP(%d)" % -var 1505 if factor not in [1.]: 1506 jamp_res[jamp].append("(%s)*%s" % (format(factor), name)) 1507 elif factor ==1: 1508 jamp_res[jamp].append("%s" % (name)) 1509 max_jamp = max(max_jamp, jamp) 1510 1511 1512 for i in range(1,max_jamp+1): 1513 name = JAMP_format % i 1514 if not jamp_res[i]: 1515 res_list.append(" %s = 0d0" %(name)) 1516 else: 1517 res_list.append(" %s = %s" %(name, '+'.join(jamp_res[i]))) 1518 1519 return res_list, len(defs)
1520
1521 - def optimise_jamp(self, all_element, nb_line=0, nb_col=0, added=0):
1522 """ optimise problem of type Y = A X 1523 A is a matrix (all_element) 1524 X is the fortran name of the input. 1525 The code iteratively add sub-expression jtemp[sub_add] 1526 and recall itself (this is add to the X size) 1527 """ 1528 self.myjamp_count +=1 1529 1530 if not nb_line: 1531 for i,j in all_element: 1532 if i+1 > nb_line: 1533 nb_line = i+1 1534 if j+1> nb_col: 1535 nb_col = j+1 1536 1537 max_count = 0 1538 all_index = [] 1539 operation = collections.defaultdict(lambda: collections.defaultdict(int)) 1540 for i in range(nb_line): 1541 for j1 in range(-added, nb_col): 1542 v1 = all_element.get((i,j1), 0) 1543 if not v1: 1544 continue 1545 for j2 in range(j1+1, nb_col): 1546 R = all_element.get((i,j2), 0)/v1 1547 if not R: 1548 continue 1549 1550 operation[(j1,j2)][R] +=1 1551 if operation[(j1,j2)][R] > max_count: 1552 max_count = operation[(j1,j2)][R] 1553 all_index = [(j1,j2, R)] 1554 elif operation[(j1,j2)][R] == max_count: 1555 all_index.append((j1,j2, R)) 1556 if max_count <= 1: 1557 return all_element, [] 1558 #added += 1 1559 #misc.sprint(max_count, len(all_index)) 1560 #misc.sprint(operation) 1561 to_add = [] 1562 for index in all_index: 1563 j1,j2,R = index 1564 first = True 1565 for i in range(nb_line): 1566 v1 = all_element.get((i,j1), 0) 1567 v2 = all_element.get((i,j2), 0) 1568 if not v1 or not v2: 1569 continue 1570 if v2/v1 == R: 1571 if first: 1572 first = False 1573 added +=1 1574 to_add.append((added,j1,j2,R, max_count)) 1575 1576 all_element[(i,-added)] = v1 1577 del all_element[(i,j1)] #= 0 1578 del all_element[(i,j2)] #= 0 1579 1580 logger.log(5,"Define %d new shortcut reused %d times", len(to_add), max_count) 1581 new_element, new_def = self.optimise_jamp(all_element, nb_line=nb_line, nb_col=nb_col, added=added) 1582 for one_def in to_add: 1583 new_def.insert(0, one_def) 1584 return new_element, new_def
1585 1586 1587 1588 1589
1590 - def get_pdf_lines(self, matrix_element, ninitial, subproc_group = False):
1591 """Generate the PDF lines for the auto_dsig.f file""" 1592 1593 processes = matrix_element.get('processes') 1594 model = processes[0].get('model') 1595 1596 pdf_definition_lines = "" 1597 pdf_data_lines = "" 1598 pdf_lines = "" 1599 1600 if ninitial == 1: 1601 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 1602 for i, proc in enumerate(processes): 1603 process_line = proc.base_string() 1604 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1605 pdf_lines = pdf_lines + "\nPD(IPROC)=1d0\n" 1606 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 1607 else: 1608 # Pick out all initial state particles for the two beams 1609 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 1610 p in processes]))), 1611 sorted(list(set([p.get_initial_pdg(2) for \ 1612 p in processes])))] 1613 1614 # Prepare all variable names 1615 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 1616 sum(initial_states,[])]) 1617 for key,val in pdf_codes.items(): 1618 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 1619 1620 # Set conversion from PDG code to number used in PDF calls 1621 pdgtopdf = {21: 0, 22: 7} 1622 1623 # Fill in missing entries of pdgtopdf 1624 for pdg in sum(initial_states,[]): 1625 if not pdg in pdgtopdf and not pdg in list(pdgtopdf.values()): 1626 pdgtopdf[pdg] = pdg 1627 elif pdg not in pdgtopdf and pdg in list(pdgtopdf.values()): 1628 # If any particle has pdg code 7, we need to use something else 1629 pdgtopdf[pdg] = 6000000 + pdg 1630 1631 # Get PDF variable declarations for all initial states 1632 for i in [0,1]: 1633 pdf_definition_lines += "DOUBLE PRECISION " + \ 1634 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1635 for pdg in \ 1636 initial_states[i]]) + \ 1637 "\n" 1638 1639 # Get PDF data lines for all initial states 1640 for i in [0,1]: 1641 pdf_data_lines += "DATA " + \ 1642 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1643 for pdg in initial_states[i]]) + \ 1644 "/%d*1D0/" % len(initial_states[i]) + \ 1645 "\n" 1646 1647 # Get PDF lines for all different initial states 1648 for i, init_states in enumerate(initial_states): 1649 if subproc_group: 1650 pdf_lines = pdf_lines + \ 1651 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 1652 % (i + 1, i + 1) 1653 else: 1654 pdf_lines = pdf_lines + \ 1655 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 1656 % (i + 1, i + 1) 1657 1658 for nbi,initial_state in enumerate(init_states): 1659 if initial_state in list(pdf_codes.keys()): 1660 if subproc_group: 1661 pdf_lines = pdf_lines + \ 1662 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP, 1," + \ 1663 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 1664 (pdf_codes[initial_state], 1665 i + 1, i + 1, pdgtopdf[initial_state], 1666 i + 1, i + 1) 1667 else: 1668 pdf_lines = pdf_lines + \ 1669 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP, %d," + \ 1670 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 1671 (pdf_codes[initial_state], 1672 i + 1, i + 1, pdgtopdf[initial_state], 1673 i + 1, 1674 i + 1, i + 1) 1675 pdf_lines = pdf_lines + "ENDIF\n" 1676 1677 # Add up PDFs for the different initial state particles 1678 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 1679 for proc in processes: 1680 process_line = proc.base_string() 1681 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1682 pdf_lines = pdf_lines + "\nPD(IPROC)=" 1683 for ibeam in [1, 2]: 1684 initial_state = proc.get_initial_pdg(ibeam) 1685 if initial_state in list(pdf_codes.keys()): 1686 pdf_lines = pdf_lines + "%s%d*" % \ 1687 (pdf_codes[initial_state], ibeam) 1688 else: 1689 pdf_lines = pdf_lines + "1d0*" 1690 # Remove last "*" from pdf_lines 1691 pdf_lines = pdf_lines[:-1] + "\n" 1692 pdf_lines = pdf_lines + "PD(0)=PD(0)+DABS(PD(IPROC))\n" 1693 1694 # Remove last line break from the return variables 1695 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1]
1696 1697 #=========================================================================== 1698 # write_props_file 1699 #===========================================================================
1700 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
1701 """Write the props.inc file for MadEvent. Needs input from 1702 write_configs_file.""" 1703 1704 lines = [] 1705 1706 particle_dict = matrix_element.get('processes')[0].get('model').\ 1707 get('particle_dict') 1708 1709 for iconf, configs in enumerate(s_and_t_channels): 1710 for vertex in configs[0] + configs[1][:-1]: 1711 leg = vertex.get('legs')[-1] 1712 if leg.get('id') not in particle_dict: 1713 # Fake propagator used in multiparticle vertices 1714 mass = 'zero' 1715 width = 'zero' 1716 pow_part = 0 1717 else: 1718 particle = particle_dict[leg.get('id')] 1719 # Get mass 1720 if particle.get('mass').lower() == 'zero': 1721 mass = particle.get('mass') 1722 else: 1723 mass = "abs(%s)" % particle.get('mass') 1724 # Get width 1725 if particle.get('width').lower() == 'zero': 1726 width = particle.get('width') 1727 else: 1728 width = "abs(%s)" % particle.get('width') 1729 1730 pow_part = 1 + int(particle.is_boson()) 1731 1732 lines.append("prmass(%d,%d) = %s" % \ 1733 (leg.get('number'), iconf + 1, mass)) 1734 lines.append("prwidth(%d,%d) = %s" % \ 1735 (leg.get('number'), iconf + 1, width)) 1736 lines.append("pow(%d,%d) = %d" % \ 1737 (leg.get('number'), iconf + 1, pow_part)) 1738 1739 # Write the file 1740 writer.writelines(lines) 1741 1742 return True
1743 1744 #=========================================================================== 1745 # write_configs_file 1746 #===========================================================================
1747 - def write_configs_file(self, writer, matrix_element):
1748 """Write the configs.inc file for MadEvent""" 1749 1750 # Extract number of external particles 1751 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1752 1753 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 1754 mapconfigs = [c[0] for c in configs] 1755 model = matrix_element.get('processes')[0].get('model') 1756 return mapconfigs, self.write_configs_file_from_diagrams(writer, 1757 [[c[1]] for c in configs], 1758 mapconfigs, 1759 nexternal, ninitial, 1760 model)
1761 1762 #=========================================================================== 1763 # write_configs_file_from_diagrams 1764 #===========================================================================
1765 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 1766 nexternal, ninitial, model):
1767 """Write the actual configs.inc file. 1768 1769 configs is the diagrams corresponding to configs (each 1770 diagrams is a list of corresponding diagrams for all 1771 subprocesses, with None if there is no corresponding diagrams 1772 for a given process). 1773 mapconfigs gives the diagram number for each config. 1774 1775 For s-channels, we need to output one PDG for each subprocess in 1776 the subprocess group, in order to be able to pick the right 1777 one for multiprocesses.""" 1778 1779 lines = [] 1780 1781 s_and_t_channels = [] 1782 1783 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 1784 for config in configs if [d for d in config if d][0].\ 1785 get_vertex_leg_numbers()!=[]] 1786 minvert = min(vert_list) if vert_list!=[] else 0 1787 1788 # Number of subprocesses 1789 nsubprocs = len(configs[0]) 1790 1791 nconfigs = 0 1792 1793 new_pdg = model.get_first_non_pdg() 1794 1795 for iconfig, helas_diags in enumerate(configs): 1796 if any(vert > minvert for vert in [d for d in helas_diags if d]\ 1797 [0].get_vertex_leg_numbers()) : 1798 # Only 3-vertices allowed in configs.inc except for vertices 1799 # which originate from a shrunk loop. 1800 continue 1801 nconfigs += 1 1802 1803 # Need s- and t-channels for all subprocesses, including 1804 # those that don't contribute to this config 1805 empty_verts = [] 1806 stchannels = [] 1807 for h in helas_diags: 1808 if h: 1809 # get_s_and_t_channels gives vertices starting from 1810 # final state external particles and working inwards 1811 stchannels.append(h.get('amplitudes')[0].\ 1812 get_s_and_t_channels(ninitial, model, new_pdg)) 1813 else: 1814 stchannels.append((empty_verts, None)) 1815 1816 # For t-channels, just need the first non-empty one 1817 tchannels = [t for s,t in stchannels if t != None][0] 1818 1819 # For s_and_t_channels (to be used later) use only first config 1820 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 1821 tchannels]) 1822 1823 # Make sure empty_verts is same length as real vertices 1824 if any([s for s,t in stchannels]): 1825 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 1826 1827 # Reorganize s-channel vertices to get a list of all 1828 # subprocesses for each vertex 1829 schannels = list(zip(*[s for s,t in stchannels])) 1830 else: 1831 schannels = [] 1832 1833 allchannels = schannels 1834 if len(tchannels) > 1: 1835 # Write out tchannels only if there are any non-trivial ones 1836 allchannels = schannels + tchannels 1837 1838 # Write out propagators for s-channel and t-channel vertices 1839 1840 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 1841 # Correspondance between the config and the diagram = amp2 1842 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 1843 mapconfigs[iconfig])) 1844 1845 for verts in allchannels: 1846 if verts in schannels: 1847 vert = [v for v in verts if v][0] 1848 else: 1849 vert = verts 1850 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1851 last_leg = vert.get('legs')[-1] 1852 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 1853 (last_leg.get('number'), nconfigs, len(daughters), 1854 ",".join([str(d) for d in daughters]))) 1855 if verts in schannels: 1856 pdgs = [] 1857 for v in verts: 1858 if v: 1859 pdgs.append(v.get('legs')[-1].get('id')) 1860 else: 1861 pdgs.append(0) 1862 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1863 (last_leg.get('number'), nconfigs, nsubprocs, 1864 ",".join([str(d) for d in pdgs]))) 1865 lines.append("data tprid(%d,%d)/0/" % \ 1866 (last_leg.get('number'), nconfigs)) 1867 elif verts in tchannels[:-1]: 1868 lines.append("data tprid(%d,%d)/%d/" % \ 1869 (last_leg.get('number'), nconfigs, 1870 abs(last_leg.get('id')))) 1871 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1872 (last_leg.get('number'), nconfigs, nsubprocs, 1873 ",".join(['0'] * nsubprocs))) 1874 1875 # Write out number of configs 1876 lines.append("# Number of configs") 1877 lines.append("data mapconfig(0)/%d/" % nconfigs) 1878 1879 # Write the file 1880 writer.writelines(lines) 1881 1882 return s_and_t_channels
1883 1884 #=========================================================================== 1885 # Global helper methods 1886 #=========================================================================== 1887
1888 - def coeff(self, ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1889 """Returns a nicely formatted string for the coefficients in JAMP lines""" 1890 1891 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power 1892 1893 if total_coeff == 1: 1894 if is_imaginary: 1895 return '+imag1*' 1896 else: 1897 return '+' 1898 elif total_coeff == -1: 1899 if is_imaginary: 1900 return '-imag1*' 1901 else: 1902 return '-' 1903 1904 res_str = '%+iD0' % total_coeff.numerator 1905 1906 if total_coeff.denominator != 1: 1907 # Check if total_coeff is an integer 1908 res_str = res_str + '/%iD0' % total_coeff.denominator 1909 1910 if is_imaginary: 1911 res_str = res_str + '*imag1' 1912 1913 return res_str + '*'
1914 1915
1916 - def set_fortran_compiler(self, default_compiler, force=False):
1917 """Set compiler based on what's available on the system""" 1918 1919 # Check for compiler 1920 if default_compiler['fortran'] and misc.which(default_compiler['fortran']): 1921 f77_compiler = default_compiler['fortran'] 1922 elif misc.which('gfortran'): 1923 f77_compiler = 'gfortran' 1924 elif misc.which('g77'): 1925 f77_compiler = 'g77' 1926 elif misc.which('f77'): 1927 f77_compiler = 'f77' 1928 elif default_compiler['fortran']: 1929 logger.warning('No Fortran Compiler detected! Please install one') 1930 f77_compiler = default_compiler['fortran'] # maybe misc fail so try with it 1931 else: 1932 raise MadGraph5Error('No Fortran Compiler detected! Please install one') 1933 logger.info('Use Fortran compiler ' + f77_compiler) 1934 1935 1936 # Check for compiler. 1. set default. 1937 if default_compiler['f2py']: 1938 f2py_compiler = default_compiler['f2py'] 1939 else: 1940 f2py_compiler = '' 1941 # Try to find the correct one. 1942 if default_compiler['f2py'] and misc.which(default_compiler['f2py']): 1943 f2py_compiler = default_compiler['f2py'] 1944 elif misc.which('f2py'): 1945 f2py_compiler = 'f2py' 1946 elif sys.version_info[1] == 6: 1947 if misc.which('f2py-2.6'): 1948 f2py_compiler = 'f2py-2.6' 1949 elif misc.which('f2py2.6'): 1950 f2py_compiler = 'f2py2.6' 1951 elif sys.version_info[1] == 7: 1952 if misc.which('f2py-2.7'): 1953 f2py_compiler = 'f2py-2.7' 1954 elif misc.which('f2py2.7'): 1955 f2py_compiler = 'f2py2.7' 1956 1957 to_replace = {'fortran': f77_compiler, 'f2py': f2py_compiler} 1958 1959 1960 self.replace_make_opt_f_compiler(to_replace) 1961 # Replace also for Template but not for cluster 1962 if 'MADGRAPH_DATA' not in os.environ and ReadWrite: 1963 self.replace_make_opt_f_compiler(to_replace, pjoin(MG5DIR, 'Template', 'LO')) 1964 1965 return f77_compiler
1966 1967 # an alias for backward compatibility 1968 set_compiler = set_fortran_compiler 1969 1970
1971 - def set_cpp_compiler(self, default_compiler, force=False):
1972 """Set compiler based on what's available on the system""" 1973 1974 # Check for compiler 1975 if default_compiler and misc.which(default_compiler): 1976 compiler = default_compiler 1977 elif misc.which('g++'): 1978 #check if clang version 1979 p = misc.Popen(['g++', '--version'], stdout=subprocess.PIPE, 1980 stderr=subprocess.PIPE) 1981 out, _ = p.communicate() 1982 out = out.decode() 1983 if 'clang' in str(out) and misc.which('clang'): 1984 compiler = 'clang' 1985 else: 1986 compiler = 'g++' 1987 elif misc.which('c++'): 1988 compiler = 'c++' 1989 elif misc.which('clang'): 1990 compiler = 'clang' 1991 elif default_compiler: 1992 logger.warning('No c++ Compiler detected! Please install one') 1993 compiler = default_compiler # maybe misc fail so try with it 1994 else: 1995 raise MadGraph5Error('No c++ Compiler detected! Please install one') 1996 logger.info('Use c++ compiler ' + compiler) 1997 self.replace_make_opt_c_compiler(compiler) 1998 # Replace also for Template but not for cluster 1999 if 'MADGRAPH_DATA' not in os.environ and ReadWrite and \ 2000 not __debug__ and not os.path.exists(pjoin(MG5DIR,'bin','create_release.py')): 2001 self.replace_make_opt_c_compiler(compiler, pjoin(MG5DIR, 'Template', 'LO')) 2002 2003 return compiler
2004 2005
2006 - def replace_make_opt_f_compiler(self, compilers, root_dir = ""):
2007 """Set FC=compiler in Source/make_opts""" 2008 2009 assert isinstance(compilers, dict) 2010 2011 mod = False #avoid to rewrite the file if not needed 2012 if not root_dir: 2013 root_dir = self.dir_path 2014 2015 compiler= compilers['fortran'] 2016 f2py_compiler = compilers['f2py'] 2017 if not f2py_compiler: 2018 f2py_compiler = 'f2py' 2019 for_update= {'DEFAULT_F_COMPILER':compiler, 2020 'DEFAULT_F2PY_COMPILER':f2py_compiler} 2021 make_opts = pjoin(root_dir, 'Source', 'make_opts') 2022 2023 try: 2024 common_run_interface.CommonRunCmd.update_make_opts_full( 2025 make_opts, for_update) 2026 except IOError: 2027 if root_dir == self.dir_path: 2028 logger.info('Fail to set compiler. Trying to continue anyway.')
2029
2030 - def replace_make_opt_c_compiler(self, compiler, root_dir = ""):
2031 """Set CXX=compiler in Source/make_opts. 2032 The version is also checked, in order to set some extra flags 2033 if the compiler is clang (on MACOS)""" 2034 2035 is_clang = misc.detect_if_cpp_compiler_is_clang(compiler) 2036 is_lc = misc.detect_cpp_std_lib_dependence(compiler) == '-lc++' 2037 2038 2039 # list of the variable to set in the make_opts file 2040 for_update= {'DEFAULT_CPP_COMPILER':compiler, 2041 'MACFLAG':'-mmacosx-version-min=10.7' if is_clang and is_lc else '', 2042 'STDLIB': '-lc++' if is_lc else '-lstdc++', 2043 'STDLIB_FLAG': '-stdlib=libc++' if is_lc and is_clang else '' 2044 } 2045 2046 # for MOJAVE remove the MACFLAG: 2047 if is_clang: 2048 import platform 2049 version, _, _ = platform.mac_ver() 2050 if not version:# not linux 2051 version = 14 # set version to remove MACFLAG 2052 else: 2053 majversion, version = [int(x) for x in version.split('.',3)[:2]] 2054 2055 if majversion >= 11 or (majversion ==10 and version >= 14): 2056 for_update['MACFLAG'] = '-mmacosx-version-min=10.8' if is_lc else '' 2057 2058 if not root_dir: 2059 root_dir = self.dir_path 2060 make_opts = pjoin(root_dir, 'Source', 'make_opts') 2061 2062 try: 2063 common_run_interface.CommonRunCmd.update_make_opts_full( 2064 make_opts, for_update) 2065 except IOError: 2066 if root_dir == self.dir_path: 2067 logger.info('Fail to set compiler. Trying to continue anyway.') 2068 2069 return
2070
2071 #=============================================================================== 2072 # ProcessExporterFortranSA 2073 #=============================================================================== 2074 -class ProcessExporterFortranSA(ProcessExporterFortran):
2075 """Class to take care of exporting a set of matrix elements to 2076 MadGraph v4 StandAlone format.""" 2077 2078 matrix_template = "matrix_standalone_v4.inc" 2079
2080 - def __init__(self, *args,**opts):
2081 """add the format information compare to standard init""" 2082 2083 if 'format' in opts: 2084 self.format = opts['format'] 2085 del opts['format'] 2086 else: 2087 self.format = 'standalone' 2088 2089 self.prefix_info = {} 2090 ProcessExporterFortran.__init__(self, *args, **opts)
2091
2092 - def copy_template(self, model):
2093 """Additional actions needed for setup of Template 2094 """ 2095 2096 #First copy the full template tree if dir_path doesn't exit 2097 if os.path.isdir(self.dir_path): 2098 return 2099 2100 logger.info('initialize a new standalone directory: %s' % \ 2101 os.path.basename(self.dir_path)) 2102 temp_dir = pjoin(self.mgme_dir, 'Template/LO') 2103 2104 # Create the directory structure 2105 os.mkdir(self.dir_path) 2106 os.mkdir(pjoin(self.dir_path, 'Source')) 2107 os.mkdir(pjoin(self.dir_path, 'Source', 'MODEL')) 2108 os.mkdir(pjoin(self.dir_path, 'Source', 'DHELAS')) 2109 os.mkdir(pjoin(self.dir_path, 'SubProcesses')) 2110 os.mkdir(pjoin(self.dir_path, 'bin')) 2111 os.mkdir(pjoin(self.dir_path, 'bin', 'internal')) 2112 os.mkdir(pjoin(self.dir_path, 'lib')) 2113 os.mkdir(pjoin(self.dir_path, 'Cards')) 2114 2115 # Information at top-level 2116 #Write version info 2117 shutil.copy(pjoin(temp_dir, 'TemplateVersion.txt'), self.dir_path) 2118 try: 2119 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 2120 except IOError: 2121 MG5_version = misc.get_pkg_info() 2122 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 2123 "5." + MG5_version['version']) 2124 2125 2126 # Add file in SubProcesses 2127 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f_sp'), 2128 pjoin(self.dir_path, 'SubProcesses', 'makefileP')) 2129 2130 if self.format == 'standalone': 2131 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'check_sa.f'), 2132 pjoin(self.dir_path, 'SubProcesses', 'check_sa.f')) 2133 2134 # Add file in Source 2135 shutil.copy(pjoin(temp_dir, 'Source', 'make_opts'), 2136 pjoin(self.dir_path, 'Source')) 2137 # add the makefile 2138 filename = pjoin(self.dir_path,'Source','makefile') 2139 self.write_source_makefile(writers.FileWriter(filename))
2140 2141 #=========================================================================== 2142 # export model files 2143 #===========================================================================
2144 - def export_model_files(self, model_path):
2145 """export the model dependent files for V4 model""" 2146 2147 super(ProcessExporterFortranSA,self).export_model_files(model_path) 2148 # Add the routine update_as_param in v4 model 2149 # This is a function created in the UFO 2150 text=""" 2151 subroutine update_as_param() 2152 call setpara('param_card.dat',.false.) 2153 return 2154 end 2155 """ 2156 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 2157 ff.write(text) 2158 ff.close() 2159 2160 text = open(pjoin(self.dir_path,'SubProcesses','check_sa.f')).read() 2161 text = text.replace('call setpara(\'param_card.dat\')', 'call setpara(\'param_card.dat\', .true.)') 2162 fsock = open(pjoin(self.dir_path,'SubProcesses','check_sa.f'), 'w') 2163 fsock.write(text) 2164 fsock.close() 2165 2166 self.make_model_symbolic_link()
2167 2168 #=========================================================================== 2169 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 2170 #===========================================================================
2171 - def write_procdef_mg5(self, file_pos, modelname, process_str):
2172 """ write an equivalent of the MG4 proc_card in order that all the Madevent 2173 Perl script of MadEvent4 are still working properly for pure MG5 run. 2174 Not needed for StandAlone so just return 2175 """ 2176 2177 return
2178 2179 2180 #=========================================================================== 2181 # Make the Helas and Model directories for Standalone directory 2182 #===========================================================================
2183 - def make(self):
2184 """Run make in the DHELAS and MODEL directories, to set up 2185 everything for running standalone 2186 """ 2187 2188 source_dir = pjoin(self.dir_path, "Source") 2189 logger.info("Running make for Helas") 2190 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2191 logger.info("Running make for Model") 2192 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran')
2193 2194 #=========================================================================== 2195 # Create proc_card_mg5.dat for Standalone directory 2196 #===========================================================================
2197 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2198 """Finalize Standalone MG4 directory by 2199 generation proc_card_mg5.dat 2200 generate a global makefile 2201 """ 2202 2203 compiler = {'fortran': mg5options['fortran_compiler'], 2204 'cpp': mg5options['cpp_compiler'], 2205 'f2py': mg5options['f2py_compiler']} 2206 2207 self.compiler_choice(compiler) 2208 self.make() 2209 2210 # Write command history as proc_card_mg5 2211 if history and os.path.isdir(pjoin(self.dir_path, 'Cards')): 2212 output_file = pjoin(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2213 history.write(output_file) 2214 2215 ProcessExporterFortran.finalize(self, matrix_elements, 2216 history, mg5options, flaglist) 2217 open(pjoin(self.dir_path,'__init__.py'),'w') 2218 open(pjoin(self.dir_path,'SubProcesses','__init__.py'),'w') 2219 2220 if False:#'mode' in self.opt and self.opt['mode'] == "reweight": 2221 #add the module to hande the NLO weight 2222 files.copytree(pjoin(MG5DIR, 'Template', 'RWGTNLO'), 2223 pjoin(self.dir_path, 'Source')) 2224 files.copytree(pjoin(MG5DIR, 'Template', 'NLO', 'Source', 'PDF'), 2225 pjoin(self.dir_path, 'Source', 'PDF')) 2226 self.write_pdf_opendata() 2227 2228 if self.prefix_info: 2229 self.write_f2py_splitter() 2230 self.write_f2py_makefile() 2231 self.write_f2py_check_sa(matrix_elements, 2232 pjoin(self.dir_path,'SubProcesses','check_sa.py')) 2233 else: 2234 # create a single makefile to compile all the subprocesses 2235 text = '''\n# For python linking (require f2py part of numpy)\nifeq ($(origin MENUM),undefined)\n MENUM=2\nendif\n''' 2236 deppython = '' 2237 for Pdir in os.listdir(pjoin(self.dir_path,'SubProcesses')): 2238 if os.path.isdir(pjoin(self.dir_path, 'SubProcesses', Pdir)): 2239 text += '%(0)s/matrix$(MENUM)py.so:\n\tcd %(0)s;make matrix$(MENUM)py.so\n'% {'0': Pdir} 2240 deppython += ' %(0)s/matrix$(MENUM)py.so ' % {'0': Pdir} 2241 text+='all: %s\n\techo \'done\'' % deppython 2242 2243 ff = open(pjoin(self.dir_path, 'SubProcesses', 'makefile'),'a') 2244 ff.write(text) 2245 ff.close()
2246
2247 - def write_f2py_splitter(self):
2248 """write a function to call the correct matrix element""" 2249 2250 template = """ 2251 %(python_information)s 2252 subroutine smatrixhel(pdgs, procid, npdg, p, ALPHAS, SCALE2, nhel, ANS) 2253 IMPLICIT NONE 2254 C ALPHAS is given at scale2 (SHOULD be different of 0 for loop induced, ignore for LO) 2255 2256 CF2PY double precision, intent(in), dimension(0:3,npdg) :: p 2257 CF2PY integer, intent(in), dimension(npdg) :: pdgs 2258 CF2PY integer, intent(in):: procid 2259 CF2PY integer, intent(in) :: npdg 2260 CF2PY double precision, intent(out) :: ANS 2261 CF2PY double precision, intent(in) :: ALPHAS 2262 CF2PY double precision, intent(in) :: SCALE2 2263 integer pdgs(*) 2264 integer npdg, nhel, procid 2265 double precision p(*) 2266 double precision ANS, ALPHAS, PI,SCALE2 2267 include 'coupl.inc' 2268 2269 PI = 3.141592653589793D0 2270 G = 2* DSQRT(ALPHAS*PI) 2271 CALL UPDATE_AS_PARAM() 2272 c if (scale2.ne.0d0) stop 1 2273 2274 %(smatrixhel)s 2275 2276 return 2277 end 2278 2279 SUBROUTINE INITIALISE(PATH) 2280 C ROUTINE FOR F2PY to read the benchmark point. 2281 IMPLICIT NONE 2282 CHARACTER*512 PATH 2283 CF2PY INTENT(IN) :: PATH 2284 CALL SETPARA(PATH) !first call to setup the paramaters 2285 RETURN 2286 END 2287 2288 2289 subroutine CHANGE_PARA(name, value) 2290 implicit none 2291 CF2PY intent(in) :: name 2292 CF2PY intent(in) :: value 2293 2294 character*512 name 2295 double precision value 2296 2297 %(helreset_def)s 2298 2299 include '../Source/MODEL/input.inc' 2300 include '../Source/MODEL/coupl.inc' 2301 2302 %(helreset_setup)s 2303 2304 SELECT CASE (name) 2305 %(parameter_setup)s 2306 CASE DEFAULT 2307 write(*,*) 'no parameter matching', name, value 2308 END SELECT 2309 2310 return 2311 end 2312 2313 subroutine update_all_coup() 2314 implicit none 2315 call coup() 2316 return 2317 end 2318 2319 2320 subroutine get_pdg_order(PDG, ALLPROC) 2321 IMPLICIT NONE 2322 CF2PY INTEGER, intent(out) :: PDG(%(nb_me)i,%(maxpart)i) 2323 CF2PY INTEGER, intent(out) :: ALLPROC(%(nb_me)i) 2324 INTEGER PDG(%(nb_me)i,%(maxpart)i), PDGS(%(nb_me)i,%(maxpart)i) 2325 INTEGER ALLPROC(%(nb_me)i),PIDs(%(nb_me)i) 2326 DATA PDGS/ %(pdgs)s / 2327 DATA PIDS/ %(pids)s / 2328 PDG = PDGS 2329 ALLPROC = PIDS 2330 RETURN 2331 END 2332 2333 subroutine get_prefix(PREFIX) 2334 IMPLICIT NONE 2335 CF2PY CHARACTER*20, intent(out) :: PREFIX(%(nb_me)i) 2336 character*20 PREFIX(%(nb_me)i),PREF(%(nb_me)i) 2337 DATA PREF / '%(prefix)s'/ 2338 PREFIX = PREF 2339 RETURN 2340 END 2341 2342 2343 """ 2344 2345 allids = list(self.prefix_info.keys()) 2346 allprefix = [self.prefix_info[key][0] for key in allids] 2347 min_nexternal = min([len(ids[0]) for ids in allids]) 2348 max_nexternal = max([len(ids[0]) for ids in allids]) 2349 2350 info = [] 2351 for (key, pid), (prefix, tag) in self.prefix_info.items(): 2352 info.append('#PY %s : %s # %s %s' % (tag, key, prefix, pid)) 2353 2354 2355 text = [] 2356 for n_ext in range(min_nexternal, max_nexternal+1): 2357 current_id = [ids[0] for ids in allids if len(ids[0])==n_ext] 2358 current_pid = [ids[1] for ids in allids if len(ids[0])==n_ext] 2359 if not current_id: 2360 continue 2361 if min_nexternal != max_nexternal: 2362 if n_ext == min_nexternal: 2363 text.append(' if (npdg.eq.%i)then' % n_ext) 2364 else: 2365 text.append(' else if (npdg.eq.%i)then' % n_ext) 2366 for ii,pdgs in enumerate(current_id): 2367 pid = current_pid[ii] 2368 condition = '.and.'.join(['%i.eq.pdgs(%i)' %(pdg, i+1) for i, pdg in enumerate(pdgs)]) 2369 if ii==0: 2370 text.append( ' if(%s.and.(procid.le.0.or.procid.eq.%d)) then ! %i' % (condition, pid, ii)) 2371 else: 2372 text.append( ' else if(%s.and.(procid.le.0.or.procid.eq.%d)) then ! %i' % (condition,pid,ii)) 2373 text.append(' call %ssmatrixhel(p, nhel, ans)' % self.prefix_info[(pdgs,pid)][0]) 2374 text.append(' endif') 2375 #close the function 2376 if min_nexternal != max_nexternal: 2377 text.append('endif') 2378 2379 params = self.get_model_parameter(self.model) 2380 parameter_setup =[] 2381 for key, var in params.items(): 2382 parameter_setup.append(' CASE ("%s")\n %s = value' 2383 % (key, var)) 2384 2385 # part for the resetting of the helicity 2386 helreset_def = [] 2387 helreset_setup = [] 2388 for prefix in set(allprefix): 2389 helreset_setup.append(' %shelreset = .true. ' % prefix) 2390 helreset_def.append(' logical %shelreset \n common /%shelreset/ %shelreset' % (prefix, prefix, prefix)) 2391 2392 2393 formatting = {'python_information':'\n'.join(info), 2394 'smatrixhel': '\n'.join(text), 2395 'maxpart': max_nexternal, 2396 'nb_me': len(allids), 2397 'pdgs': ','.join(str(pdg[i]) if i<len(pdg) else '0' 2398 for i in range(max_nexternal) for (pdg,pid) in allids), 2399 'prefix':'\',\''.join(allprefix), 2400 'pids': ','.join(str(pid) for (pdg,pid) in allids), 2401 'parameter_setup': '\n'.join(parameter_setup), 2402 'helreset_def' : '\n'.join(helreset_def), 2403 'helreset_setup' : '\n'.join(helreset_setup), 2404 } 2405 formatting['lenprefix'] = len(formatting['prefix']) 2406 text = template % formatting 2407 fsock = writers.FortranWriter(pjoin(self.dir_path, 'SubProcesses', 'all_matrix.f'),'w') 2408 fsock.writelines(text) 2409 fsock.close()
2410
2411 - def get_model_parameter(self, model):
2412 """ returns all the model parameter 2413 """ 2414 params = {} 2415 for p in model.get('parameters')[('external',)]: 2416 name = p.name 2417 nopref = name[4:] if name.startswith('mdl_') else name 2418 params[nopref] = name 2419 2420 block = p.lhablock 2421 lha = '_'.join([str(i) for i in p.lhacode]) 2422 params['%s_%s' % (block.upper(), lha)] = name 2423 2424 return params
2425 2426 2427 2428 2429
2430 - def write_f2py_check_sa(self, matrix_element, writer):
2431 """ Write the general check_sa.py in SubProcesses that calls all processes successively.""" 2432 # To be implemented. It is just an example file, i.e. not crucial. 2433 return
2434
2435 - def write_f2py_makefile(self):
2436 """ """ 2437 # Add file in SubProcesses 2438 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f2py'), 2439 pjoin(self.dir_path, 'SubProcesses', 'makefile'))
2440
2441 - def create_MA5_cards(self,*args,**opts):
2442 """ Overload the function of the mother so as to bypass this in StandAlone.""" 2443 pass
2444
2445 - def compiler_choice(self, compiler):
2446 """ Different daughter classes might want different compilers. 2447 So this function is meant to be overloaded if desired.""" 2448 2449 self.set_compiler(compiler)
2450 2451 #=========================================================================== 2452 # generate_subprocess_directory 2453 #===========================================================================
2454 - def generate_subprocess_directory(self, matrix_element, 2455 fortran_model, number):
2456 """Generate the Pxxxxx directory for a subprocess in MG4 standalone, 2457 including the necessary matrix.f and nexternal.inc files""" 2458 2459 cwd = os.getcwd() 2460 # Create the directory PN_xx_xxxxx in the specified path 2461 dirpath = pjoin(self.dir_path, 'SubProcesses', \ 2462 "P%s" % matrix_element.get('processes')[0].shell_string()) 2463 2464 if self.opt['sa_symmetry']: 2465 # avoid symmetric output 2466 for i,proc in enumerate(matrix_element.get('processes')): 2467 2468 tag = proc.get_tag() 2469 legs = proc.get('legs')[:] 2470 leg0 = proc.get('legs')[0] 2471 leg1 = proc.get('legs')[1] 2472 if not leg1.get('state'): 2473 proc.get('legs')[0] = leg1 2474 proc.get('legs')[1] = leg0 2475 flegs = proc.get('legs')[2:] 2476 for perm in itertools.permutations(flegs): 2477 for i,p in enumerate(perm): 2478 proc.get('legs')[i+2] = p 2479 dirpath2 = pjoin(self.dir_path, 'SubProcesses', \ 2480 "P%s" % proc.shell_string()) 2481 #restore original order 2482 proc.get('legs')[2:] = legs[2:] 2483 if os.path.exists(dirpath2): 2484 proc.get('legs')[:] = legs 2485 return 0 2486 proc.get('legs')[:] = legs 2487 2488 try: 2489 os.mkdir(dirpath) 2490 except os.error as error: 2491 logger.warning(error.strerror + " " + dirpath) 2492 2493 #try: 2494 # os.chdir(dirpath) 2495 #except os.error: 2496 # logger.error('Could not cd to directory %s' % dirpath) 2497 # return 0 2498 2499 logger.info('Creating files in directory %s' % dirpath) 2500 2501 # Extract number of external particles 2502 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2503 2504 # Create the matrix.f file and the nexternal.inc file 2505 if self.opt['export_format']=='standalone_msP': 2506 filename = pjoin(dirpath, 'matrix_prod.f') 2507 else: 2508 filename = pjoin(dirpath, 'matrix.f') 2509 2510 proc_prefix = '' 2511 if 'prefix' in self.cmd_options: 2512 if self.cmd_options['prefix'] == 'int': 2513 proc_prefix = 'M%s_' % number 2514 elif self.cmd_options['prefix'] == 'proc': 2515 proc_prefix = matrix_element.get('processes')[0].shell_string().split('_',1)[1] 2516 else: 2517 raise Exception('--prefix options supports only \'int\' and \'proc\'') 2518 for proc in matrix_element.get('processes'): 2519 ids = [l.get('id') for l in proc.get('legs_with_decays')] 2520 self.prefix_info[(tuple(ids), proc.get('id'))] = [proc_prefix, proc.get_tag()] 2521 2522 calls = self.write_matrix_element_v4( 2523 writers.FortranWriter(filename), 2524 matrix_element, 2525 fortran_model, 2526 proc_prefix=proc_prefix) 2527 2528 if self.opt['export_format'] == 'standalone_msP': 2529 filename = pjoin(dirpath,'configs_production.inc') 2530 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2531 writers.FortranWriter(filename), 2532 matrix_element) 2533 2534 filename = pjoin(dirpath,'props_production.inc') 2535 self.write_props_file(writers.FortranWriter(filename), 2536 matrix_element, 2537 s_and_t_channels) 2538 2539 filename = pjoin(dirpath,'nexternal_prod.inc') 2540 self.write_nexternal_madspin(writers.FortranWriter(filename), 2541 nexternal, ninitial) 2542 2543 if self.opt['export_format']=='standalone_msF': 2544 filename = pjoin(dirpath, 'helamp.inc') 2545 ncomb=matrix_element.get_helicity_combinations() 2546 self.write_helamp_madspin(writers.FortranWriter(filename), 2547 ncomb) 2548 2549 filename = pjoin(dirpath, 'nexternal.inc') 2550 self.write_nexternal_file(writers.FortranWriter(filename), 2551 nexternal, ninitial) 2552 2553 filename = pjoin(dirpath, 'pmass.inc') 2554 self.write_pmass_file(writers.FortranWriter(filename), 2555 matrix_element) 2556 2557 filename = pjoin(dirpath, 'ngraphs.inc') 2558 self.write_ngraphs_file(writers.FortranWriter(filename), 2559 len(matrix_element.get_all_amplitudes())) 2560 2561 # Generate diagrams 2562 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 2563 filename = pjoin(dirpath, "matrix.ps") 2564 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2565 get('diagrams'), 2566 filename, 2567 model=matrix_element.get('processes')[0].\ 2568 get('model'), 2569 amplitude=True) 2570 logger.info("Generating Feynman diagrams for " + \ 2571 matrix_element.get('processes')[0].nice_string()) 2572 plot.draw() 2573 2574 linkfiles = ['check_sa.f', 'coupl.inc'] 2575 2576 if proc_prefix and os.path.exists(pjoin(dirpath, '..', 'check_sa.f')): 2577 text = open(pjoin(dirpath, '..', 'check_sa.f')).read() 2578 pat = re.compile('smatrix', re.I) 2579 new_text, n = re.subn(pat, '%ssmatrix' % proc_prefix, text) 2580 with open(pjoin(dirpath, 'check_sa.f'),'w') as f: 2581 f.write(new_text) 2582 linkfiles.pop(0) 2583 2584 for file in linkfiles: 2585 ln('../%s' % file, cwd=dirpath) 2586 ln('../makefileP', name='makefile', cwd=dirpath) 2587 # Return to original PWD 2588 #os.chdir(cwd) 2589 2590 if not calls: 2591 calls = 0 2592 return calls
2593 2594 2595 #=========================================================================== 2596 # write_source_makefile 2597 #===========================================================================
2598 - def write_source_makefile(self, writer):
2599 """Write the nexternal.inc file for MG4""" 2600 2601 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 2602 set_of_lib = '$(LIBDIR)libdhelas.$(libext) $(LIBDIR)libmodel.$(libext)' 2603 model_line='''$(LIBDIR)libmodel.$(libext): MODEL\n\t cd MODEL; make\n''' 2604 2605 replace_dict= {'libraries': set_of_lib, 2606 'model':model_line, 2607 'additional_dsample': '', 2608 'additional_dependencies':''} 2609 2610 text = open(path).read() % replace_dict 2611 2612 if writer: 2613 writer.write(text) 2614 2615 return replace_dict
2616 2617 #=========================================================================== 2618 # write_matrix_element_v4 2619 #===========================================================================
2620 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 2621 write=True, proc_prefix=''):
2622 """Export a matrix element to a matrix.f file in MG4 standalone format 2623 if write is on False, just return the replace_dict and not write anything.""" 2624 2625 2626 if not matrix_element.get('processes') or \ 2627 not matrix_element.get('diagrams'): 2628 return 0 2629 2630 if writer: 2631 if not isinstance(writer, writers.FortranWriter): 2632 raise writers.FortranWriter.FortranWriterError(\ 2633 "writer not FortranWriter but %s" % type(writer)) 2634 # Set lowercase/uppercase Fortran code 2635 writers.FortranWriter.downcase = False 2636 2637 2638 if 'sa_symmetry' not in self.opt: 2639 self.opt['sa_symmetry']=False 2640 2641 2642 # The proc_id is for MadEvent grouping which is never used in SA. 2643 replace_dict = {'global_variable':'', 'amp2_lines':'', 2644 'proc_prefix':proc_prefix, 'proc_id':''} 2645 2646 # Extract helas calls 2647 helas_calls = fortran_model.get_matrix_element_calls(\ 2648 matrix_element) 2649 2650 replace_dict['helas_calls'] = "\n".join(helas_calls) 2651 2652 # Extract version number and date from VERSION file 2653 info_lines = self.get_mg5_info_lines() 2654 replace_dict['info_lines'] = info_lines 2655 2656 # Extract process info lines 2657 process_lines = self.get_process_info_lines(matrix_element) 2658 replace_dict['process_lines'] = process_lines 2659 2660 # Extract number of external particles 2661 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2662 replace_dict['nexternal'] = nexternal 2663 replace_dict['nincoming'] = ninitial 2664 2665 # Extract ncomb 2666 ncomb = matrix_element.get_helicity_combinations() 2667 replace_dict['ncomb'] = ncomb 2668 2669 # Extract helicity lines 2670 helicity_lines = self.get_helicity_lines(matrix_element) 2671 replace_dict['helicity_lines'] = helicity_lines 2672 2673 # Extract overall denominator 2674 # Averaging initial state color, spin, and identical FS particles 2675 replace_dict['den_factor_line'] = self.get_den_factor_line(matrix_element) 2676 2677 # Extract ngraphs 2678 ngraphs = matrix_element.get_number_of_amplitudes() 2679 replace_dict['ngraphs'] = ngraphs 2680 2681 # Extract nwavefuncs 2682 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2683 replace_dict['nwavefuncs'] = nwavefuncs 2684 2685 # Extract ncolor 2686 ncolor = max(1, len(matrix_element.get('color_basis'))) 2687 replace_dict['ncolor'] = ncolor 2688 2689 replace_dict['hel_avg_factor'] = matrix_element.get_hel_avg_factor() 2690 replace_dict['beamone_helavgfactor'], replace_dict['beamtwo_helavgfactor'] =\ 2691 matrix_element.get_beams_hel_avg_factor() 2692 2693 # Extract color data lines 2694 color_data_lines = self.get_color_data_lines(matrix_element) 2695 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2696 2697 if self.opt['export_format']=='standalone_msP': 2698 # For MadSpin need to return the AMP2 2699 amp2_lines = self.get_amp2_lines(matrix_element, [] ) 2700 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2701 replace_dict['global_variable'] = \ 2702 " Double Precision amp2(NGRAPHS)\n common/to_amps/ amp2\n" 2703 2704 # JAMP definition, depends on the number of independent split orders 2705 split_orders=matrix_element.get('processes')[0].get('split_orders') 2706 2707 if len(split_orders)==0: 2708 replace_dict['nSplitOrders']='' 2709 # Extract JAMP lines 2710 jamp_lines, nb_tmp_jamp = self.get_JAMP_lines(matrix_element) 2711 # Consider the output of a dummy order 'ALL_ORDERS' for which we 2712 # set all amplitude order to weight 1 and only one squared order 2713 # contribution which is of course ALL_ORDERS=2. 2714 squared_orders = [(2,),] 2715 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 2716 replace_dict['chosen_so_configs'] = '.TRUE.' 2717 replace_dict['nSqAmpSplitOrders']=1 2718 replace_dict['split_order_str_list']='' 2719 replace_dict['nb_temp_jamp'] = nb_tmp_jamp 2720 2721 else: 2722 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 2723 replace_dict['nAmpSplitOrders']=len(amp_orders) 2724 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 2725 replace_dict['nSplitOrders']=len(split_orders) 2726 replace_dict['split_order_str_list']=str(split_orders) 2727 amp_so = self.get_split_orders_lines( 2728 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 2729 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 2730 replace_dict['ampsplitorders']='\n'.join(amp_so) 2731 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 2732 jamp_lines, nb_tmp_jamp = self.get_JAMP_lines_split_order(\ 2733 matrix_element,amp_orders,split_order_names=split_orders) 2734 replace_dict['nb_temp_jamp'] = nb_tmp_jamp 2735 # Now setup the array specifying what squared split order is chosen 2736 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 2737 matrix_element.get('processes')[0],squared_orders) 2738 2739 # For convenience we also write the driver check_sa_splitOrders.f 2740 # that explicitely writes out the contribution from each squared order. 2741 # The original driver still works and is compiled with 'make' while 2742 # the splitOrders one is compiled with 'make check_sa_born_splitOrders' 2743 check_sa_writer=writers.FortranWriter('check_sa_born_splitOrders.f') 2744 self.write_check_sa_splitOrders(squared_orders,split_orders, 2745 nexternal,ninitial,proc_prefix,check_sa_writer) 2746 2747 if write: 2748 writers.FortranWriter('nsqso_born.inc').writelines( 2749 """INTEGER NSQSO_BORN 2750 PARAMETER (NSQSO_BORN=%d)"""%replace_dict['nSqAmpSplitOrders']) 2751 2752 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2753 2754 matrix_template = self.matrix_template 2755 if self.opt['export_format']=='standalone_msP' : 2756 matrix_template = 'matrix_standalone_msP_v4.inc' 2757 elif self.opt['export_format']=='standalone_msF': 2758 matrix_template = 'matrix_standalone_msF_v4.inc' 2759 elif self.opt['export_format']=='matchbox': 2760 replace_dict["proc_prefix"] = 'MG5_%i_' % matrix_element.get('processes')[0].get('id') 2761 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2762 2763 if len(split_orders)>0: 2764 if self.opt['export_format'] in ['standalone_msP', 'standalone_msF']: 2765 logger.debug("Warning: The export format %s is not "+\ 2766 " available for individual ME evaluation of given coupl. orders."+\ 2767 " Only the total ME will be computed.", self.opt['export_format']) 2768 elif self.opt['export_format'] in ['madloop_matchbox']: 2769 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2770 matrix_template = "matrix_standalone_matchbox_splitOrders_v4.inc" 2771 else: 2772 matrix_template = "matrix_standalone_splitOrders_v4.inc" 2773 2774 replace_dict['template_file'] = pjoin(_file_path, 'iolibs', 'template_files', matrix_template) 2775 replace_dict['template_file2'] = pjoin(_file_path, \ 2776 'iolibs/template_files/split_orders_helping_functions.inc') 2777 if write and writer: 2778 path = replace_dict['template_file'] 2779 content = open(path).read() 2780 content = content % replace_dict 2781 # Write the file 2782 writer.writelines(content) 2783 # Add the helper functions. 2784 if len(split_orders)>0: 2785 content = '\n' + open(replace_dict['template_file2'])\ 2786 .read()%replace_dict 2787 writer.writelines(content) 2788 return len([call for call in helas_calls if call.find('#') != 0]) 2789 else: 2790 replace_dict['return_value'] = len([call for call in helas_calls if call.find('#') != 0]) 2791 return replace_dict # for subclass update
2792
2793 - def write_check_sa_splitOrders(self,squared_orders, split_orders, nexternal, 2794 nincoming, proc_prefix, writer):
2795 """ Write out a more advanced version of the check_sa drivers that 2796 individually returns the matrix element for each contributing squared 2797 order.""" 2798 2799 check_sa_content = open(pjoin(self.mgme_dir, 'madgraph', 'iolibs', \ 2800 'template_files', 'check_sa_splitOrders.f')).read() 2801 printout_sq_orders=[] 2802 for i, squared_order in enumerate(squared_orders): 2803 sq_orders=[] 2804 for j, sqo in enumerate(squared_order): 2805 sq_orders.append('%s=%d'%(split_orders[j],sqo)) 2806 printout_sq_orders.append(\ 2807 "write(*,*) '%d) Matrix element for (%s) = ',MATELEMS(%d)"\ 2808 %(i+1,' '.join(sq_orders),i+1)) 2809 printout_sq_orders='\n'.join(printout_sq_orders) 2810 replace_dict = {'printout_sqorders':printout_sq_orders, 2811 'nSplitOrders':len(squared_orders), 2812 'nexternal':nexternal, 2813 'nincoming':nincoming, 2814 'proc_prefix':proc_prefix} 2815 2816 if writer: 2817 writer.writelines(check_sa_content % replace_dict) 2818 else: 2819 return replace_dict
2820
2821 -class ProcessExporterFortranMatchBox(ProcessExporterFortranSA):
2822 """class to take care of exporting a set of matrix element for the Matchbox 2823 code in the case of Born only routine""" 2824 2825 default_opt = {'clean': False, 'complex_mass':False, 2826 'export_format':'matchbox', 'mp': False, 2827 'sa_symmetry': True} 2828 2829 #specific template of the born 2830 2831 2832 matrix_template = "matrix_standalone_matchbox.inc" 2833 2834 @staticmethod
2835 - def get_color_string_lines(matrix_element):
2836 """Return the color matrix definition lines for this matrix element. Split 2837 rows in chunks of size n.""" 2838 2839 if not matrix_element.get('color_matrix'): 2840 return "\n".join(["out = 1"]) 2841 2842 #start the real work 2843 color_denominators = matrix_element.get('color_matrix').\ 2844 get_line_denominators() 2845 matrix_strings = [] 2846 my_cs = color.ColorString() 2847 for i_color in range(len(color_denominators)): 2848 # Then write the numerators for the matrix elements 2849 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[i_color]) 2850 t_str=repr(my_cs) 2851 t_match=re.compile(r"(\w+)\(([\s\d+\,]*)\)") 2852 # from '1 T(2,4,1) Tr(4,5,6) Epsilon(5,3,2,1) T(1,2)' returns with findall: 2853 # [('T', '2,4,1'), ('Tr', '4,5,6'), ('Epsilon', '5,3,2,1'), ('T', '1,2')] 2854 all_matches = t_match.findall(t_str) 2855 output = {} 2856 arg=[] 2857 for match in all_matches: 2858 ctype, tmparg = match[0], [m.strip() for m in match[1].split(',')] 2859 if ctype in ['ColorOne' ]: 2860 continue 2861 if ctype not in ['T', 'Tr' ]: 2862 raise MadGraph5Error('Color Structure not handled by Matchbox: %s' % ctype) 2863 tmparg += ['0'] 2864 arg +=tmparg 2865 for j, v in enumerate(arg): 2866 output[(i_color,j)] = v 2867 2868 for key in output: 2869 if matrix_strings == []: 2870 #first entry 2871 matrix_strings.append(""" 2872 if (in1.eq.%s.and.in2.eq.%s)then 2873 out = %s 2874 """ % (key[0], key[1], output[key])) 2875 else: 2876 #not first entry 2877 matrix_strings.append(""" 2878 elseif (in1.eq.%s.and.in2.eq.%s)then 2879 out = %s 2880 """ % (key[0], key[1], output[key])) 2881 if len(matrix_strings): 2882 matrix_strings.append(" else \n out = - 1 \n endif") 2883 else: 2884 return "\n out = - 1 \n " 2885 return "\n".join(matrix_strings)
2886
2887 - def make(self,*args,**opts):
2888 pass
2889
2890 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", split=-1, 2891 JAMP_formatLC=None):
2892 2893 """Adding leading color part of the colorflow""" 2894 2895 if not JAMP_formatLC: 2896 JAMP_formatLC= "LN%s" % JAMP_format 2897 2898 error_msg="Malformed '%s' argument passed to the get_JAMP_lines" 2899 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 2900 col_amps=col_amps.get_color_amplitudes() 2901 elif(isinstance(col_amps,list)): 2902 if(col_amps and isinstance(col_amps[0],list)): 2903 col_amps=col_amps 2904 else: 2905 raise MadGraph5Error(error_msg % 'col_amps') 2906 else: 2907 raise MadGraph5Error(error_msg % 'col_amps') 2908 2909 text, nb = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(col_amps, 2910 JAMP_format=JAMP_format, 2911 AMP_format=AMP_format, 2912 split=-1) 2913 2914 2915 # Filter the col_ampls to generate only those without any 1/NC terms 2916 2917 LC_col_amps = [] 2918 for coeff_list in col_amps: 2919 to_add = [] 2920 for (coefficient, amp_number) in coeff_list: 2921 if coefficient[3]==0: 2922 to_add.append( (coefficient, amp_number) ) 2923 LC_col_amps.append(to_add) 2924 2925 text2, nb = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(LC_col_amps, 2926 JAMP_format=JAMP_formatLC, 2927 AMP_format=AMP_format, 2928 split=-1) 2929 text += text2 2930 2931 return text, 0
2932
2933 2934 2935 2936 #=============================================================================== 2937 # ProcessExporterFortranMW 2938 #=============================================================================== 2939 -class ProcessExporterFortranMW(ProcessExporterFortran):
2940 """Class to take care of exporting a set of matrix elements to 2941 MadGraph v4 - MadWeight format.""" 2942 2943 matrix_file="matrix_standalone_v4.inc" 2944 jamp_optim = False 2945
2946 - def copy_template(self, model):
2947 """Additional actions needed for setup of Template 2948 """ 2949 2950 super(ProcessExporterFortranMW, self).copy_template(model) 2951 2952 # Add the MW specific file 2953 shutil.copytree(pjoin(MG5DIR,'Template','MadWeight'), 2954 pjoin(self.dir_path, 'Source','MadWeight'), True) 2955 shutil.copytree(pjoin(MG5DIR,'madgraph','madweight'), 2956 pjoin(self.dir_path, 'bin','internal','madweight'), True) 2957 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','setrun.f'), 2958 pjoin(self.dir_path, 'Source','setrun.f')) 2959 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','run.inc'), 2960 pjoin(self.dir_path, 'Source','run.inc')) 2961 # File created from Template (Different in some child class) 2962 filename = os.path.join(self.dir_path,'Source','run_config.inc') 2963 self.write_run_config_file(writers.FortranWriter(filename)) 2964 2965 try: 2966 subprocess.call([os.path.join(self.dir_path, 'Source','MadWeight','bin','internal','pass_to_madweight')], 2967 stdout = os.open(os.devnull, os.O_RDWR), 2968 stderr = os.open(os.devnull, os.O_RDWR), 2969 cwd=self.dir_path) 2970 except OSError: 2971 # Probably madweight already called 2972 pass 2973 2974 # Copy the different python file in the Template 2975 self.copy_python_file() 2976 # create the appropriate cuts.f 2977 self.get_mw_cuts_version() 2978 2979 # add the makefile in Source directory 2980 filename = os.path.join(self.dir_path,'Source','makefile') 2981 self.write_source_makefile(writers.FortranWriter(filename))
2982 2983 2984 2985 2986 #=========================================================================== 2987 # convert_model 2988 #===========================================================================
2989 - def convert_model(self, model, wanted_lorentz = [], 2990 wanted_couplings = []):
2991 2992 super(ProcessExporterFortranMW,self).convert_model(model, 2993 wanted_lorentz, wanted_couplings) 2994 2995 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 2996 try: 2997 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 2998 except OSError as error: 2999 pass 3000 model_path = model.get('modelpath') 3001 # This is not safe if there is a '##' or '-' in the path. 3002 shutil.copytree(model_path, 3003 pjoin(self.dir_path,'bin','internal','ufomodel'), 3004 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 3005 if hasattr(model, 'restrict_card'): 3006 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 3007 'restrict_default.dat') 3008 if isinstance(model.restrict_card, check_param_card.ParamCard): 3009 model.restrict_card.write(out_path) 3010 else: 3011 files.cp(model.restrict_card, out_path)
3012 3013 #=========================================================================== 3014 # generate_subprocess_directory 3015 #===========================================================================
3016 - def copy_python_file(self):
3017 """copy the python file require for the Template""" 3018 3019 # madevent interface 3020 cp(_file_path+'/interface/madweight_interface.py', 3021 self.dir_path+'/bin/internal/madweight_interface.py') 3022 cp(_file_path+'/interface/extended_cmd.py', 3023 self.dir_path+'/bin/internal/extended_cmd.py') 3024 cp(_file_path+'/interface/common_run_interface.py', 3025 self.dir_path+'/bin/internal/common_run_interface.py') 3026 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3027 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3028 cp(_file_path+'/iolibs/save_load_object.py', 3029 self.dir_path+'/bin/internal/save_load_object.py') 3030 cp(_file_path+'/madevent/gen_crossxhtml.py', 3031 self.dir_path+'/bin/internal/gen_crossxhtml.py') 3032 cp(_file_path+'/madevent/sum_html.py', 3033 self.dir_path+'/bin/internal/sum_html.py') 3034 cp(_file_path+'/various/FO_analyse_card.py', 3035 self.dir_path+'/bin/internal/FO_analyse_card.py') 3036 cp(_file_path+'/iolibs/file_writers.py', 3037 self.dir_path+'/bin/internal/file_writers.py') 3038 #model file 3039 cp(_file_path+'../models/check_param_card.py', 3040 self.dir_path+'/bin/internal/check_param_card.py') 3041 3042 #madevent file 3043 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3044 cp(_file_path+'/various/lhe_parser.py', 3045 self.dir_path+'/bin/internal/lhe_parser.py') 3046 3047 cp(_file_path+'/various/banner.py', 3048 self.dir_path+'/bin/internal/banner.py') 3049 cp(_file_path+'/various/shower_card.py', 3050 self.dir_path+'/bin/internal/shower_card.py') 3051 cp(_file_path+'/various/cluster.py', 3052 self.dir_path+'/bin/internal/cluster.py') 3053 3054 # logging configuration 3055 cp(_file_path+'/interface/.mg5_logging.conf', 3056 self.dir_path+'/bin/internal/me5_logging.conf') 3057 cp(_file_path+'/interface/coloring_logging.py', 3058 self.dir_path+'/bin/internal/coloring_logging.py')
3059 3060 3061 #=========================================================================== 3062 # Change the version of cuts.f to the one compatible with MW 3063 #===========================================================================
3064 - def get_mw_cuts_version(self, outpath=None):
3065 """create the appropriate cuts.f 3066 This is based on the one associated to ME output but: 3067 1) No clustering (=> remove initcluster/setclscales) 3068 2) Adding the definition of cut_bw at the file. 3069 """ 3070 3071 template = open(pjoin(MG5DIR,'Template','LO','SubProcesses','cuts.f')) 3072 3073 text = StringIO() 3074 #1) remove all dependencies in ickkw >1: 3075 nb_if = 0 3076 for line in template: 3077 if 'if(xqcut.gt.0d0' in line: 3078 nb_if = 1 3079 if nb_if == 0: 3080 text.write(line) 3081 continue 3082 if re.search(r'if\(.*\)\s*then', line): 3083 nb_if += 1 3084 elif 'endif' in line: 3085 nb_if -= 1 3086 3087 #2) add fake cut_bw (have to put the true one later) 3088 text.write(""" 3089 logical function cut_bw(p) 3090 include 'madweight_param.inc' 3091 double precision p(*) 3092 if (bw_cut) then 3093 cut_bw = .true. 3094 else 3095 stop 1 3096 endif 3097 return 3098 end 3099 """) 3100 3101 final = text.getvalue() 3102 #3) remove the call to initcluster: 3103 template = final.replace('call initcluster', '! Remove for MW!call initcluster') 3104 template = template.replace('genps.inc', 'maxparticles.inc') 3105 #Now we can write it 3106 if not outpath: 3107 fsock = open(pjoin(self.dir_path, 'SubProcesses', 'cuts.f'), 'w') 3108 elif isinstance(outpath, str): 3109 fsock = open(outpath, 'w') 3110 else: 3111 fsock = outpath 3112 fsock.write(template)
3113 3114 3115 3116 #=========================================================================== 3117 # Make the Helas and Model directories for Standalone directory 3118 #===========================================================================
3119 - def make(self):
3120 """Run make in the DHELAS, MODEL, PDF and CERNLIB directories, to set up 3121 everything for running madweight 3122 """ 3123 3124 source_dir = os.path.join(self.dir_path, "Source") 3125 logger.info("Running make for Helas") 3126 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 3127 logger.info("Running make for Model") 3128 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran') 3129 logger.info("Running make for PDF") 3130 misc.compile(arg=['../lib/libpdf.a'], cwd=source_dir, mode='fortran') 3131 logger.info("Running make for CERNLIB") 3132 misc.compile(arg=['../lib/libcernlib.a'], cwd=source_dir, mode='fortran') 3133 logger.info("Running make for GENERIC") 3134 misc.compile(arg=['../lib/libgeneric.a'], cwd=source_dir, mode='fortran') 3135 logger.info("Running make for blocks") 3136 misc.compile(arg=['../lib/libblocks.a'], cwd=source_dir, mode='fortran') 3137 logger.info("Running make for tools") 3138 misc.compile(arg=['../lib/libtools.a'], cwd=source_dir, mode='fortran')
3139 3140 #=========================================================================== 3141 # Create proc_card_mg5.dat for MadWeight directory 3142 #===========================================================================
3143 - def finalize(self, matrix_elements, history, mg5options, flaglist):
3144 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 3145 3146 compiler = {'fortran': mg5options['fortran_compiler'], 3147 'cpp': mg5options['cpp_compiler'], 3148 'f2py': mg5options['f2py_compiler']} 3149 3150 3151 3152 #proc_charac 3153 self.create_proc_charac() 3154 3155 # Write maxparticles.inc based on max of ME's/subprocess groups 3156 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 3157 self.write_maxparticles_file(writers.FortranWriter(filename), 3158 matrix_elements) 3159 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 3160 pjoin(self.dir_path, 'Source','MadWeight','blocks')) 3161 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 3162 pjoin(self.dir_path, 'Source','MadWeight','tools')) 3163 3164 self.set_compiler(compiler) 3165 self.make() 3166 3167 # Write command history as proc_card_mg5 3168 if os.path.isdir(os.path.join(self.dir_path, 'Cards')): 3169 output_file = os.path.join(self.dir_path, 'Cards', 'proc_card_mg5.dat') 3170 history.write(output_file) 3171 3172 ProcessExporterFortran.finalize(self, matrix_elements, 3173 history, mg5options, flaglist)
3174 3175 3176 3177 #=========================================================================== 3178 # create the run_card for MW 3179 #===========================================================================
3180 - def create_run_card(self, matrix_elements, history):
3181 """ """ 3182 3183 run_card = banner_mod.RunCard() 3184 3185 # pass to default for MW 3186 run_card["run_tag"] = "\'not_use\'" 3187 run_card["fixed_ren_scale"] = "T" 3188 run_card["fixed_fac_scale"] = "T" 3189 run_card.remove_all_cut() 3190 3191 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 3192 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 3193 python_template=True) 3194 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'), 3195 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 3196 python_template=True)
3197 3198 #=========================================================================== 3199 # export model files 3200 #===========================================================================
3201 - def export_model_files(self, model_path):
3202 """export the model dependent files for V4 model""" 3203 3204 super(ProcessExporterFortranMW,self).export_model_files(model_path) 3205 # Add the routine update_as_param in v4 model 3206 # This is a function created in the UFO 3207 text=""" 3208 subroutine update_as_param() 3209 call setpara('param_card.dat',.false.) 3210 return 3211 end 3212 """ 3213 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3214 ff.write(text) 3215 ff.close() 3216 3217 # Modify setrun.f 3218 text = open(os.path.join(self.dir_path,'Source','setrun.f')).read() 3219 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3220 fsock = open(os.path.join(self.dir_path,'Source','setrun.f'), 'w') 3221 fsock.write(text) 3222 fsock.close() 3223 3224 # Modify initialization.f 3225 text = open(os.path.join(self.dir_path,'SubProcesses','initialization.f')).read() 3226 text = text.replace('call setpara(param_name)', 'call setpara(param_name, .true.)') 3227 fsock = open(os.path.join(self.dir_path,'SubProcesses','initialization.f'), 'w') 3228 fsock.write(text) 3229 fsock.close() 3230 3231 3232 self.make_model_symbolic_link()
3233 3234 #=========================================================================== 3235 # generate_subprocess_directory 3236 #===========================================================================
3237 - def generate_subprocess_directory(self, matrix_element, 3238 fortran_model,number):
3239 """Generate the Pxxxxx directory for a subprocess in MG4 MadWeight format, 3240 including the necessary matrix.f and nexternal.inc files""" 3241 3242 cwd = os.getcwd() 3243 # Create the directory PN_xx_xxxxx in the specified path 3244 dirpath = os.path.join(self.dir_path, 'SubProcesses', \ 3245 "P%s" % matrix_element.get('processes')[0].shell_string()) 3246 3247 try: 3248 os.mkdir(dirpath) 3249 except os.error as error: 3250 logger.warning(error.strerror + " " + dirpath) 3251 3252 #try: 3253 # os.chdir(dirpath) 3254 #except os.error: 3255 # logger.error('Could not cd to directory %s' % dirpath) 3256 # return 0 3257 3258 logger.info('Creating files in directory %s' % dirpath) 3259 3260 # Extract number of external particles 3261 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3262 3263 # Create the matrix.f file and the nexternal.inc file 3264 filename = pjoin(dirpath,'matrix.f') 3265 calls,ncolor = self.write_matrix_element_v4( 3266 writers.FortranWriter(filename), 3267 matrix_element, 3268 fortran_model) 3269 3270 filename = pjoin(dirpath, 'auto_dsig.f') 3271 self.write_auto_dsig_file(writers.FortranWriter(filename), 3272 matrix_element) 3273 3274 filename = pjoin(dirpath, 'configs.inc') 3275 mapconfigs, s_and_t_channels = self.write_configs_file(\ 3276 writers.FortranWriter(filename), 3277 matrix_element) 3278 3279 filename = pjoin(dirpath, 'nexternal.inc') 3280 self.write_nexternal_file(writers.FortranWriter(filename), 3281 nexternal, ninitial) 3282 3283 filename = pjoin(dirpath, 'leshouche.inc') 3284 self.write_leshouche_file(writers.FortranWriter(filename), 3285 matrix_element) 3286 3287 filename = pjoin(dirpath, 'props.inc') 3288 self.write_props_file(writers.FortranWriter(filename), 3289 matrix_element, 3290 s_and_t_channels) 3291 3292 filename = pjoin(dirpath, 'pmass.inc') 3293 self.write_pmass_file(writers.FortranWriter(filename), 3294 matrix_element) 3295 3296 filename = pjoin(dirpath, 'ngraphs.inc') 3297 self.write_ngraphs_file(writers.FortranWriter(filename), 3298 len(matrix_element.get_all_amplitudes())) 3299 3300 filename = pjoin(dirpath, 'maxamps.inc') 3301 self.write_maxamps_file(writers.FortranWriter(filename), 3302 len(matrix_element.get('diagrams')), 3303 ncolor, 3304 len(matrix_element.get('processes')), 3305 1) 3306 3307 filename = pjoin(dirpath, 'phasespace.inc') 3308 self.write_phasespace_file(writers.FortranWriter(filename), 3309 len(matrix_element.get('diagrams')), 3310 ) 3311 3312 # Generate diagrams 3313 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 3314 filename = pjoin(dirpath, "matrix.ps") 3315 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3316 get('diagrams'), 3317 filename, 3318 model=matrix_element.get('processes')[0].\ 3319 get('model'), 3320 amplitude='') 3321 logger.info("Generating Feynman diagrams for " + \ 3322 matrix_element.get('processes')[0].nice_string()) 3323 plot.draw() 3324 3325 #import genps.inc and maxconfigs.inc into Subprocesses 3326 ln(self.dir_path + '/Source/genps.inc', self.dir_path + '/SubProcesses', log=False) 3327 #ln(self.dir_path + '/Source/maxconfigs.inc', self.dir_path + '/SubProcesses', log=False) 3328 3329 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f', 'genps.inc'] 3330 3331 for file in linkfiles: 3332 ln('../%s' % file, starting_dir=cwd) 3333 3334 ln('nexternal.inc', '../../Source', log=False, cwd=dirpath) 3335 ln('leshouche.inc', '../../Source', log=False, cwd=dirpath) 3336 ln('maxamps.inc', '../../Source', log=False, cwd=dirpath) 3337 ln('phasespace.inc', '../', log=True, cwd=dirpath) 3338 # Return to original PWD 3339 #os.chdir(cwd) 3340 3341 if not calls: 3342 calls = 0 3343 return calls
3344 3345 #=========================================================================== 3346 # write_matrix_element_v4 3347 #===========================================================================
3348 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model,proc_id = "", config_map = []):
3349 """Export a matrix element to a matrix.f file in MG4 MadWeight format""" 3350 3351 if not matrix_element.get('processes') or \ 3352 not matrix_element.get('diagrams'): 3353 return 0 3354 3355 if writer: 3356 if not isinstance(writer, writers.FortranWriter): 3357 raise writers.FortranWriter.FortranWriterError(\ 3358 "writer not FortranWriter") 3359 3360 # Set lowercase/uppercase Fortran code 3361 writers.FortranWriter.downcase = False 3362 3363 replace_dict = {} 3364 3365 # Extract version number and date from VERSION file 3366 info_lines = self.get_mg5_info_lines() 3367 replace_dict['info_lines'] = info_lines 3368 3369 # Extract process info lines 3370 process_lines = self.get_process_info_lines(matrix_element) 3371 replace_dict['process_lines'] = process_lines 3372 3373 # Set proc_id 3374 replace_dict['proc_id'] = proc_id 3375 3376 # Extract number of external particles 3377 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3378 replace_dict['nexternal'] = nexternal 3379 3380 # Extract ncomb 3381 ncomb = matrix_element.get_helicity_combinations() 3382 replace_dict['ncomb'] = ncomb 3383 3384 # Extract helicity lines 3385 helicity_lines = self.get_helicity_lines(matrix_element) 3386 replace_dict['helicity_lines'] = helicity_lines 3387 3388 # Extract overall denominator 3389 # Averaging initial state color, spin, and identical FS particles 3390 den_factor_line = self.get_den_factor_line(matrix_element) 3391 replace_dict['den_factor_line'] = den_factor_line 3392 3393 # Extract ngraphs 3394 ngraphs = matrix_element.get_number_of_amplitudes() 3395 replace_dict['ngraphs'] = ngraphs 3396 3397 # Extract nwavefuncs 3398 nwavefuncs = matrix_element.get_number_of_wavefunctions() 3399 replace_dict['nwavefuncs'] = nwavefuncs 3400 3401 # Extract ncolor 3402 ncolor = max(1, len(matrix_element.get('color_basis'))) 3403 replace_dict['ncolor'] = ncolor 3404 3405 # Extract color data lines 3406 color_data_lines = self.get_color_data_lines(matrix_element) 3407 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 3408 3409 # Extract helas calls 3410 helas_calls = fortran_model.get_matrix_element_calls(\ 3411 matrix_element) 3412 3413 replace_dict['helas_calls'] = "\n".join(helas_calls) 3414 3415 # Extract JAMP lines 3416 jamp_lines, nb = self.get_JAMP_lines(matrix_element) 3417 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 3418 3419 replace_dict['template_file'] = os.path.join(_file_path, \ 3420 'iolibs/template_files/%s' % self.matrix_file) 3421 replace_dict['template_file2'] = '' 3422 3423 if writer: 3424 file = open(replace_dict['template_file']).read() 3425 file = file % replace_dict 3426 # Write the file 3427 writer.writelines(file) 3428 return len([call for call in helas_calls if call.find('#') != 0]),ncolor 3429 else: 3430 replace_dict['return_value'] = (len([call for call in helas_calls if call.find('#') != 0]),ncolor)
3431 3432 #=========================================================================== 3433 # write_source_makefile 3434 #===========================================================================
3435 - def write_source_makefile(self, writer):
3436 """Write the nexternal.inc file for madweight""" 3437 3438 3439 path = os.path.join(_file_path,'iolibs','template_files','madweight_makefile_source') 3440 set_of_lib = '$(LIBRARIES) $(LIBDIR)libdhelas.$(libext) $(LIBDIR)libpdf.$(libext) $(LIBDIR)libmodel.$(libext) $(LIBDIR)libcernlib.$(libext) $(LIBDIR)libtf.$(libext)' 3441 text = open(path).read() % {'libraries': set_of_lib} 3442 writer.write(text) 3443 3444 return True
3445
3446 - def write_phasespace_file(self, writer, nb_diag):
3447 """ """ 3448 3449 template = """ include 'maxparticles.inc' 3450 integer max_branches 3451 parameter (max_branches=max_particles-1) 3452 integer max_configs 3453 parameter (max_configs=%(nb_diag)s) 3454 3455 c channel position 3456 integer config_pos,perm_pos 3457 common /to_config/config_pos,perm_pos 3458 3459 """ 3460 3461 writer.write(template % {'nb_diag': nb_diag})
3462 3463 3464 #=========================================================================== 3465 # write_auto_dsig_file 3466 #===========================================================================
3467 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
3468 """Write the auto_dsig.f file for the differential cross section 3469 calculation, includes pdf call information (MadWeight format)""" 3470 3471 if not matrix_element.get('processes') or \ 3472 not matrix_element.get('diagrams'): 3473 return 0 3474 3475 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 3476 3477 if ninitial < 1 or ninitial > 2: 3478 raise writers.FortranWriter.FortranWriterError("""Need ninitial = 1 or 2 to write auto_dsig file""") 3479 3480 replace_dict = {} 3481 3482 # Extract version number and date from VERSION file 3483 info_lines = self.get_mg5_info_lines() 3484 replace_dict['info_lines'] = info_lines 3485 3486 # Extract process info lines 3487 process_lines = self.get_process_info_lines(matrix_element) 3488 replace_dict['process_lines'] = process_lines 3489 3490 # Set proc_id 3491 replace_dict['proc_id'] = proc_id 3492 replace_dict['numproc'] = 1 3493 3494 # Set dsig_line 3495 if ninitial == 1: 3496 # No conversion, since result of decay should be given in GeV 3497 dsig_line = "pd(0)*dsiguu" 3498 else: 3499 # Convert result (in GeV) to pb 3500 dsig_line = "pd(0)*conv*dsiguu" 3501 3502 replace_dict['dsig_line'] = dsig_line 3503 3504 # Extract pdf lines 3505 pdf_vars, pdf_data, pdf_lines = \ 3506 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 3507 replace_dict['pdf_vars'] = pdf_vars 3508 replace_dict['pdf_data'] = pdf_data 3509 replace_dict['pdf_lines'] = pdf_lines 3510 3511 # Lines that differ between subprocess group and regular 3512 if proc_id: 3513 replace_dict['numproc'] = int(proc_id) 3514 replace_dict['passcuts_begin'] = "" 3515 replace_dict['passcuts_end'] = "" 3516 # Set lines for subprocess group version 3517 # Set define_iconfigs_lines 3518 replace_dict['define_subdiag_lines'] = \ 3519 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3520 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3521 else: 3522 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 3523 replace_dict['passcuts_end'] = "ENDIF" 3524 replace_dict['define_subdiag_lines'] = "" 3525 3526 if writer: 3527 file = open(os.path.join(_file_path, \ 3528 'iolibs/template_files/auto_dsig_mw.inc')).read() 3529 3530 file = file % replace_dict 3531 # Write the file 3532 writer.writelines(file) 3533 else: 3534 return replace_dict
3535 #=========================================================================== 3536 # write_configs_file 3537 #===========================================================================
3538 - def write_configs_file(self, writer, matrix_element):
3539 """Write the configs.inc file for MadEvent""" 3540 3541 # Extract number of external particles 3542 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3543 3544 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 3545 mapconfigs = [c[0] for c in configs] 3546 model = matrix_element.get('processes')[0].get('model') 3547 return mapconfigs, self.write_configs_file_from_diagrams(writer, 3548 [[c[1]] for c in configs], 3549 mapconfigs, 3550 nexternal, ninitial,matrix_element, model)
3551 3552 #=========================================================================== 3553 # write_run_configs_file 3554 #===========================================================================
3555 - def write_run_config_file(self, writer):
3556 """Write the run_configs.inc file for MadWeight""" 3557 3558 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 3559 text = open(path).read() % {'chanperjob':'5'} 3560 writer.write(text) 3561 return True
3562 3563 #=========================================================================== 3564 # write_configs_file_from_diagrams 3565 #===========================================================================
3566 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 3567 nexternal, ninitial, matrix_element, model):
3568 """Write the actual configs.inc file. 3569 3570 configs is the diagrams corresponding to configs (each 3571 diagrams is a list of corresponding diagrams for all 3572 subprocesses, with None if there is no corresponding diagrams 3573 for a given process). 3574 mapconfigs gives the diagram number for each config. 3575 3576 For s-channels, we need to output one PDG for each subprocess in 3577 the subprocess group, in order to be able to pick the right 3578 one for multiprocesses.""" 3579 3580 lines = [] 3581 3582 particle_dict = matrix_element.get('processes')[0].get('model').\ 3583 get('particle_dict') 3584 3585 s_and_t_channels = [] 3586 3587 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 3588 for config in configs if [d for d in config if d][0].\ 3589 get_vertex_leg_numbers()!=[]] 3590 3591 minvert = min(vert_list) if vert_list!=[] else 0 3592 # Number of subprocesses 3593 nsubprocs = len(configs[0]) 3594 3595 nconfigs = 0 3596 3597 new_pdg = model.get_first_non_pdg() 3598 3599 for iconfig, helas_diags in enumerate(configs): 3600 if any([vert > minvert for vert in 3601 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 3602 # Only 3-vertices allowed in configs.inc 3603 continue 3604 nconfigs += 1 3605 3606 # Need s- and t-channels for all subprocesses, including 3607 # those that don't contribute to this config 3608 empty_verts = [] 3609 stchannels = [] 3610 for h in helas_diags: 3611 if h: 3612 # get_s_and_t_channels gives vertices starting from 3613 # final state external particles and working inwards 3614 stchannels.append(h.get('amplitudes')[0].\ 3615 get_s_and_t_channels(ninitial,model,new_pdg)) 3616 else: 3617 stchannels.append((empty_verts, None)) 3618 3619 # For t-channels, just need the first non-empty one 3620 tchannels = [t for s,t in stchannels if t != None][0] 3621 3622 # For s_and_t_channels (to be used later) use only first config 3623 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 3624 tchannels]) 3625 3626 # Make sure empty_verts is same length as real vertices 3627 if any([s for s,t in stchannels]): 3628 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 3629 3630 # Reorganize s-channel vertices to get a list of all 3631 # subprocesses for each vertex 3632 schannels = list(zip(*[s for s,t in stchannels])) 3633 else: 3634 schannels = [] 3635 3636 allchannels = schannels 3637 if len(tchannels) > 1: 3638 # Write out tchannels only if there are any non-trivial ones 3639 allchannels = schannels + tchannels 3640 3641 # Write out propagators for s-channel and t-channel vertices 3642 3643 #lines.append("# Diagram %d" % (mapconfigs[iconfig])) 3644 # Correspondance between the config and the diagram = amp2 3645 lines.append("* %d %d " % (nconfigs, 3646 mapconfigs[iconfig])) 3647 3648 for verts in allchannels: 3649 if verts in schannels: 3650 vert = [v for v in verts if v][0] 3651 else: 3652 vert = verts 3653 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 3654 last_leg = vert.get('legs')[-1] 3655 line=str(last_leg.get('number'))+" "+str(daughters[0])+" "+str(daughters[1]) 3656 # lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 3657 # (last_leg.get('number'), nconfigs, len(daughters), 3658 # ",".join([str(d) for d in daughters]))) 3659 3660 if last_leg.get('id') == 21 and 21 not in particle_dict: 3661 # Fake propagator used in multiparticle vertices 3662 mass = 'zero' 3663 width = 'zero' 3664 pow_part = 0 3665 else: 3666 if (last_leg.get('id')!=7): 3667 particle = particle_dict[last_leg.get('id')] 3668 # Get mass 3669 mass = particle.get('mass') 3670 # Get width 3671 width = particle.get('width') 3672 else : # fake propagator used in multiparticle vertices 3673 mass= 'zero' 3674 width= 'zero' 3675 3676 line=line+" "+mass+" "+width+" " 3677 3678 if verts in schannels: 3679 pdgs = [] 3680 for v in verts: 3681 if v: 3682 pdgs.append(v.get('legs')[-1].get('id')) 3683 else: 3684 pdgs.append(0) 3685 lines.append(line+" S "+str(last_leg.get('id'))) 3686 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3687 # (last_leg.get('number'), nconfigs, nsubprocs, 3688 # ",".join([str(d) for d in pdgs]))) 3689 # lines.append("data tprid(%d,%d)/0/" % \ 3690 # (last_leg.get('number'), nconfigs)) 3691 elif verts in tchannels[:-1]: 3692 lines.append(line+" T "+str(last_leg.get('id'))) 3693 # lines.append("data tprid(%d,%d)/%d/" % \ 3694 # (last_leg.get('number'), nconfigs, 3695 # abs(last_leg.get('id')))) 3696 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3697 # (last_leg.get('number'), nconfigs, nsubprocs, 3698 # ",".join(['0'] * nsubprocs))) 3699 3700 # Write out number of configs 3701 # lines.append("# Number of configs") 3702 # lines.append("data mapconfig(0)/%d/" % nconfigs) 3703 lines.append(" * ") # a line with just a star indicates this is the end of file 3704 # Write the file 3705 writer.writelines(lines) 3706 3707 return s_and_t_channels
3708
3709 3710 3711 #=============================================================================== 3712 # ProcessExporterFortranME 3713 #=============================================================================== 3714 -class ProcessExporterFortranME(ProcessExporterFortran):
3715 """Class to take care of exporting a set of matrix elements to 3716 MadEvent format.""" 3717 3718 matrix_file = "matrix_madevent_v4.inc" 3719 done_warning_tchannel = False 3720 3721 default_opt = {'clean': False, 'complex_mass':False, 3722 'export_format':'madevent', 'mp': False, 3723 'v5_model': True, 3724 'output_options':{}, 3725 'hel_recycling': False 3726 } 3727 jamp_optim = True 3728
3729 - def __init__(self, dir_path = "", opt=None):
3730 3731 super(ProcessExporterFortranME, self).__init__(dir_path, opt) 3732 3733 # check and format the hel_recycling options as it should if provided 3734 if opt and isinstance(opt['output_options'], dict) and \ 3735 'hel_recycling' in opt['output_options']: 3736 self.opt['hel_recycling'] = banner_mod.ConfigFile.format_variable( 3737 opt['output_options']['hel_recycling'], bool, 'hel_recycling') 3738 3739 if opt and isinstance(opt['output_options'], dict) and \ 3740 't_strategy' in opt['output_options']: 3741 self.opt['t_strategy'] = banner_mod.ConfigFile.format_variable( 3742 opt['output_options']['t_strategy'], int, 't_strategy')
3743 3744 # helper function for customise helas writter 3745 @staticmethod
3746 - def custom_helas_call(call, arg):
3747 if arg['mass'] == '%(M)s,%(W)s,': 3748 arg['mass'] = '%(M)s, fk_%(W)s,' 3749 elif '%(W)s' in arg['mass']: 3750 raise Exception 3751 return call, arg
3752
3753 - def copy_template(self, model):
3754 """Additional actions needed for setup of Template 3755 """ 3756 3757 super(ProcessExporterFortranME, self).copy_template(model) 3758 3759 # File created from Template (Different in some child class) 3760 filename = pjoin(self.dir_path,'Source','run_config.inc') 3761 self.write_run_config_file(writers.FortranWriter(filename)) 3762 3763 # The next file are model dependant (due to SLAH convention) 3764 self.model_name = model.get('name') 3765 # Add the symmetry.f 3766 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3767 self.write_symmetry(writers.FortranWriter(filename)) 3768 # 3769 filename = pjoin(self.dir_path,'SubProcesses','addmothers.f') 3770 self.write_addmothers(writers.FortranWriter(filename)) 3771 # Copy the different python file in the Template 3772 self.copy_python_file()
3773 3774 3775 3776 3777 3778 3779 #=========================================================================== 3780 # generate_subprocess_directory 3781 #===========================================================================
3782 - def copy_python_file(self):
3783 """copy the python file require for the Template""" 3784 3785 # madevent interface 3786 cp(_file_path+'/interface/madevent_interface.py', 3787 self.dir_path+'/bin/internal/madevent_interface.py') 3788 cp(_file_path+'/interface/extended_cmd.py', 3789 self.dir_path+'/bin/internal/extended_cmd.py') 3790 cp(_file_path+'/interface/common_run_interface.py', 3791 self.dir_path+'/bin/internal/common_run_interface.py') 3792 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3793 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3794 cp(_file_path+'/iolibs/save_load_object.py', 3795 self.dir_path+'/bin/internal/save_load_object.py') 3796 cp(_file_path+'/iolibs/file_writers.py', 3797 self.dir_path+'/bin/internal/file_writers.py') 3798 #model file 3799 cp(_file_path+'../models/check_param_card.py', 3800 self.dir_path+'/bin/internal/check_param_card.py') 3801 3802 #copy all the file present in madevent directory 3803 for name in os.listdir(pjoin(_file_path, 'madevent')): 3804 if name not in ['__init__.py'] and name.endswith('.py'): 3805 cp(_file_path+'/madevent/'+name, self.dir_path+'/bin/internal/') 3806 3807 #madevent file 3808 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3809 cp(_file_path+'/various/lhe_parser.py', 3810 self.dir_path+'/bin/internal/lhe_parser.py') 3811 cp(_file_path+'/various/banner.py', 3812 self.dir_path+'/bin/internal/banner.py') 3813 cp(_file_path+'/various/histograms.py', 3814 self.dir_path+'/bin/internal/histograms.py') 3815 cp(_file_path+'/various/plot_djrs.py', 3816 self.dir_path+'/bin/internal/plot_djrs.py') 3817 cp(_file_path+'/various/systematics.py', self.dir_path+'/bin/internal/systematics.py') 3818 3819 cp(_file_path+'/various/cluster.py', 3820 self.dir_path+'/bin/internal/cluster.py') 3821 cp(_file_path+'/madevent/combine_runs.py', 3822 self.dir_path+'/bin/internal/combine_runs.py') 3823 # logging configuration 3824 cp(_file_path+'/interface/.mg5_logging.conf', 3825 self.dir_path+'/bin/internal/me5_logging.conf') 3826 cp(_file_path+'/interface/coloring_logging.py', 3827 self.dir_path+'/bin/internal/coloring_logging.py') 3828 # shower card and FO_analyse_card. 3829 # Although not needed, it is imported by banner.py 3830 cp(_file_path+'/various/shower_card.py', 3831 self.dir_path+'/bin/internal/shower_card.py') 3832 cp(_file_path+'/various/FO_analyse_card.py', 3833 self.dir_path+'/bin/internal/FO_analyse_card.py')
3834 3835
3836 - def convert_model(self, model, wanted_lorentz = [], 3837 wanted_couplings = []):
3838 3839 super(ProcessExporterFortranME,self).convert_model(model, 3840 wanted_lorentz, wanted_couplings) 3841 3842 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 3843 try: 3844 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 3845 except OSError as error: 3846 pass 3847 model_path = model.get('modelpath') 3848 # This is not safe if there is a '##' or '-' in the path. 3849 shutil.copytree(model_path, 3850 pjoin(self.dir_path,'bin','internal','ufomodel'), 3851 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 3852 if hasattr(model, 'restrict_card'): 3853 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 3854 'restrict_default.dat') 3855 if isinstance(model.restrict_card, check_param_card.ParamCard): 3856 model.restrict_card.write(out_path) 3857 else: 3858 files.cp(model.restrict_card, out_path)
3859 3860 #=========================================================================== 3861 # export model files 3862 #===========================================================================
3863 - def export_model_files(self, model_path):
3864 """export the model dependent files""" 3865 3866 super(ProcessExporterFortranME,self).export_model_files(model_path) 3867 3868 # Add the routine update_as_param in v4 model 3869 # This is a function created in the UFO 3870 text=""" 3871 subroutine update_as_param() 3872 call setpara('param_card.dat',.false.) 3873 return 3874 end 3875 """ 3876 ff = open(pjoin(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3877 ff.write(text) 3878 ff.close() 3879 3880 # Add the symmetry.f 3881 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3882 self.write_symmetry(writers.FortranWriter(filename), v5=False) 3883 3884 # Modify setrun.f 3885 text = open(pjoin(self.dir_path,'Source','setrun.f')).read() 3886 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3887 fsock = open(pjoin(self.dir_path,'Source','setrun.f'), 'w') 3888 fsock.write(text) 3889 fsock.close() 3890 3891 self.make_model_symbolic_link()
3892 3893 #=========================================================================== 3894 # generate_subprocess_directory 3895 #===========================================================================
3896 - def generate_subprocess_directory(self, matrix_element, 3897 fortran_model, 3898 me_number):
3899 """Generate the Pxxxxx directory for a subprocess in MG4 madevent, 3900 including the necessary matrix.f and various helper files""" 3901 3902 cwd = os.getcwd() 3903 path = pjoin(self.dir_path, 'SubProcesses') 3904 3905 3906 if not self.model: 3907 self.model = matrix_element.get('processes')[0].get('model') 3908 3909 #os.chdir(path) 3910 # Create the directory PN_xx_xxxxx in the specified path 3911 subprocdir = "P%s" % matrix_element.get('processes')[0].shell_string() 3912 try: 3913 os.mkdir(pjoin(path,subprocdir)) 3914 except os.error as error: 3915 logger.warning(error.strerror + " " + subprocdir) 3916 3917 #try: 3918 # os.chdir(subprocdir) 3919 #except os.error: 3920 # logger.error('Could not cd to directory %s' % subprocdir) 3921 # return 0 3922 3923 logger.info('Creating files in directory %s' % subprocdir) 3924 Ppath = pjoin(path, subprocdir) 3925 3926 # Extract number of external particles 3927 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3928 3929 # Add the driver.f 3930 ncomb = matrix_element.get_helicity_combinations() 3931 filename = pjoin(Ppath,'driver.f') 3932 self.write_driver(writers.FortranWriter(filename),ncomb,n_grouped_proc=1, 3933 v5=self.opt['v5_model']) 3934 3935 3936 # Create the matrix.f file, auto_dsig.f file and all inc files 3937 if self.opt['hel_recycling']: 3938 filename = pjoin(Ppath, 'matrix_orig.f') 3939 else: 3940 filename = pjoin(Ppath, 'matrix.f') 3941 calls, ncolor = \ 3942 self.write_matrix_element_v4(writers.FortranWriter(filename), 3943 matrix_element, fortran_model, subproc_number = me_number) 3944 3945 filename = pjoin(Ppath, 'auto_dsig.f') 3946 self.write_auto_dsig_file(writers.FortranWriter(filename), 3947 matrix_element) 3948 3949 filename = pjoin(Ppath, 'configs.inc') 3950 mapconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 3951 writers.FortranWriter(filename), 3952 matrix_element) 3953 3954 filename = pjoin(Ppath, 'config_nqcd.inc') 3955 self.write_config_nqcd_file(writers.FortranWriter(filename), 3956 nqcd_list) 3957 3958 filename = pjoin(Ppath, 'config_subproc_map.inc') 3959 self.write_config_subproc_map_file(writers.FortranWriter(filename), 3960 s_and_t_channels) 3961 3962 filename = pjoin(Ppath, 'coloramps.inc') 3963 self.write_coloramps_file(writers.FortranWriter(filename), 3964 mapconfigs, 3965 matrix_element) 3966 3967 filename = pjoin(Ppath, 'get_color.f') 3968 self.write_colors_file(writers.FortranWriter(filename), 3969 matrix_element) 3970 3971 filename = pjoin(Ppath, 'decayBW.inc') 3972 self.write_decayBW_file(writers.FortranWriter(filename), 3973 s_and_t_channels) 3974 3975 filename = pjoin(Ppath, 'dname.mg') 3976 self.write_dname_file(writers.FileWriter(filename), 3977 "P"+matrix_element.get('processes')[0].shell_string()) 3978 3979 filename = pjoin(Ppath, 'iproc.dat') 3980 self.write_iproc_file(writers.FortranWriter(filename), 3981 me_number) 3982 3983 filename = pjoin(Ppath, 'leshouche.inc') 3984 self.write_leshouche_file(writers.FortranWriter(filename), 3985 matrix_element) 3986 3987 filename = pjoin(Ppath, 'maxamps.inc') 3988 self.write_maxamps_file(writers.FortranWriter(filename), 3989 len(matrix_element.get('diagrams')), 3990 ncolor, 3991 len(matrix_element.get('processes')), 3992 1) 3993 3994 filename = pjoin(Ppath, 'mg.sym') 3995 self.write_mg_sym_file(writers.FortranWriter(filename), 3996 matrix_element) 3997 3998 filename = pjoin(Ppath, 'ncombs.inc') 3999 self.write_ncombs_file(writers.FortranWriter(filename), 4000 nexternal) 4001 4002 filename = pjoin(Ppath, 'nexternal.inc') 4003 self.write_nexternal_file(writers.FortranWriter(filename), 4004 nexternal, ninitial) 4005 4006 filename = pjoin(Ppath, 'ngraphs.inc') 4007 self.write_ngraphs_file(writers.FortranWriter(filename), 4008 len(mapconfigs)) 4009 4010 4011 filename = pjoin(Ppath, 'pmass.inc') 4012 self.write_pmass_file(writers.FortranWriter(filename), 4013 matrix_element) 4014 4015 filename = pjoin(Ppath, 'props.inc') 4016 self.write_props_file(writers.FortranWriter(filename), 4017 matrix_element, 4018 s_and_t_channels) 4019 4020 # Find config symmetries and permutations 4021 symmetry, perms, ident_perms = \ 4022 diagram_symmetry.find_symmetry(matrix_element) 4023 4024 filename = pjoin(Ppath, 'symswap.inc') 4025 self.write_symswap_file(writers.FortranWriter(filename), 4026 ident_perms) 4027 4028 filename = pjoin(Ppath, 'symfact_orig.dat') 4029 self.write_symfact_file(open(filename, 'w'), symmetry) 4030 4031 # Generate diagrams 4032 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 4033 filename = pjoin(Ppath, "matrix.ps") 4034 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 4035 get('diagrams'), 4036 filename, 4037 model=matrix_element.get('processes')[0].\ 4038 get('model'), 4039 amplitude=True) 4040 logger.info("Generating Feynman diagrams for " + \ 4041 matrix_element.get('processes')[0].nice_string()) 4042 plot.draw() 4043 4044 self.link_files_in_SubProcess(Ppath) 4045 4046 #import nexternal/leshouche in Source 4047 ln(pjoin(Ppath,'nexternal.inc'), pjoin(self.dir_path,'Source'), log=False) 4048 ln(pjoin(Ppath,'leshouche.inc'), pjoin(self.dir_path,'Source'), log=False) 4049 ln(pjoin(Ppath,'maxamps.inc'), pjoin(self.dir_path,'Source'), log=False) 4050 # Return to SubProcesses dir 4051 #os.chdir(os.path.pardir) 4052 4053 # Add subprocess to subproc.mg 4054 filename = pjoin(path, 'subproc.mg') 4055 files.append_to_file(filename, 4056 self.write_subproc, 4057 subprocdir) 4058 4059 # Return to original dir 4060 #os.chdir(cwd) 4061 4062 # Generate info page 4063 gen_infohtml.make_info_html(self.dir_path) 4064 4065 4066 if not calls: 4067 calls = 0 4068 return calls
4069 4070 link_Sub_files = ['addmothers.f', 4071 'cluster.f', 4072 'cluster.inc', 4073 'coupl.inc', 4074 'cuts.f', 4075 'cuts.inc', 4076 'genps.f', 4077 'genps.inc', 4078 'idenparts.f', 4079 'initcluster.f', 4080 'makefile', 4081 'message.inc', 4082 'myamp.f', 4083 'reweight.f', 4084 'run.inc', 4085 'maxconfigs.inc', 4086 'maxparticles.inc', 4087 'run_config.inc', 4088 'lhe_event_infos.inc', 4089 'setcuts.f', 4090 'setscales.f', 4091 'sudakov.inc', 4092 'symmetry.f', 4093 'unwgt.f', 4094 'dummy_fct.f' 4095 ] 4096 4110 4111
4112 - def finalize(self, matrix_elements, history, mg5options, flaglist):
4113 """Finalize ME v4 directory by creating jpeg diagrams, html 4114 pages,proc_card_mg5.dat and madevent.tar.gz.""" 4115 4116 if 'nojpeg' in flaglist: 4117 makejpg = False 4118 else: 4119 makejpg = True 4120 if 'online' in flaglist: 4121 online = True 4122 else: 4123 online = False 4124 4125 compiler = {'fortran': mg5options['fortran_compiler'], 4126 'cpp': mg5options['cpp_compiler'], 4127 'f2py': mg5options['f2py_compiler']} 4128 4129 # indicate that the output type is not grouped 4130 if not isinstance(self, ProcessExporterFortranMEGroup): 4131 self.proc_characteristic['grouped_matrix'] = False 4132 4133 self.proc_characteristic['complex_mass_scheme'] = mg5options['complex_mass_scheme'] 4134 4135 # set limitation linked to the model 4136 4137 4138 # indicate the PDG of all initial particle 4139 try: 4140 pdgs1 = [p.get_initial_pdg(1) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 4141 pdgs2 = [p.get_initial_pdg(2) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 4142 except AttributeError: 4143 pdgs1 = [p.get_initial_pdg(1) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 4144 pdgs2 = [p.get_initial_pdg(2) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 4145 self.proc_characteristic['pdg_initial1'] = pdgs1 4146 self.proc_characteristic['pdg_initial2'] = pdgs2 4147 4148 4149 modelname = self.opt['model'] 4150 if modelname == 'mssm' or modelname.startswith('mssm-'): 4151 param_card = pjoin(self.dir_path, 'Cards','param_card.dat') 4152 mg5_param = pjoin(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 4153 check_param_card.convert_to_mg5card(param_card, mg5_param) 4154 check_param_card.check_valid_param_card(mg5_param) 4155 4156 # Add the combine_events.f modify param_card path/number of @X 4157 filename = pjoin(self.dir_path,'Source','combine_events.f') 4158 try: 4159 nb_proc =[p.get('id') for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes')] 4160 except AttributeError: 4161 nb_proc =[p.get('id') for m in matrix_elements.get('matrix_elements') for p in m.get('processes')] 4162 nb_proc = len(set(nb_proc)) 4163 self.write_combine_events(writers.FortranWriter(filename), nb_proc) # already formatted 4164 # Write maxconfigs.inc based on max of ME's/subprocess groups 4165 filename = pjoin(self.dir_path,'Source','maxconfigs.inc') 4166 self.write_maxconfigs_file(writers.FortranWriter(filename), 4167 matrix_elements) 4168 4169 # Write maxparticles.inc based on max of ME's/subprocess groups 4170 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 4171 self.write_maxparticles_file(writers.FortranWriter(filename), 4172 matrix_elements) 4173 4174 # Touch "done" file 4175 os.system('touch %s/done' % pjoin(self.dir_path,'SubProcesses')) 4176 4177 # Check for compiler 4178 self.set_compiler(compiler) 4179 self.set_cpp_compiler(compiler['cpp']) 4180 4181 4182 old_pos = os.getcwd() 4183 subpath = pjoin(self.dir_path, 'SubProcesses') 4184 4185 P_dir_list = [proc for proc in os.listdir(subpath) 4186 if os.path.isdir(pjoin(subpath,proc)) and proc[0] == 'P'] 4187 4188 devnull = os.open(os.devnull, os.O_RDWR) 4189 # Convert the poscript in jpg files (if authorize) 4190 if makejpg: 4191 try: 4192 os.remove(pjoin(self.dir_path,'HTML','card.jpg')) 4193 except Exception as error: 4194 pass 4195 4196 if misc.which('gs'): 4197 logger.info("Generate jpeg diagrams") 4198 for Pdir in P_dir_list: 4199 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 4200 stdout = devnull, cwd=pjoin(subpath, Pdir)) 4201 4202 logger.info("Generate web pages") 4203 # Create the WebPage using perl script 4204 4205 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 4206 stdout = devnull,cwd=pjoin(self.dir_path)) 4207 4208 #os.chdir(os.path.pardir) 4209 4210 obj = gen_infohtml.make_info_html(self.dir_path) 4211 4212 if online: 4213 nb_channel = obj.rep_rule['nb_gen_diag'] 4214 open(pjoin(self.dir_path, 'Online'),'w').write(str(nb_channel)) 4215 #add the information to proc_charac 4216 self.proc_characteristic['nb_channel'] = obj.rep_rule['nb_gen_diag'] 4217 4218 # Write command history as proc_card_mg5 4219 if os.path.isdir(pjoin(self.dir_path,'Cards')): 4220 output_file = pjoin(self.dir_path,'Cards', 'proc_card_mg5.dat') 4221 history.write(output_file) 4222 4223 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 4224 stdout = devnull) 4225 4226 #crate the proc_characteristic file 4227 self.create_proc_charac(matrix_elements, history) 4228 4229 # create the run_card 4230 ProcessExporterFortran.finalize(self, matrix_elements, history, mg5options, flaglist) 4231 4232 # Run "make" to generate madevent.tar.gz file 4233 if os.path.exists(pjoin(self.dir_path,'SubProcesses', 'subproc.mg')): 4234 if os.path.exists(pjoin(self.dir_path,'madevent.tar.gz')): 4235 os.remove(pjoin(self.dir_path,'madevent.tar.gz')) 4236 misc.call([os.path.join(self.dir_path, 'bin', 'internal', 'make_madevent_tar')], 4237 stdout = devnull, cwd=self.dir_path) 4238 4239 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 4240 stdout = devnull, cwd=self.dir_path)
4241 4242 4243 4244 4245 4246 4247 #return to the initial dir 4248 #os.chdir(old_pos) 4249 4250 #=========================================================================== 4251 # write_matrix_element_v4 4252 #===========================================================================
4253 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 4254 proc_id = "", config_map = [], subproc_number = ""):
4255 """Export a matrix element to a matrix.f file in MG4 madevent format""" 4256 4257 if not matrix_element.get('processes') or \ 4258 not matrix_element.get('diagrams'): 4259 return 0 4260 4261 if writer: 4262 if not isinstance(writer, writers.FortranWriter): 4263 raise writers.FortranWriter.FortranWriterError(\ 4264 "writer not FortranWriter") 4265 # Set lowercase/uppercase Fortran code 4266 writers.FortranWriter.downcase = False 4267 4268 # check if MLM/.../ is supported for this matrix-element and update associate flag 4269 if self.model and 'MLM' in self.model["limitations"]: 4270 if 'MLM' not in self.proc_characteristic["limitations"]: 4271 used_couplings = matrix_element.get_used_couplings(output="set") 4272 for vertex in self.model.get('interactions'): 4273 particles = [p for p in vertex.get('particles')] 4274 if 21 in [p.get('pdg_code') for p in particles]: 4275 colors = [par.get('color') for par in particles] 4276 if 1 in colors: 4277 continue 4278 elif 'QCD' not in vertex.get('orders'): 4279 for bad_coup in vertex.get('couplings').values(): 4280 if bad_coup in used_couplings: 4281 self.proc_characteristic["limitations"].append('MLM') 4282 break 4283 4284 # The proc prefix is not used for MadEvent output so it can safely be set 4285 # to an empty string. 4286 replace_dict = {'proc_prefix':''} 4287 4288 4289 # Extract helas calls 4290 helas_calls = fortran_model.get_matrix_element_calls(\ 4291 matrix_element) 4292 if fortran_model.width_tchannel_set_tozero and not ProcessExporterFortranME.done_warning_tchannel: 4293 logger.info("Some T-channel width have been set to zero [new since 2.8.0]\n if you want to keep this width please set \"zerowidth_tchannel\" to False", '$MG:BOLD') 4294 ProcessExporterFortranME.done_warning_tchannel = True 4295 4296 replace_dict['helas_calls'] = "\n".join(helas_calls) 4297 4298 4299 #adding the support for the fake width (forbidding too small width) 4300 mass_width = matrix_element.get_all_mass_widths() 4301 mass_width = sorted(list(mass_width)) 4302 width_list = set([e[1] for e in mass_width]) 4303 4304 replace_dict['fake_width_declaration'] = \ 4305 (' double precision fk_%s \n' * len(width_list)) % tuple(width_list) 4306 replace_dict['fake_width_declaration'] += \ 4307 (' save fk_%s \n' * len(width_list)) % tuple(width_list) 4308 fk_w_defs = [] 4309 one_def = ' IF(%(w)s.ne.0d0) fk_%(w)s = SIGN(MAX(ABS(%(w)s), ABS(%(m)s*small_width_treatment)), %(w)s)' 4310 for m, w in mass_width: 4311 if w == 'zero': 4312 if ' fk_zero = 0d0' not in fk_w_defs: 4313 fk_w_defs.append(' fk_zero = 0d0') 4314 continue 4315 fk_w_defs.append(one_def %{'m':m, 'w':w}) 4316 replace_dict['fake_width_definitions'] = '\n'.join(fk_w_defs) 4317 4318 # Extract version number and date from VERSION file 4319 info_lines = self.get_mg5_info_lines() 4320 replace_dict['info_lines'] = info_lines 4321 4322 # Extract process info lines 4323 process_lines = self.get_process_info_lines(matrix_element) 4324 replace_dict['process_lines'] = process_lines 4325 4326 # Set proc_id 4327 replace_dict['proc_id'] = proc_id 4328 4329 # Extract ncomb 4330 ncomb = matrix_element.get_helicity_combinations() 4331 replace_dict['ncomb'] = ncomb 4332 4333 # Extract helicity lines 4334 helicity_lines = self.get_helicity_lines(matrix_element) 4335 replace_dict['helicity_lines'] = helicity_lines 4336 4337 # Extract IC line 4338 ic_line = self.get_ic_line(matrix_element) 4339 replace_dict['ic_line'] = ic_line 4340 4341 # Extract overall denominator 4342 # Averaging initial state color, spin, and identical FS particles 4343 den_factor_line = self.get_den_factor_line(matrix_element) 4344 replace_dict['den_factor_line'] = den_factor_line 4345 4346 # Extract ngraphs 4347 ngraphs = matrix_element.get_number_of_amplitudes() 4348 replace_dict['ngraphs'] = ngraphs 4349 4350 # Extract ndiags 4351 ndiags = len(matrix_element.get('diagrams')) 4352 replace_dict['ndiags'] = ndiags 4353 4354 # Set define_iconfigs_lines 4355 replace_dict['define_iconfigs_lines'] = \ 4356 """INTEGER MAPCONFIG(0:LMAXCONFIGS), ICONFIG 4357 COMMON/TO_MCONFIGS/MAPCONFIG, ICONFIG""" 4358 4359 if proc_id: 4360 # Set lines for subprocess group version 4361 # Set define_iconfigs_lines 4362 replace_dict['define_iconfigs_lines'] += \ 4363 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4364 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4365 # Set set_amp2_line 4366 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(SUBDIAG(%s))/XTOT" % \ 4367 proc_id 4368 else: 4369 # Standard running 4370 # Set set_amp2_line 4371 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(MAPCONFIG(ICONFIG))/XTOT" 4372 4373 # Extract nwavefuncs 4374 nwavefuncs = matrix_element.get_number_of_wavefunctions() 4375 replace_dict['nwavefuncs'] = nwavefuncs 4376 4377 # Extract ncolor 4378 ncolor = max(1, len(matrix_element.get('color_basis'))) 4379 replace_dict['ncolor'] = ncolor 4380 4381 # Extract color data lines 4382 color_data_lines = self.get_color_data_lines(matrix_element) 4383 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 4384 4385 4386 # Set the size of Wavefunction 4387 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 4388 replace_dict['wavefunctionsize'] = 18 4389 else: 4390 replace_dict['wavefunctionsize'] = 6 4391 4392 # Extract amp2 lines 4393 amp2_lines = self.get_amp2_lines(matrix_element, config_map, replace_dict) 4394 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 4395 4396 # The JAMP definition depends on the splitting order 4397 split_orders=matrix_element.get('processes')[0].get('split_orders') 4398 if len(split_orders)>0: 4399 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 4400 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 4401 matrix_element.get('processes')[0],squared_orders) 4402 replace_dict['select_configs_if'] = ' IF (CHOSEN_SO_CONFIGS(SQSOINDEX%(proc_id)s(M,N))) THEN' % replace_dict 4403 replace_dict['select_configs_endif'] = ' endif' 4404 else: 4405 # Consider the output of a dummy order 'ALL_ORDERS' for which we 4406 # set all amplitude order to weight 1 and only one squared order 4407 # contribution which is of course ALL_ORDERS=2. 4408 squared_orders = [(2,),] 4409 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 4410 replace_dict['chosen_so_configs'] = '.TRUE.' 4411 # addtionally set the function to NOT be called 4412 replace_dict['select_configs_if'] = '' 4413 replace_dict['select_configs_endif'] = '' 4414 4415 replace_dict['nAmpSplitOrders']=len(amp_orders) 4416 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 4417 replace_dict['split_order_str_list']=str(split_orders) 4418 replace_dict['nSplitOrders']=max(len(split_orders),1) 4419 amp_so = self.get_split_orders_lines( 4420 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 4421 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 4422 replace_dict['ampsplitorders']='\n'.join(amp_so) 4423 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 4424 4425 4426 # Extract JAMP lines 4427 # If no split_orders then artificiall add one entry called 'ALL_ORDERS' 4428 jamp_lines, nb_temp = self.get_JAMP_lines_split_order(\ 4429 matrix_element,amp_orders,split_order_names= 4430 split_orders if len(split_orders)>0 else ['ALL_ORDERS']) 4431 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 4432 replace_dict['nb_temp_jamp'] = nb_temp 4433 4434 replace_dict['template_file'] = pjoin(_file_path, \ 4435 'iolibs/template_files/%s' % self.matrix_file) 4436 replace_dict['template_file2'] = pjoin(_file_path, \ 4437 'iolibs/template_files/split_orders_helping_functions.inc') 4438 4439 s1,s2 = matrix_element.get_spin_state_initial() 4440 replace_dict['nb_spin_state1'] = s1 4441 replace_dict['nb_spin_state2'] = s2 4442 4443 if writer: 4444 file = open(replace_dict['template_file']).read() 4445 file = file % replace_dict 4446 # Add the split orders helper functions. 4447 file = file + '\n' + open(replace_dict['template_file2'])\ 4448 .read()%replace_dict 4449 # Write the file 4450 writer.writelines(file) 4451 return len([call for call in helas_calls if call.find('#') != 0]), ncolor 4452 else: 4453 replace_dict['return_value'] = (len([call for call in helas_calls if call.find('#') != 0]), ncolor) 4454 return replace_dict
4455 4456 #=========================================================================== 4457 # write_auto_dsig_file 4458 #===========================================================================
4459 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
4460 """Write the auto_dsig.f file for the differential cross section 4461 calculation, includes pdf call information""" 4462 4463 if not matrix_element.get('processes') or \ 4464 not matrix_element.get('diagrams'): 4465 return 0 4466 4467 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 4468 self.proc_characteristic['ninitial'] = ninitial 4469 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 4470 4471 # Add information relevant for MLM matching: 4472 # Maximum QCD power in all the contributions 4473 max_qcd_order = 0 4474 for diag in matrix_element.get('diagrams'): 4475 orders = diag.calculate_orders() 4476 if 'QCD' in orders: 4477 max_qcd_order = max(max_qcd_order,orders['QCD']) 4478 max_n_light_final_partons = max(len([1 for id in proc.get_final_ids() 4479 if proc.get('model').get_particle(id).get('mass')=='ZERO' and 4480 proc.get('model').get_particle(id).get('color')>1]) 4481 for proc in matrix_element.get('processes')) 4482 # Maximum number of final state light jets to be matched 4483 self.proc_characteristic['max_n_matched_jets'] = max( 4484 self.proc_characteristic['max_n_matched_jets'], 4485 min(max_qcd_order,max_n_light_final_partons)) 4486 4487 # List of default pdgs to be considered for the CKKWl merging cut 4488 self.proc_characteristic['colored_pdgs'] = \ 4489 sorted(list(set([abs(p.get('pdg_code')) for p in 4490 matrix_element.get('processes')[0].get('model').get('particles') if 4491 p.get('color')>1]))) 4492 4493 if ninitial < 1 or ninitial > 2: 4494 raise writers.FortranWriter.FortranWriterError("""Need ninitial = 1 or 2 to write auto_dsig file""") 4495 4496 replace_dict = {} 4497 4498 # Extract version number and date from VERSION file 4499 info_lines = self.get_mg5_info_lines() 4500 replace_dict['info_lines'] = info_lines 4501 4502 # Extract process info lines 4503 process_lines = self.get_process_info_lines(matrix_element) 4504 replace_dict['process_lines'] = process_lines 4505 4506 # Set proc_id 4507 replace_dict['proc_id'] = proc_id 4508 replace_dict['numproc'] = 1 4509 4510 # Set dsig_line 4511 if ninitial == 1: 4512 # No conversion, since result of decay should be given in GeV 4513 dsig_line = "pd(0)*dsiguu" 4514 else: 4515 # Convert result (in GeV) to pb 4516 dsig_line = "pd(0)*conv*dsiguu" 4517 4518 replace_dict['dsig_line'] = dsig_line 4519 4520 # Extract pdf lines 4521 pdf_vars, pdf_data, pdf_lines = \ 4522 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 4523 replace_dict['pdf_vars'] = pdf_vars 4524 replace_dict['pdf_data'] = pdf_data 4525 replace_dict['pdf_lines'] = pdf_lines 4526 4527 # Lines that differ between subprocess group and regular 4528 if proc_id: 4529 replace_dict['numproc'] = int(proc_id) 4530 replace_dict['passcuts_begin'] = "" 4531 replace_dict['passcuts_end'] = "" 4532 # Set lines for subprocess group version 4533 # Set define_iconfigs_lines 4534 replace_dict['define_subdiag_lines'] = \ 4535 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4536 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4537 replace_dict['cutsdone'] = "" 4538 else: 4539 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 4540 replace_dict['passcuts_end'] = "ENDIF" 4541 replace_dict['define_subdiag_lines'] = "" 4542 replace_dict['cutsdone'] = " cutsdone=.false.\n cutspassed=.false." 4543 4544 if not isinstance(self, ProcessExporterFortranMEGroup): 4545 ncomb=matrix_element.get_helicity_combinations() 4546 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 4547 else: 4548 replace_dict['read_write_good_hel'] = "" 4549 4550 context = {'read_write_good_hel':True} 4551 4552 if writer: 4553 file = open(pjoin(_file_path, \ 4554 'iolibs/template_files/auto_dsig_v4.inc')).read() 4555 file = file % replace_dict 4556 4557 # Write the file 4558 writer.writelines(file, context=context) 4559 else: 4560 return replace_dict, context
4561 #=========================================================================== 4562 # write_coloramps_file 4563 #===========================================================================
4564 - def write_coloramps_file(self, writer, mapconfigs, matrix_element):
4565 """Write the coloramps.inc file for MadEvent""" 4566 4567 lines = self.get_icolamp_lines(mapconfigs, matrix_element, 1) 4568 lines.insert(0, "logical icolamp(%d,%d,1)" % \ 4569 (max(len(list(matrix_element.get('color_basis').keys())), 1), 4570 len(mapconfigs))) 4571 4572 4573 # Write the file 4574 writer.writelines(lines) 4575 4576 return True
4577 4578 #=========================================================================== 4579 # write_colors_file 4580 #===========================================================================
4581 - def write_colors_file(self, writer, matrix_elements):
4582 """Write the get_color.f file for MadEvent, which returns color 4583 for all particles used in the matrix element.""" 4584 4585 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 4586 matrix_elements = [matrix_elements] 4587 4588 model = matrix_elements[0].get('processes')[0].get('model') 4589 4590 # We need the both particle and antiparticle wf_ids, since the identity 4591 # depends on the direction of the wf. 4592 wf_ids = set(sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 4593 for wf in d.get('wavefunctions')],[]) \ 4594 for d in me.get('diagrams')], []) \ 4595 for me in matrix_elements], [])) 4596 4597 leg_ids = set(sum([sum([sum([[l.get('id'), 4598 model.get_particle(l.get('id')).get_anti_pdg_code()] \ 4599 for l in p.get_legs_with_decays()], []) \ 4600 for p in me.get('processes')], []) \ 4601 for me in matrix_elements], [])) 4602 particle_ids = sorted(list(wf_ids.union(leg_ids))) 4603 4604 lines = """function get_color(ipdg) 4605 implicit none 4606 integer get_color, ipdg 4607 4608 if(ipdg.eq.%d)then 4609 get_color=%d 4610 return 4611 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 4612 4613 for part_id in particle_ids[1:]: 4614 lines += """else if(ipdg.eq.%d)then 4615 get_color=%d 4616 return 4617 """ % (part_id, model.get_particle(part_id).get_color()) 4618 # Dummy particle for multiparticle vertices with pdg given by 4619 # first code not in the model 4620 lines += """else if(ipdg.eq.%d)then 4621 c This is dummy particle used in multiparticle vertices 4622 get_color=2 4623 return 4624 """ % model.get_first_non_pdg() 4625 lines += """else 4626 write(*,*)'Error: No color given for pdg ',ipdg 4627 get_color=0 4628 return 4629 endif 4630 end 4631 """ 4632 4633 # Write the file 4634 writer.writelines(lines) 4635 4636 return True
4637 4638 #=========================================================================== 4639 # write_config_nqcd_file 4640 #===========================================================================
4641 - def write_config_nqcd_file(self, writer, nqcd_list):
4642 """Write the config_nqcd.inc with the number of QCD couplings 4643 for each config""" 4644 4645 lines = [] 4646 for iconf, n in enumerate(nqcd_list): 4647 lines.append("data nqcd(%d)/%d/" % (iconf+1, n)) 4648 4649 # Write the file 4650 writer.writelines(lines) 4651 4652 return True
4653 4654 #=========================================================================== 4655 # write_maxconfigs_file 4656 #===========================================================================
4657 - def write_maxconfigs_file(self, writer, matrix_elements):
4658 """Write the maxconfigs.inc file for MadEvent""" 4659 4660 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 4661 maxconfigs = max([me.get_num_configs() for me in \ 4662 matrix_elements.get('matrix_elements')]) 4663 else: 4664 maxconfigs = max([me.get_num_configs() for me in matrix_elements]) 4665 4666 lines = "integer lmaxconfigs\n" 4667 lines += "parameter(lmaxconfigs=%d)" % maxconfigs 4668 4669 # Write the file 4670 writer.writelines(lines) 4671 4672 return True
4673 4674 #=========================================================================== 4675 # read_write_good_hel 4676 #===========================================================================
4677 - def read_write_good_hel(self, ncomb):
4678 """return the code to read/write the good_hel common_block""" 4679 4680 convert = {'ncomb' : ncomb} 4681 output = """ 4682 subroutine write_good_hel(stream_id) 4683 implicit none 4684 integer stream_id 4685 INTEGER NCOMB 4686 PARAMETER ( NCOMB=%(ncomb)d) 4687 LOGICAL GOODHEL(NCOMB) 4688 INTEGER NTRY 4689 common/BLOCK_GOODHEL/NTRY,GOODHEL 4690 write(stream_id,*) GOODHEL 4691 return 4692 end 4693 4694 4695 subroutine read_good_hel(stream_id) 4696 implicit none 4697 include 'genps.inc' 4698 integer stream_id 4699 INTEGER NCOMB 4700 PARAMETER ( NCOMB=%(ncomb)d) 4701 LOGICAL GOODHEL(NCOMB) 4702 INTEGER NTRY 4703 common/BLOCK_GOODHEL/NTRY,GOODHEL 4704 read(stream_id,*) GOODHEL 4705 NTRY = MAXTRIES + 1 4706 return 4707 end 4708 4709 subroutine init_good_hel() 4710 implicit none 4711 INTEGER NCOMB 4712 PARAMETER ( NCOMB=%(ncomb)d) 4713 LOGICAL GOODHEL(NCOMB) 4714 INTEGER NTRY 4715 INTEGER I 4716 4717 do i=1,NCOMB 4718 GOODHEL(I) = .false. 4719 enddo 4720 NTRY = 0 4721 end 4722 4723 integer function get_maxsproc() 4724 implicit none 4725 get_maxsproc = 1 4726 return 4727 end 4728 4729 """ % convert 4730 4731 return output
4732 4733 #=========================================================================== 4734 # write_config_subproc_map_file 4735 #===========================================================================
4736 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
4737 """Write a dummy config_subproc.inc file for MadEvent""" 4738 4739 lines = [] 4740 4741 for iconfig in range(len(s_and_t_channels)): 4742 lines.append("DATA CONFSUB(1,%d)/1/" % \ 4743 (iconfig + 1)) 4744 4745 # Write the file 4746 writer.writelines(lines) 4747 4748 return True
4749 4750 #=========================================================================== 4751 # write_configs_file 4752 #===========================================================================
4753 - def write_configs_file(self, writer, matrix_element):
4754 """Write the configs.inc file for MadEvent""" 4755 4756 # Extract number of external particles 4757 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4758 4759 model = matrix_element.get('processes')[0].get('model') 4760 configs = [(i+1, d) for (i, d) in \ 4761 enumerate(matrix_element.get('diagrams'))] 4762 mapconfigs = [c[0] for c in configs] 4763 return mapconfigs, self.write_configs_file_from_diagrams(writer, 4764 [[c[1]] for c in configs], 4765 mapconfigs, 4766 nexternal, ninitial, 4767 model)
4768 4769 #=========================================================================== 4770 # write_run_configs_file 4771 #===========================================================================
4772 - def write_run_config_file(self, writer):
4773 """Write the run_configs.inc file for MadEvent""" 4774 4775 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 4776 4777 if self.proc_characteristic['loop_induced']: 4778 job_per_chan = 1 4779 else: 4780 job_per_chan = 5 4781 4782 if writer: 4783 text = open(path).read() % {'chanperjob': job_per_chan} 4784 writer.write(text) 4785 return True 4786 else: 4787 return {'chanperjob': job_per_chan}
4788 4789 #=========================================================================== 4790 # write_configs_file_from_diagrams 4791 #===========================================================================
4792 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 4793 nexternal, ninitial, model):
4794 """Write the actual configs.inc file. 4795 4796 configs is the diagrams corresponding to configs (each 4797 diagrams is a list of corresponding diagrams for all 4798 subprocesses, with None if there is no corresponding diagrams 4799 for a given process). 4800 mapconfigs gives the diagram number for each config. 4801 4802 For s-channels, we need to output one PDG for each subprocess in 4803 the subprocess group, in order to be able to pick the right 4804 one for multiprocesses.""" 4805 4806 lines = [] 4807 4808 s_and_t_channels = [] 4809 4810 nqcd_list = [] 4811 4812 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 4813 for config in configs if [d for d in config if d][0].\ 4814 get_vertex_leg_numbers()!=[]] 4815 minvert = min(vert_list) if vert_list!=[] else 0 4816 4817 # Number of subprocesses 4818 nsubprocs = len(configs[0]) 4819 4820 nconfigs = 0 4821 4822 new_pdg = model.get_first_non_pdg() 4823 4824 for iconfig, helas_diags in enumerate(configs): 4825 if any([vert > minvert for vert in 4826 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 4827 # Only 3-vertices allowed in configs.inc 4828 continue 4829 nconfigs += 1 4830 4831 # Need s- and t-channels for all subprocesses, including 4832 # those that don't contribute to this config 4833 empty_verts = [] 4834 stchannels = [] 4835 for h in helas_diags: 4836 if h: 4837 # get_s_and_t_channels gives vertices starting from 4838 # final state external particles and working inwards 4839 stchannels.append(h.get('amplitudes')[0].\ 4840 get_s_and_t_channels(ninitial, model, 4841 new_pdg)) 4842 else: 4843 stchannels.append((empty_verts, None)) 4844 4845 4846 # For t-channels, just need the first non-empty one 4847 tchannels = [t for s,t in stchannels if t != None][0] 4848 4849 # pass to ping-pong strategy for t-channel for 3 ore more T-channel 4850 # this is directly related to change in genps.f 4851 tstrat = self.opt.get('t_strategy', 0) 4852 tchannels, tchannels_strategy = ProcessExporterFortranME.reorder_tchannels(tchannels, tstrat, self.model) 4853 4854 # For s_and_t_channels (to be used later) use only first config 4855 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 4856 tchannels, tchannels_strategy]) 4857 4858 # Make sure empty_verts is same length as real vertices 4859 if any([s for s,t in stchannels]): 4860 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 4861 4862 # Reorganize s-channel vertices to get a list of all 4863 # subprocesses for each vertex 4864 schannels = list(zip(*[s for s,t in stchannels])) 4865 else: 4866 schannels = [] 4867 4868 allchannels = schannels 4869 if len(tchannels) > 1: 4870 # Write out tchannels only if there are any non-trivial ones 4871 allchannels = schannels + tchannels 4872 4873 # Write out propagators for s-channel and t-channel vertices 4874 4875 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 4876 # Correspondance between the config and the diagram = amp2 4877 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 4878 mapconfigs[iconfig])) 4879 lines.append("data tstrategy(%d)/%d/" % (nconfigs, tchannels_strategy)) 4880 # Number of QCD couplings in this diagram 4881 nqcd = 0 4882 for h in helas_diags: 4883 if h: 4884 try: 4885 nqcd = h.calculate_orders()['QCD'] 4886 except KeyError: 4887 pass 4888 break 4889 else: 4890 continue 4891 4892 nqcd_list.append(nqcd) 4893 4894 for verts in allchannels: 4895 if verts in schannels: 4896 vert = [v for v in verts if v][0] 4897 else: 4898 vert = verts 4899 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 4900 last_leg = vert.get('legs')[-1] 4901 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 4902 (last_leg.get('number'), nconfigs, len(daughters), 4903 ",".join([str(d) for d in daughters]))) 4904 if verts in schannels: 4905 pdgs = [] 4906 for v in verts: 4907 if v: 4908 pdgs.append(v.get('legs')[-1].get('id')) 4909 else: 4910 pdgs.append(0) 4911 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4912 (last_leg.get('number'), nconfigs, nsubprocs, 4913 ",".join([str(d) for d in pdgs]))) 4914 lines.append("data tprid(%d,%d)/0/" % \ 4915 (last_leg.get('number'), nconfigs)) 4916 elif verts in tchannels[:-1]: 4917 lines.append("data tprid(%d,%d)/%d/" % \ 4918 (last_leg.get('number'), nconfigs, 4919 abs(last_leg.get('id')))) 4920 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4921 (last_leg.get('number'), nconfigs, nsubprocs, 4922 ",".join(['0'] * nsubprocs))) 4923 4924 # Write out number of configs 4925 lines.append("# Number of configs") 4926 lines.append("data mapconfig(0)/%d/" % nconfigs) 4927 4928 # Write the file 4929 writer.writelines(lines) 4930 4931 return s_and_t_channels, nqcd_list
4932 4933 4934 4935 #=========================================================================== 4936 # reoder t-channels 4937 #=========================================================================== 4938 4939 #ordering = 0 4940 @staticmethod
4941 - def reorder_tchannels(tchannels, tstrat, model):
4942 # no need to modified anything if 1 or less T-Channel 4943 #Note that this counts the number of vertex (one more vertex compare to T) 4944 #ProcessExporterFortranME.ordering +=1 4945 if len(tchannels) < 3 or tstrat == 2 or not model: 4946 return tchannels, 2 4947 elif tstrat == 1: 4948 return ProcessExporterFortranME.reorder_tchannels_flipside(tchannels), 1 4949 elif tstrat == -2: 4950 return ProcessExporterFortranME.reorder_tchannels_pingpong(tchannels), -2 4951 elif tstrat == -1: 4952 return ProcessExporterFortranME.reorder_tchannels_pingpong(tchannels, 1), -1 4953 elif len(tchannels) < 4: 4954 # 4955 first = tchannels[0]['legs'][1]['number'] 4956 t1 = tchannels[0]['legs'][-1]['id'] 4957 last = tchannels[-1]['legs'][1]['number'] 4958 t2 = tchannels[-1]['legs'][0]['id'] 4959 m1 = model.get_particle(t1).get('mass') == 'ZERO' 4960 m2 = model.get_particle(t2).get('mass') == 'ZERO' 4961 if m2 and not m1: 4962 return ProcessExporterFortranME.reorder_tchannels_flipside(tchannels), 1 4963 elif m1 and not m2: 4964 return tchannels, 2 4965 elif first < last: 4966 return ProcessExporterFortranME.reorder_tchannels_flipside(tchannels), 1 4967 else: 4968 return tchannels, 2 4969 else: 4970 first = tchannels[0]['legs'][1]['number'] 4971 t1 = tchannels[0]['legs'][-1]['id'] 4972 last = tchannels[-1]['legs'][1]['number'] 4973 t2 = tchannels[-1]['legs'][0]['id'] 4974 m1 = model.get_particle(t1).get('mass') == 'ZERO' 4975 m2 = model.get_particle(t2).get('mass') == 'ZERO' 4976 4977 t12 = tchannels[1]['legs'][-1]['id'] 4978 m12 = model.get_particle(t12).get('mass') == 'ZERO' 4979 t22 = tchannels[-2]['legs'][0]['id'] 4980 m22 = model.get_particle(t22).get('mass') == 'ZERO' 4981 if m2 and not m1: 4982 if m22: 4983 return ProcessExporterFortranME.reorder_tchannels_flipside(tchannels), 1 4984 else: 4985 return ProcessExporterFortranME.reorder_tchannels_pingpong(tchannels), -2 4986 elif m1 and not m2: 4987 if m12: 4988 return tchannels, 2 4989 else: 4990 return ProcessExporterFortranME.reorder_tchannels_pingpong(tchannels), -2 4991 elif m1 and m2 and len(tchannels) == 4 and not m12: # 3 T propa 4992 return ProcessExporterFortranME.reorder_tchannels_pingpong(tchannels), -2 4993 # this case seems quite sensitive we tested method 2 specifically and this was not helping in general 4994 elif not m1 and not m2 and len(tchannels) == 4 and m12: 4995 if first < last: 4996 return ProcessExporterFortranME.reorder_tchannels_flipside(tchannels), 1 4997 return tchannels, 2 4998 else: 4999 return ProcessExporterFortranME.reorder_tchannels_pingpong(tchannels), -2
5000 5001 5002 5003 5004 @staticmethod
5005 - def reorder_tchannels_flipside(tchannels):
5006 """change the tchannel ordering to pass to a ping-pong strategy. 5007 assume ninitial == 2 5008 5009 We assume that we receive something like this 5010 5011 1 ----- X ------- -2 5012 | 5013 | (-X) 5014 | 5015 X -------- 4 5016 | 5017 | (-X-1) 5018 | 5019 X --------- -1 5020 5021 X---------- 3 5022 | 5023 | (-N+2) 5024 | 5025 X --------- L 5026 | 5027 | (-N+1) 5028 | 5029 -N ----- X ------- P 5030 5031 coded as 5032 (1 -2 > -X) (-X 4 > -X-1) (-X-1 -1 > -X-2) ... 5033 ((-N+3) 3 > (-N+2)) ((-n+2) L > (-n+1)) ((-n+1) P > -N) 5034 5035 we want to convert this as: 5036 -N ----- X ------- -2 5037 | 5038 | (-N+1) 5039 | 5040 X -------- 4 5041 | 5042 | (-N+2) 5043 | 5044 X --------- -1 5045 5046 X---------- 3 5047 | 5048 | (-X-1) 5049 | 5050 X --------- L 5051 | 5052 | (-X) 5053 | 5054 2 ----- X ------- P 5055 5056 coded as 5057 ( 2 P > -X) (-X L > -X-1) (-X-1 3 > -X-2)... (-X-L -2 > -N) 5058 """ 5059 5060 # no need to modified anything if 1 or less T-Channel 5061 #Note that this counts the number of vertex (one more vertex compare to T) 5062 if len(tchannels) < 2: 5063 return tchannels 5064 5065 out = [] 5066 oldid2new = {} 5067 5068 # initialisation 5069 # id of the first T-channel (-X) 5070 propa_id = tchannels[0]['legs'][-1]['number'] 5071 # 5072 # Setup the last vertex to refenence the second id beam 5073 # -N (need to setup it to 2. 5074 initialid = tchannels[-1]['legs'][-1]['number'] 5075 oldid2new[initialid] = 2 5076 oldid2new[1] = initialid 5077 5078 i = 0 5079 while tchannels: 5080 old_vert = tchannels.pop() 5081 5082 #copy the vertex /leglist to avoid side effects 5083 new_vert = base_objects.Vertex(old_vert) 5084 new_vert['legs'] = base_objects.LegList([base_objects.Leg(l) for l in old_vert['legs']]) 5085 # vertex taken from the bottom we have 5086 # (-N+1 X > -N) we need to flip to pass to 5087 # -N X > -N+1 (and then relabel -N and -N+1 5088 legs = new_vert['legs'] # shorcut 5089 id1 = legs[0]['number'] 5090 id2 = legs[1]['number'] 5091 id3 = legs[2]['number'] 5092 # to be secure we also support (X -N+1 > -N) 5093 if id3 == id2 -1 and id1 !=1: 5094 legs[0], legs[1] = legs[1], legs[0] 5095 #flipping side 5096 legs[0], legs[2] = legs[2], legs[0] 5097 5098 # the only new relabelling is the last element of the list 5099 # always thanks to the above flipping 5100 old_propa_id = new_vert['legs'][-1]['number'] 5101 oldid2new[old_propa_id] = propa_id 5102 5103 5104 #pass to new convention for leg numbering: 5105 for l in new_vert['legs']: 5106 if l['number'] in oldid2new: 5107 l['number'] = oldid2new[l['number']] 5108 5109 # new_vert is now ready 5110 out.append(new_vert) 5111 # prepare next iteration 5112 propa_id -=1 5113 i +=1 5114 5115 return out
5116 5117 @staticmethod
5118 - def reorder_tchannels_pingpong(tchannels, id=2):
5119 """change the tchannel ordering to pass to a ping-pong strategy. 5120 assume ninitial == 2 5121 5122 We assume that we receive something like this 5123 5124 1 ----- X ------- -2 5125 | 5126 | (-X) 5127 | 5128 X -------- 4 5129 | 5130 | (-X-1) 5131 | 5132 X --------- -1 5133 5134 X---------- 3 5135 | 5136 | (-N+2) 5137 | 5138 X --------- L 5139 | 5140 | (-N+1) 5141 | 5142 -N ----- X ------- P 5143 5144 coded as 5145 (1 -2 > -X) (-X 4 > -X-1) (-X-1 -1 > -X-2) ... 5146 ((-N+3) 3 > (-N+2)) ((-n+2) L > (-n+1)) ((-n+1) P > -N) 5147 5148 we want to convert this as: 5149 1 ----- X ------- -2 5150 | 5151 | (-X) 5152 | 5153 X -------- 4 5154 | 5155 | (-X-2) 5156 | 5157 X --------- -1 5158 5159 X---------- 3 5160 | 5161 | (-X-3) 5162 | 5163 X --------- L 5164 | 5165 | (-X-1) 5166 | 5167 2 ----- X ------- P 5168 5169 coded as 5170 (1 -2 > -X) (2 P > -X-1) (-X 4 > -X-2) (-X-1 L > -X-3) ... 5171 """ 5172 5173 # no need to modified anything if 1 or less T-Channel 5174 #Note that this counts the number of vertex (one more vertex compare to T) 5175 if len(tchannels) < 2: 5176 return tchannels 5177 5178 out = [] 5179 oldid2new = {} 5180 5181 # initialisation 5182 # id of the first T-channel (-X) 5183 propa_id = tchannels[0]['legs'][-1]['number'] 5184 # 5185 # Setup the last vertex to refenence the second id beam 5186 # -N (need to setup it to 2. 5187 initialid = tchannels[-1]['legs'][-1]['number'] 5188 oldid2new[initialid] = id 5189 5190 5191 5192 i = 0 5193 while tchannels: 5194 #ping pong by taking first/last element in aternance 5195 if id ==2: 5196 if i % 2 == 0: 5197 old_vert = tchannels.pop(0) 5198 else: 5199 old_vert = tchannels.pop() 5200 else: 5201 if i % 2 != 0: 5202 old_vert = tchannels.pop(0) 5203 else: 5204 old_vert = tchannels.pop() 5205 5206 #copy the vertex /leglist to avoid side effects 5207 new_vert = base_objects.Vertex(old_vert) 5208 new_vert['legs'] = base_objects.LegList([base_objects.Leg(l) for l in old_vert['legs']]) 5209 # if vertex taken from the bottom we have 5210 # (-N+1 X > -N) we need to flip to pass to 5211 # -N X > -N+1 (and then relabel -N and -N+1 5212 # to be secure we also support (X -N+1 > -N) 5213 if (i % 2 ==1 and id ==2) or (i %2 == 0 and id ==1): 5214 legs = new_vert['legs'] # shorcut 5215 id1 = legs[0]['number'] 5216 id2 = legs[1]['number'] 5217 if id1 > id2: 5218 legs[0], legs[1] = legs[1], legs[0] 5219 else: 5220 legs[0], legs[2] = legs[2], legs[0] 5221 5222 # the only new relabelling is the last element of the list 5223 # always thanks to the above flipping 5224 old_propa_id = new_vert['legs'][-1]['number'] 5225 oldid2new[old_propa_id] = propa_id 5226 5227 if i==0 and id==1: 5228 legs[0]['number'] = 2 5229 5230 #pass to new convention for leg numbering: 5231 for l in new_vert['legs']: 5232 if l['number'] in oldid2new: 5233 l['number'] = oldid2new[l['number']] 5234 5235 # new_vert is now ready 5236 out.append(new_vert) 5237 # prepare next iteration 5238 propa_id -=1 5239 i +=1 5240 5241 return out
5242 5243 5244 5245 5246 5247 #=========================================================================== 5248 # write_decayBW_file 5249 #===========================================================================
5250 - def write_decayBW_file(self, writer, s_and_t_channels):
5251 """Write the decayBW.inc file for MadEvent""" 5252 5253 lines = [] 5254 5255 booldict = {None: "0", True: "1", False: "2"} 5256 5257 for iconf, config in enumerate(s_and_t_channels): 5258 schannels = config[0] 5259 for vertex in schannels: 5260 # For the resulting leg, pick out whether it comes from 5261 # decay or not, as given by the onshell flag 5262 leg = vertex.get('legs')[-1] 5263 lines.append("data gForceBW(%d,%d)/%s/" % \ 5264 (leg.get('number'), iconf + 1, 5265 booldict[leg.get('onshell')])) 5266 5267 # Write the file 5268 writer.writelines(lines) 5269 5270 return True
5271 5272 #=========================================================================== 5273 # write_dname_file 5274 #===========================================================================
5275 - def write_dname_file(self, writer, dir_name):
5276 """Write the dname.mg file for MG4""" 5277 5278 line = "DIRNAME=%s" % dir_name 5279 5280 # Write the file 5281 writer.write(line + "\n") 5282 5283 return True
5284 5285 #=========================================================================== 5286 # write_driver 5287 #===========================================================================
5288 - def write_driver(self, writer, ncomb, n_grouped_proc, v5=True):
5289 """Write the SubProcess/driver.f file for MG4""" 5290 5291 path = pjoin(_file_path,'iolibs','template_files','madevent_driver.f') 5292 5293 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 5294 card = 'Source/MODEL/MG5_param.dat' 5295 else: 5296 card = 'param_card.dat' 5297 # Requiring each helicity configuration to be probed by 10 points for 5298 # matrix element before using the resulting grid for MC over helicity 5299 # sampling. 5300 # We multiply this by 2 because each grouped subprocess is called at most 5301 # twice for each IMIRROR. 5302 replace_dict = {'param_card_name':card, 5303 'ncomb':ncomb, 5304 'hel_init_points':n_grouped_proc*10*2} 5305 if not v5: 5306 replace_dict['secondparam']=',.true.' 5307 else: 5308 replace_dict['secondparam']='' 5309 5310 if writer: 5311 text = open(path).read() % replace_dict 5312 writer.write(text) 5313 return True 5314 else: 5315 return replace_dict
5316 5317 #=========================================================================== 5318 # write_addmothers 5319 #===========================================================================
5320 - def write_addmothers(self, writer):
5321 """Write the SubProcess/addmothers.f""" 5322 5323 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 5324 5325 text = open(path).read() % {'iconfig': 'diag_number'} 5326 writer.write(text) 5327 5328 return True
5329 5330 5331 #=========================================================================== 5332 # write_combine_events 5333 #===========================================================================
5334 - def write_combine_events(self, writer, nb_proc=100):
5335 """Write the SubProcess/driver.f file for MG4""" 5336 5337 path = pjoin(_file_path,'iolibs','template_files','madevent_combine_events.f') 5338 5339 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 5340 card = 'Source/MODEL/MG5_param.dat' 5341 else: 5342 card = 'param_card.dat' 5343 5344 #set maxpup (number of @X in the process card) 5345 5346 text = open(path).read() % {'param_card_name':card, 'maxpup':nb_proc+1} 5347 #the +1 is just a security. This is not needed but I feel(OM) safer with it. 5348 writer.write(text) 5349 5350 return True
5351 5352 5353 #=========================================================================== 5354 # write_symmetry 5355 #===========================================================================
5356 - def write_symmetry(self, writer, v5=True):
5357 """Write the SubProcess/driver.f file for ME""" 5358 5359 5360 path = pjoin(_file_path,'iolibs','template_files','madevent_symmetry.f') 5361 5362 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 5363 card = 'Source/MODEL/MG5_param.dat' 5364 else: 5365 card = 'param_card.dat' 5366 5367 if v5: 5368 replace_dict = {'param_card_name':card, 'setparasecondarg':''} 5369 else: 5370 replace_dict= {'param_card_name':card, 'setparasecondarg':',.true.'} 5371 5372 if writer: 5373 text = open(path).read() 5374 text = text % replace_dict 5375 writer.write(text) 5376 return True 5377 else: 5378 return replace_dict
5379 5380 5381 5382 #=========================================================================== 5383 # write_iproc_file 5384 #===========================================================================
5385 - def write_iproc_file(self, writer, me_number):
5386 """Write the iproc.dat file for MG4""" 5387 line = "%d" % (me_number + 1) 5388 5389 # Write the file 5390 for line_to_write in writer.write_line(line): 5391 writer.write(line_to_write) 5392 return True
5393 5394 #=========================================================================== 5395 # write_mg_sym_file 5396 #===========================================================================
5397 - def write_mg_sym_file(self, writer, matrix_element):
5398 """Write the mg.sym file for MadEvent.""" 5399 5400 lines = [] 5401 5402 # Extract process with all decays included 5403 final_legs = [leg for leg in matrix_element.get('processes')[0].get_legs_with_decays() if leg.get('state') == True] 5404 5405 ninitial = len([leg for leg in matrix_element.get('processes')[0].get('legs') if leg.get('state') == False]) 5406 5407 identical_indices = {} 5408 5409 # Extract identical particle info 5410 for i, leg in enumerate(final_legs): 5411 if leg.get('id') in identical_indices: 5412 identical_indices[leg.get('id')].append(\ 5413 i + ninitial + 1) 5414 else: 5415 identical_indices[leg.get('id')] = [i + ninitial + 1] 5416 5417 # Remove keys which have only one particle 5418 for key in list(identical_indices.keys()): 5419 if len(identical_indices[key]) < 2: 5420 del identical_indices[key] 5421 5422 # Write mg.sym file 5423 lines.append(str(len(list(identical_indices.keys())))) 5424 for key in identical_indices.keys(): 5425 lines.append(str(len(identical_indices[key]))) 5426 for number in identical_indices[key]: 5427 lines.append(str(number)) 5428 5429 # Write the file 5430 writer.writelines(lines) 5431 5432 return True
5433 5434 #=========================================================================== 5435 # write_mg_sym_file 5436 #===========================================================================
5437 - def write_default_mg_sym_file(self, writer):
5438 """Write the mg.sym file for MadEvent.""" 5439 5440 lines = "0" 5441 5442 # Write the file 5443 writer.writelines(lines) 5444 5445 return True
5446 5447 #=========================================================================== 5448 # write_ncombs_file 5449 #===========================================================================
5450 - def write_ncombs_file(self, writer, nexternal):
5451 """Write the ncombs.inc file for MadEvent.""" 5452 5453 # ncomb (used for clustering) is 2^nexternal 5454 file = " integer n_max_cl\n" 5455 file = file + "parameter (n_max_cl=%d)" % (2 ** nexternal) 5456 5457 # Write the file 5458 writer.writelines(file) 5459 5460 return True
5461 5462 #=========================================================================== 5463 # write_processes_file 5464 #===========================================================================
5465 - def write_processes_file(self, writer, subproc_group):
5466 """Write the processes.dat file with info about the subprocesses 5467 in this group.""" 5468 5469 lines = [] 5470 5471 for ime, me in \ 5472 enumerate(subproc_group.get('matrix_elements')): 5473 lines.append("%s %s" % (str(ime+1) + " " * (7-len(str(ime+1))), 5474 ",".join(p.base_string() for p in \ 5475 me.get('processes')))) 5476 if me.get('has_mirror_process'): 5477 mirror_procs = [copy.copy(p) for p in me.get('processes')] 5478 for proc in mirror_procs: 5479 legs = copy.copy(proc.get('legs_with_decays')) 5480 legs.insert(0, legs.pop(1)) 5481 proc.set("legs_with_decays", legs) 5482 lines.append("mirror %s" % ",".join(p.base_string() for p in \ 5483 mirror_procs)) 5484 else: 5485 lines.append("mirror none") 5486 5487 # Write the file 5488 writer.write("\n".join(lines)) 5489 5490 return True
5491 5492 #=========================================================================== 5493 # write_symswap_file 5494 #===========================================================================
5495 - def write_symswap_file(self, writer, ident_perms):
5496 """Write the file symswap.inc for MG4 by comparing diagrams using 5497 the internal matrix element value functionality.""" 5498 5499 lines = [] 5500 5501 # Write out lines for symswap.inc file (used to permute the 5502 # external leg momenta 5503 for iperm, perm in enumerate(ident_perms): 5504 lines.append("data (isym(i,%d),i=1,nexternal)/%s/" % \ 5505 (iperm+1, ",".join([str(i+1) for i in perm]))) 5506 lines.append("data nsym/%d/" % len(ident_perms)) 5507 5508 # Write the file 5509 writer.writelines(lines) 5510 5511 return True
5512 5513 #=========================================================================== 5514 # write_symfact_file 5515 #===========================================================================
5516 - def write_symfact_file(self, writer, symmetry):
5517 """Write the files symfact.dat for MG4 by comparing diagrams using 5518 the internal matrix element value functionality.""" 5519 5520 pos = max(2, int(math.ceil(math.log10(len(symmetry))))) 5521 form = "%"+str(pos)+"r %"+str(pos+1)+"r" 5522 # Write out lines for symswap.inc file (used to permute the 5523 # external leg momenta 5524 lines = [ form %(i+1, s) for i,s in enumerate(symmetry) if s != 0] 5525 # Write the file 5526 writer.write('\n'.join(lines)) 5527 writer.write('\n') 5528 5529 return True
5530 5531 #=========================================================================== 5532 # write_symperms_file 5533 #===========================================================================
5534 - def write_symperms_file(self, writer, perms):
5535 """Write the symperms.inc file for subprocess group, used for 5536 symmetric configurations""" 5537 5538 lines = [] 5539 for iperm, perm in enumerate(perms): 5540 lines.append("data (perms(i,%d),i=1,nexternal)/%s/" % \ 5541 (iperm+1, ",".join([str(i+1) for i in perm]))) 5542 5543 # Write the file 5544 writer.writelines(lines) 5545 5546 return True
5547 5548 #=========================================================================== 5549 # write_subproc 5550 #===========================================================================
5551 - def write_subproc(self, writer, subprocdir):
5552 """Append this subprocess to the subproc.mg file for MG4""" 5553 5554 # Write line to file 5555 writer.write(subprocdir + "\n") 5556 5557 return True
5558
5559 #=============================================================================== 5560 # ProcessExporterFortranMEGroup 5561 #=============================================================================== 5562 -class ProcessExporterFortranMEGroup(ProcessExporterFortranME):
5563 """Class to take care of exporting a set of matrix elements to 5564 MadEvent subprocess group format.""" 5565 5566 matrix_file = "matrix_madevent_group_v4.inc" 5567 grouped_mode = 'madevent' 5568 default_opt = {'clean': False, 'complex_mass':False, 5569 'export_format':'madevent', 'mp': False, 5570 'v5_model': True, 5571 'output_options':{}, 5572 'hel_recycling': True 5573 } 5574 5575 5576 #=========================================================================== 5577 # generate_subprocess_directory 5578 #===========================================================================
5579 - def generate_subprocess_directory(self, subproc_group, 5580 fortran_model, 5581 group_number):
5582 """Generate the Pn directory for a subprocess group in MadEvent, 5583 including the necessary matrix_N.f files, configs.inc and various 5584 other helper files.""" 5585 5586 assert isinstance(subproc_group, group_subprocs.SubProcessGroup), \ 5587 "subproc_group object not SubProcessGroup" 5588 5589 if not self.model: 5590 self.model = subproc_group.get('matrix_elements')[0].\ 5591 get('processes')[0].get('model') 5592 5593 cwd = os.getcwd() 5594 path = pjoin(self.dir_path, 'SubProcesses') 5595 5596 os.chdir(path) 5597 pathdir = os.getcwd() 5598 5599 # Create the directory PN in the specified path 5600 subprocdir = "P%d_%s" % (subproc_group.get('number'), 5601 subproc_group.get('name')) 5602 try: 5603 os.mkdir(subprocdir) 5604 except os.error as error: 5605 logger.warning(error.strerror + " " + subprocdir) 5606 5607 try: 5608 os.chdir(subprocdir) 5609 except os.error: 5610 logger.error('Could not cd to directory %s' % subprocdir) 5611 return 0 5612 5613 logger.info('Creating files in directory %s' % subprocdir) 5614 5615 # Create the matrix.f files, auto_dsig.f files and all inc files 5616 # for all subprocesses in the group 5617 5618 maxamps = 0 5619 maxflows = 0 5620 tot_calls = 0 5621 5622 matrix_elements = subproc_group.get('matrix_elements') 5623 5624 # Add the driver.f, all grouped ME's must share the same number of 5625 # helicity configuration 5626 ncomb = matrix_elements[0].get_helicity_combinations() 5627 for me in matrix_elements[1:]: 5628 if ncomb!=me.get_helicity_combinations(): 5629 raise MadGraph5Error("All grouped processes must share the "+\ 5630 "same number of helicity configurations.") 5631 5632 filename = 'driver.f' 5633 self.write_driver(writers.FortranWriter(filename),ncomb, 5634 n_grouped_proc=len(matrix_elements), v5=self.opt['v5_model']) 5635 5636 try: 5637 self.proc_characteristic['hel_recycling'] = self.opt['hel_recycling'] 5638 except KeyError: 5639 self.proc_characteristic['hel_recycling'] = False 5640 self.opt['hel_recycling'] = False 5641 for ime, matrix_element in \ 5642 enumerate(matrix_elements): 5643 if self.opt['hel_recycling']: 5644 filename = 'matrix%d_orig.f' % (ime+1) 5645 replace_dict = self.write_matrix_element_v4(None, 5646 matrix_element, 5647 fortran_model, 5648 proc_id=str(ime+1), 5649 config_map=subproc_group.get('diagram_maps')[ime], 5650 subproc_number=group_number) 5651 calls,ncolor = replace_dict['return_value'] 5652 tfile = open(replace_dict['template_file']).read() 5653 file = tfile % replace_dict 5654 # Add the split orders helper functions. 5655 file = file + '\n' + open(replace_dict['template_file2'])\ 5656 .read()%replace_dict 5657 # Write the file 5658 writer = writers.FortranWriter(filename) 5659 writer.writelines(file) 5660 5661 # 5662 # write the dedicated template for helicity recycling 5663 # 5664 tfile = open(replace_dict['template_file'].replace('.inc',"_hel.inc")).read() 5665 file = tfile % replace_dict 5666 # Add the split orders helper functions. 5667 file = file + '\n' + open(replace_dict['template_file2'])\ 5668 .read()%replace_dict 5669 # Write the file 5670 writer = writers.FortranWriter('template_matrix%d.f' % (ime+1)) 5671 writer.uniformcase = False 5672 writer.writelines(file) 5673 5674 5675 5676 5677 else: 5678 filename = 'matrix%d.f' % (ime+1) 5679 calls, ncolor = \ 5680 self.write_matrix_element_v4(writers.FortranWriter(filename), 5681 matrix_element, 5682 fortran_model, 5683 proc_id=str(ime+1), 5684 config_map=subproc_group.get('diagram_maps')[ime], 5685 subproc_number=group_number) 5686 5687 5688 5689 filename = 'auto_dsig%d.f' % (ime+1) 5690 self.write_auto_dsig_file(writers.FortranWriter(filename), 5691 matrix_element, 5692 str(ime+1)) 5693 5694 # Keep track of needed quantities 5695 tot_calls += int(calls) 5696 maxflows = max(maxflows, ncolor) 5697 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 5698 5699 # Draw diagrams 5700 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 5701 filename = "matrix%d.ps" % (ime+1) 5702 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 5703 get('diagrams'), 5704 filename, 5705 model = \ 5706 matrix_element.get('processes')[0].\ 5707 get('model'), 5708 amplitude=True) 5709 logger.info("Generating Feynman diagrams for " + \ 5710 matrix_element.get('processes')[0].nice_string()) 5711 plot.draw() 5712 5713 # Extract number of external particles 5714 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 5715 5716 # Generate a list of diagrams corresponding to each configuration 5717 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 5718 # If a subprocess has no diagrams for this config, the number is 0 5719 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 5720 5721 filename = 'auto_dsig.f' 5722 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 5723 subproc_group) 5724 5725 filename = 'coloramps.inc' 5726 self.write_coloramps_file(writers.FortranWriter(filename), 5727 subproc_diagrams_for_config, 5728 maxflows, 5729 matrix_elements) 5730 5731 filename = 'get_color.f' 5732 self.write_colors_file(writers.FortranWriter(filename), 5733 matrix_elements) 5734 5735 filename = 'config_subproc_map.inc' 5736 self.write_config_subproc_map_file(writers.FortranWriter(filename), 5737 subproc_diagrams_for_config) 5738 5739 filename = 'configs.inc' 5740 nconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 5741 writers.FortranWriter(filename), 5742 subproc_group, 5743 subproc_diagrams_for_config) 5744 5745 filename = 'config_nqcd.inc' 5746 self.write_config_nqcd_file(writers.FortranWriter(filename), 5747 nqcd_list) 5748 5749 filename = 'decayBW.inc' 5750 self.write_decayBW_file(writers.FortranWriter(filename), 5751 s_and_t_channels) 5752 5753 filename = 'dname.mg' 5754 self.write_dname_file(writers.FortranWriter(filename), 5755 subprocdir) 5756 5757 filename = 'iproc.dat' 5758 self.write_iproc_file(writers.FortranWriter(filename), 5759 group_number) 5760 5761 filename = 'leshouche.inc' 5762 self.write_leshouche_file(writers.FortranWriter(filename), 5763 subproc_group) 5764 5765 filename = 'maxamps.inc' 5766 self.write_maxamps_file(writers.FortranWriter(filename), 5767 maxamps, 5768 maxflows, 5769 max([len(me.get('processes')) for me in \ 5770 matrix_elements]), 5771 len(matrix_elements)) 5772 5773 # Note that mg.sym is not relevant for this case 5774 filename = 'mg.sym' 5775 self.write_default_mg_sym_file(writers.FortranWriter(filename)) 5776 5777 filename = 'mirrorprocs.inc' 5778 self.write_mirrorprocs(writers.FortranWriter(filename), 5779 subproc_group) 5780 5781 filename = 'ncombs.inc' 5782 self.write_ncombs_file(writers.FortranWriter(filename), 5783 nexternal) 5784 5785 filename = 'nexternal.inc' 5786 self.write_nexternal_file(writers.FortranWriter(filename), 5787 nexternal, ninitial) 5788 5789 filename = 'ngraphs.inc' 5790 self.write_ngraphs_file(writers.FortranWriter(filename), 5791 nconfigs) 5792 5793 filename = 'pmass.inc' 5794 self.write_pmass_file(writers.FortranWriter(filename), 5795 matrix_element) 5796 5797 filename = 'props.inc' 5798 self.write_props_file(writers.FortranWriter(filename), 5799 matrix_element, 5800 s_and_t_channels) 5801 5802 filename = 'processes.dat' 5803 files.write_to_file(filename, 5804 self.write_processes_file, 5805 subproc_group) 5806 5807 # Find config symmetries and permutations 5808 symmetry, perms, ident_perms = \ 5809 diagram_symmetry.find_symmetry(subproc_group) 5810 5811 filename = 'symswap.inc' 5812 self.write_symswap_file(writers.FortranWriter(filename), 5813 ident_perms) 5814 5815 filename = 'symfact_orig.dat' 5816 self.write_symfact_file(open(filename, 'w'), symmetry) 5817 5818 # check consistency 5819 for i, sym_fact in enumerate(symmetry): 5820 5821 if sym_fact >= 0: 5822 continue 5823 if nqcd_list[i] != nqcd_list[abs(sym_fact)-1]: 5824 misc.sprint(i, sym_fact, nqcd_list[i], nqcd_list[abs(sym_fact)]) 5825 raise Exception("identical diagram with different QCD powwer") 5826 5827 5828 filename = 'symperms.inc' 5829 self.write_symperms_file(writers.FortranWriter(filename), 5830 perms) 5831 5832 # Generate jpgs -> pass in make_html 5833 #os.system(pjoin('..', '..', 'bin', 'gen_jpeg-pl')) 5834 5835 self.link_files_in_SubProcess(pjoin(pathdir,subprocdir)) 5836 5837 #import nexternal/leshouch in Source 5838 ln('nexternal.inc', '../../Source', log=False) 5839 ln('leshouche.inc', '../../Source', log=False) 5840 ln('maxamps.inc', '../../Source', log=False) 5841 5842 # Return to SubProcesses dir) 5843 os.chdir(pathdir) 5844 5845 # Add subprocess to subproc.mg 5846 filename = 'subproc.mg' 5847 files.append_to_file(filename, 5848 self.write_subproc, 5849 subprocdir) 5850 5851 # Return to original dir 5852 os.chdir(cwd) 5853 5854 if not tot_calls: 5855 tot_calls = 0 5856 return tot_calls
5857 5858 #=========================================================================== 5859 # write_super_auto_dsig_file 5860 #===========================================================================
5861 - def write_super_auto_dsig_file(self, writer, subproc_group):
5862 """Write the auto_dsig.f file selecting between the subprocesses 5863 in subprocess group mode""" 5864 5865 replace_dict = {} 5866 5867 # Extract version number and date from VERSION file 5868 info_lines = self.get_mg5_info_lines() 5869 replace_dict['info_lines'] = info_lines 5870 5871 matrix_elements = subproc_group.get('matrix_elements') 5872 5873 # Extract process info lines 5874 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 5875 matrix_elements]) 5876 replace_dict['process_lines'] = process_lines 5877 5878 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 5879 replace_dict['nexternal'] = nexternal 5880 5881 replace_dict['nsprocs'] = 2*len(matrix_elements) 5882 5883 # Generate dsig definition line 5884 dsig_def_line = "DOUBLE PRECISION " + \ 5885 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 5886 range(len(matrix_elements))]) 5887 replace_dict["dsig_def_line"] = dsig_def_line 5888 5889 # Generate dsig process lines 5890 call_dsig_proc_lines = [] 5891 for iproc in range(len(matrix_elements)): 5892 call_dsig_proc_lines.append(\ 5893 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 5894 {"num": iproc + 1, 5895 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 5896 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 5897 5898 ncomb=matrix_elements[0].get_helicity_combinations() 5899 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 5900 5901 s1,s2 = matrix_elements[0].get_spin_state_initial() 5902 replace_dict['nb_spin_state1'] = s1 5903 replace_dict['nb_spin_state2'] = s2 5904 5905 printzeroamp = [] 5906 for iproc in range(len(matrix_elements)): 5907 printzeroamp.append(\ 5908 " call print_zero_amp_%i()" % ( iproc + 1)) 5909 replace_dict['print_zero_amp'] = "\n".join(printzeroamp) 5910 5911 5912 if writer: 5913 file = open(pjoin(_file_path, \ 5914 'iolibs/template_files/super_auto_dsig_group_v4.inc')).read() 5915 file = file % replace_dict 5916 5917 # Write the file 5918 writer.writelines(file) 5919 else: 5920 return replace_dict
5921 5922 #=========================================================================== 5923 # write_mirrorprocs 5924 #===========================================================================
5925 - def write_mirrorprocs(self, writer, subproc_group):
5926 """Write the mirrorprocs.inc file determining which processes have 5927 IS mirror process in subprocess group mode.""" 5928 5929 lines = [] 5930 bool_dict = {True: '.true.', False: '.false.'} 5931 matrix_elements = subproc_group.get('matrix_elements') 5932 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 5933 (len(matrix_elements), 5934 ",".join([bool_dict[me.get('has_mirror_process')] for \ 5935 me in matrix_elements]))) 5936 # Write the file 5937 writer.writelines(lines)
5938 5939 #=========================================================================== 5940 # write_addmothers 5941 #===========================================================================
5942 - def write_addmothers(self, writer):
5943 """Write the SubProcess/addmothers.f""" 5944 5945 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 5946 5947 text = open(path).read() % {'iconfig': 'lconfig'} 5948 writer.write(text) 5949 5950 return True
5951 5952 5953 #=========================================================================== 5954 # write_coloramps_file 5955 #===========================================================================
5956 - def write_coloramps_file(self, writer, diagrams_for_config, maxflows, 5957 matrix_elements):
5958 """Write the coloramps.inc file for MadEvent in Subprocess group mode""" 5959 5960 # Create a map from subprocess (matrix element) to a list of 5961 # the diagrams corresponding to each config 5962 5963 lines = [] 5964 5965 subproc_to_confdiag = {} 5966 for config in diagrams_for_config: 5967 for subproc, diag in enumerate(config): 5968 try: 5969 subproc_to_confdiag[subproc].append(diag) 5970 except KeyError: 5971 subproc_to_confdiag[subproc] = [diag] 5972 5973 for subproc in sorted(subproc_to_confdiag.keys()): 5974 lines.extend(self.get_icolamp_lines(subproc_to_confdiag[subproc], 5975 matrix_elements[subproc], 5976 subproc + 1)) 5977 5978 lines.insert(0, "logical icolamp(%d,%d,%d)" % \ 5979 (maxflows, 5980 len(diagrams_for_config), 5981 len(matrix_elements))) 5982 5983 # Write the file 5984 writer.writelines(lines) 5985 5986 return True
5987 5988 #=========================================================================== 5989 # write_config_subproc_map_file 5990 #===========================================================================
5991 - def write_config_subproc_map_file(self, writer, config_subproc_map):
5992 """Write the config_subproc_map.inc file for subprocess groups""" 5993 5994 lines = [] 5995 # Output only configs that have some corresponding diagrams 5996 iconfig = 0 5997 for config in config_subproc_map: 5998 if set(config) == set([0]): 5999 continue 6000 lines.append("DATA (CONFSUB(i,%d),i=1,%d)/%s/" % \ 6001 (iconfig + 1, len(config), 6002 ",".join([str(i) for i in config]))) 6003 iconfig += 1 6004 # Write the file 6005 writer.writelines(lines) 6006 6007 return True
6008 6009 #=========================================================================== 6010 # read_write_good_hel 6011 #===========================================================================
6012 - def read_write_good_hel(self, ncomb):
6013 """return the code to read/write the good_hel common_block""" 6014 6015 convert = {'ncomb' : ncomb} 6016 6017 output = """ 6018 subroutine write_good_hel(stream_id) 6019 implicit none 6020 integer stream_id 6021 INTEGER NCOMB 6022 PARAMETER ( NCOMB=%(ncomb)d) 6023 LOGICAL GOODHEL(NCOMB, 2) 6024 INTEGER NTRY(2) 6025 common/BLOCK_GOODHEL/NTRY,GOODHEL 6026 write(stream_id,*) GOODHEL 6027 return 6028 end 6029 6030 6031 subroutine read_good_hel(stream_id) 6032 implicit none 6033 include 'genps.inc' 6034 integer stream_id 6035 INTEGER NCOMB 6036 PARAMETER ( NCOMB=%(ncomb)d) 6037 LOGICAL GOODHEL(NCOMB, 2) 6038 INTEGER NTRY(2) 6039 common/BLOCK_GOODHEL/NTRY,GOODHEL 6040 read(stream_id,*) GOODHEL 6041 NTRY(1) = MAXTRIES + 1 6042 NTRY(2) = MAXTRIES + 1 6043 return 6044 end 6045 6046 subroutine init_good_hel() 6047 implicit none 6048 INTEGER NCOMB 6049 PARAMETER ( NCOMB=%(ncomb)d) 6050 LOGICAL GOODHEL(NCOMB, 2) 6051 INTEGER NTRY(2) 6052 INTEGER I 6053 6054 do i=1,NCOMB 6055 GOODHEL(I,1) = .false. 6056 GOODHEL(I,2) = .false. 6057 enddo 6058 NTRY(1) = 0 6059 NTRY(2) = 0 6060 end 6061 6062 integer function get_maxsproc() 6063 implicit none 6064 include 'maxamps.inc' 6065 6066 get_maxsproc = maxsproc 6067 return 6068 end 6069 6070 """ % convert 6071 6072 return output
6073 6074 6075 6076 #=========================================================================== 6077 # write_configs_file 6078 #===========================================================================
6079 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
6080 """Write the configs.inc file with topology information for a 6081 subprocess group. Use the first subprocess with a diagram for each 6082 configuration.""" 6083 6084 matrix_elements = subproc_group.get('matrix_elements') 6085 model = matrix_elements[0].get('processes')[0].get('model') 6086 6087 diagrams = [] 6088 config_numbers = [] 6089 for iconfig, config in enumerate(diagrams_for_config): 6090 # Check if any diagrams correspond to this config 6091 if set(config) == set([0]): 6092 continue 6093 subproc_diags = [] 6094 for s,d in enumerate(config): 6095 if d: 6096 subproc_diags.append(matrix_elements[s].\ 6097 get('diagrams')[d-1]) 6098 else: 6099 subproc_diags.append(None) 6100 diagrams.append(subproc_diags) 6101 config_numbers.append(iconfig + 1) 6102 6103 # Extract number of external particles 6104 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 6105 6106 return len(diagrams), \ 6107 self.write_configs_file_from_diagrams(writer, diagrams, 6108 config_numbers, 6109 nexternal, ninitial, 6110 model)
6111 6112 #=========================================================================== 6113 # write_run_configs_file 6114 #===========================================================================
6115 - def write_run_config_file(self, writer):
6116 """Write the run_configs.inc file for MadEvent""" 6117 6118 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 6119 if self.proc_characteristic['loop_induced']: 6120 job_per_chan = 1 6121 else: 6122 job_per_chan = 2 6123 text = open(path).read() % {'chanperjob':job_per_chan} 6124 writer.write(text) 6125 return True
6126 6127 6128 #=========================================================================== 6129 # write_leshouche_file 6130 #===========================================================================
6131 - def write_leshouche_file(self, writer, subproc_group):
6132 """Write the leshouche.inc file for MG4""" 6133 6134 all_lines = [] 6135 6136 for iproc, matrix_element in \ 6137 enumerate(subproc_group.get('matrix_elements')): 6138 all_lines.extend(self.get_leshouche_lines(matrix_element, 6139 iproc)) 6140 # Write the file 6141 writer.writelines(all_lines) 6142 return True
6143 6144
6145 - def finalize(self,*args, **opts):
6146 6147 super(ProcessExporterFortranMEGroup, self).finalize(*args, **opts) 6148 #ensure that the grouping information is on the correct value 6149 self.proc_characteristic['grouped_matrix'] = True
6150 6151 6152 #=============================================================================== 6153 # UFO_model_to_mg4 6154 #=============================================================================== 6155 6156 python_to_fortran = lambda x: parsers.UFOExpressionParserFortran().parse(x)
6157 6158 -class UFO_model_to_mg4(object):
6159 """ A converter of the UFO-MG5 Model to the MG4 format """ 6160 6161 # The list below shows the only variables the user is allowed to change by 6162 # himself for each PS point. If he changes any other, then calling 6163 # UPDATE_AS_PARAM() (or equivalently MP_UPDATE_AS_PARAM()) will not 6164 # correctly account for the change. 6165 PS_dependent_key = ['aS','MU_R'] 6166 mp_complex_format = 'complex*32' 6167 mp_real_format = 'real*16' 6168 # Warning, it is crucial none of the couplings/parameters of the model 6169 # starts with this prefix. I should add a check for this. 6170 # You can change it as the global variable to check_param_card.ParamCard 6171 mp_prefix = check_param_card.ParamCard.mp_prefix 6172
6173 - def __init__(self, model, output_path, opt=None):
6174 """ initialization of the objects """ 6175 6176 self.model = model 6177 self.model_name = model['name'] 6178 self.dir_path = output_path 6179 6180 self.opt = {'complex_mass': False, 'export_format': 'madevent', 'mp':True, 6181 'loop_induced': False} 6182 if opt: 6183 self.opt.update(opt) 6184 6185 self.coups_dep = [] # (name, expression, type) 6186 self.coups_indep = [] # (name, expression, type) 6187 self.params_dep = [] # (name, expression, type) 6188 self.params_indep = [] # (name, expression, type) 6189 self.params_ext = [] # external parameter 6190 self.p_to_f = parsers.UFOExpressionParserFortran(self.model) 6191 self.mp_p_to_f = parsers.UFOExpressionParserMPFortran(self.model)
6192 6193
6195 """modify the parameter if some of them are identical up to the case""" 6196 6197 lower_dict={} 6198 duplicate = set() 6199 keys = list(self.model['parameters'].keys()) 6200 keys.sort() 6201 for key in keys: 6202 for param in self.model['parameters'][key]: 6203 lower_name = param.name.lower() 6204 if not lower_name: 6205 continue 6206 try: 6207 lower_dict[lower_name].append(param) 6208 except KeyError as error: 6209 lower_dict[lower_name] = [param] 6210 else: 6211 duplicate.add(lower_name) 6212 logger.debug('%s is define both as lower case and upper case.' 6213 % lower_name) 6214 if not duplicate: 6215 return 6216 6217 re_expr = r'''\b(%s)\b''' 6218 to_change = [] 6219 change={} 6220 for value in duplicate: 6221 for i, var in enumerate(lower_dict[value]): 6222 to_change.append(var.name) 6223 new_name = '%s%s' % (var.name.lower(), 6224 ('__%d'%(i+1) if i>0 else '')) 6225 change[var.name] = new_name 6226 var.name = new_name 6227 6228 # Apply the modification to the map_CTcoup_CTparam of the model 6229 # if it has one (giving for each coupling the CT parameters whcih 6230 # are necessary and which should be exported to the model. 6231 if hasattr(self.model,'map_CTcoup_CTparam'): 6232 for coup, ctparams in self.model.map_CTcoup_CTparam: 6233 for i, ctparam in enumerate(ctparams): 6234 try: 6235 self.model.map_CTcoup_CTparam[coup][i] = change[ctparam] 6236 except KeyError: 6237 pass 6238 6239 replace = lambda match_pattern: change[match_pattern.groups()[0]] 6240 rep_pattern = re.compile(re_expr % '|'.join(to_change)) 6241 6242 # change parameters 6243 for key in keys: 6244 if key == ('external',): 6245 continue 6246 for param in self.model['parameters'][key]: 6247 param.expr = rep_pattern.sub(replace, param.expr) 6248 6249 # change couplings 6250 for key in self.model['couplings'].keys(): 6251 for coup in self.model['couplings'][key]: 6252 coup.expr = rep_pattern.sub(replace, coup.expr) 6253 6254 # change mass/width 6255 for part in self.model['particles']: 6256 if str(part.get('mass')) in to_change: 6257 part.set('mass', rep_pattern.sub(replace, str(part.get('mass')))) 6258 if str(part.get('width')) in to_change: 6259 part.set('width', rep_pattern.sub(replace, str(part.get('width'))))
6260
6261 - def refactorize(self, wanted_couplings = []):
6262 """modify the couplings to fit with MG4 convention """ 6263 6264 # Keep only separation in alphaS 6265 keys = list(self.model['parameters'].keys()) 6266 keys.sort(key=len) 6267 for key in keys: 6268 to_add = [o for o in self.model['parameters'][key] if o.name] 6269 6270 if key == ('external',): 6271 self.params_ext += to_add 6272 elif any([(k in key) for k in self.PS_dependent_key]): 6273 self.params_dep += to_add 6274 else: 6275 self.params_indep += to_add 6276 # same for couplings 6277 keys = list(self.model['couplings'].keys()) 6278 keys.sort(key=len) 6279 for key, coup_list in self.model['couplings'].items(): 6280 if any([(k in key) for k in self.PS_dependent_key]): 6281 self.coups_dep += [c for c in coup_list if 6282 (not wanted_couplings or c.name in \ 6283 wanted_couplings)] 6284 else: 6285 self.coups_indep += [c for c in coup_list if 6286 (not wanted_couplings or c.name in \ 6287 wanted_couplings)] 6288 6289 # MG4 use G and not aS as it basic object for alphas related computation 6290 #Pass G in the independant list 6291 if 'G' in self.params_dep: 6292 index = self.params_dep.index('G') 6293 G = self.params_dep.pop(index) 6294 # G.expr = '2*cmath.sqrt(as*pi)' 6295 # self.params_indep.insert(0, self.params_dep.pop(index)) 6296 # No need to add it if not defined 6297 6298 if 'aS' not in self.params_ext: 6299 logger.critical('aS not define as external parameter adding it!') 6300 #self.model['parameters']['aS'] = base_objects.ParamCardVariable('aS', 0.138,'DUMMY',(1,)) 6301 self.params_indep.append( base_objects. ModelVariable('aS', '0.138','real')) 6302 self.params_indep.append( base_objects. ModelVariable('G', '4.1643','real'))
6303 - def build(self, wanted_couplings = [], full=True):
6304 """modify the couplings to fit with MG4 convention and creates all the 6305 different files""" 6306 6307 self.pass_parameter_to_case_insensitive() 6308 self.refactorize(wanted_couplings) 6309 6310 # write the files 6311 if full: 6312 if wanted_couplings: 6313 # extract the wanted ct parameters 6314 self.extract_needed_CTparam(wanted_couplings=wanted_couplings) 6315 self.write_all()
6316 6317
6318 - def open(self, name, comment='c', format='default'):
6319 """ Open the file name in the correct directory and with a valid 6320 header.""" 6321 6322 file_path = pjoin(self.dir_path, name) 6323 6324 if format == 'fortran': 6325 fsock = writers.FortranWriter(file_path, 'w') 6326 write_class = io.FileIO 6327 6328 write_class.writelines(fsock, comment * 77 + '\n') 6329 write_class.writelines(fsock, '%(comment)s written by the UFO converter\n' % \ 6330 {'comment': comment + (6 - len(comment)) * ' '}) 6331 write_class.writelines(fsock, comment * 77 + '\n\n') 6332 else: 6333 fsock = open(file_path, 'w') 6334 fsock.writelines(comment * 77 + '\n') 6335 fsock.writelines('%(comment)s written by the UFO converter\n' % \ 6336 {'comment': comment + (6 - len(comment)) * ' '}) 6337 fsock.writelines(comment * 77 + '\n\n') 6338 return fsock
6339 6340
6341 - def write_all(self):
6342 """ write all the files """ 6343 #write the part related to the external parameter 6344 self.create_ident_card() 6345 self.create_param_read() 6346 6347 #write the definition of the parameter 6348 self.create_input() 6349 self.create_intparam_def(dp=True,mp=False) 6350 if self.opt['mp']: 6351 self.create_intparam_def(dp=False,mp=True) 6352 6353 # definition of the coupling. 6354 self.create_actualize_mp_ext_param_inc() 6355 self.create_coupl_inc() 6356 self.create_write_couplings() 6357 self.create_couplings() 6358 6359 # the makefile 6360 self.create_makeinc() 6361 self.create_param_write() 6362 6363 # The model functions 6364 self.create_model_functions_inc() 6365 self.create_model_functions_def() 6366 6367 # The param_card.dat 6368 self.create_param_card() 6369 6370 6371 # All the standard files 6372 self.copy_standard_file()
6373 6374 ############################################################################ 6375 ## ROUTINE CREATING THE FILES ############################################ 6376 ############################################################################ 6377
6378 - def copy_standard_file(self):
6379 """Copy the standard files for the fortran model.""" 6380 6381 #copy the library files 6382 file_to_link = ['formats.inc','printout.f', \ 6383 'rw_para.f', 'testprog.f'] 6384 6385 for filename in file_to_link: 6386 cp( MG5DIR + '/models/template_files/fortran/' + filename, \ 6387 self.dir_path) 6388 6389 file = open(os.path.join(MG5DIR,\ 6390 'models/template_files/fortran/rw_para.f')).read() 6391 6392 includes=["include \'coupl.inc\'","include \'input.inc\'", 6393 "include \'model_functions.inc\'"] 6394 if self.opt['mp']: 6395 includes.extend(["include \'mp_coupl.inc\'","include \'mp_input.inc\'"]) 6396 # In standalone and madloop we do no use the compiled param card but 6397 # still parse the .dat one so we must load it. 6398 if self.opt['loop_induced']: 6399 #loop induced follow MadEvent way to handle the card. 6400 load_card = '' 6401 lha_read_filename='lha_read.f' 6402 elif self.opt['export_format'] in ['madloop','madloop_optimized', 'madloop_matchbox']: 6403 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 6404 lha_read_filename='lha_read_mp.f' 6405 elif self.opt['export_format'].startswith('standalone') \ 6406 or self.opt['export_format'] in ['madweight', 'plugin']\ 6407 or self.opt['export_format'].startswith('matchbox'): 6408 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 6409 lha_read_filename='lha_read.f' 6410 else: 6411 load_card = '' 6412 lha_read_filename='lha_read.f' 6413 cp( MG5DIR + '/models/template_files/fortran/' + lha_read_filename, \ 6414 os.path.join(self.dir_path,'lha_read.f')) 6415 6416 file=file%{'includes':'\n '.join(includes), 6417 'load_card':load_card} 6418 writer=open(os.path.join(self.dir_path,'rw_para.f'),'w') 6419 writer.writelines(file) 6420 writer.close() 6421 6422 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 6423 or self.opt['loop_induced']: 6424 cp( MG5DIR + '/models/template_files/fortran/makefile_madevent', 6425 self.dir_path + '/makefile') 6426 if self.opt['export_format'] in ['FKS5_default', 'FKS5_optimized']: 6427 path = pjoin(self.dir_path, 'makefile') 6428 text = open(path).read() 6429 text = text.replace('madevent','aMCatNLO') 6430 open(path, 'w').writelines(text) 6431 elif self.opt['export_format'] in ['standalone', 'standalone_msP','standalone_msF', 6432 'madloop','madloop_optimized', 'standalone_rw', 6433 'madweight','matchbox','madloop_matchbox', 'plugin']: 6434 cp( MG5DIR + '/models/template_files/fortran/makefile_standalone', 6435 self.dir_path + '/makefile') 6436 #elif self.opt['export_format'] in []: 6437 #pass 6438 else: 6439 raise MadGraph5Error('Unknown format')
6440
6441 - def create_coupl_inc(self):
6442 """ write coupling.inc """ 6443 6444 fsock = self.open('coupl.inc', format='fortran') 6445 if self.opt['mp']: 6446 mp_fsock = self.open('mp_coupl.inc', format='fortran') 6447 mp_fsock_same_name = self.open('mp_coupl_same_name.inc',\ 6448 format='fortran') 6449 6450 # Write header 6451 header = """double precision G 6452 common/strong/ G 6453 6454 double complex gal(2) 6455 common/weak/ gal 6456 6457 double precision MU_R 6458 common/rscale/ MU_R 6459 6460 double precision Nf 6461 parameter(Nf=%d) 6462 """ % self.model.get_nflav() 6463 6464 fsock.writelines(header) 6465 6466 if self.opt['mp']: 6467 header = """%(real_mp_format)s %(mp_prefix)sG 6468 common/MP_strong/ %(mp_prefix)sG 6469 6470 %(complex_mp_format)s %(mp_prefix)sgal(2) 6471 common/MP_weak/ %(mp_prefix)sgal 6472 6473 %(complex_mp_format)s %(mp_prefix)sMU_R 6474 common/MP_rscale/ %(mp_prefix)sMU_R 6475 6476 """ 6477 6478 6479 6480 6481 mp_fsock.writelines(header%{'real_mp_format':self.mp_real_format, 6482 'complex_mp_format':self.mp_complex_format, 6483 'mp_prefix':self.mp_prefix}) 6484 mp_fsock_same_name.writelines(header%{'real_mp_format':self.mp_real_format, 6485 'complex_mp_format':self.mp_complex_format, 6486 'mp_prefix':''}) 6487 6488 # Write the Mass definition/ common block 6489 masses = set() 6490 widths = set() 6491 if self.opt['complex_mass']: 6492 complex_mass = set() 6493 6494 for particle in self.model.get('particles'): 6495 #find masses 6496 one_mass = particle.get('mass') 6497 if one_mass.lower() != 'zero': 6498 masses.add(one_mass) 6499 6500 # find width 6501 one_width = particle.get('width') 6502 if one_width.lower() != 'zero': 6503 widths.add(one_width) 6504 if self.opt['complex_mass'] and one_mass.lower() != 'zero': 6505 complex_mass.add('CMASS_%s' % one_mass) 6506 6507 if masses: 6508 fsock.writelines('double precision '+','.join(masses)+'\n') 6509 fsock.writelines('common/masses/ '+','.join(masses)+'\n\n') 6510 if self.opt['mp']: 6511 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 6512 ','.join(masses)+'\n') 6513 mp_fsock_same_name.writelines('common/MP_masses/ '+\ 6514 ','.join(masses)+'\n\n') 6515 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 6516 self.mp_prefix+m for m in masses])+'\n') 6517 mp_fsock.writelines('common/MP_masses/ '+\ 6518 ','.join([self.mp_prefix+m for m in masses])+'\n\n') 6519 6520 if widths: 6521 fsock.writelines('double precision '+','.join(widths)+'\n') 6522 fsock.writelines('common/widths/ '+','.join(widths)+'\n\n') 6523 if self.opt['mp']: 6524 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 6525 ','.join(widths)+'\n') 6526 mp_fsock_same_name.writelines('common/MP_widths/ '+\ 6527 ','.join(widths)+'\n\n') 6528 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 6529 self.mp_prefix+w for w in widths])+'\n') 6530 mp_fsock.writelines('common/MP_widths/ '+\ 6531 ','.join([self.mp_prefix+w for w in widths])+'\n\n') 6532 6533 # Write the Couplings 6534 coupling_list = [coupl.name for coupl in self.coups_dep + self.coups_indep] 6535 fsock.writelines('double complex '+', '.join(coupling_list)+'\n') 6536 fsock.writelines('common/couplings/ '+', '.join(coupling_list)+'\n') 6537 if self.opt['mp']: 6538 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 6539 ','.join(coupling_list)+'\n') 6540 mp_fsock_same_name.writelines('common/MP_couplings/ '+\ 6541 ','.join(coupling_list)+'\n\n') 6542 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 6543 self.mp_prefix+c for c in coupling_list])+'\n') 6544 mp_fsock.writelines('common/MP_couplings/ '+\ 6545 ','.join([self.mp_prefix+c for c in coupling_list])+'\n\n') 6546 6547 # Write complex mass for complex mass scheme (if activated) 6548 if self.opt['complex_mass'] and complex_mass: 6549 fsock.writelines('double complex '+', '.join(complex_mass)+'\n') 6550 fsock.writelines('common/complex_mass/ '+', '.join(complex_mass)+'\n') 6551 if self.opt['mp']: 6552 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 6553 ','.join(complex_mass)+'\n') 6554 mp_fsock_same_name.writelines('common/MP_complex_mass/ '+\ 6555 ','.join(complex_mass)+'\n\n') 6556 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 6557 self.mp_prefix+cm for cm in complex_mass])+'\n') 6558 mp_fsock.writelines('common/MP_complex_mass/ '+\ 6559 ','.join([self.mp_prefix+cm for cm in complex_mass])+'\n\n')
6560
6561 - def create_write_couplings(self):
6562 """ write the file coupl_write.inc """ 6563 6564 fsock = self.open('coupl_write.inc', format='fortran') 6565 6566 fsock.writelines("""write(*,*) ' Couplings of %s' 6567 write(*,*) ' ---------------------------------' 6568 write(*,*) ' '""" % self.model_name) 6569 def format(coupl): 6570 return 'write(*,2) \'%(name)s = \', %(name)s' % {'name': coupl.name}
6571 6572 # Write the Couplings 6573 lines = [format(coupl) for coupl in self.coups_dep + self.coups_indep] 6574 fsock.writelines('\n'.join(lines)) 6575 6576
6577 - def create_input(self):
6578 """create input.inc containing the definition of the parameters""" 6579 6580 fsock = self.open('input.inc', format='fortran') 6581 if self.opt['mp']: 6582 mp_fsock = self.open('mp_input.inc', format='fortran') 6583 6584 #find mass/ width since they are already define 6585 already_def = set() 6586 for particle in self.model.get('particles'): 6587 already_def.add(particle.get('mass').lower()) 6588 already_def.add(particle.get('width').lower()) 6589 if self.opt['complex_mass']: 6590 already_def.add('cmass_%s' % particle.get('mass').lower()) 6591 6592 is_valid = lambda name: name.lower() not in ['g', 'mu_r', 'zero'] and \ 6593 name.lower() not in already_def 6594 6595 real_parameters = [param.name for param in self.params_dep + 6596 self.params_indep if param.type == 'real' 6597 and is_valid(param.name)] 6598 6599 real_parameters += [param.name for param in self.params_ext 6600 if param.type == 'real'and 6601 is_valid(param.name)] 6602 6603 # check the parameter is a CT parameter or not 6604 # if yes, just use the needed ones 6605 real_parameters = [param for param in real_parameters \ 6606 if self.check_needed_param(param)] 6607 6608 fsock.writelines('double precision '+','.join(real_parameters)+'\n') 6609 fsock.writelines('common/params_R/ '+','.join(real_parameters)+'\n\n') 6610 if self.opt['mp']: 6611 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 6612 self.mp_prefix+p for p in real_parameters])+'\n') 6613 mp_fsock.writelines('common/MP_params_R/ '+','.join([\ 6614 self.mp_prefix+p for p in real_parameters])+'\n\n') 6615 6616 complex_parameters = [param.name for param in self.params_dep + 6617 self.params_indep if param.type == 'complex' and 6618 is_valid(param.name)] 6619 6620 # check the parameter is a CT parameter or not 6621 # if yes, just use the needed ones 6622 complex_parameters = [param for param in complex_parameters \ 6623 if self.check_needed_param(param)] 6624 6625 if complex_parameters: 6626 fsock.writelines('double complex '+','.join(complex_parameters)+'\n') 6627 fsock.writelines('common/params_C/ '+','.join(complex_parameters)+'\n\n') 6628 if self.opt['mp']: 6629 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 6630 self.mp_prefix+p for p in complex_parameters])+'\n') 6631 mp_fsock.writelines('common/MP_params_C/ '+','.join([\ 6632 self.mp_prefix+p for p in complex_parameters])+'\n\n')
6633
6634 - def check_needed_param(self, param):
6635 """ Returns whether the parameter in argument is needed for this 6636 specific computation or not.""" 6637 6638 # If this is a leading order model or if there was no CT parameter 6639 # employed in this NLO model, one can directly return that the 6640 # parameter is needed since only CTParameters are filtered. 6641 if not hasattr(self, 'allCTparameters') or \ 6642 self.allCTparameters is None or self.usedCTparameters is None or \ 6643 len(self.allCTparameters)==0: 6644 return True 6645 6646 # We must allow the conjugate shorthand for the complex parameter as 6647 # well so we check wether either the parameter name or its name with 6648 # 'conjg__' substituted with '' is present in the list. 6649 # This is acceptable even if some parameter had an original name 6650 # including 'conjg__' in it, because at worst we export a parameter 6651 # was not needed. 6652 param = param.lower() 6653 cjg_param = param.replace('conjg__','',1) 6654 6655 # First make sure it is a CTparameter 6656 if param not in self.allCTparameters and \ 6657 cjg_param not in self.allCTparameters: 6658 return True 6659 6660 # Now check if it is in the list of CTparameters actually used 6661 return (param in self.usedCTparameters or \ 6662 cjg_param in self.usedCTparameters)
6663
6664 - def extract_needed_CTparam(self,wanted_couplings=[]):
6665 """ Extract what are the needed CT parameters given the wanted_couplings""" 6666 6667 if not hasattr(self.model,'map_CTcoup_CTparam') or not wanted_couplings: 6668 # Setting these lists to none wil disable the filtering in 6669 # check_needed_param 6670 self.allCTparameters = None 6671 self.usedCTparameters = None 6672 return 6673 6674 # All CTparameters appearin in all CT couplings 6675 allCTparameters=list(self.model.map_CTcoup_CTparam.values()) 6676 # Define in this class the list of all CT parameters 6677 self.allCTparameters=list(\ 6678 set(itertools.chain.from_iterable(allCTparameters))) 6679 6680 # All used CT couplings 6681 w_coupls = [coupl.lower() for coupl in wanted_couplings] 6682 allUsedCTCouplings = [coupl for coupl in 6683 self.model.map_CTcoup_CTparam.keys() if coupl.lower() in w_coupls] 6684 6685 # Now define the list of all CT parameters that are actually used 6686 self.usedCTparameters=list(\ 6687 set(itertools.chain.from_iterable([ 6688 self.model.map_CTcoup_CTparam[coupl] for coupl in allUsedCTCouplings 6689 ]))) 6690 6691 # Now at last, make these list case insensitive 6692 self.allCTparameters = [ct.lower() for ct in self.allCTparameters] 6693 self.usedCTparameters = [ct.lower() for ct in self.usedCTparameters]
6694
6695 - def create_intparam_def(self, dp=True, mp=False):
6696 """ create intparam_definition.inc setting the internal parameters. 6697 Output the double precision and/or the multiple precision parameters 6698 depending on the parameters dp and mp. If mp only, then the file names 6699 get the 'mp_' prefix. 6700 """ 6701 6702 fsock = self.open('%sintparam_definition.inc'% 6703 ('mp_' if mp and not dp else ''), format='fortran') 6704 6705 fsock.write_comments(\ 6706 "Parameters that should not be recomputed event by event.\n") 6707 fsock.writelines("if(readlha) then\n") 6708 if dp: 6709 fsock.writelines("G = 2 * DSQRT(AS*PI) ! for the first init\n") 6710 if mp: 6711 fsock.writelines("MP__G = 2 * SQRT(MP__AS*MP__PI) ! for the first init\n") 6712 6713 for param in self.params_indep: 6714 if param.name == 'ZERO': 6715 continue 6716 # check whether the parameter is a CT parameter 6717 # if yes,just used the needed ones 6718 if not self.check_needed_param(param.name): 6719 continue 6720 if dp: 6721 fsock.writelines("%s = %s\n" % (param.name, 6722 self.p_to_f.parse(param.expr))) 6723 if mp: 6724 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 6725 self.mp_p_to_f.parse(param.expr))) 6726 6727 fsock.writelines('endif') 6728 6729 fsock.write_comments('\nParameters that should be recomputed at an event by even basis.\n') 6730 if dp: 6731 fsock.writelines("aS = G**2/4/pi\n") 6732 if mp: 6733 fsock.writelines("MP__aS = MP__G**2/4/MP__PI\n") 6734 for param in self.params_dep: 6735 # check whether the parameter is a CT parameter 6736 # if yes,just used the needed ones 6737 if not self.check_needed_param(param.name): 6738 continue 6739 if dp: 6740 fsock.writelines("%s = %s\n" % (param.name, 6741 self.p_to_f.parse(param.expr))) 6742 elif mp: 6743 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 6744 self.mp_p_to_f.parse(param.expr))) 6745 6746 fsock.write_comments("\nDefinition of the EW coupling used in the write out of aqed\n") 6747 if ('aEWM1',) in self.model['parameters']: 6748 if dp: 6749 fsock.writelines(""" gal(1) = 3.5449077018110318d0 / DSQRT(aEWM1) 6750 gal(2) = 1d0 6751 """) 6752 elif mp: 6753 fsock.writelines(""" %(mp_prefix)sgal(1) = 2 * SQRT(MP__PI/MP__aEWM1) 6754 %(mp_prefix)sgal(2) = 1d0 6755 """ %{'mp_prefix':self.mp_prefix}) 6756 pass 6757 # in Gmu scheme, aEWM1 is not external but Gf is an exteranl variable 6758 elif ('Gf',) in self.model['parameters']: 6759 if dp: 6760 fsock.writelines(""" gal(1) = 2.378414230005442133435d0*MDL_MW*DSQRT(1D0-MDL_MW**2/MDL_MZ**2)*DSQRT(MDL_Gf) 6761 gal(2) = 1d0 6762 """) 6763 elif mp: 6764 fsock.writelines(""" %(mp_prefix)sgal(1) = 2*MP__MDL_MW*SQRT(1e0_16-MP__MDL_MW**2/MP__MDL_MZ**2)*SQRT(SQRT(2e0_16)*MP__MDL_Gf) 6765 %(mp_prefix)sgal(2) = 1d0 6766 """ %{'mp_prefix':self.mp_prefix}) 6767 pass 6768 else: 6769 if dp: 6770 logger.warning('$RED aEWM1 and Gf not define in MODEL. AQED will not be written correcty in LHE FILE') 6771 fsock.writelines(""" gal(1) = 1d0 6772 gal(2) = 1d0 6773 """) 6774 elif mp: 6775 fsock.writelines(""" %(mp_prefix)sgal(1) = 1e0_16 6776 %(mp_prefix)sgal(2) = 1e0_16 6777 """%{'mp_prefix':self.mp_prefix})
6778 6779
6780 - def create_couplings(self):
6781 """ create couplings.f and all couplingsX.f """ 6782 6783 nb_def_by_file = 25 6784 6785 self.create_couplings_main(nb_def_by_file) 6786 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6787 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6788 6789 for i in range(nb_coup_indep): 6790 # For the independent couplings, we compute the double and multiple 6791 # precision ones together 6792 data = self.coups_indep[nb_def_by_file * i: 6793 min(len(self.coups_indep), nb_def_by_file * (i+1))] 6794 self.create_couplings_part(i + 1, data, dp=True, mp=self.opt['mp']) 6795 6796 for i in range(nb_coup_dep): 6797 # For the dependent couplings, we compute the double and multiple 6798 # precision ones in separate subroutines. 6799 data = self.coups_dep[nb_def_by_file * i: 6800 min(len(self.coups_dep), nb_def_by_file * (i+1))] 6801 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6802 dp=True,mp=False) 6803 if self.opt['mp']: 6804 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6805 dp=False,mp=True)
6806 6807
6808 - def create_couplings_main(self, nb_def_by_file=25):
6809 """ create couplings.f """ 6810 6811 fsock = self.open('couplings.f', format='fortran') 6812 6813 fsock.writelines("""subroutine coup() 6814 6815 implicit none 6816 double precision PI, ZERO 6817 logical READLHA 6818 parameter (PI=3.141592653589793d0) 6819 parameter (ZERO=0d0) 6820 include \'model_functions.inc\'""") 6821 if self.opt['mp']: 6822 fsock.writelines("""%s MP__PI, MP__ZERO 6823 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6824 parameter (MP__ZERO=0e0_16) 6825 include \'mp_input.inc\' 6826 include \'mp_coupl.inc\' 6827 """%self.mp_real_format) 6828 fsock.writelines("""include \'input.inc\' 6829 include \'coupl.inc\' 6830 READLHA = .true. 6831 include \'intparam_definition.inc\'""") 6832 if self.opt['mp']: 6833 fsock.writelines("""include \'mp_intparam_definition.inc\'\n""") 6834 6835 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6836 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6837 6838 fsock.writelines('\n'.join(\ 6839 ['call coup%s()' % (i + 1) for i in range(nb_coup_indep)])) 6840 6841 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6842 6843 fsock.writelines('\n'.join(\ 6844 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6845 for i in range(nb_coup_dep)])) 6846 if self.opt['mp']: 6847 fsock.writelines('\n'.join(\ 6848 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6849 for i in range(nb_coup_dep)])) 6850 fsock.writelines('''\n return \n end\n''') 6851 6852 fsock.writelines("""subroutine update_as_param() 6853 6854 implicit none 6855 double precision PI, ZERO 6856 logical READLHA 6857 parameter (PI=3.141592653589793d0) 6858 parameter (ZERO=0d0) 6859 include \'model_functions.inc\'""") 6860 fsock.writelines("""include \'input.inc\' 6861 include \'coupl.inc\' 6862 READLHA = .false.""") 6863 fsock.writelines(""" 6864 include \'intparam_definition.inc\'\n 6865 """) 6866 6867 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6868 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6869 6870 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6871 6872 fsock.writelines('\n'.join(\ 6873 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6874 for i in range(nb_coup_dep)])) 6875 fsock.writelines('''\n return \n end\n''') 6876 6877 fsock.writelines("""subroutine update_as_param2(mu_r2,as2) 6878 6879 implicit none 6880 double precision PI 6881 parameter (PI=3.141592653589793d0) 6882 double precision mu_r2, as2 6883 include \'model_functions.inc\'""") 6884 fsock.writelines("""include \'input.inc\' 6885 include \'coupl.inc\'""") 6886 fsock.writelines(""" 6887 if (mu_r2.gt.0d0) MU_R = mu_r2 6888 G = SQRT(4.0d0*PI*AS2) 6889 AS = as2 6890 6891 CALL UPDATE_AS_PARAM() 6892 """) 6893 fsock.writelines('''\n return \n end\n''') 6894 6895 if self.opt['mp']: 6896 fsock.writelines("""subroutine mp_update_as_param() 6897 6898 implicit none 6899 logical READLHA 6900 include \'model_functions.inc\'""") 6901 fsock.writelines("""%s MP__PI, MP__ZERO 6902 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6903 parameter (MP__ZERO=0e0_16) 6904 include \'mp_input.inc\' 6905 include \'mp_coupl.inc\' 6906 """%self.mp_real_format) 6907 fsock.writelines("""include \'input.inc\' 6908 include \'coupl.inc\' 6909 include \'actualize_mp_ext_params.inc\' 6910 READLHA = .false. 6911 include \'mp_intparam_definition.inc\'\n 6912 """) 6913 6914 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6915 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6916 6917 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6918 6919 fsock.writelines('\n'.join(\ 6920 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6921 for i in range(nb_coup_dep)])) 6922 fsock.writelines('''\n return \n end\n''')
6923
6924 - def create_couplings_part(self, nb_file, data, dp=True, mp=False):
6925 """ create couplings[nb_file].f containing information coming from data. 6926 Outputs the computation of the double precision and/or the multiple 6927 precision couplings depending on the parameters dp and mp. 6928 If mp is True and dp is False, then the prefix 'MP_' is appended to the 6929 filename and subroutine name. 6930 """ 6931 6932 fsock = self.open('%scouplings%s.f' %('mp_' if mp and not dp else '', 6933 nb_file), format='fortran') 6934 fsock.writelines("""subroutine %scoup%s() 6935 6936 implicit none 6937 include \'model_functions.inc\'"""%('mp_' if mp and not dp else '',nb_file)) 6938 if dp: 6939 fsock.writelines(""" 6940 double precision PI, ZERO 6941 parameter (PI=3.141592653589793d0) 6942 parameter (ZERO=0d0) 6943 include 'input.inc' 6944 include 'coupl.inc'""") 6945 if mp: 6946 fsock.writelines("""%s MP__PI, MP__ZERO 6947 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6948 parameter (MP__ZERO=0e0_16) 6949 include \'mp_input.inc\' 6950 include \'mp_coupl.inc\' 6951 """%self.mp_real_format) 6952 6953 for coupling in data: 6954 if dp: 6955 fsock.writelines('%s = %s' % (coupling.name, 6956 self.p_to_f.parse(coupling.expr))) 6957 if mp: 6958 fsock.writelines('%s%s = %s' % (self.mp_prefix,coupling.name, 6959 self.mp_p_to_f.parse(coupling.expr))) 6960 fsock.writelines('end')
6961
6962 - def create_model_functions_inc(self):
6963 """ Create model_functions.inc which contains the various declarations 6964 of auxiliary functions which might be used in the couplings expressions 6965 """ 6966 6967 additional_fct = [] 6968 # check for functions define in the UFO model 6969 ufo_fct = self.model.get('functions') 6970 if ufo_fct: 6971 for fct in ufo_fct: 6972 # already handle by default 6973 if str(fct.name) not in ["complexconjugate", "re", "im", "sec", 6974 "csc", "asec", "acsc", "theta_function", "cond", 6975 "condif", "reglogp", "reglogm", "reglog", "recms", "arg", "cot", 6976 "grreglog","regsqrt"]: 6977 additional_fct.append(fct.name) 6978 6979 fsock = self.open('model_functions.inc', format='fortran') 6980 fsock.writelines("""double complex cond 6981 double complex condif 6982 double complex reglog 6983 double complex reglogp 6984 double complex reglogm 6985 double complex recms 6986 double complex arg 6987 double complex grreglog 6988 double complex regsqrt 6989 %s 6990 """ % "\n".join([" double complex %s" % i for i in additional_fct])) 6991 6992 6993 if self.opt['mp']: 6994 fsock.writelines("""%(complex_mp_format)s mp_cond 6995 %(complex_mp_format)s mp_condif 6996 %(complex_mp_format)s mp_reglog 6997 %(complex_mp_format)s mp_reglogp 6998 %(complex_mp_format)s mp_reglogm 6999 %(complex_mp_format)s mp_recms 7000 %(complex_mp_format)s mp_arg 7001 %(complex_mp_format)s mp_grreglog 7002 %(complex_mp_format)s mp_regsqrt 7003 %(additional)s 7004 """ %\ 7005 {"additional": "\n".join([" %s mp_%s" % (self.mp_complex_format, i) for i in additional_fct]), 7006 'complex_mp_format':self.mp_complex_format 7007 })
7008
7009 - def create_model_functions_def(self):
7010 """ Create model_functions.f which contains the various definitions 7011 of auxiliary functions which might be used in the couplings expressions 7012 Add the functions.f functions for formfactors support 7013 """ 7014 7015 fsock = self.open('model_functions.f', format='fortran') 7016 fsock.writelines("""double complex function cond(condition,truecase,falsecase) 7017 implicit none 7018 double complex condition,truecase,falsecase 7019 if(condition.eq.(0.0d0,0.0d0)) then 7020 cond=truecase 7021 else 7022 cond=falsecase 7023 endif 7024 end 7025 7026 double complex function condif(condition,truecase,falsecase) 7027 implicit none 7028 logical condition 7029 double complex truecase,falsecase 7030 if(condition) then 7031 condif=truecase 7032 else 7033 condif=falsecase 7034 endif 7035 end 7036 7037 double complex function recms(condition,expr) 7038 implicit none 7039 logical condition 7040 double complex expr 7041 if(condition)then 7042 recms=expr 7043 else 7044 recms=dcmplx(dble(expr)) 7045 endif 7046 end 7047 7048 double complex function reglog(arg) 7049 implicit none 7050 double complex TWOPII 7051 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 7052 double complex arg 7053 if(arg.eq.(0.0d0,0.0d0)) then 7054 reglog=(0.0d0,0.0d0) 7055 else 7056 reglog=log(arg) 7057 endif 7058 end 7059 7060 double complex function reglogp(arg) 7061 implicit none 7062 double complex TWOPII 7063 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 7064 double complex arg 7065 if(arg.eq.(0.0d0,0.0d0))then 7066 reglogp=(0.0d0,0.0d0) 7067 else 7068 if(dble(arg).lt.0.0d0.and.dimag(arg).lt.0.0d0)then 7069 reglogp=log(arg) + TWOPII 7070 else 7071 reglogp=log(arg) 7072 endif 7073 endif 7074 end 7075 7076 double complex function reglogm(arg) 7077 implicit none 7078 double complex TWOPII 7079 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 7080 double complex arg 7081 if(arg.eq.(0.0d0,0.0d0))then 7082 reglogm=(0.0d0,0.0d0) 7083 else 7084 if(dble(arg).lt.0.0d0.and.dimag(arg).gt.0.0d0)then 7085 reglogm=log(arg) - TWOPII 7086 else 7087 reglogm=log(arg) 7088 endif 7089 endif 7090 end 7091 7092 double complex function regsqrt(arg_in) 7093 implicit none 7094 double complex arg_in 7095 double complex arg 7096 arg=arg_in 7097 if(dabs(dimag(arg)).eq.0.0d0)then 7098 arg=dcmplx(dble(arg),0.0d0) 7099 endif 7100 if(dabs(dble(arg)).eq.0.0d0)then 7101 arg=dcmplx(0.0d0,dimag(arg)) 7102 endif 7103 regsqrt=sqrt(arg) 7104 end 7105 7106 double complex function grreglog(logsw,expr1_in,expr2_in) 7107 implicit none 7108 double complex TWOPII 7109 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 7110 double complex expr1_in,expr2_in 7111 double complex expr1,expr2 7112 double precision logsw 7113 double precision imagexpr 7114 logical firstsheet 7115 expr1=expr1_in 7116 expr2=expr2_in 7117 if(dabs(dimag(expr1)).eq.0.0d0)then 7118 expr1=dcmplx(dble(expr1),0.0d0) 7119 endif 7120 if(dabs(dble(expr1)).eq.0.0d0)then 7121 expr1=dcmplx(0.0d0,dimag(expr1)) 7122 endif 7123 if(dabs(dimag(expr2)).eq.0.0d0)then 7124 expr2=dcmplx(dble(expr2),0.0d0) 7125 endif 7126 if(dabs(dble(expr2)).eq.0.0d0)then 7127 expr2=dcmplx(0.0d0,dimag(expr2)) 7128 endif 7129 if(expr1.eq.(0.0d0,0.0d0))then 7130 grreglog=(0.0d0,0.0d0) 7131 else 7132 imagexpr=dimag(expr1)*dimag(expr2) 7133 firstsheet=imagexpr.ge.0.0d0 7134 firstsheet=firstsheet.or.dble(expr1).ge.0.0d0 7135 firstsheet=firstsheet.or.dble(expr2).ge.0.0d0 7136 if(firstsheet)then 7137 grreglog=log(expr1) 7138 else 7139 if(dimag(expr1).gt.0.0d0)then 7140 grreglog=log(expr1) - logsw*TWOPII 7141 else 7142 grreglog=log(expr1) + logsw*TWOPII 7143 endif 7144 endif 7145 endif 7146 end 7147 7148 double complex function arg(comnum) 7149 implicit none 7150 double complex comnum 7151 double complex iim 7152 iim = (0.0d0,1.0d0) 7153 if(comnum.eq.(0.0d0,0.0d0)) then 7154 arg=(0.0d0,0.0d0) 7155 else 7156 arg=log(comnum/abs(comnum))/iim 7157 endif 7158 end""") 7159 if self.opt['mp']: 7160 fsock.writelines(""" 7161 7162 %(complex_mp_format)s function mp_cond(condition,truecase,falsecase) 7163 implicit none 7164 %(complex_mp_format)s condition,truecase,falsecase 7165 if(condition.eq.(0.0e0_16,0.0e0_16)) then 7166 mp_cond=truecase 7167 else 7168 mp_cond=falsecase 7169 endif 7170 end 7171 7172 %(complex_mp_format)s function mp_condif(condition,truecase,falsecase) 7173 implicit none 7174 logical condition 7175 %(complex_mp_format)s truecase,falsecase 7176 if(condition) then 7177 mp_condif=truecase 7178 else 7179 mp_condif=falsecase 7180 endif 7181 end 7182 7183 %(complex_mp_format)s function mp_recms(condition,expr) 7184 implicit none 7185 logical condition 7186 %(complex_mp_format)s expr 7187 if(condition)then 7188 mp_recms=expr 7189 else 7190 mp_recms=cmplx(real(expr),kind=16) 7191 endif 7192 end 7193 7194 %(complex_mp_format)s function mp_reglog(arg) 7195 implicit none 7196 %(complex_mp_format)s TWOPII 7197 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 7198 %(complex_mp_format)s arg 7199 if(arg.eq.(0.0e0_16,0.0e0_16)) then 7200 mp_reglog=(0.0e0_16,0.0e0_16) 7201 else 7202 mp_reglog=log(arg) 7203 endif 7204 end 7205 7206 %(complex_mp_format)s function mp_reglogp(arg) 7207 implicit none 7208 %(complex_mp_format)s TWOPII 7209 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 7210 %(complex_mp_format)s arg 7211 if(arg.eq.(0.0e0_16,0.0e0_16))then 7212 mp_reglogp=(0.0e0_16,0.0e0_16) 7213 else 7214 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).lt.0.0e0_16)then 7215 mp_reglogp=log(arg) + TWOPII 7216 else 7217 mp_reglogp=log(arg) 7218 endif 7219 endif 7220 end 7221 7222 %(complex_mp_format)s function mp_reglogm(arg) 7223 implicit none 7224 %(complex_mp_format)s TWOPII 7225 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 7226 %(complex_mp_format)s arg 7227 if(arg.eq.(0.0e0_16,0.0e0_16))then 7228 mp_reglogm=(0.0e0_16,0.0e0_16) 7229 else 7230 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).gt.0.0e0_16)then 7231 mp_reglogm=log(arg) - TWOPII 7232 else 7233 mp_reglogm=log(arg) 7234 endif 7235 endif 7236 end 7237 7238 %(complex_mp_format)s function mp_regsqrt(arg_in) 7239 implicit none 7240 %(complex_mp_format)s arg_in 7241 %(complex_mp_format)s arg 7242 arg=arg_in 7243 if(abs(imagpart(arg)).eq.0.0e0_16)then 7244 arg=cmplx(real(arg,kind=16),0.0e0_16) 7245 endif 7246 if(abs(real(arg,kind=16)).eq.0.0e0_16)then 7247 arg=cmplx(0.0e0_16,imagpart(arg)) 7248 endif 7249 mp_regsqrt=sqrt(arg) 7250 end 7251 7252 7253 %(complex_mp_format)s function mp_grreglog(logsw,expr1_in,expr2_in) 7254 implicit none 7255 %(complex_mp_format)s TWOPII 7256 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 7257 %(complex_mp_format)s expr1_in,expr2_in 7258 %(complex_mp_format)s expr1,expr2 7259 %(real_mp_format)s logsw 7260 %(real_mp_format)s imagexpr 7261 logical firstsheet 7262 expr1=expr1_in 7263 expr2=expr2_in 7264 if(abs(imagpart(expr1)).eq.0.0e0_16)then 7265 expr1=cmplx(real(expr1,kind=16),0.0e0_16) 7266 endif 7267 if(abs(real(expr1,kind=16)).eq.0.0e0_16)then 7268 expr1=cmplx(0.0e0_16,imagpart(expr1)) 7269 endif 7270 if(abs(imagpart(expr2)).eq.0.0e0_16)then 7271 expr2=cmplx(real(expr2,kind=16),0.0e0_16) 7272 endif 7273 if(abs(real(expr2,kind=16)).eq.0.0e0_16)then 7274 expr2=cmplx(0.0e0_16,imagpart(expr2)) 7275 endif 7276 if(expr1.eq.(0.0e0_16,0.0e0_16))then 7277 mp_grreglog=(0.0e0_16,0.0e0_16) 7278 else 7279 imagexpr=imagpart(expr1)*imagpart(expr2) 7280 firstsheet=imagexpr.ge.0.0e0_16 7281 firstsheet=firstsheet.or.real(expr1,kind=16).ge.0.0e0_16 7282 firstsheet=firstsheet.or.real(expr2,kind=16).ge.0.0e0_16 7283 if(firstsheet)then 7284 mp_grreglog=log(expr1) 7285 else 7286 if(imagpart(expr1).gt.0.0e0_16)then 7287 mp_grreglog=log(expr1) - logsw*TWOPII 7288 else 7289 mp_grreglog=log(expr1) + logsw*TWOPII 7290 endif 7291 endif 7292 endif 7293 end 7294 7295 %(complex_mp_format)s function mp_arg(comnum) 7296 implicit none 7297 %(complex_mp_format)s comnum 7298 %(complex_mp_format)s imm 7299 imm = (0.0e0_16,1.0e0_16) 7300 if(comnum.eq.(0.0e0_16,0.0e0_16)) then 7301 mp_arg=(0.0e0_16,0.0e0_16) 7302 else 7303 mp_arg=log(comnum/abs(comnum))/imm 7304 endif 7305 end"""%{'complex_mp_format':self.mp_complex_format,'real_mp_format':self.mp_real_format}) 7306 7307 7308 #check for the file functions.f 7309 model_path = self.model.get('modelpath') 7310 if os.path.exists(pjoin(model_path,'Fortran','functions.f')): 7311 fsock.write_comment_line(' USER DEFINE FUNCTIONS ') 7312 input = pjoin(model_path,'Fortran','functions.f') 7313 fsock.writelines(open(input).read()) 7314 fsock.write_comment_line(' END USER DEFINE FUNCTIONS ') 7315 7316 # check for functions define in the UFO model 7317 ufo_fct = self.model.get('functions') 7318 if ufo_fct: 7319 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS ') 7320 done = [] 7321 for fct in ufo_fct: 7322 # already handle by default 7323 if str(fct.name.lower()) not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc", "condif", 7324 "theta_function", "cond", "reglog", "reglogp", "reglogm", "recms","arg", 7325 "grreglog","regsqrt"] + done: 7326 done.append(str(fct.name.lower())) 7327 ufo_fct_template = """ 7328 double complex function %(name)s(%(args)s) 7329 implicit none 7330 double complex %(args)s 7331 %(definitions)s 7332 %(name)s = %(fct)s 7333 7334 return 7335 end 7336 """ 7337 str_fct = self.p_to_f.parse(fct.expr) 7338 if not self.p_to_f.to_define: 7339 definitions = [] 7340 else: 7341 definitions=[] 7342 for d in self.p_to_f.to_define: 7343 if d == 'pi': 7344 definitions.append(' double precision pi') 7345 definitions.append(' data pi /3.1415926535897932d0/') 7346 else: 7347 definitions.append(' double complex %s' % d) 7348 7349 text = ufo_fct_template % { 7350 'name': fct.name, 7351 'args': ", ".join(fct.arguments), 7352 'fct': str_fct, 7353 'definitions': '\n'.join(definitions) 7354 } 7355 7356 fsock.writelines(text) 7357 if self.opt['mp']: 7358 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS FOR MP') 7359 for fct in ufo_fct: 7360 # already handle by default 7361 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc","condif", 7362 "theta_function", "cond", "reglog", "reglogp","reglogm", "recms","arg", 7363 "grreglog","regsqrt"]: 7364 ufo_fct_template = """ 7365 %(complex_mp_format)s function mp_%(name)s(mp__%(args)s) 7366 implicit none 7367 %(complex_mp_format)s mp__%(args)s 7368 %(definitions)s 7369 mp_%(name)s = %(fct)s 7370 7371 return 7372 end 7373 """ 7374 str_fct = self.mp_p_to_f.parse(fct.expr) 7375 if not self.mp_p_to_f.to_define: 7376 definitions = [] 7377 else: 7378 definitions=[] 7379 for d in self.mp_p_to_f.to_define: 7380 if d == 'pi': 7381 definitions.append(' %s mp__pi' % self.mp_real_format) 7382 definitions.append(' data mp__pi /3.141592653589793238462643383279502884197e+00_16/') 7383 else: 7384 definitions.append(' %s mp_%s' % (self.mp_complex_format,d)) 7385 text = ufo_fct_template % { 7386 'name': fct.name, 7387 'args': ", mp__".join(fct.arguments), 7388 'fct': str_fct, 7389 'definitions': '\n'.join(definitions), 7390 'complex_mp_format': self.mp_complex_format 7391 } 7392 fsock.writelines(text) 7393 7394 7395 7396 fsock.write_comment_line(' STOP UFO DEFINE FUNCTIONS ')
7397 7398 7399
7400 - def create_makeinc(self):
7401 """create makeinc.inc containing the file to compile """ 7402 7403 fsock = self.open('makeinc.inc', comment='#') 7404 text = 'MODEL = couplings.o lha_read.o printout.o rw_para.o' 7405 text += ' model_functions.o ' 7406 7407 nb_coup_indep = 1 + len(self.coups_dep) // 25 7408 nb_coup_dep = 1 + len(self.coups_indep) // 25 7409 couplings_files=['couplings%s.o' % (i+1) \ 7410 for i in range(nb_coup_dep + nb_coup_indep) ] 7411 if self.opt['mp']: 7412 couplings_files+=['mp_couplings%s.o' % (i+1) for i in \ 7413 range(nb_coup_dep,nb_coup_dep + nb_coup_indep) ] 7414 text += ' '.join(couplings_files) 7415 fsock.writelines(text)
7416
7417 - def create_param_write(self):
7418 """ create param_write """ 7419 7420 fsock = self.open('param_write.inc', format='fortran') 7421 7422 fsock.writelines("""write(*,*) ' External Params' 7423 write(*,*) ' ---------------------------------' 7424 write(*,*) ' '""") 7425 def format(name): 7426 return 'write(*,*) \'%(name)s = \', %(name)s' % {'name': name}
7427 7428 # Write the external parameter 7429 lines = [format(param.name) for param in self.params_ext] 7430 fsock.writelines('\n'.join(lines)) 7431 7432 fsock.writelines("""write(*,*) ' Internal Params' 7433 write(*,*) ' ---------------------------------' 7434 write(*,*) ' '""") 7435 lines = [format(data.name) for data in self.params_indep 7436 if data.name != 'ZERO' and self.check_needed_param(data.name)] 7437 fsock.writelines('\n'.join(lines)) 7438 fsock.writelines("""write(*,*) ' Internal Params evaluated point by point' 7439 write(*,*) ' ----------------------------------------' 7440 write(*,*) ' '""") 7441 lines = [format(data.name) for data in self.params_dep \ 7442 if self.check_needed_param(data.name)] 7443 7444 fsock.writelines('\n'.join(lines)) 7445 7446 7447
7448 - def create_ident_card(self):
7449 """ create the ident_card.dat """ 7450 7451 def format(parameter): 7452 """return the line for the ident_card corresponding to this parameter""" 7453 colum = [parameter.lhablock.lower()] + \ 7454 [str(value) for value in parameter.lhacode] + \ 7455 [parameter.name] 7456 if not parameter.name: 7457 return '' 7458 return ' '.join(colum)+'\n'
7459 7460 fsock = self.open('ident_card.dat') 7461 7462 external_param = [format(param) for param in self.params_ext] 7463 fsock.writelines('\n'.join(external_param)) 7464
7465 - def create_actualize_mp_ext_param_inc(self):
7466 """ create the actualize_mp_ext_params.inc code """ 7467 7468 # In principle one should actualize all external, but for now, it is 7469 # hardcoded that only AS and MU_R can by dynamically changed by the user 7470 # so that we only update those ones. 7471 # Of course, to be on the safe side, one could decide to update all 7472 # external parameters. 7473 update_params_list=[p for p in self.params_ext if p.name in 7474 self.PS_dependent_key] 7475 7476 res_strings = ["%(mp_prefix)s%(name)s=%(name)s"\ 7477 %{'mp_prefix':self.mp_prefix,'name':param.name}\ 7478 for param in update_params_list] 7479 # When read_lha is false, it is G which is taken in input and not AS, so 7480 # this is what should be reset here too. 7481 if 'aS' in [param.name for param in update_params_list]: 7482 res_strings.append("%(mp_prefix)sG=G"%{'mp_prefix':self.mp_prefix}) 7483 7484 fsock = self.open('actualize_mp_ext_params.inc', format='fortran') 7485 fsock.writelines('\n'.join(res_strings))
7486
7487 - def create_param_read(self):
7488 """create param_read""" 7489 7490 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 7491 or self.opt['loop_induced']: 7492 fsock = self.open('param_read.inc', format='fortran') 7493 fsock.writelines(' include \'../param_card.inc\'') 7494 return 7495 7496 def format_line(parameter): 7497 """return the line for the ident_card corresponding to this 7498 parameter""" 7499 template = \ 7500 """ call LHA_get_real(npara,param,value,'%(name)s',%(name)s,%(value)s)""" \ 7501 % {'name': parameter.name, 7502 'value': self.p_to_f.parse(str(parameter.value.real))} 7503 if self.opt['mp']: 7504 template = template+ \ 7505 ("\n call MP_LHA_get_real(npara,param,value,'%(name)s',"+ 7506 "%(mp_prefix)s%(name)s,%(value)s)") \ 7507 % {'name': parameter.name,'mp_prefix': self.mp_prefix, 7508 'value': self.mp_p_to_f.parse(str(parameter.value.real))} 7509 7510 if parameter.lhablock.lower() == 'loop': 7511 template = template.replace('LHA_get_real', 'LHA_get_real_silent') 7512 7513 return template 7514 7515 fsock = self.open('param_read.inc', format='fortran') 7516 res_strings = [format_line(param) \ 7517 for param in self.params_ext] 7518 7519 # Correct width sign for Majorana particles (where the width 7520 # and mass need to have the same sign) 7521 for particle in self.model.get('particles'): 7522 if particle.is_fermion() and particle.get('self_antipart') and \ 7523 particle.get('width').lower() != 'zero': 7524 7525 res_strings.append('%(width)s = sign(%(width)s,%(mass)s)' % \ 7526 {'width': particle.get('width'), 'mass': particle.get('mass')}) 7527 if self.opt['mp']: 7528 res_strings.append(\ 7529 ('%(mp_pref)s%(width)s = sign(%(mp_pref)s%(width)s,'+\ 7530 '%(mp_pref)s%(mass)s)')%{'width': particle.get('width'),\ 7531 'mass': particle.get('mass'),'mp_pref':self.mp_prefix}) 7532 7533 fsock.writelines('\n'.join(res_strings)) 7534 7535 7536 @staticmethod
7537 - def create_param_card_static(model, output_path, rule_card_path=False, 7538 mssm_convert=True, write_special=True):
7539 """ create the param_card.dat for a givent model --static method-- """ 7540 #1. Check if a default param_card is present: 7541 done = False 7542 if hasattr(model, 'restrict_card') and isinstance(model.restrict_card, str): 7543 restrict_name = os.path.basename(model.restrict_card)[9:-4] 7544 model_path = model.get('modelpath') 7545 if os.path.exists(pjoin(model_path,'paramcard_%s.dat' % restrict_name)): 7546 done = True 7547 files.cp(pjoin(model_path,'paramcard_%s.dat' % restrict_name), 7548 output_path) 7549 if not done: 7550 param_writer.ParamCardWriter(model, output_path, write_special=write_special) 7551 7552 if rule_card_path: 7553 if hasattr(model, 'rule_card'): 7554 model.rule_card.write_file(rule_card_path) 7555 7556 if mssm_convert: 7557 model_name = model.get('name') 7558 # IF MSSM convert the card to SLAH1 7559 if model_name == 'mssm' or model_name.startswith('mssm-'): 7560 import models.check_param_card as translator 7561 # Check the format of the param_card for Pythia and make it correct 7562 if rule_card_path: 7563 translator.make_valid_param_card(output_path, rule_card_path) 7564 translator.convert_to_slha1(output_path)
7565
7566 - def create_param_card(self, write_special=True):
7567 """ create the param_card.dat """ 7568 7569 rule_card = pjoin(self.dir_path, 'param_card_rule.dat') 7570 if not hasattr(self.model, 'rule_card'): 7571 rule_card=False 7572 write_special = True 7573 if 'exporter' in self.opt: 7574 import madgraph.loop.loop_exporters as loop_exporters 7575 import madgraph.iolibs.export_fks as export_fks 7576 write_special = False 7577 if issubclass(self.opt['exporter'], loop_exporters.LoopProcessExporterFortranSA): 7578 write_special = True 7579 if issubclass(self.opt['exporter'],(loop_exporters.LoopInducedExporterME,export_fks.ProcessExporterFortranFKS)): 7580 write_special = False 7581 7582 self.create_param_card_static(self.model, 7583 output_path=pjoin(self.dir_path, 'param_card.dat'), 7584 rule_card_path=rule_card, 7585 mssm_convert=True, 7586 write_special=write_special)
7587
7588 -def ExportV4Factory(cmd, noclean, output_type='default', group_subprocesses=True, cmd_options={}):
7589 """ Determine which Export_v4 class is required. cmd is the command 7590 interface containing all potential usefull information. 7591 The output_type argument specifies from which context the output 7592 is called. It is 'madloop' for MadLoop5, 'amcatnlo' for FKS5 output 7593 and 'default' for tree-level outputs.""" 7594 7595 opt = dict(cmd.options) 7596 opt['output_options'] = cmd_options 7597 7598 # ========================================================================== 7599 # First check whether Ninja must be installed. 7600 # Ninja would only be required if: 7601 # a) Loop optimized output is selected 7602 # b) the process gathered from the amplitude generated use loops 7603 7604 if len(cmd._curr_amps)>0: 7605 try: 7606 curr_proc = cmd._curr_amps[0].get('process') 7607 except base_objects.PhysicsObject.PhysicsObjectError: 7608 curr_proc = None 7609 elif hasattr(cmd,'_fks_multi_proc') and \ 7610 len(cmd._fks_multi_proc.get('process_definitions'))>0: 7611 curr_proc = cmd._fks_multi_proc.get('process_definitions')[0] 7612 else: 7613 curr_proc = None 7614 7615 requires_reduction_tool = opt['loop_optimized_output'] and \ 7616 (not curr_proc is None) and \ 7617 (curr_proc.get('perturbation_couplings') != [] and \ 7618 not curr_proc.get('NLO_mode') in [None,'real','tree','LO','LOonly']) 7619 7620 # An installation is required then, but only if the specified path is the 7621 # default local one and that the Ninja library appears missing. 7622 if requires_reduction_tool: 7623 cmd.install_reduction_library() 7624 7625 # ========================================================================== 7626 # First treat the MadLoop5 standalone case 7627 MadLoop_SA_options = {'clean': not noclean, 7628 'complex_mass':cmd.options['complex_mass_scheme'], 7629 'export_format':'madloop', 7630 'mp':True, 7631 'loop_dir': os.path.join(cmd._mgme_dir,'Template','loop_material'), 7632 'cuttools_dir': cmd._cuttools_dir, 7633 'iregi_dir':cmd._iregi_dir, 7634 'golem_dir':cmd.options['golem'], 7635 'samurai_dir':cmd.options['samurai'], 7636 'ninja_dir':cmd.options['ninja'], 7637 'collier_dir':cmd.options['collier'], 7638 'fortran_compiler':cmd.options['fortran_compiler'], 7639 'f2py_compiler':cmd.options['f2py_compiler'], 7640 'output_dependencies':cmd.options['output_dependencies'], 7641 'SubProc_prefix':'P', 7642 'compute_color_flows':cmd.options['loop_color_flows'], 7643 'mode': 'reweight' if cmd._export_format == "standalone_rw" else '', 7644 'cluster_local_path': cmd.options['cluster_local_path'], 7645 'output_options': cmd_options 7646 } 7647 7648 if output_type.startswith('madloop'): 7649 import madgraph.loop.loop_exporters as loop_exporters 7650 if os.path.isdir(os.path.join(cmd._mgme_dir, 'Template/loop_material')): 7651 ExporterClass=None 7652 if not cmd.options['loop_optimized_output']: 7653 ExporterClass=loop_exporters.LoopProcessExporterFortranSA 7654 else: 7655 if output_type == "madloop": 7656 ExporterClass=loop_exporters.LoopProcessOptimizedExporterFortranSA 7657 MadLoop_SA_options['export_format'] = 'madloop_optimized' 7658 elif output_type == "madloop_matchbox": 7659 ExporterClass=loop_exporters.LoopProcessExporterFortranMatchBox 7660 MadLoop_SA_options['export_format'] = 'madloop_matchbox' 7661 else: 7662 raise Exception("output_type not recognize %s" % output_type) 7663 return ExporterClass(cmd._export_dir, MadLoop_SA_options) 7664 else: 7665 raise MadGraph5Error('MG5_aMC cannot find the \'loop_material\' directory'+\ 7666 ' in %s'%str(cmd._mgme_dir)) 7667 7668 # Then treat the aMC@NLO output 7669 elif output_type=='amcatnlo': 7670 import madgraph.iolibs.export_fks as export_fks 7671 ExporterClass=None 7672 amcatnlo_options = dict(opt) 7673 amcatnlo_options.update(MadLoop_SA_options) 7674 amcatnlo_options['mp'] = len(cmd._fks_multi_proc.get_virt_amplitudes()) > 0 7675 if not cmd.options['loop_optimized_output']: 7676 logger.info("Writing out the aMC@NLO code") 7677 ExporterClass = export_fks.ProcessExporterFortranFKS 7678 amcatnlo_options['export_format']='FKS5_default' 7679 else: 7680 logger.info("Writing out the aMC@NLO code, using optimized Loops") 7681 ExporterClass = export_fks.ProcessOptimizedExporterFortranFKS 7682 amcatnlo_options['export_format']='FKS5_optimized' 7683 return ExporterClass(cmd._export_dir, amcatnlo_options) 7684 7685 7686 # Then the default tree-level output 7687 elif output_type=='default': 7688 assert group_subprocesses in [True, False] 7689 7690 opt = dict(opt) 7691 opt.update({'clean': not noclean, 7692 'complex_mass': cmd.options['complex_mass_scheme'], 7693 'export_format':cmd._export_format, 7694 'mp': False, 7695 'sa_symmetry':False, 7696 'model': cmd._curr_model.get('name'), 7697 'v5_model': False if cmd._model_v4_path else True }) 7698 7699 format = cmd._export_format #shortcut 7700 7701 if format in ['standalone_msP', 'standalone_msF', 'standalone_rw']: 7702 opt['sa_symmetry'] = True 7703 elif format == 'plugin': 7704 opt['sa_symmetry'] = cmd._export_plugin.sa_symmetry 7705 7706 loop_induced_opt = dict(opt) 7707 loop_induced_opt.update(MadLoop_SA_options) 7708 loop_induced_opt['export_format'] = 'madloop_optimized' 7709 loop_induced_opt['SubProc_prefix'] = 'PV' 7710 # For loop_induced output with MadEvent, we must have access to the 7711 # color flows. 7712 loop_induced_opt['compute_color_flows'] = True 7713 for key in opt: 7714 if key not in loop_induced_opt: 7715 loop_induced_opt[key] = opt[key] 7716 7717 # Madevent output supports MadAnalysis5 7718 if format in ['madevent']: 7719 opt['madanalysis5'] = cmd.options['madanalysis5_path'] 7720 7721 if format == 'matrix' or format.startswith('standalone'): 7722 return ProcessExporterFortranSA(cmd._export_dir, opt, format=format) 7723 7724 elif format in ['madevent'] and group_subprocesses: 7725 if isinstance(cmd._curr_amps[0], 7726 loop_diagram_generation.LoopAmplitude): 7727 import madgraph.loop.loop_exporters as loop_exporters 7728 return loop_exporters.LoopInducedExporterMEGroup( 7729 cmd._export_dir,loop_induced_opt) 7730 else: 7731 return ProcessExporterFortranMEGroup(cmd._export_dir,opt) 7732 elif format in ['madevent']: 7733 if isinstance(cmd._curr_amps[0], 7734 loop_diagram_generation.LoopAmplitude): 7735 import madgraph.loop.loop_exporters as loop_exporters 7736 return loop_exporters.LoopInducedExporterMENoGroup( 7737 cmd._export_dir,loop_induced_opt) 7738 else: 7739 return ProcessExporterFortranME(cmd._export_dir,opt) 7740 elif format in ['matchbox']: 7741 return ProcessExporterFortranMatchBox(cmd._export_dir,opt) 7742 elif cmd._export_format in ['madweight'] and group_subprocesses: 7743 7744 return ProcessExporterFortranMWGroup(cmd._export_dir, opt) 7745 elif cmd._export_format in ['madweight']: 7746 return ProcessExporterFortranMW(cmd._export_dir, opt) 7747 elif format == 'plugin': 7748 if isinstance(cmd._curr_amps[0], 7749 loop_diagram_generation.LoopAmplitude): 7750 return cmd._export_plugin(cmd._export_dir, loop_induced_opt) 7751 else: 7752 return cmd._export_plugin(cmd._export_dir, opt) 7753 7754 else: 7755 raise Exception('Wrong export_v4 format') 7756 else: 7757 raise MadGraph5Error('Output type %s not reckognized in ExportV4Factory.')
7758
7759 7760 7761 7762 #=============================================================================== 7763 # ProcessExporterFortranMWGroup 7764 #=============================================================================== 7765 -class ProcessExporterFortranMWGroup(ProcessExporterFortranMW):
7766 """Class to take care of exporting a set of matrix elements to 7767 MadEvent subprocess group format.""" 7768 7769 matrix_file = "matrix_madweight_group_v4.inc" 7770 grouped_mode = 'madweight' 7771 #=========================================================================== 7772 # generate_subprocess_directory 7773 #===========================================================================
7774 - def generate_subprocess_directory(self, subproc_group, 7775 fortran_model, 7776 group_number):
7777 """Generate the Pn directory for a subprocess group in MadEvent, 7778 including the necessary matrix_N.f files, configs.inc and various 7779 other helper files.""" 7780 7781 if not isinstance(subproc_group, group_subprocs.SubProcessGroup): 7782 raise base_objects.PhysicsObject.PhysicsObjectError("subproc_group object not SubProcessGroup") 7783 7784 if not self.model: 7785 self.model = subproc_group.get('matrix_elements')[0].\ 7786 get('processes')[0].get('model') 7787 7788 pathdir = os.path.join(self.dir_path, 'SubProcesses') 7789 7790 # Create the directory PN in the specified path 7791 subprocdir = "P%d_%s" % (subproc_group.get('number'), 7792 subproc_group.get('name')) 7793 try: 7794 os.mkdir(pjoin(pathdir, subprocdir)) 7795 except os.error as error: 7796 logger.warning(error.strerror + " " + subprocdir) 7797 7798 7799 logger.info('Creating files in directory %s' % subprocdir) 7800 Ppath = pjoin(pathdir, subprocdir) 7801 7802 # Create the matrix.f files, auto_dsig.f files and all inc files 7803 # for all subprocesses in the group 7804 7805 maxamps = 0 7806 maxflows = 0 7807 tot_calls = 0 7808 7809 matrix_elements = subproc_group.get('matrix_elements') 7810 7811 for ime, matrix_element in \ 7812 enumerate(matrix_elements): 7813 filename = pjoin(Ppath, 'matrix%d.f' % (ime+1)) 7814 calls, ncolor = \ 7815 self.write_matrix_element_v4(writers.FortranWriter(filename), 7816 matrix_element, 7817 fortran_model, 7818 str(ime+1), 7819 subproc_group.get('diagram_maps')[\ 7820 ime]) 7821 7822 filename = pjoin(Ppath, 'auto_dsig%d.f' % (ime+1)) 7823 self.write_auto_dsig_file(writers.FortranWriter(filename), 7824 matrix_element, 7825 str(ime+1)) 7826 7827 # Keep track of needed quantities 7828 tot_calls += int(calls) 7829 maxflows = max(maxflows, ncolor) 7830 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 7831 7832 # Draw diagrams 7833 filename = pjoin(Ppath, "matrix%d.ps" % (ime+1)) 7834 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 7835 get('diagrams'), 7836 filename, 7837 model = \ 7838 matrix_element.get('processes')[0].\ 7839 get('model'), 7840 amplitude=True) 7841 logger.info("Generating Feynman diagrams for " + \ 7842 matrix_element.get('processes')[0].nice_string()) 7843 plot.draw() 7844 7845 # Extract number of external particles 7846 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 7847 7848 # Generate a list of diagrams corresponding to each configuration 7849 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 7850 # If a subprocess has no diagrams for this config, the number is 0 7851 7852 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 7853 7854 filename = pjoin(Ppath, 'auto_dsig.f') 7855 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 7856 subproc_group) 7857 7858 filename = pjoin(Ppath,'configs.inc') 7859 nconfigs, s_and_t_channels = self.write_configs_file(\ 7860 writers.FortranWriter(filename), 7861 subproc_group, 7862 subproc_diagrams_for_config) 7863 7864 filename = pjoin(Ppath, 'leshouche.inc') 7865 self.write_leshouche_file(writers.FortranWriter(filename), 7866 subproc_group) 7867 7868 filename = pjoin(Ppath, 'phasespace.inc') 7869 self.write_phasespace_file(writers.FortranWriter(filename), 7870 nconfigs) 7871 7872 7873 filename = pjoin(Ppath, 'maxamps.inc') 7874 self.write_maxamps_file(writers.FortranWriter(filename), 7875 maxamps, 7876 maxflows, 7877 max([len(me.get('processes')) for me in \ 7878 matrix_elements]), 7879 len(matrix_elements)) 7880 7881 filename = pjoin(Ppath, 'mirrorprocs.inc') 7882 self.write_mirrorprocs(writers.FortranWriter(filename), 7883 subproc_group) 7884 7885 filename = pjoin(Ppath, 'nexternal.inc') 7886 self.write_nexternal_file(writers.FortranWriter(filename), 7887 nexternal, ninitial) 7888 7889 filename = pjoin(Ppath, 'pmass.inc') 7890 self.write_pmass_file(writers.FortranWriter(filename), 7891 matrix_element) 7892 7893 filename = pjoin(Ppath, 'props.inc') 7894 self.write_props_file(writers.FortranWriter(filename), 7895 matrix_element, 7896 s_and_t_channels) 7897 7898 # filename = pjoin(Ppath, 'processes.dat') 7899 # files.write_to_file(filename, 7900 # self.write_processes_file, 7901 # subproc_group) 7902 7903 # Generate jpgs -> pass in make_html 7904 #os.system(os.path.join('..', '..', 'bin', 'gen_jpeg-pl')) 7905 7906 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f'] 7907 7908 for file in linkfiles: 7909 ln('../%s' % file, cwd=Ppath) 7910 7911 ln('nexternal.inc', '../../Source', cwd=Ppath, log=False) 7912 ln('leshouche.inc', '../../Source', cwd=Ppath, log=False) 7913 ln('maxamps.inc', '../../Source', cwd=Ppath, log=False) 7914 ln('../../Source/maxparticles.inc', '.', log=True, cwd=Ppath) 7915 ln('../../Source/maxparticles.inc', '.', name='genps.inc', log=True, cwd=Ppath) 7916 ln('phasespace.inc', '../', log=True, cwd=Ppath) 7917 if not tot_calls: 7918 tot_calls = 0 7919 return tot_calls
7920 7921 7922 #=========================================================================== 7923 # Helper functions 7924 #===========================================================================
7925 - def modify_grouping(self, matrix_element):
7926 """allow to modify the grouping (if grouping is in place) 7927 return two value: 7928 - True/False if the matrix_element was modified 7929 - the new(or old) matrix element""" 7930 7931 return True, matrix_element.split_lepton_grouping()
7932 7933 #=========================================================================== 7934 # write_super_auto_dsig_file 7935 #===========================================================================
7936 - def write_super_auto_dsig_file(self, writer, subproc_group):
7937 """Write the auto_dsig.f file selecting between the subprocesses 7938 in subprocess group mode""" 7939 7940 replace_dict = {} 7941 7942 # Extract version number and date from VERSION file 7943 info_lines = self.get_mg5_info_lines() 7944 replace_dict['info_lines'] = info_lines 7945 7946 matrix_elements = subproc_group.get('matrix_elements') 7947 7948 # Extract process info lines 7949 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 7950 matrix_elements]) 7951 replace_dict['process_lines'] = process_lines 7952 7953 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 7954 replace_dict['nexternal'] = nexternal 7955 7956 replace_dict['nsprocs'] = 2*len(matrix_elements) 7957 7958 # Generate dsig definition line 7959 dsig_def_line = "DOUBLE PRECISION " + \ 7960 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 7961 range(len(matrix_elements))]) 7962 replace_dict["dsig_def_line"] = dsig_def_line 7963 7964 # Generate dsig process lines 7965 call_dsig_proc_lines = [] 7966 for iproc in range(len(matrix_elements)): 7967 call_dsig_proc_lines.append(\ 7968 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 7969 {"num": iproc + 1, 7970 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 7971 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 7972 7973 if writer: 7974 file = open(os.path.join(_file_path, \ 7975 'iolibs/template_files/super_auto_dsig_mw_group_v4.inc')).read() 7976 file = file % replace_dict 7977 # Write the file 7978 writer.writelines(file) 7979 else: 7980 return replace_dict
7981 7982 #=========================================================================== 7983 # write_mirrorprocs 7984 #===========================================================================
7985 - def write_mirrorprocs(self, writer, subproc_group):
7986 """Write the mirrorprocs.inc file determining which processes have 7987 IS mirror process in subprocess group mode.""" 7988 7989 lines = [] 7990 bool_dict = {True: '.true.', False: '.false.'} 7991 matrix_elements = subproc_group.get('matrix_elements') 7992 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 7993 (len(matrix_elements), 7994 ",".join([bool_dict[me.get('has_mirror_process')] for \ 7995 me in matrix_elements]))) 7996 # Write the file 7997 writer.writelines(lines)
7998 7999 #=========================================================================== 8000 # write_configs_file 8001 #===========================================================================
8002 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
8003 """Write the configs.inc file with topology information for a 8004 subprocess group. Use the first subprocess with a diagram for each 8005 configuration.""" 8006 8007 matrix_elements = subproc_group.get('matrix_elements') 8008 model = matrix_elements[0].get('processes')[0].get('model') 8009 8010 diagrams = [] 8011 config_numbers = [] 8012 for iconfig, config in enumerate(diagrams_for_config): 8013 # Check if any diagrams correspond to this config 8014 if set(config) == set([0]): 8015 continue 8016 subproc_diags = [] 8017 for s,d in enumerate(config): 8018 if d: 8019 subproc_diags.append(matrix_elements[s].\ 8020 get('diagrams')[d-1]) 8021 else: 8022 subproc_diags.append(None) 8023 diagrams.append(subproc_diags) 8024 config_numbers.append(iconfig + 1) 8025 8026 # Extract number of external particles 8027 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 8028 8029 return len(diagrams), \ 8030 self.write_configs_file_from_diagrams(writer, diagrams, 8031 config_numbers, 8032 nexternal, ninitial, 8033 matrix_elements[0],model)
8034 8035 #=========================================================================== 8036 # write_run_configs_file 8037 #===========================================================================
8038 - def write_run_config_file(self, writer):
8039 """Write the run_configs.inc file for MadEvent""" 8040 8041 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 8042 text = open(path).read() % {'chanperjob':'2'} 8043 writer.write(text) 8044 return True
8045 8046 8047 #=========================================================================== 8048 # write_leshouche_file 8049 #===========================================================================
8050 - def write_leshouche_file(self, writer, subproc_group):
8051 """Write the leshouche.inc file for MG4""" 8052 8053 all_lines = [] 8054 8055 for iproc, matrix_element in \ 8056 enumerate(subproc_group.get('matrix_elements')): 8057 all_lines.extend(self.get_leshouche_lines(matrix_element, 8058 iproc)) 8059 8060 # Write the file 8061 writer.writelines(all_lines) 8062 8063 return True
8064