Package madgraph :: Package iolibs :: Module export_v4
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_v4

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Methods and classes to export matrix elements to v4 format.""" 
  16   
  17  import copy 
  18  from cStringIO import StringIO 
  19  from distutils import dir_util 
  20  import itertools 
  21  import fractions 
  22  import glob 
  23  import logging 
  24  import math 
  25  import os 
  26  import re 
  27  import shutil 
  28  import subprocess 
  29  import sys 
  30   
  31   
  32  import aloha 
  33   
  34  import madgraph.core.base_objects as base_objects 
  35  import madgraph.core.color_algebra as color 
  36  import madgraph.core.helas_objects as helas_objects 
  37  import madgraph.iolibs.drawing_eps as draw 
  38  import madgraph.iolibs.files as files 
  39  import madgraph.iolibs.group_subprocs as group_subprocs 
  40  import madgraph.iolibs.file_writers as writers 
  41  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  42  import madgraph.iolibs.template_files as template_files 
  43  import madgraph.iolibs.ufo_expression_parsers as parsers 
  44  import madgraph.iolibs.helas_call_writers as helas_call_writers 
  45  import madgraph.interface.common_run_interface as common_run_interface 
  46  import madgraph.various.diagram_symmetry as diagram_symmetry 
  47  import madgraph.various.misc as misc 
  48  import madgraph.various.banner as banner_mod 
  49  import madgraph.various.process_checks as process_checks 
  50  import madgraph.loop.loop_diagram_generation as loop_diagram_generation 
  51  import aloha.create_aloha as create_aloha 
  52  import models.import_ufo as import_ufo 
  53  import models.write_param_card as param_writer 
  54  import models.check_param_card as check_param_card 
  55   
  56   
  57  from madgraph import MadGraph5Error, MG5DIR, ReadWrite 
  58  from madgraph.iolibs.files import cp, ln, mv 
  59   
  60  from madgraph import InvalidCmd 
  61   
  62  pjoin = os.path.join 
  63   
  64  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  65  logger = logging.getLogger('madgraph.export_v4') 
  66   
  67  default_compiler= {'fortran': 'gfortran', 
  68                         'f2py': 'f2py', 
  69                         'cpp':'g++'} 
70 71 #=============================================================================== 72 # ProcessExporterFortran 73 #=============================================================================== 74 -class ProcessExporterFortran(object):
75 """Class to take care of exporting a set of matrix elements to 76 Fortran (v4) format.""" 77 78 default_opt = {'clean': False, 'complex_mass':False, 79 'export_format':'madevent', 'mp': False, 80 'v5_model': True 81 } 82
83 - def __init__(self, mgme_dir = "", dir_path = "", opt=None):
84 """Initiate the ProcessExporterFortran with directory information""" 85 self.mgme_dir = mgme_dir 86 self.dir_path = dir_path 87 self.model = None 88 89 self.opt = dict(self.default_opt) 90 if opt: 91 self.opt.update(opt) 92 93 #place holder to pass information to the run_interface 94 self.proc_characteristic = banner_mod.ProcCharacteristic()
95 96 97 #=========================================================================== 98 # process exporter fortran switch between group and not grouped 99 #===========================================================================
100 - def export_processes(self, matrix_elements, fortran_model):
101 """Make the switch between grouped and not grouped output""" 102 103 calls = 0 104 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 105 for (group_number, me_group) in enumerate(matrix_elements): 106 calls = calls + self.generate_subprocess_directory_v4(\ 107 me_group, fortran_model, group_number) 108 else: 109 for me_number, me in enumerate(matrix_elements.get_matrix_elements()): 110 calls = calls + self.generate_subprocess_directory_v4(\ 111 me, fortran_model, me_number) 112 113 return calls
114 115 116 117 #=========================================================================== 118 # create the run_card 119 #===========================================================================
120 - def create_run_card(self, matrix_elements, history):
121 """ """ 122 123 run_card = banner_mod.RunCard() 124 125 126 default=True 127 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 128 processes = [me.get('processes') for megroup in matrix_elements 129 for me in megroup['matrix_elements']] 130 elif matrix_elements: 131 processes = [me.get('processes') 132 for me in matrix_elements['matrix_elements']] 133 else: 134 default =False 135 136 if default: 137 run_card.create_default_for_process(self.proc_characteristic, 138 history, 139 processes) 140 141 142 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 143 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'))
144 145 146 #=========================================================================== 147 # copy the Template in a new directory. 148 #===========================================================================
149 - def copy_v4template(self, modelname):
150 """create the directory run_name as a copy of the MadEvent 151 Template, and clean the directory 152 """ 153 154 #First copy the full template tree if dir_path doesn't exit 155 if not os.path.isdir(self.dir_path): 156 assert self.mgme_dir, \ 157 "No valid MG_ME path given for MG4 run directory creation." 158 logger.info('initialize a new directory: %s' % \ 159 os.path.basename(self.dir_path)) 160 shutil.copytree(pjoin(self.mgme_dir, 'Template/LO'), 161 self.dir_path, True) 162 # distutils.dir_util.copy_tree since dir_path already exists 163 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 164 self.dir_path) 165 # Duplicate run_card and plot_card 166 for card in ['plot_card']: 167 try: 168 shutil.copy(pjoin(self.dir_path, 'Cards', 169 card + '.dat'), 170 pjoin(self.dir_path, 'Cards', 171 card + '_default.dat')) 172 except IOError: 173 logger.warning("Failed to copy " + card + ".dat to default") 174 elif os.getcwd() == os.path.realpath(self.dir_path): 175 logger.info('working in local directory: %s' % \ 176 os.path.realpath(self.dir_path)) 177 # distutils.dir_util.copy_tree since dir_path already exists 178 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/LO'), 179 self.dir_path) 180 # for name in misc.glob('Template/LO/*', self.mgme_dir): 181 # name = os.path.basename(name) 182 # filname = pjoin(self.mgme_dir, 'Template','LO',name) 183 # if os.path.isfile(filename): 184 # files.cp(filename, pjoin(self.dir_path,name)) 185 # elif os.path.isdir(filename): 186 # shutil.copytree(filename, pjoin(self.dir_path,name), True) 187 # distutils.dir_util.copy_tree since dir_path already exists 188 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 189 self.dir_path) 190 # Duplicate run_card and plot_card 191 for card in ['plot_card']: 192 try: 193 shutil.copy(pjoin(self.dir_path, 'Cards', 194 card + '.dat'), 195 pjoin(self.dir_path, 'Cards', 196 card + '_default.dat')) 197 except IOError: 198 logger.warning("Failed to copy " + card + ".dat to default") 199 elif not os.path.isfile(pjoin(self.dir_path, 'TemplateVersion.txt')): 200 assert self.mgme_dir, \ 201 "No valid MG_ME path given for MG4 run directory creation." 202 try: 203 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 204 except IOError: 205 MG5_version = misc.get_pkg_info() 206 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 207 "5." + MG5_version['version']) 208 209 #Ensure that the Template is clean 210 if self.opt['clean']: 211 logger.info('remove old information in %s' % \ 212 os.path.basename(self.dir_path)) 213 if os.environ.has_key('MADGRAPH_BASE'): 214 misc.call([pjoin('bin', 'internal', 'clean_template'), 215 '--web'], cwd=self.dir_path) 216 else: 217 try: 218 misc.call([pjoin('bin', 'internal', 'clean_template')], \ 219 cwd=self.dir_path) 220 except Exception, why: 221 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 222 % (os.path.basename(self.dir_path),why)) 223 224 #Write version info 225 MG_version = misc.get_pkg_info() 226 open(pjoin(self.dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 227 MG_version['version']) 228 229 230 # add the makefile in Source directory 231 filename = pjoin(self.dir_path,'Source','makefile') 232 self.write_source_makefile(writers.FileWriter(filename)) 233 234 # add the DiscreteSampler information 235 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'DiscreteSampler.f'), 236 pjoin(self.dir_path, 'Source')) 237 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'StringCast.f'), 238 pjoin(self.dir_path, 'Source')) 239 240 # We need to create the correct open_data for the pdf 241 self.write_pdf_opendata()
242 243 244 245 246 #=========================================================================== 247 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 248 #===========================================================================
249 - def write_procdef_mg5(self, file_pos, modelname, process_str):
250 """ write an equivalent of the MG4 proc_card in order that all the Madevent 251 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 252 253 proc_card_template = template_files.mg4_proc_card.mg4_template 254 process_template = template_files.mg4_proc_card.process_template 255 process_text = '' 256 coupling = '' 257 new_process_content = [] 258 259 260 # First find the coupling and suppress the coupling from process_str 261 #But first ensure that coupling are define whithout spaces: 262 process_str = process_str.replace(' =', '=') 263 process_str = process_str.replace('= ', '=') 264 process_str = process_str.replace(',',' , ') 265 #now loop on the element and treat all the coupling 266 for info in process_str.split(): 267 if '=' in info: 268 coupling += info + '\n' 269 else: 270 new_process_content.append(info) 271 # Recombine the process_str (which is the input process_str without coupling 272 #info) 273 process_str = ' '.join(new_process_content) 274 275 #format the SubProcess 276 process_text += process_template.substitute({'process': process_str, \ 277 'coupling': coupling}) 278 279 text = proc_card_template.substitute({'process': process_text, 280 'model': modelname, 281 'multiparticle':''}) 282 ff = open(file_pos, 'w') 283 ff.write(text) 284 ff.close()
285 286 #=========================================================================== 287 # Create jpeg diagrams, html pages,proc_card_mg5.dat and madevent.tar.gz 288 #===========================================================================
289 - def finalize_v4_directory(self, matrix_elements, history = "", makejpg = False, 290 online = False, compiler=default_compiler):
291 """Function to finalize v4 directory, for inheritance. 292 """ 293 294 self.create_run_card(matrix_elements, history) 295 296 pass
297 298 #=========================================================================== 299 # Create the proc_characteristic file passing information to the run_interface 300 #===========================================================================
301 - def create_proc_charac(self, matrix_elements=None, history= "", **opts):
302 303 self.proc_characteristic.write(pjoin(self.dir_path, 'SubProcesses', 'proc_characteristics'))
304 305 #=========================================================================== 306 # write_matrix_element_v4 307 #===========================================================================
308 - def write_matrix_element_v4(self):
309 """Function to write a matrix.f file, for inheritance. 310 """ 311 pass
312 313 #=========================================================================== 314 # write_pdf_opendata 315 #===========================================================================
316 - def write_pdf_opendata(self):
317 """ modify the pdf opendata file, to allow direct access to cluster node 318 repository if configure""" 319 320 if not self.opt["cluster_local_path"]: 321 changer = {"pdf_systemwide": ""} 322 else: 323 to_add = """ 324 tempname='%(path)s'//Tablefile 325 open(IU,file=tempname,status='old',ERR=1) 326 return 327 1 tempname='%(path)s/Pdfdata/'//Tablefile 328 open(IU,file=tempname,status='old',ERR=2) 329 return 330 2 tempname='%(path)s/lhapdf'//Tablefile 331 open(IU,file=tempname,status='old',ERR=3) 332 return 333 3 tempname='%(path)s/../lhapdf/pdfsets/'//Tablefile 334 open(IU,file=tempname,status='old',ERR=4) 335 return 336 4 tempname='%(path)s/../lhapdf/pdfsets/6.1/'//Tablefile 337 open(IU,file=tempname,status='old',ERR=5) 338 return 339 """ % {"path" : self.opt["cluster_local_path"]} 340 341 changer = {"pdf_systemwide": to_add} 342 343 344 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "opendata.f")) 345 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_opendata.f"),"r").read() 346 ff.writelines(template % changer) 347 348 # Do the same for lhapdf set 349 if not self.opt["cluster_local_path"]: 350 changer = {"cluster_specific_path": ""} 351 else: 352 to_add=""" 353 LHAPath='%(path)s/PDFsets' 354 Inquire(File=LHAPath, exist=exists) 355 if(exists)return 356 LHAPath='%(path)s/../lhapdf/pdfsets/6.1/' 357 Inquire(File=LHAPath, exist=exists) 358 if(exists)return 359 LHAPath='%(path)s/../lhapdf/pdfsets/' 360 Inquire(File=LHAPath, exist=exists) 361 if(exists)return 362 LHAPath='./PDFsets' 363 """ % {"path" : self.opt["cluster_local_path"]} 364 changer = {"cluster_specific_path": to_add} 365 366 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f")) 367 #ff = open(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f"),"w") 368 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_wrap_lhapdf.f"),"r").read() 369 ff.writelines(template % changer) 370 371 372 return
373 374 375 376 #=========================================================================== 377 # write_maxparticles_file 378 #===========================================================================
379 - def write_maxparticles_file(self, writer, matrix_elements):
380 """Write the maxparticles.inc file for MadEvent""" 381 382 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 383 maxparticles = max([me.get_nexternal_ninitial()[0] for me in \ 384 matrix_elements.get('matrix_elements')]) 385 else: 386 maxparticles = max([me.get_nexternal_ninitial()[0] \ 387 for me in matrix_elements]) 388 389 lines = "integer max_particles\n" 390 lines += "parameter(max_particles=%d)" % maxparticles 391 392 # Write the file 393 writer.writelines(lines) 394 395 return True
396 397 398 #=========================================================================== 399 # export the model 400 #===========================================================================
401 - def export_model_files(self, model_path):
402 """Configure the files/link of the process according to the model""" 403 404 # Import the model 405 for file in os.listdir(model_path): 406 if os.path.isfile(pjoin(model_path, file)): 407 shutil.copy2(pjoin(model_path, file), \ 408 pjoin(self.dir_path, 'Source', 'MODEL'))
409 410 424 431 432 #=========================================================================== 433 # export the helas routine 434 #===========================================================================
435 - def export_helas(self, helas_path):
436 """Configure the files/link of the process according to the model""" 437 438 # Import helas routine 439 for filename in os.listdir(helas_path): 440 filepos = pjoin(helas_path, filename) 441 if os.path.isfile(filepos): 442 if filepos.endswith('Makefile.template'): 443 cp(filepos, self.dir_path + '/Source/DHELAS/Makefile') 444 elif filepos.endswith('Makefile'): 445 pass 446 else: 447 cp(filepos, self.dir_path + '/Source/DHELAS')
448 # following lines do the same but whithout symbolic link 449 # 450 #def export_helas(mgme_dir, dir_path): 451 # 452 # # Copy the HELAS directory 453 # helas_dir = pjoin(mgme_dir, 'HELAS') 454 # for filename in os.listdir(helas_dir): 455 # if os.path.isfile(pjoin(helas_dir, filename)): 456 # shutil.copy2(pjoin(helas_dir, filename), 457 # pjoin(dir_path, 'Source', 'DHELAS')) 458 # shutil.move(pjoin(dir_path, 'Source', 'DHELAS', 'Makefile.template'), 459 # pjoin(dir_path, 'Source', 'DHELAS', 'Makefile')) 460 # 461 462 #=========================================================================== 463 # generate_subprocess_directory_v4 464 #===========================================================================
465 - def generate_subprocess_directory_v4(self, matrix_element, 466 fortran_model, 467 me_number):
468 """Routine to generate a subprocess directory (for inheritance)""" 469 470 pass
471 472 #=========================================================================== 473 # get_source_libraries_list 474 #===========================================================================
475 - def get_source_libraries_list(self):
476 """ Returns the list of libraries to be compiling when compiling the 477 SOURCE directory. It is different for loop_induced processes and 478 also depends on the value of the 'output_dependencies' option""" 479 480 return ['$(LIBDIR)libdhelas.$(libext)', 481 '$(LIBDIR)libpdf.$(libext)', 482 '$(LIBDIR)libmodel.$(libext)', 483 '$(LIBDIR)libcernlib.$(libext)']
484 485 #=========================================================================== 486 # write_source_makefile 487 #===========================================================================
488 - def write_source_makefile(self, writer):
489 """Write the nexternal.inc file for MG4""" 490 491 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 492 set_of_lib = ' '.join(['$(LIBRARIES)']+self.get_source_libraries_list()) 493 if self.opt['model'] == 'mssm' or self.opt['model'].startswith('mssm-'): 494 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 495 MODEL/MG5_param.dat: ../Cards/param_card.dat\n\t../bin/madevent treatcards param 496 param_card.inc: MODEL/MG5_param.dat\n\t../bin/madevent treatcards param\n''' 497 else: 498 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 499 param_card.inc: ../Cards/param_card.dat\n\t../bin/madevent treatcards param\n''' 500 text = open(path).read() % {'libraries': set_of_lib, 'model':model_line} 501 writer.write(text) 502 503 return True
504 505 #=========================================================================== 506 # write_nexternal_madspin 507 #===========================================================================
508 - def write_nexternal_madspin(self, writer, nexternal, ninitial):
509 """Write the nexternal_prod.inc file for madspin""" 510 511 replace_dict = {} 512 513 replace_dict['nexternal'] = nexternal 514 replace_dict['ninitial'] = ninitial 515 516 file = """ \ 517 integer nexternal_prod 518 parameter (nexternal_prod=%(nexternal)d) 519 integer nincoming_prod 520 parameter (nincoming_prod=%(ninitial)d)""" % replace_dict 521 522 # Write the file 523 writer.writelines(file) 524 525 return True
526 527 #=========================================================================== 528 # write_helamp_madspin 529 #===========================================================================
530 - def write_helamp_madspin(self, writer, ncomb):
531 """Write the helamp.inc file for madspin""" 532 533 replace_dict = {} 534 535 replace_dict['ncomb'] = ncomb 536 537 file = """ \ 538 integer ncomb1 539 parameter (ncomb1=%(ncomb)d) 540 double precision helamp(ncomb1) 541 common /to_helamp/helamp """ % replace_dict 542 543 # Write the file 544 writer.writelines(file) 545 546 return True
547 548 549 #=========================================================================== 550 # write_nexternal_file 551 #===========================================================================
552 - def write_nexternal_file(self, writer, nexternal, ninitial):
553 """Write the nexternal.inc file for MG4""" 554 555 replace_dict = {} 556 557 replace_dict['nexternal'] = nexternal 558 replace_dict['ninitial'] = ninitial 559 560 file = """ \ 561 integer nexternal 562 parameter (nexternal=%(nexternal)d) 563 integer nincoming 564 parameter (nincoming=%(ninitial)d)""" % replace_dict 565 566 # Write the file 567 writer.writelines(file) 568 569 return True
570 571 #=========================================================================== 572 # write_pmass_file 573 #===========================================================================
574 - def write_pmass_file(self, writer, matrix_element):
575 """Write the pmass.inc file for MG4""" 576 577 model = matrix_element.get('processes')[0].get('model') 578 579 lines = [] 580 for wf in matrix_element.get_external_wavefunctions(): 581 mass = model.get('particle_dict')[wf.get('pdg_code')].get('mass') 582 if mass.lower() != "zero": 583 mass = "abs(%s)" % mass 584 585 lines.append("pmass(%d)=%s" % \ 586 (wf.get('number_external'), mass)) 587 588 # Write the file 589 writer.writelines(lines) 590 591 return True
592 593 #=========================================================================== 594 # write_ngraphs_file 595 #===========================================================================
596 - def write_ngraphs_file(self, writer, nconfigs):
597 """Write the ngraphs.inc file for MG4. Needs input from 598 write_configs_file.""" 599 600 file = " integer n_max_cg\n" 601 file = file + "parameter (n_max_cg=%d)" % nconfigs 602 603 # Write the file 604 writer.writelines(file) 605 606 return True
607 608 #=========================================================================== 609 # write_leshouche_file 610 #===========================================================================
611 - def write_leshouche_file(self, writer, matrix_element):
612 """Write the leshouche.inc file for MG4""" 613 614 # Write the file 615 writer.writelines(self.get_leshouche_lines(matrix_element, 0)) 616 617 return True
618 619 #=========================================================================== 620 # get_leshouche_lines 621 #===========================================================================
622 - def get_leshouche_lines(self, matrix_element, numproc):
623 """Write the leshouche.inc file for MG4""" 624 625 # Extract number of external particles 626 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 627 628 lines = [] 629 for iproc, proc in enumerate(matrix_element.get('processes')): 630 legs = proc.get_legs_with_decays() 631 lines.append("DATA (IDUP(i,%d,%d),i=1,%d)/%s/" % \ 632 (iproc + 1, numproc+1, nexternal, 633 ",".join([str(l.get('id')) for l in legs]))) 634 if iproc == 0 and numproc == 0: 635 for i in [1, 2]: 636 lines.append("DATA (MOTHUP(%d,i),i=1,%2r)/%s/" % \ 637 (i, nexternal, 638 ",".join([ "%3r" % 0 ] * ninitial + \ 639 [ "%3r" % i ] * (nexternal - ninitial)))) 640 641 # Here goes the color connections corresponding to the JAMPs 642 # Only one output, for the first subproc! 643 if iproc == 0: 644 # If no color basis, just output trivial color flow 645 if not matrix_element.get('color_basis'): 646 for i in [1, 2]: 647 lines.append("DATA (ICOLUP(%d,i,1,%d),i=1,%2r)/%s/" % \ 648 (i, numproc+1,nexternal, 649 ",".join([ "%3r" % 0 ] * nexternal))) 650 651 else: 652 # First build a color representation dictionnary 653 repr_dict = {} 654 for l in legs: 655 repr_dict[l.get('number')] = \ 656 proc.get('model').get_particle(l.get('id')).get_color()\ 657 * (-1)**(1+l.get('state')) 658 # Get the list of color flows 659 color_flow_list = \ 660 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 661 ninitial) 662 # And output them properly 663 for cf_i, color_flow_dict in enumerate(color_flow_list): 664 for i in [0, 1]: 665 lines.append("DATA (ICOLUP(%d,i,%d,%d),i=1,%2r)/%s/" % \ 666 (i + 1, cf_i + 1, numproc+1, nexternal, 667 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 668 for l in legs]))) 669 670 return lines
671 672 673 674 675 #=========================================================================== 676 # write_maxamps_file 677 #===========================================================================
678 - def write_maxamps_file(self, writer, maxamps, maxflows, 679 maxproc,maxsproc):
680 """Write the maxamps.inc file for MG4.""" 681 682 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 683 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 684 (maxamps, maxflows) 685 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 686 (maxproc, maxsproc) 687 688 # Write the file 689 writer.writelines(file) 690 691 return True
692 693 #=========================================================================== 694 # write_props_file 695 #===========================================================================
696 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
697 """Write the props.inc file for MadEvent. Needs input from 698 write_configs_file.""" 699 700 lines = [] 701 702 particle_dict = matrix_element.get('processes')[0].get('model').\ 703 get('particle_dict') 704 705 for iconf, configs in enumerate(s_and_t_channels): 706 for vertex in configs[0] + configs[1][:-1]: 707 leg = vertex.get('legs')[-1] 708 if leg.get('id') not in particle_dict: 709 # Fake propagator used in multiparticle vertices 710 mass = 'zero' 711 width = 'zero' 712 pow_part = 0 713 else: 714 particle = particle_dict[leg.get('id')] 715 # Get mass 716 if particle.get('mass').lower() == 'zero': 717 mass = particle.get('mass') 718 else: 719 mass = "abs(%s)" % particle.get('mass') 720 # Get width 721 if particle.get('width').lower() == 'zero': 722 width = particle.get('width') 723 else: 724 width = "abs(%s)" % particle.get('width') 725 726 pow_part = 1 + int(particle.is_boson()) 727 728 lines.append("prmass(%d,%d) = %s" % \ 729 (leg.get('number'), iconf + 1, mass)) 730 lines.append("prwidth(%d,%d) = %s" % \ 731 (leg.get('number'), iconf + 1, width)) 732 lines.append("pow(%d,%d) = %d" % \ 733 (leg.get('number'), iconf + 1, pow_part)) 734 735 # Write the file 736 writer.writelines(lines) 737 738 return True
739 740 741 742 743 744 #=========================================================================== 745 # Routines to output UFO models in MG4 format 746 #=========================================================================== 747
748 - def convert_model_to_mg4(self, model, wanted_lorentz = [], 749 wanted_couplings = []):
750 """ Create a full valid MG4 model from a MG5 model (coming from UFO)""" 751 752 # Make sure aloha is in quadruple precision if needed 753 old_aloha_mp=aloha.mp_precision 754 aloha.mp_precision=self.opt['mp'] 755 756 # create the MODEL 757 write_dir=pjoin(self.dir_path, 'Source', 'MODEL') 758 model_builder = UFO_model_to_mg4(model, write_dir, self.opt + self.proc_characteristic) 759 model_builder.build(wanted_couplings) 760 761 # Backup the loop mode, because it can be changed in what follows. 762 old_loop_mode = aloha.loop_mode 763 764 # Create the aloha model or use the existing one (for loop exporters 765 # this is useful as the aloha model will be used again in the 766 # LoopHelasMatrixElements generated). We do not save the model generated 767 # here if it didn't exist already because it would be a waste of 768 # memory for tree level applications since aloha is only needed at the 769 # time of creating the aloha fortran subroutines. 770 if hasattr(self, 'aloha_model'): 771 aloha_model = self.aloha_model 772 else: 773 aloha_model = create_aloha.AbstractALOHAModel(os.path.basename(model.get('modelpath'))) 774 aloha_model.add_Lorentz_object(model.get('lorentz')) 775 776 # Compute the subroutines 777 if wanted_lorentz: 778 aloha_model.compute_subset(wanted_lorentz) 779 else: 780 aloha_model.compute_all(save=False) 781 782 # Write them out 783 write_dir=pjoin(self.dir_path, 'Source', 'DHELAS') 784 aloha_model.write(write_dir, 'Fortran') 785 786 # Revert the original aloha loop mode 787 aloha.loop_mode = old_loop_mode 788 789 #copy Helas Template 790 cp(MG5DIR + '/aloha/template_files/Makefile_F', write_dir+'/makefile') 791 if any([any(['L' in tag for tag in d[1]]) for d in wanted_lorentz]): 792 cp(MG5DIR + '/aloha/template_files/aloha_functions_loop.f', 793 write_dir+'/aloha_functions.f') 794 aloha_model.loop_mode = False 795 else: 796 cp(MG5DIR + '/aloha/template_files/aloha_functions.f', 797 write_dir+'/aloha_functions.f') 798 create_aloha.write_aloha_file_inc(write_dir, '.f', '.o') 799 800 # Make final link in the Process 801 self.make_model_symbolic_link() 802 803 # Re-establish original aloha mode 804 aloha.mp_precision=old_aloha_mp
805 806 #=========================================================================== 807 # Helper functions 808 #===========================================================================
809 - def get_mg5_info_lines(self):
810 """Return info lines for MG5, suitable to place at beginning of 811 Fortran files""" 812 813 info = misc.get_pkg_info() 814 info_lines = "" 815 if info and info.has_key('version') and info.has_key('date'): 816 info_lines = "# Generated by MadGraph5_aMC@NLO v. %s, %s\n" % \ 817 (info['version'], info['date']) 818 info_lines = info_lines + \ 819 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 820 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 821 else: 822 info_lines = "# Generated by MadGraph5_aMC@NLO\n" + \ 823 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 824 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 825 826 return info_lines
827
828 - def get_process_info_lines(self, matrix_element):
829 """Return info lines describing the processes for this matrix element""" 830 831 return"\n".join([ "C " + process.nice_string().replace('\n', '\nC * ') \ 832 for process in matrix_element.get('processes')])
833 834
835 - def get_helicity_lines(self, matrix_element,array_name='NHEL'):
836 """Return the Helicity matrix definition lines for this matrix element""" 837 838 helicity_line_list = [] 839 i = 0 840 for helicities in matrix_element.get_helicity_matrix(): 841 i = i + 1 842 int_list = [i, len(helicities)] 843 int_list.extend(helicities) 844 helicity_line_list.append(\ 845 ("DATA ("+array_name+"(I,%4r),I=1,%d) /" + \ 846 ",".join(['%2r'] * len(helicities)) + "/") % tuple(int_list)) 847 848 return "\n".join(helicity_line_list)
849
850 - def get_ic_line(self, matrix_element):
851 """Return the IC definition line coming after helicities, required by 852 switchmom in madevent""" 853 854 nexternal = matrix_element.get_nexternal_ninitial()[0] 855 int_list = range(1, nexternal + 1) 856 857 return "DATA (IC(I,1),I=1,%i) /%s/" % (nexternal, 858 ",".join([str(i) for \ 859 i in int_list]))
860
861 - def set_chosen_SO_index(self, process, squared_orders):
862 """ From the squared order constraints set by the user, this function 863 finds what indices of the squared_orders list the user intends to pick. 864 It returns this as a string of comma-separated successive '.true.' or 865 '.false.' for each index.""" 866 867 user_squared_orders = process.get('squared_orders') 868 split_orders = process.get('split_orders') 869 870 if len(user_squared_orders)==0: 871 return ','.join(['.true.']*len(squared_orders)) 872 873 res = [] 874 for sqsos in squared_orders: 875 is_a_match = True 876 for user_sqso, value in user_squared_orders.items(): 877 if (process.get_squared_order_type(user_sqso) =='==' and \ 878 value!=sqsos[split_orders.index(user_sqso)]) or \ 879 (process.get_squared_order_type(user_sqso) in ['<=','='] and \ 880 value<sqsos[split_orders.index(user_sqso)]) or \ 881 (process.get_squared_order_type(user_sqso) == '>' and \ 882 value>=sqsos[split_orders.index(user_sqso)]): 883 is_a_match = False 884 break 885 res.append('.true.' if is_a_match else '.false.') 886 887 return ','.join(res)
888
889 - def get_split_orders_lines(self, orders, array_name, n=5):
890 """ Return the split orders definition as defined in the list orders and 891 for the name of the array 'array_name'. Split rows in chunks of size n.""" 892 893 ret_list = [] 894 for index, order in enumerate(orders): 895 for k in xrange(0, len(order), n): 896 ret_list.append("DATA (%s(%3r,i),i=%3r,%3r) /%s/" % \ 897 (array_name,index + 1, k + 1, min(k + n, len(order)), 898 ','.join(["%5r" % i for i in order[k:k + n]]))) 899 return ret_list
900
901 - def format_integer_list(self, list, name, n=5):
902 """ Return an initialization of the python list in argument following 903 the fortran syntax using the data keyword assignment, filling an array 904 of name 'name'. It splits rows in chunks of size n.""" 905 906 ret_list = [] 907 for k in xrange(0, len(list), n): 908 ret_list.append("DATA (%s(i),i=%3r,%3r) /%s/" % \ 909 (name, k + 1, min(k + n, len(list)), 910 ','.join(["%5r" % i for i in list[k:k + n]]))) 911 return ret_list
912
913 - def get_color_data_lines(self, matrix_element, n=6):
914 """Return the color matrix definition lines for this matrix element. Split 915 rows in chunks of size n.""" 916 917 if not matrix_element.get('color_matrix'): 918 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 919 else: 920 ret_list = [] 921 my_cs = color.ColorString() 922 for index, denominator in \ 923 enumerate(matrix_element.get('color_matrix').\ 924 get_line_denominators()): 925 # First write the common denominator for this color matrix line 926 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 927 # Then write the numerators for the matrix elements 928 num_list = matrix_element.get('color_matrix').\ 929 get_line_numerators(index, denominator) 930 931 for k in xrange(0, len(num_list), n): 932 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 933 (index + 1, k + 1, min(k + n, len(num_list)), 934 ','.join(["%5r" % i for i in num_list[k:k + n]]))) 935 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[index]) 936 ret_list.append("C %s" % repr(my_cs)) 937 return ret_list
938 939
940 - def get_den_factor_line(self, matrix_element):
941 """Return the denominator factor line for this matrix element""" 942 943 return "DATA IDEN/%2r/" % \ 944 matrix_element.get_denominator_factor()
945
946 - def get_icolamp_lines(self, mapconfigs, matrix_element, num_matrix_element):
947 """Return the ICOLAMP matrix, showing which JAMPs contribute to 948 which configs (diagrams).""" 949 950 ret_list = [] 951 952 booldict = {False: ".false.", True: ".true."} 953 954 if not matrix_element.get('color_basis'): 955 # No color, so only one color factor. Simply write a ".true." 956 # for each config (i.e., each diagram with only 3 particle 957 # vertices 958 configs = len(mapconfigs) 959 ret_list.append("DATA(icolamp(1,i,%d),i=1,%d)/%s/" % \ 960 (num_matrix_element, configs, 961 ','.join([".true." for i in range(configs)]))) 962 return ret_list 963 964 # There is a color basis - create a list showing which JAMPs have 965 # contributions to which configs 966 967 # Only want to include leading color flows, so find max_Nc 968 color_basis = matrix_element.get('color_basis') 969 970 # We don't want to include the power of Nc's which come from the potential 971 # loop color trace (i.e. in the case of a closed fermion loop for example) 972 # so we subtract it here when computing max_Nc 973 max_Nc = max(sum([[(v[4]-v[5]) for v in val] for val in 974 color_basis.values()],[])) 975 976 # Crate dictionary between diagram number and JAMP number 977 diag_jamp = {} 978 for ijamp, col_basis_elem in \ 979 enumerate(sorted(matrix_element.get('color_basis').keys())): 980 for diag_tuple in matrix_element.get('color_basis')[col_basis_elem]: 981 # Only use color flows with Nc == max_Nc. However, notice that 982 # we don't want to include the Nc power coming from the loop 983 # in this counting. 984 if (diag_tuple[4]-diag_tuple[5]) == max_Nc: 985 diag_num = diag_tuple[0] + 1 986 # Add this JAMP number to this diag_num 987 diag_jamp[diag_num] = diag_jamp.setdefault(diag_num, []) + \ 988 [ijamp+1] 989 990 colamps = ijamp + 1 991 for iconfig, num_diag in enumerate(mapconfigs): 992 if num_diag == 0: 993 continue 994 995 # List of True or False 996 bool_list = [(i + 1 in diag_jamp[num_diag]) for i in range(colamps)] 997 # Add line 998 ret_list.append("DATA(icolamp(i,%d,%d),i=1,%d)/%s/" % \ 999 (iconfig+1, num_matrix_element, colamps, 1000 ','.join(["%s" % booldict[b] for b in \ 1001 bool_list]))) 1002 1003 return ret_list
1004
1005 - def get_amp2_lines(self, matrix_element, config_map = []):
1006 """Return the amp2(i) = sum(amp for diag(i))^2 lines""" 1007 1008 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 1009 # Get minimum legs in a vertex 1010 vert_list = [max(diag.get_vertex_leg_numbers()) for diag in \ 1011 matrix_element.get('diagrams') if diag.get_vertex_leg_numbers()!=[]] 1012 minvert = min(vert_list) if vert_list!=[] else 0 1013 1014 ret_lines = [] 1015 if config_map: 1016 # In this case, we need to sum up all amplitudes that have 1017 # identical topologies, as given by the config_map (which 1018 # gives the topology/config for each of the diagrams 1019 diagrams = matrix_element.get('diagrams') 1020 # Combine the diagrams with identical topologies 1021 config_to_diag_dict = {} 1022 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1023 if config_map[idiag] == 0: 1024 continue 1025 try: 1026 config_to_diag_dict[config_map[idiag]].append(idiag) 1027 except KeyError: 1028 config_to_diag_dict[config_map[idiag]] = [idiag] 1029 # Write out the AMP2s summing squares of amplitudes belonging 1030 # to eiher the same diagram or different diagrams with 1031 # identical propagator properties. Note that we need to use 1032 # AMP2 number corresponding to the first diagram number used 1033 # for that AMP2. 1034 for config in sorted(config_to_diag_dict.keys()): 1035 1036 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % \ 1037 {"num": (config_to_diag_dict[config][0] + 1)} 1038 1039 amp = "+".join(["AMP(%(num)d)" % {"num": a.get('number')} for a in \ 1040 sum([diagrams[idiag].get('amplitudes') for \ 1041 idiag in config_to_diag_dict[config]], [])]) 1042 1043 # Not using \sum |M|^2 anymore since this creates troubles 1044 # when ckm is not diagonal due to the JIM mechanism. 1045 if '+' in amp: 1046 line += "(%s)*dconjg(%s)" % (amp, amp) 1047 else: 1048 line += "%s*dconjg(%s)" % (amp, amp) 1049 ret_lines.append(line) 1050 else: 1051 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1052 # Ignore any diagrams with 4-particle vertices. 1053 if diag.get_vertex_leg_numbers()!=[] and max(diag.get_vertex_leg_numbers()) > minvert: 1054 continue 1055 # Now write out the expression for AMP2, meaning the sum of 1056 # squared amplitudes belonging to the same diagram 1057 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % {"num": (idiag + 1)} 1058 line += "+".join(["AMP(%(num)d)*dconjg(AMP(%(num)d))" % \ 1059 {"num": a.get('number')} for a in \ 1060 diag.get('amplitudes')]) 1061 ret_lines.append(line) 1062 1063 return ret_lines
1064 1065 #=========================================================================== 1066 # Returns the data statements initializing the coeffictients for the JAMP 1067 # decomposition. It is used when the JAMP initialization is decided to be 1068 # done through big arrays containing the projection coefficients. 1069 #===========================================================================
1070 - def get_JAMP_coefs(self, color_amplitudes, color_basis=None, tag_letter="",\ 1071 n=50, Nc_value=3):
1072 """This functions return the lines defining the DATA statement setting 1073 the coefficients building the JAMPS out of the AMPS. Split rows in 1074 bunches of size n. 1075 One can specify the color_basis from which the color amplitudes originates 1076 so that there are commentaries telling what color structure each JAMP 1077 corresponds to.""" 1078 1079 if(not isinstance(color_amplitudes,list) or 1080 not (color_amplitudes and isinstance(color_amplitudes[0],list))): 1081 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_coefs" 1082 1083 res_list = [] 1084 my_cs = color.ColorString() 1085 for index, coeff_list in enumerate(color_amplitudes): 1086 # Create the list of the complete numerical coefficient. 1087 coefs_list=[coefficient[0][0]*coefficient[0][1]*\ 1088 (fractions.Fraction(Nc_value)**coefficient[0][3]) for \ 1089 coefficient in coeff_list] 1090 # Create the list of the numbers of the contributing amplitudes. 1091 # Mutliply by -1 for those which have an imaginary coefficient. 1092 ampnumbers_list=[coefficient[1]*(-1 if coefficient[0][2] else 1) \ 1093 for coefficient in coeff_list] 1094 # Find the common denominator. 1095 commondenom=abs(reduce(fractions.gcd, coefs_list).denominator) 1096 num_list=[(coefficient*commondenom).numerator \ 1097 for coefficient in coefs_list] 1098 res_list.append("DATA NCONTRIBAMPS%s(%i)/%i/"%(tag_letter,\ 1099 index+1,len(num_list))) 1100 res_list.append("DATA DENOMCCOEF%s(%i)/%i/"%(tag_letter,\ 1101 index+1,commondenom)) 1102 if color_basis: 1103 my_cs.from_immutable(sorted(color_basis.keys())[index]) 1104 res_list.append("C %s" % repr(my_cs)) 1105 for k in xrange(0, len(num_list), n): 1106 res_list.append("DATA (NUMCCOEF%s(%3r,i),i=%6r,%6r) /%s/" % \ 1107 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1108 ','.join(["%6r" % i for i in num_list[k:k + n]]))) 1109 res_list.append("DATA (AMPNUMBERS%s(%3r,i),i=%6r,%6r) /%s/" % \ 1110 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1111 ','.join(["%6r" % i for i in ampnumbers_list[k:k + n]]))) 1112 pass 1113 return res_list
1114 1115
1116 - def get_JAMP_lines_split_order(self, col_amps, split_order_amps, 1117 split_order_names=None, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)"):
1118 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1119 defined as a matrix element or directly as a color_amplitudes dictionary. 1120 The split_order_amps specifies the group of amplitudes sharing the same 1121 amplitude orders which should be put in together in a given set of JAMPS. 1122 The split_order_amps is supposed to have the format of the second output 1123 of the function get_split_orders_mapping function in helas_objects.py. 1124 The split_order_names is optional (it should correspond to the process 1125 'split_orders' attribute) and only present to provide comments in the 1126 JAMP definitions in the code.""" 1127 1128 # Let the user call get_JAMP_lines_split_order directly from a 1129 error_msg="Malformed '%s' argument passed to the "+\ 1130 "get_JAMP_lines_split_order function: %s"%str(split_order_amps) 1131 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1132 color_amplitudes=col_amps.get_color_amplitudes() 1133 elif(isinstance(col_amps,list)): 1134 if(col_amps and isinstance(col_amps[0],list)): 1135 color_amplitudes=col_amps 1136 else: 1137 raise MadGraph5Error, error_msg%'col_amps' 1138 else: 1139 raise MadGraph5Error, error_msg%'col_amps' 1140 1141 # Verify the sanity of the split_order_amps and split_order_names args 1142 if isinstance(split_order_amps,list): 1143 for elem in split_order_amps: 1144 if len(elem)!=2: 1145 raise MadGraph5Error, error_msg%'split_order_amps' 1146 # Check the first element of the two lists to make sure they are 1147 # integers, although in principle they should all be integers. 1148 if not isinstance(elem[0],tuple) or \ 1149 not isinstance(elem[1],tuple) or \ 1150 not isinstance(elem[0][0],int) or \ 1151 not isinstance(elem[1][0],int): 1152 raise MadGraph5Error, error_msg%'split_order_amps' 1153 else: 1154 raise MadGraph5Error, error_msg%'split_order_amps' 1155 1156 if not split_order_names is None: 1157 if isinstance(split_order_names,list): 1158 # Should specify the same number of names as there are elements 1159 # in the key of the split_order_amps. 1160 if len(split_order_names)!=len(split_order_amps[0][0]): 1161 raise MadGraph5Error, error_msg%'split_order_names' 1162 # Check the first element of the list to be a string 1163 if not isinstance(split_order_names[0],str): 1164 raise MadGraph5Error, error_msg%'split_order_names' 1165 else: 1166 raise MadGraph5Error, error_msg%'split_order_names' 1167 1168 # Now scan all contributing orders to be individually computed and 1169 # construct the list of color_amplitudes for JAMP to be constructed 1170 # accordingly. 1171 res_list=[] 1172 for i, amp_order in enumerate(split_order_amps): 1173 col_amps_order = [] 1174 for jamp in color_amplitudes: 1175 col_amps_order.append(filter(lambda col_amp: 1176 col_amp[1] in amp_order[1],jamp)) 1177 if split_order_names: 1178 res_list.append('C JAMPs contributing to orders '+' '.join( 1179 ['%s=%i'%order for order in zip(split_order_names, 1180 amp_order[0])])) 1181 if self.opt['export_format'] in ['madloop_matchbox']: 1182 res_list.extend(self.get_JAMP_lines(col_amps_order, 1183 JAMP_format="JAMP(%s,{0})".format(str(i+1)), 1184 JAMP_formatLC="LNJAMP(%s,{0})".format(str(i+1)))) 1185 else: 1186 res_list.extend(self.get_JAMP_lines(col_amps_order, 1187 JAMP_format="JAMP(%s,{0})".format(str(i+1)))) 1188 1189 return res_list
1190 1191
1192 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", 1193 split=-1):
1194 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1195 defined as a matrix element or directly as a color_amplitudes dictionary, 1196 Jamp_formatLC should be define to allow to add LeadingColor computation 1197 (usefull for MatchBox) 1198 The split argument defines how the JAMP lines should be split in order 1199 not to be too long.""" 1200 1201 # Let the user call get_JAMP_lines directly from a MatrixElement or from 1202 # the color amplitudes lists. 1203 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1204 color_amplitudes=col_amps.get_color_amplitudes() 1205 elif(isinstance(col_amps,list)): 1206 if(col_amps and isinstance(col_amps[0],list)): 1207 color_amplitudes=col_amps 1208 else: 1209 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1210 else: 1211 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1212 1213 1214 res_list = [] 1215 for i, coeff_list in enumerate(color_amplitudes): 1216 # It might happen that coeff_list is empty if this function was 1217 # called from get_JAMP_lines_split_order (i.e. if some color flow 1218 # does not contribute at all for a given order). 1219 # In this case we simply set it to 0. 1220 if coeff_list==[]: 1221 res_list.append(((JAMP_format+"=0D0") % str(i + 1))) 1222 continue 1223 # Break the JAMP definition into 'n=split' pieces to avoid having 1224 # arbitrarly long lines. 1225 first=True 1226 n = (len(coeff_list)+1 if split<=0 else split) 1227 while coeff_list!=[]: 1228 coefs=coeff_list[:n] 1229 coeff_list=coeff_list[n:] 1230 res = ((JAMP_format+"=") % str(i + 1)) + \ 1231 ((JAMP_format % str(i + 1)) if not first and split>0 else '') 1232 1233 first=False 1234 # Optimization: if all contributions to that color basis element have 1235 # the same coefficient (up to a sign), put it in front 1236 list_fracs = [abs(coefficient[0][1]) for coefficient in coefs] 1237 common_factor = False 1238 diff_fracs = list(set(list_fracs)) 1239 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1: 1240 common_factor = True 1241 global_factor = diff_fracs[0] 1242 res = res + '%s(' % self.coeff(1, global_factor, False, 0) 1243 1244 # loop for JAMP 1245 for (coefficient, amp_number) in coefs: 1246 if not coefficient: 1247 continue 1248 if common_factor: 1249 res = (res + "%s" + AMP_format) % \ 1250 (self.coeff(coefficient[0], 1251 coefficient[1] / abs(coefficient[1]), 1252 coefficient[2], 1253 coefficient[3]), 1254 str(amp_number)) 1255 else: 1256 res = (res + "%s" + AMP_format) % (self.coeff(coefficient[0], 1257 coefficient[1], 1258 coefficient[2], 1259 coefficient[3]), 1260 str(amp_number)) 1261 1262 if common_factor: 1263 res = res + ')' 1264 1265 res_list.append(res) 1266 1267 return res_list
1268
1269 - def get_pdf_lines(self, matrix_element, ninitial, subproc_group = False):
1270 """Generate the PDF lines for the auto_dsig.f file""" 1271 1272 processes = matrix_element.get('processes') 1273 model = processes[0].get('model') 1274 1275 pdf_definition_lines = "" 1276 pdf_data_lines = "" 1277 pdf_lines = "" 1278 1279 if ninitial == 1: 1280 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 1281 for i, proc in enumerate(processes): 1282 process_line = proc.base_string() 1283 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1284 pdf_lines = pdf_lines + "\nPD(IPROC)=1d0\n" 1285 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 1286 else: 1287 # Pick out all initial state particles for the two beams 1288 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 1289 p in processes]))), 1290 sorted(list(set([p.get_initial_pdg(2) for \ 1291 p in processes])))] 1292 1293 # Prepare all variable names 1294 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 1295 sum(initial_states,[])]) 1296 for key,val in pdf_codes.items(): 1297 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 1298 1299 # Set conversion from PDG code to number used in PDF calls 1300 pdgtopdf = {21: 0, 22: 7} 1301 1302 # Fill in missing entries of pdgtopdf 1303 for pdg in sum(initial_states,[]): 1304 if not pdg in pdgtopdf and not pdg in pdgtopdf.values(): 1305 pdgtopdf[pdg] = pdg 1306 elif pdg not in pdgtopdf and pdg in pdgtopdf.values(): 1307 # If any particle has pdg code 7, we need to use something else 1308 pdgtopdf[pdg] = 6000000 + pdg 1309 1310 # Get PDF variable declarations for all initial states 1311 for i in [0,1]: 1312 pdf_definition_lines += "DOUBLE PRECISION " + \ 1313 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1314 for pdg in \ 1315 initial_states[i]]) + \ 1316 "\n" 1317 1318 # Get PDF data lines for all initial states 1319 for i in [0,1]: 1320 pdf_data_lines += "DATA " + \ 1321 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1322 for pdg in initial_states[i]]) + \ 1323 "/%d*1D0/" % len(initial_states[i]) + \ 1324 "\n" 1325 1326 # Get PDF lines for all different initial states 1327 for i, init_states in enumerate(initial_states): 1328 if subproc_group: 1329 pdf_lines = pdf_lines + \ 1330 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 1331 % (i + 1, i + 1) 1332 else: 1333 pdf_lines = pdf_lines + \ 1334 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 1335 % (i + 1, i + 1) 1336 1337 for initial_state in init_states: 1338 if initial_state in pdf_codes.keys(): 1339 if subproc_group: 1340 pdf_lines = pdf_lines + \ 1341 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP," + \ 1342 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 1343 (pdf_codes[initial_state], 1344 i + 1, i + 1, pdgtopdf[initial_state], 1345 i + 1, i + 1) 1346 else: 1347 pdf_lines = pdf_lines + \ 1348 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP," + \ 1349 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 1350 (pdf_codes[initial_state], 1351 i + 1, i + 1, pdgtopdf[initial_state], 1352 i + 1, i + 1) 1353 pdf_lines = pdf_lines + "ENDIF\n" 1354 1355 # Add up PDFs for the different initial state particles 1356 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 1357 for proc in processes: 1358 process_line = proc.base_string() 1359 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1360 pdf_lines = pdf_lines + "\nPD(IPROC)=" 1361 for ibeam in [1, 2]: 1362 initial_state = proc.get_initial_pdg(ibeam) 1363 if initial_state in pdf_codes.keys(): 1364 pdf_lines = pdf_lines + "%s%d*" % \ 1365 (pdf_codes[initial_state], ibeam) 1366 else: 1367 pdf_lines = pdf_lines + "1d0*" 1368 # Remove last "*" from pdf_lines 1369 pdf_lines = pdf_lines[:-1] + "\n" 1370 pdf_lines = pdf_lines + "PD(0)=PD(0)+DABS(PD(IPROC))\n" 1371 1372 # Remove last line break from the return variables 1373 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1]
1374 1375 #=========================================================================== 1376 # write_props_file 1377 #===========================================================================
1378 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
1379 """Write the props.inc file for MadEvent. Needs input from 1380 write_configs_file.""" 1381 1382 lines = [] 1383 1384 particle_dict = matrix_element.get('processes')[0].get('model').\ 1385 get('particle_dict') 1386 1387 for iconf, configs in enumerate(s_and_t_channels): 1388 for vertex in configs[0] + configs[1][:-1]: 1389 leg = vertex.get('legs')[-1] 1390 if leg.get('id') not in particle_dict: 1391 # Fake propagator used in multiparticle vertices 1392 mass = 'zero' 1393 width = 'zero' 1394 pow_part = 0 1395 else: 1396 particle = particle_dict[leg.get('id')] 1397 # Get mass 1398 if particle.get('mass').lower() == 'zero': 1399 mass = particle.get('mass') 1400 else: 1401 mass = "abs(%s)" % particle.get('mass') 1402 # Get width 1403 if particle.get('width').lower() == 'zero': 1404 width = particle.get('width') 1405 else: 1406 width = "abs(%s)" % particle.get('width') 1407 1408 pow_part = 1 + int(particle.is_boson()) 1409 1410 lines.append("prmass(%d,%d) = %s" % \ 1411 (leg.get('number'), iconf + 1, mass)) 1412 lines.append("prwidth(%d,%d) = %s" % \ 1413 (leg.get('number'), iconf + 1, width)) 1414 lines.append("pow(%d,%d) = %d" % \ 1415 (leg.get('number'), iconf + 1, pow_part)) 1416 1417 # Write the file 1418 writer.writelines(lines) 1419 1420 return True
1421 1422 #=========================================================================== 1423 # write_configs_file 1424 #===========================================================================
1425 - def write_configs_file(self, writer, matrix_element):
1426 """Write the configs.inc file for MadEvent""" 1427 1428 # Extract number of external particles 1429 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1430 1431 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 1432 mapconfigs = [c[0] for c in configs] 1433 model = matrix_element.get('processes')[0].get('model') 1434 return mapconfigs, self.write_configs_file_from_diagrams(writer, 1435 [[c[1]] for c in configs], 1436 mapconfigs, 1437 nexternal, ninitial, 1438 model)
1439 1440 #=========================================================================== 1441 # write_configs_file_from_diagrams 1442 #===========================================================================
1443 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 1444 nexternal, ninitial, model):
1445 """Write the actual configs.inc file. 1446 1447 configs is the diagrams corresponding to configs (each 1448 diagrams is a list of corresponding diagrams for all 1449 subprocesses, with None if there is no corresponding diagrams 1450 for a given process). 1451 mapconfigs gives the diagram number for each config. 1452 1453 For s-channels, we need to output one PDG for each subprocess in 1454 the subprocess group, in order to be able to pick the right 1455 one for multiprocesses.""" 1456 1457 lines = [] 1458 1459 s_and_t_channels = [] 1460 1461 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 1462 for config in configs if [d for d in config if d][0].\ 1463 get_vertex_leg_numbers()!=[]] 1464 minvert = min(vert_list) if vert_list!=[] else 0 1465 1466 # Number of subprocesses 1467 nsubprocs = len(configs[0]) 1468 1469 nconfigs = 0 1470 1471 new_pdg = model.get_first_non_pdg() 1472 1473 for iconfig, helas_diags in enumerate(configs): 1474 if any(vert > minvert for vert in [d for d in helas_diags if d]\ 1475 [0].get_vertex_leg_numbers()) : 1476 # Only 3-vertices allowed in configs.inc except for vertices 1477 # which originate from a shrunk loop. 1478 continue 1479 nconfigs += 1 1480 1481 # Need s- and t-channels for all subprocesses, including 1482 # those that don't contribute to this config 1483 empty_verts = [] 1484 stchannels = [] 1485 for h in helas_diags: 1486 if h: 1487 # get_s_and_t_channels gives vertices starting from 1488 # final state external particles and working inwards 1489 stchannels.append(h.get('amplitudes')[0].\ 1490 get_s_and_t_channels(ninitial, model, new_pdg)) 1491 else: 1492 stchannels.append((empty_verts, None)) 1493 1494 # For t-channels, just need the first non-empty one 1495 tchannels = [t for s,t in stchannels if t != None][0] 1496 1497 # For s_and_t_channels (to be used later) use only first config 1498 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 1499 tchannels]) 1500 1501 # Make sure empty_verts is same length as real vertices 1502 if any([s for s,t in stchannels]): 1503 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 1504 1505 # Reorganize s-channel vertices to get a list of all 1506 # subprocesses for each vertex 1507 schannels = zip(*[s for s,t in stchannels]) 1508 else: 1509 schannels = [] 1510 1511 allchannels = schannels 1512 if len(tchannels) > 1: 1513 # Write out tchannels only if there are any non-trivial ones 1514 allchannels = schannels + tchannels 1515 1516 # Write out propagators for s-channel and t-channel vertices 1517 1518 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 1519 # Correspondance between the config and the diagram = amp2 1520 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 1521 mapconfigs[iconfig])) 1522 1523 for verts in allchannels: 1524 if verts in schannels: 1525 vert = [v for v in verts if v][0] 1526 else: 1527 vert = verts 1528 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1529 last_leg = vert.get('legs')[-1] 1530 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 1531 (last_leg.get('number'), nconfigs, len(daughters), 1532 ",".join([str(d) for d in daughters]))) 1533 if verts in schannels: 1534 pdgs = [] 1535 for v in verts: 1536 if v: 1537 pdgs.append(v.get('legs')[-1].get('id')) 1538 else: 1539 pdgs.append(0) 1540 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1541 (last_leg.get('number'), nconfigs, nsubprocs, 1542 ",".join([str(d) for d in pdgs]))) 1543 lines.append("data tprid(%d,%d)/0/" % \ 1544 (last_leg.get('number'), nconfigs)) 1545 elif verts in tchannels[:-1]: 1546 lines.append("data tprid(%d,%d)/%d/" % \ 1547 (last_leg.get('number'), nconfigs, 1548 abs(last_leg.get('id')))) 1549 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1550 (last_leg.get('number'), nconfigs, nsubprocs, 1551 ",".join(['0'] * nsubprocs))) 1552 1553 # Write out number of configs 1554 lines.append("# Number of configs") 1555 lines.append("data mapconfig(0)/%d/" % nconfigs) 1556 1557 # Write the file 1558 writer.writelines(lines) 1559 1560 return s_and_t_channels
1561 1562 #=========================================================================== 1563 # Global helper methods 1564 #=========================================================================== 1565
1566 - def coeff(self, ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1567 """Returns a nicely formatted string for the coefficients in JAMP lines""" 1568 1569 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power 1570 1571 if total_coeff == 1: 1572 if is_imaginary: 1573 return '+imag1*' 1574 else: 1575 return '+' 1576 elif total_coeff == -1: 1577 if is_imaginary: 1578 return '-imag1*' 1579 else: 1580 return '-' 1581 1582 res_str = '%+iD0' % total_coeff.numerator 1583 1584 if total_coeff.denominator != 1: 1585 # Check if total_coeff is an integer 1586 res_str = res_str + '/%iD0' % total_coeff.denominator 1587 1588 if is_imaginary: 1589 res_str = res_str + '*imag1' 1590 1591 return res_str + '*'
1592 1593
1594 - def set_fortran_compiler(self, default_compiler, force=False):
1595 """Set compiler based on what's available on the system""" 1596 1597 # Check for compiler 1598 if default_compiler['fortran'] and misc.which(default_compiler['fortran']): 1599 f77_compiler = default_compiler['fortran'] 1600 elif misc.which('gfortran'): 1601 f77_compiler = 'gfortran' 1602 elif misc.which('g77'): 1603 f77_compiler = 'g77' 1604 elif misc.which('f77'): 1605 f77_compiler = 'f77' 1606 elif default_compiler['fortran']: 1607 logger.warning('No Fortran Compiler detected! Please install one') 1608 f77_compiler = default_compiler['fortran'] # maybe misc fail so try with it 1609 else: 1610 raise MadGraph5Error, 'No Fortran Compiler detected! Please install one' 1611 logger.info('Use Fortran compiler ' + f77_compiler) 1612 1613 1614 # Check for compiler. 1. set default. 1615 if default_compiler['f2py']: 1616 f2py_compiler = default_compiler['f2py'] 1617 else: 1618 f2py_compiler = '' 1619 # Try to find the correct one. 1620 if default_compiler['f2py'] and misc.which(default_compiler['f2py']): 1621 f2py_compiler = default_compiler 1622 elif misc.which('f2py'): 1623 f2py_compiler = 'f2py' 1624 elif sys.version_info[1] == 6: 1625 if misc.which('f2py-2.6'): 1626 f2py_compiler = 'f2py-2.6' 1627 elif misc.which('f2py2.6'): 1628 f2py_compiler = 'f2py2.6' 1629 elif sys.version_info[1] == 7: 1630 if misc.which('f2py-2.7'): 1631 f2py_compiler = 'f2py-2.7' 1632 elif misc.which('f2py2.7'): 1633 f2py_compiler = 'f2py2.7' 1634 1635 to_replace = {'fortran': f77_compiler, 'f2py': f2py_compiler} 1636 1637 1638 self.replace_make_opt_f_compiler(to_replace) 1639 # Replace also for Template but not for cluster 1640 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite: 1641 self.replace_make_opt_f_compiler(to_replace, pjoin(MG5DIR, 'Template', 'LO')) 1642 1643 return f77_compiler
1644 1645 # an alias for backward compatibility 1646 set_compiler = set_fortran_compiler 1647 1648
1649 - def set_cpp_compiler(self, default_compiler, force=False):
1650 """Set compiler based on what's available on the system""" 1651 1652 # Check for compiler 1653 if default_compiler and misc.which(default_compiler): 1654 compiler = default_compiler 1655 elif misc.which('g++'): 1656 #check if clang version 1657 p = misc.Popen(['g++', '--version'], stdout=subprocess.PIPE, 1658 stderr=subprocess.PIPE) 1659 out, _ = p.communicate() 1660 if 'clang' in out and misc.which('clang'): 1661 compiler = 'clang' 1662 else: 1663 compiler = 'g++' 1664 elif misc.which('c++'): 1665 compiler = 'c++' 1666 elif misc.which('clang'): 1667 compiler = 'clang' 1668 elif default_compiler: 1669 logger.warning('No c++ Compiler detected! Please install one') 1670 compiler = default_compiler # maybe misc fail so try with it 1671 else: 1672 raise MadGraph5Error, 'No c++ Compiler detected! Please install one' 1673 logger.info('Use c++ compiler ' + compiler) 1674 self.replace_make_opt_c_compiler(compiler) 1675 # Replace also for Template but not for cluster 1676 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite: 1677 self.replace_make_opt_c_compiler(compiler, pjoin(MG5DIR, 'Template', 'LO')) 1678 1679 return compiler
1680 1681
1682 - def replace_make_opt_f_compiler(self, compilers, root_dir = ""):
1683 """Set FC=compiler in Source/make_opts""" 1684 1685 assert isinstance(compilers, dict) 1686 1687 mod = False #avoid to rewrite the file if not needed 1688 if not root_dir: 1689 root_dir = self.dir_path 1690 1691 compiler= compilers['fortran'] 1692 f2py_compiler = compilers['f2py'] 1693 if not f2py_compiler: 1694 f2py_compiler = 'f2py' 1695 for_update= {'DEFAULT_F_COMPILER':compiler, 1696 'DEFAULT_F2PY_COMPILER':f2py_compiler} 1697 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1698 1699 try: 1700 common_run_interface.CommonRunCmd.update_make_opts_full( 1701 make_opts, for_update) 1702 except IOError: 1703 if root_dir == self.dir_path: 1704 logger.info('Fail to set compiler. Trying to continue anyway.')
1705
1706 - def replace_make_opt_c_compiler(self, compiler, root_dir = ""):
1707 """Set CXX=compiler in Source/make_opts. 1708 The version is also checked, in order to set some extra flags 1709 if the compiler is clang (on MACOS)""" 1710 1711 is_clang = misc.detect_if_cpp_compiler_is_clang(compiler) 1712 is_lc = misc.detect_cpp_std_lib_dependence(compiler) == '-lc++' 1713 1714 # list of the variable to set in the make_opts file 1715 for_update= {'DEFAULT_CPP_COMPILER':compiler, 1716 'MACFLAG':'-mmacosx-version-min=10.7' if is_clang and is_lc else '', 1717 'STDLIB': '-lc++' if is_lc else '-lstdc++', 1718 'STDLIB_FLAG': '-stdlib=libc++' if is_lc and is_clang else '' 1719 } 1720 1721 if not root_dir: 1722 root_dir = self.dir_path 1723 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1724 1725 try: 1726 common_run_interface.CommonRunCmd.update_make_opts_full( 1727 make_opts, for_update) 1728 except IOError: 1729 if root_dir == self.dir_path: 1730 logger.info('Fail to set compiler. Trying to continue anyway.') 1731 1732 return
1733
1734 #=============================================================================== 1735 # ProcessExporterFortranSA 1736 #=============================================================================== 1737 -class ProcessExporterFortranSA(ProcessExporterFortran):
1738 """Class to take care of exporting a set of matrix elements to 1739 MadGraph v4 StandAlone format.""" 1740 1741 matrix_template = "matrix_standalone_v4.inc" 1742
1743 - def __init__(self, *args, **opts):
1744 """add the format information compare to standard init""" 1745 1746 if 'format' in opts: 1747 self.format = opts['format'] 1748 del opts['format'] 1749 else: 1750 self.format = 'standalone' 1751 ProcessExporterFortran.__init__(self, *args, **opts)
1752
1753 - def copy_v4template(self, modelname):
1754 """Additional actions needed for setup of Template 1755 """ 1756 1757 #First copy the full template tree if dir_path doesn't exit 1758 if os.path.isdir(self.dir_path): 1759 return 1760 1761 logger.info('initialize a new standalone directory: %s' % \ 1762 os.path.basename(self.dir_path)) 1763 temp_dir = pjoin(self.mgme_dir, 'Template/LO') 1764 1765 # Create the directory structure 1766 os.mkdir(self.dir_path) 1767 os.mkdir(pjoin(self.dir_path, 'Source')) 1768 os.mkdir(pjoin(self.dir_path, 'Source', 'MODEL')) 1769 os.mkdir(pjoin(self.dir_path, 'Source', 'DHELAS')) 1770 os.mkdir(pjoin(self.dir_path, 'SubProcesses')) 1771 os.mkdir(pjoin(self.dir_path, 'bin')) 1772 os.mkdir(pjoin(self.dir_path, 'bin', 'internal')) 1773 os.mkdir(pjoin(self.dir_path, 'lib')) 1774 os.mkdir(pjoin(self.dir_path, 'Cards')) 1775 1776 # Information at top-level 1777 #Write version info 1778 shutil.copy(pjoin(temp_dir, 'TemplateVersion.txt'), self.dir_path) 1779 try: 1780 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 1781 except IOError: 1782 MG5_version = misc.get_pkg_info() 1783 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 1784 "5." + MG5_version['version']) 1785 1786 1787 # Add file in SubProcesses 1788 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f_sp'), 1789 pjoin(self.dir_path, 'SubProcesses', 'makefile')) 1790 1791 if self.format == 'standalone': 1792 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'check_sa.f'), 1793 pjoin(self.dir_path, 'SubProcesses', 'check_sa.f')) 1794 1795 # Add file in Source 1796 shutil.copy(pjoin(temp_dir, 'Source', 'make_opts'), 1797 pjoin(self.dir_path, 'Source')) 1798 # add the makefile 1799 filename = pjoin(self.dir_path,'Source','makefile') 1800 self.write_source_makefile(writers.FileWriter(filename))
1801 1802 #=========================================================================== 1803 # export model files 1804 #===========================================================================
1805 - def export_model_files(self, model_path):
1806 """export the model dependent files for V4 model""" 1807 1808 super(ProcessExporterFortranSA,self).export_model_files(model_path) 1809 # Add the routine update_as_param in v4 model 1810 # This is a function created in the UFO 1811 text=""" 1812 subroutine update_as_param() 1813 call setpara('param_card.dat',.false.) 1814 return 1815 end 1816 """ 1817 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 1818 ff.write(text) 1819 ff.close() 1820 1821 text = open(pjoin(self.dir_path,'SubProcesses','check_sa.f')).read() 1822 text = text.replace('call setpara(\'param_card.dat\')', 'call setpara(\'param_card.dat\', .true.)') 1823 fsock = open(pjoin(self.dir_path,'SubProcesses','check_sa.f'), 'w') 1824 fsock.write(text) 1825 fsock.close() 1826 1827 self.make_model_symbolic_link()
1828 1829 #=========================================================================== 1830 # Make the Helas and Model directories for Standalone directory 1831 #===========================================================================
1832 - def make(self):
1833 """Run make in the DHELAS and MODEL directories, to set up 1834 everything for running standalone 1835 """ 1836 1837 source_dir = pjoin(self.dir_path, "Source") 1838 logger.info("Running make for Helas") 1839 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 1840 logger.info("Running make for Model") 1841 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran')
1842 1843 #=========================================================================== 1844 # Create proc_card_mg5.dat for Standalone directory 1845 #===========================================================================
1846 - def finalize_v4_directory(self, matrix_elements, history, makejpg = False, 1847 online = False, compiler=default_compiler):
1848 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 1849 1850 self.compiler_choice(compiler) 1851 self.make() 1852 1853 # Write command history as proc_card_mg5 1854 if history and os.path.isdir(pjoin(self.dir_path, 'Cards')): 1855 output_file = pjoin(self.dir_path, 'Cards', 'proc_card_mg5.dat') 1856 history.write(output_file) 1857 1858 ProcessExporterFortran.finalize_v4_directory(self, matrix_elements, 1859 history, makejpg, online, compiler) 1860 open(pjoin(self.dir_path,'__init__.py'),'w') 1861 open(pjoin(self.dir_path,'SubProcesses','__init__.py'),'w') 1862 1863 if 'mode' in self.opt and self.opt['mode'] == "reweight": 1864 #add the module to hande the NLO weight 1865 files.copytree(pjoin(MG5DIR, 'Template', 'RWGTNLO'), 1866 pjoin(self.dir_path, 'Source')) 1867 files.copytree(pjoin(MG5DIR, 'Template', 'NLO', 'Source', 'PDF'), 1868 pjoin(self.dir_path, 'Source', 'PDF')) 1869 self.write_pdf_opendata()
1870 1871 1872
1873 - def compiler_choice(self, compiler):
1874 """ Different daughter classes might want different compilers. 1875 So this function is meant to be overloaded if desired.""" 1876 1877 self.set_compiler(compiler)
1878 1879 #=========================================================================== 1880 # generate_subprocess_directory_v4 1881 #===========================================================================
1882 - def generate_subprocess_directory_v4(self, matrix_element, 1883 fortran_model):
1884 """Generate the Pxxxxx directory for a subprocess in MG4 standalone, 1885 including the necessary matrix.f and nexternal.inc files""" 1886 1887 cwd = os.getcwd() 1888 1889 # Create the directory PN_xx_xxxxx in the specified path 1890 dirpath = pjoin(self.dir_path, 'SubProcesses', \ 1891 "P%s" % matrix_element.get('processes')[0].shell_string()) 1892 1893 if self.opt['sa_symmetry']: 1894 # avoid symmetric output 1895 for i,proc in enumerate(matrix_element.get('processes')): 1896 1897 initial = [] #filled in the next line 1898 final = [l.get('id') for l in proc.get('legs')\ 1899 if l.get('state') or initial.append(l.get('id'))] 1900 decay_finals = proc.get_final_ids_after_decay() 1901 decay_finals.sort() 1902 tag = (tuple(initial), tuple(decay_finals)) 1903 legs = proc.get('legs')[:] 1904 leg0 = proc.get('legs')[0] 1905 leg1 = proc.get('legs')[1] 1906 if not leg1.get('state'): 1907 proc.get('legs')[0] = leg1 1908 proc.get('legs')[1] = leg0 1909 flegs = proc.get('legs')[2:] 1910 for perm in itertools.permutations(flegs): 1911 for i,p in enumerate(perm): 1912 proc.get('legs')[i+2] = p 1913 dirpath2 = pjoin(self.dir_path, 'SubProcesses', \ 1914 "P%s" % proc.shell_string()) 1915 #restore original order 1916 proc.get('legs')[2:] = legs[2:] 1917 if os.path.exists(dirpath2): 1918 proc.get('legs')[:] = legs 1919 return 0 1920 proc.get('legs')[:] = legs 1921 1922 try: 1923 os.mkdir(dirpath) 1924 except os.error as error: 1925 logger.warning(error.strerror + " " + dirpath) 1926 1927 #try: 1928 # os.chdir(dirpath) 1929 #except os.error: 1930 # logger.error('Could not cd to directory %s' % dirpath) 1931 # return 0 1932 1933 logger.info('Creating files in directory %s' % dirpath) 1934 1935 # Extract number of external particles 1936 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1937 1938 # Create the matrix.f file and the nexternal.inc file 1939 if self.opt['export_format']=='standalone_msP': 1940 filename = pjoin(dirpath, 'matrix_prod.f') 1941 else: 1942 filename = pjoin(dirpath, 'matrix.f') 1943 calls = self.write_matrix_element_v4( 1944 writers.FortranWriter(filename), 1945 matrix_element, 1946 fortran_model) 1947 1948 if self.opt['export_format'] == 'standalone_msP': 1949 filename = pjoin(dirpath,'configs_production.inc') 1950 mapconfigs, s_and_t_channels = self.write_configs_file(\ 1951 writers.FortranWriter(filename), 1952 matrix_element) 1953 1954 filename = pjoin(dirpath,'props_production.inc') 1955 self.write_props_file(writers.FortranWriter(filename), 1956 matrix_element, 1957 s_and_t_channels) 1958 1959 filename = pjoin(dirpath,'nexternal_prod.inc') 1960 self.write_nexternal_madspin(writers.FortranWriter(filename), 1961 nexternal, ninitial) 1962 1963 if self.opt['export_format']=='standalone_msF': 1964 filename = pjoin(dirpath, 'helamp.inc') 1965 ncomb=matrix_element.get_helicity_combinations() 1966 self.write_helamp_madspin(writers.FortranWriter(filename), 1967 ncomb) 1968 1969 filename = pjoin(dirpath, 'nexternal.inc') 1970 self.write_nexternal_file(writers.FortranWriter(filename), 1971 nexternal, ninitial) 1972 1973 filename = pjoin(dirpath, 'pmass.inc') 1974 self.write_pmass_file(writers.FortranWriter(filename), 1975 matrix_element) 1976 1977 filename = pjoin(dirpath, 'ngraphs.inc') 1978 self.write_ngraphs_file(writers.FortranWriter(filename), 1979 len(matrix_element.get_all_amplitudes())) 1980 1981 # Generate diagrams 1982 filename = pjoin(dirpath, "matrix.ps") 1983 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 1984 get('diagrams'), 1985 filename, 1986 model=matrix_element.get('processes')[0].\ 1987 get('model'), 1988 amplitude=True) 1989 logger.info("Generating Feynman diagrams for " + \ 1990 matrix_element.get('processes')[0].nice_string()) 1991 plot.draw() 1992 1993 linkfiles = ['check_sa.f', 'coupl.inc', 'makefile'] 1994 1995 for file in linkfiles: 1996 ln('../%s' % file, cwd=dirpath) 1997 1998 # Return to original PWD 1999 #os.chdir(cwd) 2000 2001 if not calls: 2002 calls = 0 2003 return calls
2004 2005 2006 #=========================================================================== 2007 # write_source_makefile 2008 #===========================================================================
2009 - def write_source_makefile(self, writer):
2010 """Write the nexternal.inc file for MG4""" 2011 2012 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 2013 set_of_lib = '$(LIBDIR)libdhelas.$(libext) $(LIBDIR)libmodel.$(libext)' 2014 model_line='''$(LIBDIR)libmodel.$(libext): MODEL\n\t cd MODEL; make\n''' 2015 text = open(path).read() % {'libraries': set_of_lib, 'model':model_line} 2016 writer.write(text) 2017 2018 return True
2019 2020 #=========================================================================== 2021 # write_matrix_element_v4 2022 #===========================================================================
2023 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 2024 write=True, proc_prefix=''):
2025 """Export a matrix element to a matrix.f file in MG4 standalone format 2026 if write is on False, just return the replace_dict and not write anything.""" 2027 2028 2029 if not matrix_element.get('processes') or \ 2030 not matrix_element.get('diagrams'): 2031 return 0 2032 2033 if not isinstance(writer, writers.FortranWriter): 2034 raise writers.FortranWriter.FortranWriterError(\ 2035 "writer not FortranWriter but %s" % type(writer)) 2036 2037 if not self.opt.has_key('sa_symmetry'): 2038 self.opt['sa_symmetry']=False 2039 2040 # Set lowercase/uppercase Fortran code 2041 writers.FortranWriter.downcase = False 2042 2043 # The proc_id is for MadEvent grouping which is never used in SA. 2044 replace_dict = {'global_variable':'', 'amp2_lines':'', 2045 'proc_prefix':proc_prefix, 'proc_id':''} 2046 2047 # Extract helas calls 2048 helas_calls = fortran_model.get_matrix_element_calls(\ 2049 matrix_element) 2050 2051 replace_dict['helas_calls'] = "\n".join(helas_calls) 2052 2053 # Extract version number and date from VERSION file 2054 info_lines = self.get_mg5_info_lines() 2055 replace_dict['info_lines'] = info_lines 2056 2057 # Extract process info lines 2058 process_lines = self.get_process_info_lines(matrix_element) 2059 replace_dict['process_lines'] = process_lines 2060 2061 # Extract number of external particles 2062 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2063 replace_dict['nexternal'] = nexternal 2064 2065 # Extract ncomb 2066 ncomb = matrix_element.get_helicity_combinations() 2067 replace_dict['ncomb'] = ncomb 2068 2069 # Extract helicity lines 2070 helicity_lines = self.get_helicity_lines(matrix_element) 2071 replace_dict['helicity_lines'] = helicity_lines 2072 2073 # Extract overall denominator 2074 # Averaging initial state color, spin, and identical FS particles 2075 replace_dict['den_factor_line'] = self.get_den_factor_line(matrix_element) 2076 2077 # Extract ngraphs 2078 ngraphs = matrix_element.get_number_of_amplitudes() 2079 replace_dict['ngraphs'] = ngraphs 2080 2081 # Extract nwavefuncs 2082 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2083 replace_dict['nwavefuncs'] = nwavefuncs 2084 2085 # Extract ncolor 2086 ncolor = max(1, len(matrix_element.get('color_basis'))) 2087 replace_dict['ncolor'] = ncolor 2088 2089 replace_dict['hel_avg_factor'] = matrix_element.get_hel_avg_factor() 2090 2091 # Extract color data lines 2092 color_data_lines = self.get_color_data_lines(matrix_element) 2093 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2094 2095 if self.opt['export_format']=='standalone_msP': 2096 # For MadSpin need to return the AMP2 2097 amp2_lines = self.get_amp2_lines(matrix_element, [] ) 2098 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2099 replace_dict['global_variable'] = \ 2100 " Double Precision amp2(NGRAPHS)\n common/to_amps/ amp2\n" 2101 2102 # JAMP definition, depends on the number of independent split orders 2103 split_orders=matrix_element.get('processes')[0].get('split_orders') 2104 2105 if len(split_orders)==0: 2106 replace_dict['nSplitOrders']='' 2107 # Extract JAMP lines 2108 jamp_lines = self.get_JAMP_lines(matrix_element) 2109 # Consider the output of a dummy order 'ALL_ORDERS' for which we 2110 # set all amplitude order to weight 1 and only one squared order 2111 # contribution which is of course ALL_ORDERS=2. 2112 squared_orders = [(2,),] 2113 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 2114 replace_dict['chosen_so_configs'] = '.TRUE.' 2115 replace_dict['nSqAmpSplitOrders']=1 2116 replace_dict['split_order_str_list']='' 2117 else: 2118 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 2119 replace_dict['nAmpSplitOrders']=len(amp_orders) 2120 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 2121 replace_dict['nSplitOrders']=len(split_orders) 2122 replace_dict['split_order_str_list']=str(split_orders) 2123 amp_so = self.get_split_orders_lines( 2124 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 2125 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 2126 replace_dict['ampsplitorders']='\n'.join(amp_so) 2127 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 2128 jamp_lines = self.get_JAMP_lines_split_order(\ 2129 matrix_element,amp_orders,split_order_names=split_orders) 2130 2131 # Now setup the array specifying what squared split order is chosen 2132 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 2133 matrix_element.get('processes')[0],squared_orders) 2134 2135 # For convenience we also write the driver check_sa_splitOrders.f 2136 # that explicitely writes out the contribution from each squared order. 2137 # The original driver still works and is compiled with 'make' while 2138 # the splitOrders one is compiled with 'make check_sa_born_splitOrders' 2139 check_sa_writer=writers.FortranWriter('check_sa_born_splitOrders.f') 2140 self.write_check_sa_splitOrders(squared_orders,split_orders, 2141 nexternal,ninitial,proc_prefix,check_sa_writer) 2142 2143 if write: 2144 writers.FortranWriter('nsqso_born.inc').writelines( 2145 """INTEGER NSQSO_BORN 2146 PARAMETER (NSQSO_BORN=%d)"""%replace_dict['nSqAmpSplitOrders']) 2147 2148 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2149 2150 matrix_template = self.matrix_template 2151 if self.opt['export_format']=='standalone_msP' : 2152 matrix_template = 'matrix_standalone_msP_v4.inc' 2153 elif self.opt['export_format']=='standalone_msF': 2154 matrix_template = 'matrix_standalone_msF_v4.inc' 2155 elif self.opt['export_format']=='matchbox': 2156 replace_dict["proc_prefix"] = 'MG5_%i_' % matrix_element.get('processes')[0].get('id') 2157 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2158 2159 if len(split_orders)>0: 2160 if self.opt['export_format'] in ['standalone_msP', 'standalone_msF']: 2161 logger.debug("Warning: The export format %s is not "+\ 2162 " available for individual ME evaluation of given coupl. orders."+\ 2163 " Only the total ME will be computed.", self.opt['export_format']) 2164 elif self.opt['export_format'] in ['madloop_matchbox']: 2165 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2166 matrix_template = "matrix_standalone_matchbox_splitOrders_v4.inc" 2167 else: 2168 matrix_template = "matrix_standalone_splitOrders_v4.inc" 2169 2170 if write: 2171 path = pjoin(_file_path, 'iolibs', 'template_files', matrix_template) 2172 content = open(path).read() 2173 content = content % replace_dict 2174 # Write the file 2175 writer.writelines(content) 2176 # Add the helper functions. 2177 if len(split_orders)>0: 2178 content = '\n' + open(pjoin(_file_path, \ 2179 'iolibs/template_files/split_orders_helping_functions.inc'))\ 2180 .read()%replace_dict 2181 writer.writelines(content) 2182 return len(filter(lambda call: call.find('#') != 0, helas_calls)) 2183 else: 2184 replace_dict['return_value'] = len(filter(lambda call: call.find('#') != 0, helas_calls)) 2185 return replace_dict # for subclass update
2186
2187 - def write_check_sa_splitOrders(self,squared_orders, split_orders, nexternal, 2188 nincoming, proc_prefix, writer):
2189 """ Write out a more advanced version of the check_sa drivers that 2190 individually returns the matrix element for each contributing squared 2191 order.""" 2192 2193 check_sa_content = open(pjoin(self.mgme_dir, 'madgraph', 'iolibs', \ 2194 'template_files', 'check_sa_splitOrders.f')).read() 2195 printout_sq_orders=[] 2196 for i, squared_order in enumerate(squared_orders): 2197 sq_orders=[] 2198 for j, sqo in enumerate(squared_order): 2199 sq_orders.append('%s=%d'%(split_orders[j],sqo)) 2200 printout_sq_orders.append(\ 2201 "write(*,*) '%d) Matrix element for (%s) = ',MATELEMS(%d)"\ 2202 %(i+1,' '.join(sq_orders),i+1)) 2203 printout_sq_orders='\n'.join(printout_sq_orders) 2204 writer.writelines(check_sa_content%{\ 2205 'printout_sqorders':printout_sq_orders, 2206 'nSplitOrders':len(squared_orders), 2207 'nexternal':nexternal, 2208 'nincoming':nincoming, 2209 'proc_prefix':proc_prefix})
2210
2211 2212 -class ProcessExporterFortranMatchBox(ProcessExporterFortranSA):
2213 """class to take care of exporting a set of matrix element for the Matchbox 2214 code in the case of Born only routine""" 2215 2216 default_opt = {'clean': False, 'complex_mass':False, 2217 'export_format':'matchbox', 'mp': False, 2218 'sa_symmetry': True} 2219 2220 #specific template of the born 2221 2222 2223 matrix_template = "matrix_standalone_matchbox.inc" 2224 2225 @staticmethod
2226 - def get_color_string_lines(matrix_element):
2227 """Return the color matrix definition lines for this matrix element. Split 2228 rows in chunks of size n.""" 2229 2230 if not matrix_element.get('color_matrix'): 2231 return "\n".join(["out = 1"]) 2232 2233 #start the real work 2234 color_denominators = matrix_element.get('color_matrix').\ 2235 get_line_denominators() 2236 matrix_strings = [] 2237 my_cs = color.ColorString() 2238 for i_color in xrange(len(color_denominators)): 2239 # Then write the numerators for the matrix elements 2240 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[i_color]) 2241 t_str=repr(my_cs) 2242 t_match=re.compile(r"(\w+)\(([\s\d+\,]*)\)") 2243 # from '1 T(2,4,1) Tr(4,5,6) Epsilon(5,3,2,1) T(1,2)' returns with findall: 2244 # [('T', '2,4,1'), ('Tr', '4,5,6'), ('Epsilon', '5,3,2,1'), ('T', '1,2')] 2245 all_matches = t_match.findall(t_str) 2246 output = {} 2247 arg=[] 2248 for match in all_matches: 2249 ctype, tmparg = match[0], [m.strip() for m in match[1].split(',')] 2250 if ctype in ['ColorOne' ]: 2251 continue 2252 if ctype not in ['T', 'Tr' ]: 2253 raise MadGraph5Error, 'Color Structure not handled by Matchbox: %s' % ctype 2254 tmparg += ['0'] 2255 arg +=tmparg 2256 for j, v in enumerate(arg): 2257 output[(i_color,j)] = v 2258 2259 for key in output: 2260 if matrix_strings == []: 2261 #first entry 2262 matrix_strings.append(""" 2263 if (in1.eq.%s.and.in2.eq.%s)then 2264 out = %s 2265 """ % (key[0], key[1], output[key])) 2266 else: 2267 #not first entry 2268 matrix_strings.append(""" 2269 elseif (in1.eq.%s.and.in2.eq.%s)then 2270 out = %s 2271 """ % (key[0], key[1], output[key])) 2272 if len(matrix_strings): 2273 matrix_strings.append(" else \n out = - 1 \n endif") 2274 else: 2275 return "\n out = - 1 \n " 2276 return "\n".join(matrix_strings)
2277
2278 - def make(self,*args,**opts):
2279 pass
2280
2281 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", split=-1, 2282 JAMP_formatLC=None):
2283 2284 """Adding leading color part of the colorflow""" 2285 2286 if not JAMP_formatLC: 2287 JAMP_formatLC= "LN%s" % JAMP_format 2288 2289 error_msg="Malformed '%s' argument passed to the get_JAMP_lines" 2290 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 2291 col_amps=col_amps.get_color_amplitudes() 2292 elif(isinstance(col_amps,list)): 2293 if(col_amps and isinstance(col_amps[0],list)): 2294 col_amps=col_amps 2295 else: 2296 raise MadGraph5Error, error_msg % 'col_amps' 2297 else: 2298 raise MadGraph5Error, error_msg % 'col_amps' 2299 2300 text = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(col_amps, 2301 JAMP_format=JAMP_format, 2302 AMP_format=AMP_format, 2303 split=-1) 2304 2305 2306 # Filter the col_ampls to generate only those without any 1/NC terms 2307 2308 LC_col_amps = [] 2309 for coeff_list in col_amps: 2310 to_add = [] 2311 for (coefficient, amp_number) in coeff_list: 2312 if coefficient[3]==0: 2313 to_add.append( (coefficient, amp_number) ) 2314 LC_col_amps.append(to_add) 2315 2316 text += super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(LC_col_amps, 2317 JAMP_format=JAMP_formatLC, 2318 AMP_format=AMP_format, 2319 split=-1) 2320 2321 return text
2322
2323 2324 2325 2326 #=============================================================================== 2327 # ProcessExporterFortranMW 2328 #=============================================================================== 2329 -class ProcessExporterFortranMW(ProcessExporterFortran):
2330 """Class to take care of exporting a set of matrix elements to 2331 MadGraph v4 - MadWeight format.""" 2332 2333 matrix_file="matrix_standalone_v4.inc" 2334
2335 - def copy_v4template(self, modelname):
2336 """Additional actions needed for setup of Template 2337 """ 2338 2339 super(ProcessExporterFortranMW, self).copy_v4template(modelname) 2340 2341 # Add the MW specific file 2342 shutil.copytree(pjoin(MG5DIR,'Template','MadWeight'), 2343 pjoin(self.dir_path, 'Source','MadWeight'), True) 2344 shutil.copytree(pjoin(MG5DIR,'madgraph','madweight'), 2345 pjoin(self.dir_path, 'bin','internal','madweight'), True) 2346 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','setrun.f'), 2347 pjoin(self.dir_path, 'Source','setrun.f')) 2348 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','run.inc'), 2349 pjoin(self.dir_path, 'Source','run.inc')) 2350 # File created from Template (Different in some child class) 2351 filename = os.path.join(self.dir_path,'Source','run_config.inc') 2352 self.write_run_config_file(writers.FortranWriter(filename)) 2353 2354 try: 2355 subprocess.call([os.path.join(self.dir_path, 'Source','MadWeight','bin','internal','pass_to_madweight')], 2356 stdout = os.open(os.devnull, os.O_RDWR), 2357 stderr = os.open(os.devnull, os.O_RDWR), 2358 cwd=self.dir_path) 2359 except OSError: 2360 # Probably madweight already called 2361 pass 2362 2363 # Copy the different python file in the Template 2364 self.copy_python_file() 2365 # create the appropriate cuts.f 2366 self.get_mw_cuts_version() 2367 2368 # add the makefile in Source directory 2369 filename = os.path.join(self.dir_path,'Source','makefile') 2370 self.write_source_makefile(writers.FortranWriter(filename))
2371 2372 2373 2374 2375 #=========================================================================== 2376 # convert_model_to_mg4 2377 #===========================================================================
2378 - def convert_model_to_mg4(self, model, wanted_lorentz = [], 2379 wanted_couplings = []):
2380 2381 super(ProcessExporterFortranMW,self).convert_model_to_mg4(model, 2382 wanted_lorentz, wanted_couplings) 2383 2384 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 2385 try: 2386 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 2387 except OSError as error: 2388 pass 2389 model_path = model.get('modelpath') 2390 # This is not safe if there is a '##' or '-' in the path. 2391 shutil.copytree(model_path, 2392 pjoin(self.dir_path,'bin','internal','ufomodel'), 2393 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 2394 if hasattr(model, 'restrict_card'): 2395 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 2396 'restrict_default.dat') 2397 if isinstance(model.restrict_card, check_param_card.ParamCard): 2398 model.restrict_card.write(out_path) 2399 else: 2400 files.cp(model.restrict_card, out_path)
2401 2402 #=========================================================================== 2403 # generate_subprocess_directory_v4 2404 #===========================================================================
2405 - def copy_python_file(self):
2406 """copy the python file require for the Template""" 2407 2408 # madevent interface 2409 cp(_file_path+'/interface/madweight_interface.py', 2410 self.dir_path+'/bin/internal/madweight_interface.py') 2411 cp(_file_path+'/interface/extended_cmd.py', 2412 self.dir_path+'/bin/internal/extended_cmd.py') 2413 cp(_file_path+'/interface/common_run_interface.py', 2414 self.dir_path+'/bin/internal/common_run_interface.py') 2415 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 2416 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 2417 #cp(_file_path+'/iolibs/save_load_object.py', 2418 # self.dir_path+'/bin/internal/save_load_object.py') 2419 cp(_file_path+'/iolibs/file_writers.py', 2420 self.dir_path+'/bin/internal/file_writers.py') 2421 #model file 2422 cp(_file_path+'../models/check_param_card.py', 2423 self.dir_path+'/bin/internal/check_param_card.py') 2424 2425 #madevent file 2426 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 2427 cp(_file_path+'/various/lhe_parser.py', 2428 self.dir_path+'/bin/internal/lhe_parser.py') 2429 2430 cp(_file_path+'/various/banner.py', 2431 self.dir_path+'/bin/internal/banner.py') 2432 cp(_file_path+'/various/shower_card.py', 2433 self.dir_path+'/bin/internal/shower_card.py') 2434 cp(_file_path+'/various/cluster.py', 2435 self.dir_path+'/bin/internal/cluster.py') 2436 2437 # logging configuration 2438 cp(_file_path+'/interface/.mg5_logging.conf', 2439 self.dir_path+'/bin/internal/me5_logging.conf') 2440 cp(_file_path+'/interface/coloring_logging.py', 2441 self.dir_path+'/bin/internal/coloring_logging.py')
2442 2443 2444 #=========================================================================== 2445 # Change the version of cuts.f to the one compatible with MW 2446 #===========================================================================
2447 - def get_mw_cuts_version(self, outpath=None):
2448 """create the appropriate cuts.f 2449 This is based on the one associated to ME output but: 2450 1) No clustering (=> remove initcluster/setclscales) 2451 2) Adding the definition of cut_bw at the file. 2452 """ 2453 2454 template = open(pjoin(MG5DIR,'Template','LO','SubProcesses','cuts.f')) 2455 2456 text = StringIO() 2457 #1) remove all dependencies in ickkw >1: 2458 nb_if = 0 2459 for line in template: 2460 if 'if(xqcut.gt.0d0' in line: 2461 nb_if = 1 2462 if nb_if == 0: 2463 text.write(line) 2464 continue 2465 if re.search(r'if\(.*\)\s*then', line): 2466 nb_if += 1 2467 elif 'endif' in line: 2468 nb_if -= 1 2469 2470 #2) add fake cut_bw (have to put the true one later) 2471 text.write(""" 2472 logical function cut_bw(p) 2473 include 'madweight_param.inc' 2474 double precision p(*) 2475 if (bw_cut) then 2476 cut_bw = .true. 2477 else 2478 stop 1 2479 endif 2480 return 2481 end 2482 """) 2483 2484 final = text.getvalue() 2485 #3) remove the call to initcluster: 2486 template = final.replace('call initcluster', '! Remove for MW!call initcluster') 2487 template = template.replace('genps.inc', 'maxparticles.inc') 2488 #Now we can write it 2489 if not outpath: 2490 fsock = open(pjoin(self.dir_path, 'SubProcesses', 'cuts.f'), 'w') 2491 elif isinstance(outpath, str): 2492 fsock = open(outpath, 'w') 2493 else: 2494 fsock = outpath 2495 fsock.write(template)
2496 2497 2498 2499 #=========================================================================== 2500 # Make the Helas and Model directories for Standalone directory 2501 #===========================================================================
2502 - def make(self):
2503 """Run make in the DHELAS, MODEL, PDF and CERNLIB directories, to set up 2504 everything for running madweight 2505 """ 2506 2507 source_dir = os.path.join(self.dir_path, "Source") 2508 logger.info("Running make for Helas") 2509 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2510 logger.info("Running make for Model") 2511 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran') 2512 logger.info("Running make for PDF") 2513 misc.compile(arg=['../lib/libpdf.a'], cwd=source_dir, mode='fortran') 2514 logger.info("Running make for CERNLIB") 2515 misc.compile(arg=['../lib/libcernlib.a'], cwd=source_dir, mode='fortran') 2516 logger.info("Running make for GENERIC") 2517 misc.compile(arg=['../lib/libgeneric.a'], cwd=source_dir, mode='fortran') 2518 logger.info("Running make for blocks") 2519 misc.compile(arg=['../lib/libblocks.a'], cwd=source_dir, mode='fortran') 2520 logger.info("Running make for tools") 2521 misc.compile(arg=['../lib/libtools.a'], cwd=source_dir, mode='fortran')
2522 2523 #=========================================================================== 2524 # Create proc_card_mg5.dat for MadWeight directory 2525 #===========================================================================
2526 - def finalize_v4_directory(self, matrix_elements, history, makejpg = False, 2527 online = False, compiler=default_compiler):
2528 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 2529 2530 #proc_charac 2531 self.create_proc_charac() 2532 2533 # Write maxparticles.inc based on max of ME's/subprocess groups 2534 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 2535 self.write_maxparticles_file(writers.FortranWriter(filename), 2536 matrix_elements) 2537 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2538 pjoin(self.dir_path, 'Source','MadWeight','blocks')) 2539 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2540 pjoin(self.dir_path, 'Source','MadWeight','tools')) 2541 2542 self.set_compiler(compiler) 2543 self.make() 2544 2545 # Write command history as proc_card_mg5 2546 if os.path.isdir(os.path.join(self.dir_path, 'Cards')): 2547 output_file = os.path.join(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2548 history.write(output_file) 2549 2550 ProcessExporterFortran.finalize_v4_directory(self, matrix_elements, 2551 history, makejpg, online, compiler)
2552 2553 2554 #=========================================================================== 2555 # create the run_card for MW 2556 #===========================================================================
2557 - def create_run_card(self, matrix_elements, history):
2558 """ """ 2559 2560 run_card = banner_mod.RunCard() 2561 2562 # pass to default for MW 2563 run_card["run_tag"] = "\'not_use\'" 2564 run_card["fixed_ren_scale"] = "T" 2565 run_card["fixed_fac_scale"] = "T" 2566 run_card.remove_all_cut() 2567 2568 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 2569 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2570 python_template=True) 2571 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'), 2572 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2573 python_template=True)
2574 2575 #=========================================================================== 2576 # export model files 2577 #===========================================================================
2578 - def export_model_files(self, model_path):
2579 """export the model dependent files for V4 model""" 2580 2581 super(ProcessExporterFortranMW,self).export_model_files(model_path) 2582 # Add the routine update_as_param in v4 model 2583 # This is a function created in the UFO 2584 text=""" 2585 subroutine update_as_param() 2586 call setpara('param_card.dat',.false.) 2587 return 2588 end 2589 """ 2590 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 2591 ff.write(text) 2592 ff.close() 2593 2594 # Modify setrun.f 2595 text = open(os.path.join(self.dir_path,'Source','setrun.f')).read() 2596 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 2597 fsock = open(os.path.join(self.dir_path,'Source','setrun.f'), 'w') 2598 fsock.write(text) 2599 fsock.close() 2600 2601 # Modify initialization.f 2602 text = open(os.path.join(self.dir_path,'SubProcesses','initialization.f')).read() 2603 text = text.replace('call setpara(param_name)', 'call setpara(param_name, .true.)') 2604 fsock = open(os.path.join(self.dir_path,'SubProcesses','initialization.f'), 'w') 2605 fsock.write(text) 2606 fsock.close() 2607 2608 2609 self.make_model_symbolic_link()
2610 2611 #=========================================================================== 2612 # generate_subprocess_directory_v4 2613 #===========================================================================
2614 - def generate_subprocess_directory_v4(self, matrix_element, 2615 fortran_model,number):
2616 """Generate the Pxxxxx directory for a subprocess in MG4 MadWeight format, 2617 including the necessary matrix.f and nexternal.inc files""" 2618 2619 cwd = os.getcwd() 2620 2621 # Create the directory PN_xx_xxxxx in the specified path 2622 dirpath = os.path.join(self.dir_path, 'SubProcesses', \ 2623 "P%s" % matrix_element.get('processes')[0].shell_string()) 2624 2625 try: 2626 os.mkdir(dirpath) 2627 except os.error as error: 2628 logger.warning(error.strerror + " " + dirpath) 2629 2630 #try: 2631 # os.chdir(dirpath) 2632 #except os.error: 2633 # logger.error('Could not cd to directory %s' % dirpath) 2634 # return 0 2635 2636 logger.info('Creating files in directory %s' % dirpath) 2637 2638 # Extract number of external particles 2639 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2640 2641 # Create the matrix.f file and the nexternal.inc file 2642 filename = pjoin(dirpath,'matrix.f') 2643 calls,ncolor = self.write_matrix_element_v4( 2644 writers.FortranWriter(filename), 2645 matrix_element, 2646 fortran_model) 2647 2648 filename = pjoin(dirpath, 'auto_dsig.f') 2649 self.write_auto_dsig_file(writers.FortranWriter(filename), 2650 matrix_element) 2651 2652 filename = pjoin(dirpath, 'configs.inc') 2653 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2654 writers.FortranWriter(filename), 2655 matrix_element) 2656 2657 filename = pjoin(dirpath, 'nexternal.inc') 2658 self.write_nexternal_file(writers.FortranWriter(filename), 2659 nexternal, ninitial) 2660 2661 filename = pjoin(dirpath, 'leshouche.inc') 2662 self.write_leshouche_file(writers.FortranWriter(filename), 2663 matrix_element) 2664 2665 filename = pjoin(dirpath, 'props.inc') 2666 self.write_props_file(writers.FortranWriter(filename), 2667 matrix_element, 2668 s_and_t_channels) 2669 2670 filename = pjoin(dirpath, 'pmass.inc') 2671 self.write_pmass_file(writers.FortranWriter(filename), 2672 matrix_element) 2673 2674 filename = pjoin(dirpath, 'ngraphs.inc') 2675 self.write_ngraphs_file(writers.FortranWriter(filename), 2676 len(matrix_element.get_all_amplitudes())) 2677 2678 filename = pjoin(dirpath, 'maxamps.inc') 2679 self.write_maxamps_file(writers.FortranWriter(filename), 2680 len(matrix_element.get('diagrams')), 2681 ncolor, 2682 len(matrix_element.get('processes')), 2683 1) 2684 2685 filename = pjoin(dirpath, 'phasespace.inc') 2686 self.write_phasespace_file(writers.FortranWriter(filename), 2687 len(matrix_element.get('diagrams')), 2688 ) 2689 2690 # Generate diagrams 2691 filename = pjoin(dirpath, "matrix.ps") 2692 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2693 get('diagrams'), 2694 filename, 2695 model=matrix_element.get('processes')[0].\ 2696 get('model'), 2697 amplitude='') 2698 logger.info("Generating Feynman diagrams for " + \ 2699 matrix_element.get('processes')[0].nice_string()) 2700 plot.draw() 2701 2702 #import genps.inc and maxconfigs.inc into Subprocesses 2703 ln(self.dir_path + '/Source/genps.inc', self.dir_path + '/SubProcesses', log=False) 2704 #ln(self.dir_path + '/Source/maxconfigs.inc', self.dir_path + '/SubProcesses', log=False) 2705 2706 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f', 'genps.inc'] 2707 2708 for file in linkfiles: 2709 ln('../%s' % file, starting_dir=cwd) 2710 2711 ln('nexternal.inc', '../../Source', log=False, cwd=dirpath) 2712 ln('leshouche.inc', '../../Source', log=False, cwd=dirpath) 2713 ln('maxamps.inc', '../../Source', log=False, cwd=dirpath) 2714 ln('phasespace.inc', '../', log=True, cwd=dirpath) 2715 # Return to original PWD 2716 #os.chdir(cwd) 2717 2718 if not calls: 2719 calls = 0 2720 return calls
2721 2722 #=========================================================================== 2723 # write_matrix_element_v4 2724 #===========================================================================
2725 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model,proc_id = "", config_map = []):
2726 """Export a matrix element to a matrix.f file in MG4 MadWeight format""" 2727 2728 if not matrix_element.get('processes') or \ 2729 not matrix_element.get('diagrams'): 2730 return 0 2731 2732 if not isinstance(writer, writers.FortranWriter): 2733 raise writers.FortranWriter.FortranWriterError(\ 2734 "writer not FortranWriter") 2735 2736 # Set lowercase/uppercase Fortran code 2737 writers.FortranWriter.downcase = False 2738 2739 replace_dict = {} 2740 2741 # Extract version number and date from VERSION file 2742 info_lines = self.get_mg5_info_lines() 2743 replace_dict['info_lines'] = info_lines 2744 2745 # Extract process info lines 2746 process_lines = self.get_process_info_lines(matrix_element) 2747 replace_dict['process_lines'] = process_lines 2748 2749 # Set proc_id 2750 replace_dict['proc_id'] = proc_id 2751 2752 # Extract number of external particles 2753 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2754 replace_dict['nexternal'] = nexternal 2755 2756 # Extract ncomb 2757 ncomb = matrix_element.get_helicity_combinations() 2758 replace_dict['ncomb'] = ncomb 2759 2760 # Extract helicity lines 2761 helicity_lines = self.get_helicity_lines(matrix_element) 2762 replace_dict['helicity_lines'] = helicity_lines 2763 2764 # Extract overall denominator 2765 # Averaging initial state color, spin, and identical FS particles 2766 den_factor_line = self.get_den_factor_line(matrix_element) 2767 replace_dict['den_factor_line'] = den_factor_line 2768 2769 # Extract ngraphs 2770 ngraphs = matrix_element.get_number_of_amplitudes() 2771 replace_dict['ngraphs'] = ngraphs 2772 2773 # Extract nwavefuncs 2774 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2775 replace_dict['nwavefuncs'] = nwavefuncs 2776 2777 # Extract ncolor 2778 ncolor = max(1, len(matrix_element.get('color_basis'))) 2779 replace_dict['ncolor'] = ncolor 2780 2781 # Extract color data lines 2782 color_data_lines = self.get_color_data_lines(matrix_element) 2783 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2784 2785 # Extract helas calls 2786 helas_calls = fortran_model.get_matrix_element_calls(\ 2787 matrix_element) 2788 2789 replace_dict['helas_calls'] = "\n".join(helas_calls) 2790 2791 # Extract JAMP lines 2792 jamp_lines = self.get_JAMP_lines(matrix_element) 2793 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2794 2795 file = open(os.path.join(_file_path, \ 2796 'iolibs/template_files/%s' % self.matrix_file)).read() 2797 file = file % replace_dict 2798 2799 2800 # Write the file 2801 writer.writelines(file) 2802 2803 return len(filter(lambda call: call.find('#') != 0, helas_calls)),ncolor
2804 2805 #=========================================================================== 2806 # write_source_makefile 2807 #===========================================================================
2808 - def write_source_makefile(self, writer):
2809 """Write the nexternal.inc file for madweight""" 2810 2811 2812 path = os.path.join(_file_path,'iolibs','template_files','madweight_makefile_source') 2813 set_of_lib = '$(LIBRARIES) $(LIBDIR)libdhelas.$(libext) $(LIBDIR)libpdf.$(libext) $(LIBDIR)libmodel.$(libext) $(LIBDIR)libcernlib.$(libext) $(LIBDIR)libtf.$(libext)' 2814 text = open(path).read() % {'libraries': set_of_lib} 2815 writer.write(text) 2816 2817 return True
2818
2819 - def write_phasespace_file(self, writer, nb_diag):
2820 """ """ 2821 2822 template = """ include 'maxparticles.inc' 2823 integer max_branches 2824 parameter (max_branches=max_particles-1) 2825 integer max_configs 2826 parameter (max_configs=%(nb_diag)s) 2827 2828 c channel position 2829 integer config_pos,perm_pos 2830 common /to_config/config_pos,perm_pos 2831 2832 """ 2833 2834 writer.write(template % {'nb_diag': nb_diag})
2835 2836 2837 #=========================================================================== 2838 # write_auto_dsig_file 2839 #===========================================================================
2840 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
2841 """Write the auto_dsig.f file for the differential cross section 2842 calculation, includes pdf call information (MadWeight format)""" 2843 2844 if not matrix_element.get('processes') or \ 2845 not matrix_element.get('diagrams'): 2846 return 0 2847 2848 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 2849 2850 if ninitial < 1 or ninitial > 2: 2851 raise writers.FortranWriter.FortranWriterError, \ 2852 """Need ninitial = 1 or 2 to write auto_dsig file""" 2853 2854 replace_dict = {} 2855 2856 # Extract version number and date from VERSION file 2857 info_lines = self.get_mg5_info_lines() 2858 replace_dict['info_lines'] = info_lines 2859 2860 # Extract process info lines 2861 process_lines = self.get_process_info_lines(matrix_element) 2862 replace_dict['process_lines'] = process_lines 2863 2864 # Set proc_id 2865 replace_dict['proc_id'] = proc_id 2866 replace_dict['numproc'] = 1 2867 2868 # Set dsig_line 2869 if ninitial == 1: 2870 # No conversion, since result of decay should be given in GeV 2871 dsig_line = "pd(0)*dsiguu" 2872 else: 2873 # Convert result (in GeV) to pb 2874 dsig_line = "pd(0)*conv*dsiguu" 2875 2876 replace_dict['dsig_line'] = dsig_line 2877 2878 # Extract pdf lines 2879 pdf_vars, pdf_data, pdf_lines = \ 2880 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 2881 replace_dict['pdf_vars'] = pdf_vars 2882 replace_dict['pdf_data'] = pdf_data 2883 replace_dict['pdf_lines'] = pdf_lines 2884 2885 # Lines that differ between subprocess group and regular 2886 if proc_id: 2887 replace_dict['numproc'] = int(proc_id) 2888 replace_dict['passcuts_begin'] = "" 2889 replace_dict['passcuts_end'] = "" 2890 # Set lines for subprocess group version 2891 # Set define_iconfigs_lines 2892 replace_dict['define_subdiag_lines'] = \ 2893 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 2894 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 2895 else: 2896 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 2897 replace_dict['passcuts_end'] = "ENDIF" 2898 replace_dict['define_subdiag_lines'] = "" 2899 2900 file = open(os.path.join(_file_path, \ 2901 'iolibs/template_files/auto_dsig_mw.inc')).read() 2902 2903 file = file % replace_dict 2904 2905 2906 # Write the file 2907 writer.writelines(file)
2908 2909 #=========================================================================== 2910 # write_configs_file 2911 #===========================================================================
2912 - def write_configs_file(self, writer, matrix_element):
2913 """Write the configs.inc file for MadEvent""" 2914 2915 # Extract number of external particles 2916 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2917 2918 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 2919 mapconfigs = [c[0] for c in configs] 2920 model = matrix_element.get('processes')[0].get('model') 2921 return mapconfigs, self.write_configs_file_from_diagrams(writer, 2922 [[c[1]] for c in configs], 2923 mapconfigs, 2924 nexternal, ninitial,matrix_element, model)
2925 2926 #=========================================================================== 2927 # write_run_configs_file 2928 #===========================================================================
2929 - def write_run_config_file(self, writer):
2930 """Write the run_configs.inc file for MadWeight""" 2931 2932 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 2933 text = open(path).read() % {'chanperjob':'5'} 2934 writer.write(text) 2935 return True
2936 2937 #=========================================================================== 2938 # write_configs_file_from_diagrams 2939 #===========================================================================
2940 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 2941 nexternal, ninitial, matrix_element, model):
2942 """Write the actual configs.inc file. 2943 2944 configs is the diagrams corresponding to configs (each 2945 diagrams is a list of corresponding diagrams for all 2946 subprocesses, with None if there is no corresponding diagrams 2947 for a given process). 2948 mapconfigs gives the diagram number for each config. 2949 2950 For s-channels, we need to output one PDG for each subprocess in 2951 the subprocess group, in order to be able to pick the right 2952 one for multiprocesses.""" 2953 2954 lines = [] 2955 2956 particle_dict = matrix_element.get('processes')[0].get('model').\ 2957 get('particle_dict') 2958 2959 s_and_t_channels = [] 2960 2961 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 2962 for config in configs if [d for d in config if d][0].\ 2963 get_vertex_leg_numbers()!=[]] 2964 2965 minvert = min(vert_list) if vert_list!=[] else 0 2966 # Number of subprocesses 2967 nsubprocs = len(configs[0]) 2968 2969 nconfigs = 0 2970 2971 new_pdg = model.get_first_non_pdg() 2972 2973 for iconfig, helas_diags in enumerate(configs): 2974 if any([vert > minvert for vert in 2975 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 2976 # Only 3-vertices allowed in configs.inc 2977 continue 2978 nconfigs += 1 2979 2980 # Need s- and t-channels for all subprocesses, including 2981 # those that don't contribute to this config 2982 empty_verts = [] 2983 stchannels = [] 2984 for h in helas_diags: 2985 if h: 2986 # get_s_and_t_channels gives vertices starting from 2987 # final state external particles and working inwards 2988 stchannels.append(h.get('amplitudes')[0].\ 2989 get_s_and_t_channels(ninitial,model,new_pdg)) 2990 else: 2991 stchannels.append((empty_verts, None)) 2992 2993 # For t-channels, just need the first non-empty one 2994 tchannels = [t for s,t in stchannels if t != None][0] 2995 2996 # For s_and_t_channels (to be used later) use only first config 2997 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 2998 tchannels]) 2999 3000 # Make sure empty_verts is same length as real vertices 3001 if any([s for s,t in stchannels]): 3002 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 3003 3004 # Reorganize s-channel vertices to get a list of all 3005 # subprocesses for each vertex 3006 schannels = zip(*[s for s,t in stchannels]) 3007 else: 3008 schannels = [] 3009 3010 allchannels = schannels 3011 if len(tchannels) > 1: 3012 # Write out tchannels only if there are any non-trivial ones 3013 allchannels = schannels + tchannels 3014 3015 # Write out propagators for s-channel and t-channel vertices 3016 3017 #lines.append("# Diagram %d" % (mapconfigs[iconfig])) 3018 # Correspondance between the config and the diagram = amp2 3019 lines.append("* %d %d " % (nconfigs, 3020 mapconfigs[iconfig])) 3021 3022 for verts in allchannels: 3023 if verts in schannels: 3024 vert = [v for v in verts if v][0] 3025 else: 3026 vert = verts 3027 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 3028 last_leg = vert.get('legs')[-1] 3029 line=str(last_leg.get('number'))+" "+str(daughters[0])+" "+str(daughters[1]) 3030 # lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 3031 # (last_leg.get('number'), nconfigs, len(daughters), 3032 # ",".join([str(d) for d in daughters]))) 3033 3034 if last_leg.get('id') == 21 and 21 not in particle_dict: 3035 # Fake propagator used in multiparticle vertices 3036 mass = 'zero' 3037 width = 'zero' 3038 pow_part = 0 3039 else: 3040 if (last_leg.get('id')!=7): 3041 particle = particle_dict[last_leg.get('id')] 3042 # Get mass 3043 mass = particle.get('mass') 3044 # Get width 3045 width = particle.get('width') 3046 else : # fake propagator used in multiparticle vertices 3047 mass= 'zero' 3048 width= 'zero' 3049 3050 line=line+" "+mass+" "+width+" " 3051 3052 if verts in schannels: 3053 pdgs = [] 3054 for v in verts: 3055 if v: 3056 pdgs.append(v.get('legs')[-1].get('id')) 3057 else: 3058 pdgs.append(0) 3059 lines.append(line+" S "+str(last_leg.get('id'))) 3060 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3061 # (last_leg.get('number'), nconfigs, nsubprocs, 3062 # ",".join([str(d) for d in pdgs]))) 3063 # lines.append("data tprid(%d,%d)/0/" % \ 3064 # (last_leg.get('number'), nconfigs)) 3065 elif verts in tchannels[:-1]: 3066 lines.append(line+" T "+str(last_leg.get('id'))) 3067 # lines.append("data tprid(%d,%d)/%d/" % \ 3068 # (last_leg.get('number'), nconfigs, 3069 # abs(last_leg.get('id')))) 3070 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3071 # (last_leg.get('number'), nconfigs, nsubprocs, 3072 # ",".join(['0'] * nsubprocs))) 3073 3074 # Write out number of configs 3075 # lines.append("# Number of configs") 3076 # lines.append("data mapconfig(0)/%d/" % nconfigs) 3077 lines.append(" * ") # a line with just a star indicates this is the end of file 3078 # Write the file 3079 writer.writelines(lines) 3080 3081 return s_and_t_channels
3082
3083 3084 #=============================================================================== 3085 # ProcessExporterFortranME 3086 #=============================================================================== 3087 -class ProcessExporterFortranME(ProcessExporterFortran):
3088 """Class to take care of exporting a set of matrix elements to 3089 MadEvent format.""" 3090 3091 matrix_file = "matrix_madevent_v4.inc" 3092
3093 - def copy_v4template(self, modelname):
3094 """Additional actions needed for setup of Template 3095 """ 3096 3097 super(ProcessExporterFortranME, self).copy_v4template(modelname) 3098 3099 # File created from Template (Different in some child class) 3100 filename = pjoin(self.dir_path,'Source','run_config.inc') 3101 self.write_run_config_file(writers.FortranWriter(filename)) 3102 3103 # The next file are model dependant (due to SLAH convention) 3104 self.model_name = modelname 3105 # Add the symmetry.f 3106 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3107 self.write_symmetry(writers.FortranWriter(filename)) 3108 # 3109 filename = pjoin(self.dir_path,'SubProcesses','addmothers.f') 3110 self.write_addmothers(writers.FortranWriter(filename)) 3111 # Copy the different python file in the Template 3112 self.copy_python_file()
3113 3114 3115 3116 3117 3118 #=========================================================================== 3119 # generate_subprocess_directory_v4 3120 #===========================================================================
3121 - def copy_python_file(self):
3122 """copy the python file require for the Template""" 3123 3124 # madevent interface 3125 cp(_file_path+'/interface/madevent_interface.py', 3126 self.dir_path+'/bin/internal/madevent_interface.py') 3127 cp(_file_path+'/interface/extended_cmd.py', 3128 self.dir_path+'/bin/internal/extended_cmd.py') 3129 cp(_file_path+'/interface/common_run_interface.py', 3130 self.dir_path+'/bin/internal/common_run_interface.py') 3131 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3132 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3133 cp(_file_path+'/iolibs/save_load_object.py', 3134 self.dir_path+'/bin/internal/save_load_object.py') 3135 cp(_file_path+'/iolibs/file_writers.py', 3136 self.dir_path+'/bin/internal/file_writers.py') 3137 #model file 3138 cp(_file_path+'../models/check_param_card.py', 3139 self.dir_path+'/bin/internal/check_param_card.py') 3140 3141 #copy all the file present in madevent directory 3142 for name in os.listdir(pjoin(_file_path, 'madevent')): 3143 if name not in ['__init__.py'] and name.endswith('.py'): 3144 cp(_file_path+'/madevent/'+name, self.dir_path+'/bin/internal/') 3145 3146 #madevent file 3147 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3148 cp(_file_path+'/various/lhe_parser.py', 3149 self.dir_path+'/bin/internal/lhe_parser.py') 3150 cp(_file_path+'/various/banner.py', 3151 self.dir_path+'/bin/internal/banner.py') 3152 cp(_file_path+'/various/cluster.py', 3153 self.dir_path+'/bin/internal/cluster.py') 3154 cp(_file_path+'/madevent/combine_runs.py', 3155 self.dir_path+'/bin/internal/combine_runs.py') 3156 # logging configuration 3157 cp(_file_path+'/interface/.mg5_logging.conf', 3158 self.dir_path+'/bin/internal/me5_logging.conf') 3159 cp(_file_path+'/interface/coloring_logging.py', 3160 self.dir_path+'/bin/internal/coloring_logging.py') 3161 # shower card and FO_analyse_card. 3162 # Although not needed, it is imported by banner.py 3163 cp(_file_path+'/various/shower_card.py', 3164 self.dir_path+'/bin/internal/shower_card.py') 3165 cp(_file_path+'/various/FO_analyse_card.py', 3166 self.dir_path+'/bin/internal/FO_analyse_card.py')
3167 3168
3169 - def convert_model_to_mg4(self, model, wanted_lorentz = [], 3170 wanted_couplings = []):
3171 3172 super(ProcessExporterFortranME,self).convert_model_to_mg4(model, 3173 wanted_lorentz, wanted_couplings) 3174 3175 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 3176 try: 3177 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 3178 except OSError as error: 3179 pass 3180 model_path = model.get('modelpath') 3181 # This is not safe if there is a '##' or '-' in the path. 3182 shutil.copytree(model_path, 3183 pjoin(self.dir_path,'bin','internal','ufomodel'), 3184 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 3185 if hasattr(model, 'restrict_card'): 3186 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 3187 'restrict_default.dat') 3188 if isinstance(model.restrict_card, check_param_card.ParamCard): 3189 model.restrict_card.write(out_path) 3190 else: 3191 files.cp(model.restrict_card, out_path)
3192 3193 #=========================================================================== 3194 # export model files 3195 #===========================================================================
3196 - def export_model_files(self, model_path):
3197 """export the model dependent files""" 3198 3199 super(ProcessExporterFortranME,self).export_model_files(model_path) 3200 3201 # Add the routine update_as_param in v4 model 3202 # This is a function created in the UFO 3203 text=""" 3204 subroutine update_as_param() 3205 call setpara('param_card.dat',.false.) 3206 return 3207 end 3208 """ 3209 ff = open(pjoin(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3210 ff.write(text) 3211 ff.close() 3212 3213 # Add the symmetry.f 3214 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3215 self.write_symmetry(writers.FortranWriter(filename), v5=False) 3216 3217 # Modify setrun.f 3218 text = open(pjoin(self.dir_path,'Source','setrun.f')).read() 3219 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3220 fsock = open(pjoin(self.dir_path,'Source','setrun.f'), 'w') 3221 fsock.write(text) 3222 fsock.close() 3223 3224 self.make_model_symbolic_link()
3225 3226 3227 #=========================================================================== 3228 # generate_subprocess_directory_v4 3229 #===========================================================================
3230 - def generate_subprocess_directory_v4(self, matrix_element, 3231 fortran_model, 3232 me_number):
3233 """Generate the Pxxxxx directory for a subprocess in MG4 madevent, 3234 including the necessary matrix.f and various helper files""" 3235 3236 cwd = os.getcwd() 3237 path = pjoin(self.dir_path, 'SubProcesses') 3238 3239 3240 if not self.model: 3241 self.model = matrix_element.get('processes')[0].get('model') 3242 3243 3244 3245 #os.chdir(path) 3246 # Create the directory PN_xx_xxxxx in the specified path 3247 subprocdir = "P%s" % matrix_element.get('processes')[0].shell_string() 3248 try: 3249 os.mkdir(pjoin(path,subprocdir)) 3250 except os.error as error: 3251 logger.warning(error.strerror + " " + subprocdir) 3252 3253 #try: 3254 # os.chdir(subprocdir) 3255 #except os.error: 3256 # logger.error('Could not cd to directory %s' % subprocdir) 3257 # return 0 3258 3259 logger.info('Creating files in directory %s' % subprocdir) 3260 Ppath = pjoin(path, subprocdir) 3261 3262 # Extract number of external particles 3263 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3264 3265 # Add the driver.f 3266 ncomb = matrix_element.get_helicity_combinations() 3267 filename = pjoin(Ppath,'driver.f') 3268 self.write_driver(writers.FortranWriter(filename),ncomb,n_grouped_proc=1, 3269 v5=self.opt['v5_model']) 3270 3271 # Create the matrix.f file, auto_dsig.f file and all inc files 3272 filename = pjoin(Ppath, 'matrix.f') 3273 calls, ncolor = \ 3274 self.write_matrix_element_v4(writers.FortranWriter(filename), 3275 matrix_element, fortran_model, subproc_number = me_number) 3276 3277 filename = pjoin(Ppath, 'auto_dsig.f') 3278 self.write_auto_dsig_file(writers.FortranWriter(filename), 3279 matrix_element) 3280 3281 filename = pjoin(Ppath, 'configs.inc') 3282 mapconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 3283 writers.FortranWriter(filename), 3284 matrix_element) 3285 3286 filename = pjoin(Ppath, 'config_nqcd.inc') 3287 self.write_config_nqcd_file(writers.FortranWriter(filename), 3288 nqcd_list) 3289 3290 filename = pjoin(Ppath, 'config_subproc_map.inc') 3291 self.write_config_subproc_map_file(writers.FortranWriter(filename), 3292 s_and_t_channels) 3293 3294 filename = pjoin(Ppath, 'coloramps.inc') 3295 self.write_coloramps_file(writers.FortranWriter(filename), 3296 mapconfigs, 3297 matrix_element) 3298 3299 filename = pjoin(Ppath, 'get_color.f') 3300 self.write_colors_file(writers.FortranWriter(filename), 3301 matrix_element) 3302 3303 filename = pjoin(Ppath, 'decayBW.inc') 3304 self.write_decayBW_file(writers.FortranWriter(filename), 3305 s_and_t_channels) 3306 3307 filename = pjoin(Ppath, 'dname.mg') 3308 self.write_dname_file(writers.FileWriter(filename), 3309 "P"+matrix_element.get('processes')[0].shell_string()) 3310 3311 filename = pjoin(Ppath, 'iproc.dat') 3312 self.write_iproc_file(writers.FortranWriter(filename), 3313 me_number) 3314 3315 filename = pjoin(Ppath, 'leshouche.inc') 3316 self.write_leshouche_file(writers.FortranWriter(filename), 3317 matrix_element) 3318 3319 filename = pjoin(Ppath, 'maxamps.inc') 3320 self.write_maxamps_file(writers.FortranWriter(filename), 3321 len(matrix_element.get('diagrams')), 3322 ncolor, 3323 len(matrix_element.get('processes')), 3324 1) 3325 3326 filename = pjoin(Ppath, 'mg.sym') 3327 self.write_mg_sym_file(writers.FortranWriter(filename), 3328 matrix_element) 3329 3330 filename = pjoin(Ppath, 'ncombs.inc') 3331 self.write_ncombs_file(writers.FortranWriter(filename), 3332 nexternal) 3333 3334 filename = pjoin(Ppath, 'nexternal.inc') 3335 self.write_nexternal_file(writers.FortranWriter(filename), 3336 nexternal, ninitial) 3337 3338 filename = pjoin(Ppath, 'ngraphs.inc') 3339 self.write_ngraphs_file(writers.FortranWriter(filename), 3340 len(mapconfigs)) 3341 3342 3343 filename = pjoin(Ppath, 'pmass.inc') 3344 self.write_pmass_file(writers.FortranWriter(filename), 3345 matrix_element) 3346 3347 filename = pjoin(Ppath, 'props.inc') 3348 self.write_props_file(writers.FortranWriter(filename), 3349 matrix_element, 3350 s_and_t_channels) 3351 3352 # Find config symmetries and permutations 3353 symmetry, perms, ident_perms = \ 3354 diagram_symmetry.find_symmetry(matrix_element) 3355 3356 filename = pjoin(Ppath, 'symswap.inc') 3357 self.write_symswap_file(writers.FortranWriter(filename), 3358 ident_perms) 3359 3360 filename = pjoin(Ppath, 'symfact_orig.dat') 3361 self.write_symfact_file(open(filename, 'w'), symmetry) 3362 3363 # Generate diagrams 3364 filename = pjoin(Ppath, "matrix.ps") 3365 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3366 get('diagrams'), 3367 filename, 3368 model=matrix_element.get('processes')[0].\ 3369 get('model'), 3370 amplitude=True) 3371 logger.info("Generating Feynman diagrams for " + \ 3372 matrix_element.get('processes')[0].nice_string()) 3373 plot.draw() 3374 3375 self.link_files_in_SubProcess(Ppath) 3376 3377 #import nexternal/leshouche in Source 3378 ln(pjoin(Ppath,'nexternal.inc'), pjoin(self.dir_path,'Source'), log=False) 3379 ln(pjoin(Ppath,'leshouche.inc'), pjoin(self.dir_path,'Source'), log=False) 3380 ln(pjoin(Ppath,'maxamps.inc'), pjoin(self.dir_path,'Source'), log=False) 3381 # Return to SubProcesses dir 3382 #os.chdir(os.path.pardir) 3383 3384 # Add subprocess to subproc.mg 3385 filename = pjoin(path, 'subproc.mg') 3386 files.append_to_file(filename, 3387 self.write_subproc, 3388 subprocdir) 3389 3390 # Return to original dir 3391 #os.chdir(cwd) 3392 3393 # Generate info page 3394 gen_infohtml.make_info_html(self.dir_path) 3395 3396 3397 if not calls: 3398 calls = 0 3399 return calls
3400 3436
3437 - def finalize_v4_directory(self, matrix_elements, history, makejpg = False, 3438 online = False, compiler=default_compiler):
3439 """Finalize ME v4 directory by creating jpeg diagrams, html 3440 pages,proc_card_mg5.dat and madevent.tar.gz.""" 3441 3442 # indicate that the output type is not grouped 3443 if not isinstance(self, ProcessExporterFortranMEGroup): 3444 self.proc_characteristic['grouped_matrix'] = False 3445 3446 modelname = self.opt['model'] 3447 if modelname == 'mssm' or modelname.startswith('mssm-'): 3448 param_card = pjoin(self.dir_path, 'Cards','param_card.dat') 3449 mg5_param = pjoin(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 3450 check_param_card.convert_to_mg5card(param_card, mg5_param) 3451 check_param_card.check_valid_param_card(mg5_param) 3452 3453 # Add the combine_events.f modify param_card path/number of @X 3454 filename = pjoin(self.dir_path,'Source','combine_events.f') 3455 try: 3456 nb_proc =[p.get('id') for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes')] 3457 except AttributeError: 3458 nb_proc =[p.get('id') for m in matrix_elements.get('matrix_elements') for p in m.get('processes')] 3459 nb_proc = len(set(nb_proc)) 3460 self.write_combine_events(writers.FortranWriter(filename), nb_proc) # already formatted 3461 # Write maxconfigs.inc based on max of ME's/subprocess groups 3462 filename = pjoin(self.dir_path,'Source','maxconfigs.inc') 3463 self.write_maxconfigs_file(writers.FortranWriter(filename), 3464 matrix_elements) 3465 3466 # Write maxparticles.inc based on max of ME's/subprocess groups 3467 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 3468 self.write_maxparticles_file(writers.FortranWriter(filename), 3469 matrix_elements) 3470 3471 # Touch "done" file 3472 os.system('touch %s/done' % pjoin(self.dir_path,'SubProcesses')) 3473 3474 # Check for compiler 3475 self.set_compiler(compiler) 3476 self.set_cpp_compiler(compiler['cpp']) 3477 3478 3479 old_pos = os.getcwd() 3480 subpath = pjoin(self.dir_path, 'SubProcesses') 3481 3482 P_dir_list = [proc for proc in os.listdir(subpath) 3483 if os.path.isdir(pjoin(subpath,proc)) and proc[0] == 'P'] 3484 3485 devnull = os.open(os.devnull, os.O_RDWR) 3486 # Convert the poscript in jpg files (if authorize) 3487 if makejpg: 3488 try: 3489 os.remove(pjoin(self.dir_path,'HTML','card.jpg')) 3490 except Exception, error: 3491 pass 3492 logger.info("Generate jpeg diagrams") 3493 for Pdir in P_dir_list: 3494 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 3495 stdout = devnull, cwd=pjoin(subpath, Pdir)) 3496 3497 logger.info("Generate web pages") 3498 # Create the WebPage using perl script 3499 3500 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 3501 stdout = devnull,cwd=pjoin(self.dir_path)) 3502 3503 #os.chdir(os.path.pardir) 3504 3505 obj = gen_infohtml.make_info_html(self.dir_path) 3506 3507 if online: 3508 nb_channel = obj.rep_rule['nb_gen_diag'] 3509 open(pjoin(self.dir_path, 'Online'),'w').write(str(nb_channel)) 3510 #add the information to proc_charac 3511 self.proc_characteristic['nb_channel'] = obj.rep_rule['nb_gen_diag'] 3512 3513 # Write command history as proc_card_mg5 3514 if os.path.isdir(pjoin(self.dir_path,'Cards')): 3515 output_file = pjoin(self.dir_path,'Cards', 'proc_card_mg5.dat') 3516 history.write(output_file) 3517 3518 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3519 stdout = devnull) 3520 3521 #crate the proc_characteristic file 3522 self.create_proc_charac(matrix_elements, history) 3523 3524 # create the run_card 3525 ProcessExporterFortran.finalize_v4_directory(self, matrix_elements, 3526 history, makejpg, online, compiler) 3527 3528 # Run "make" to generate madevent.tar.gz file 3529 if os.path.exists(pjoin(self.dir_path,'SubProcesses', 'subproc.mg')): 3530 if os.path.exists(pjoin(self.dir_path,'madevent.tar.gz')): 3531 os.remove(pjoin(self.dir_path,'madevent.tar.gz')) 3532 misc.call([os.path.join(self.dir_path, 'bin', 'internal', 'make_madevent_tar')], 3533 stdout = devnull, cwd=self.dir_path) 3534 3535 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3536 stdout = devnull, cwd=self.dir_path)
3537 3538 3539 3540 3541 3542 3543 #return to the initial dir 3544 #os.chdir(old_pos) 3545 3546 #=========================================================================== 3547 # write_matrix_element_v4 3548 #===========================================================================
3549 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 3550 proc_id = "", config_map = [], subproc_number = ""):
3551 """Export a matrix element to a matrix.f file in MG4 madevent format""" 3552 3553 if not matrix_element.get('processes') or \ 3554 not matrix_element.get('diagrams'): 3555 return 0 3556 3557 if not isinstance(writer, writers.FortranWriter): 3558 raise writers.FortranWriter.FortranWriterError(\ 3559 "writer not FortranWriter") 3560 3561 3562 # Set lowercase/uppercase Fortran code 3563 writers.FortranWriter.downcase = False 3564 3565 # The proc prefix is not used for MadEvent output so it can safely be set 3566 # to an empty string. 3567 replace_dict = {'proc_prefix':''} 3568 3569 # Extract helas calls 3570 helas_calls = fortran_model.get_matrix_element_calls(\ 3571 matrix_element) 3572 3573 replace_dict['helas_calls'] = "\n".join(helas_calls) 3574 3575 3576 # Extract version number and date from VERSION file 3577 info_lines = self.get_mg5_info_lines() 3578 replace_dict['info_lines'] = info_lines 3579 3580 # Extract process info lines 3581 process_lines = self.get_process_info_lines(matrix_element) 3582 replace_dict['process_lines'] = process_lines 3583 3584 # Set proc_id 3585 replace_dict['proc_id'] = proc_id 3586 3587 # Extract ncomb 3588 ncomb = matrix_element.get_helicity_combinations() 3589 replace_dict['ncomb'] = ncomb 3590 3591 # Extract helicity lines 3592 helicity_lines = self.get_helicity_lines(matrix_element) 3593 replace_dict['helicity_lines'] = helicity_lines 3594 3595 # Extract IC line 3596 ic_line = self.get_ic_line(matrix_element) 3597 replace_dict['ic_line'] = ic_line 3598 3599 # Extract overall denominator 3600 # Averaging initial state color, spin, and identical FS particles 3601 den_factor_line = self.get_den_factor_line(matrix_element) 3602 replace_dict['den_factor_line'] = den_factor_line 3603 3604 # Extract ngraphs 3605 ngraphs = matrix_element.get_number_of_amplitudes() 3606 replace_dict['ngraphs'] = ngraphs 3607 3608 # Extract ndiags 3609 ndiags = len(matrix_element.get('diagrams')) 3610 replace_dict['ndiags'] = ndiags 3611 3612 # Set define_iconfigs_lines 3613 replace_dict['define_iconfigs_lines'] = \ 3614 """INTEGER MAPCONFIG(0:LMAXCONFIGS), ICONFIG 3615 COMMON/TO_MCONFIGS/MAPCONFIG, ICONFIG""" 3616 3617 if proc_id: 3618 # Set lines for subprocess group version 3619 # Set define_iconfigs_lines 3620 replace_dict['define_iconfigs_lines'] += \ 3621 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3622 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3623 # Set set_amp2_line 3624 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(SUBDIAG(%s))/XTOT" % \ 3625 proc_id 3626 else: 3627 # Standard running 3628 # Set set_amp2_line 3629 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(MAPCONFIG(ICONFIG))/XTOT" 3630 3631 # Extract nwavefuncs 3632 nwavefuncs = matrix_element.get_number_of_wavefunctions() 3633 replace_dict['nwavefuncs'] = nwavefuncs 3634 3635 # Extract ncolor 3636 ncolor = max(1, len(matrix_element.get('color_basis'))) 3637 replace_dict['ncolor'] = ncolor 3638 3639 # Extract color data lines 3640 color_data_lines = self.get_color_data_lines(matrix_element) 3641 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 3642 3643 3644 # Set the size of Wavefunction 3645 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 3646 replace_dict['wavefunctionsize'] = 18 3647 else: 3648 replace_dict['wavefunctionsize'] = 6 3649 3650 # Extract amp2 lines 3651 amp2_lines = self.get_amp2_lines(matrix_element, config_map) 3652 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 3653 3654 # The JAMP definition depends on the splitting order 3655 split_orders=matrix_element.get('processes')[0].get('split_orders') 3656 if len(split_orders)>0: 3657 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 3658 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 3659 matrix_element.get('processes')[0],squared_orders) 3660 else: 3661 # Consider the output of a dummy order 'ALL_ORDERS' for which we 3662 # set all amplitude order to weight 1 and only one squared order 3663 # contribution which is of course ALL_ORDERS=2. 3664 squared_orders = [(2,),] 3665 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 3666 replace_dict['chosen_so_configs'] = '.TRUE.' 3667 3668 replace_dict['nAmpSplitOrders']=len(amp_orders) 3669 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 3670 replace_dict['split_order_str_list']=str(split_orders) 3671 replace_dict['nSplitOrders']=max(len(split_orders),1) 3672 amp_so = self.get_split_orders_lines( 3673 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 3674 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 3675 replace_dict['ampsplitorders']='\n'.join(amp_so) 3676 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 3677 3678 3679 # Extract JAMP lines 3680 # If no split_orders then artificiall add one entry called 'ALL_ORDERS' 3681 jamp_lines = self.get_JAMP_lines_split_order(\ 3682 matrix_element,amp_orders,split_order_names= 3683 split_orders if len(split_orders)>0 else ['ALL_ORDERS']) 3684 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 3685 3686 file = open(pjoin(_file_path, \ 3687 'iolibs/template_files/%s' % self.matrix_file)).read() 3688 3689 file = file % replace_dict 3690 3691 # Add the split orders helper functions. 3692 file = file + '\n' + open(pjoin(_file_path, \ 3693 'iolibs/template_files/split_orders_helping_functions.inc'))\ 3694 .read()%replace_dict 3695 # Write the file 3696 writer.writelines(file) 3697 3698 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor
3699 3700 #=========================================================================== 3701 # write_auto_dsig_file 3702 #===========================================================================
3703 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
3704 """Write the auto_dsig.f file for the differential cross section 3705 calculation, includes pdf call information""" 3706 3707 if not matrix_element.get('processes') or \ 3708 not matrix_element.get('diagrams'): 3709 return 0 3710 3711 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 3712 self.proc_characteristic['ninitial'] = ninitial 3713 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 3714 3715 if ninitial < 1 or ninitial > 2: 3716 raise writers.FortranWriter.FortranWriterError, \ 3717 """Need ninitial = 1 or 2 to write auto_dsig file""" 3718 3719 replace_dict = {} 3720 3721 # Extract version number and date from VERSION file 3722 info_lines = self.get_mg5_info_lines() 3723 replace_dict['info_lines'] = info_lines 3724 3725 # Extract process info lines 3726 process_lines = self.get_process_info_lines(matrix_element) 3727 replace_dict['process_lines'] = process_lines 3728 3729 # Set proc_id 3730 replace_dict['proc_id'] = proc_id 3731 replace_dict['numproc'] = 1 3732 3733 # Set dsig_line 3734 if ninitial == 1: 3735 # No conversion, since result of decay should be given in GeV 3736 dsig_line = "pd(0)*dsiguu" 3737 else: 3738 # Convert result (in GeV) to pb 3739 dsig_line = "pd(0)*conv*dsiguu" 3740 3741 replace_dict['dsig_line'] = dsig_line 3742 3743 # Extract pdf lines 3744 pdf_vars, pdf_data, pdf_lines = \ 3745 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 3746 replace_dict['pdf_vars'] = pdf_vars 3747 replace_dict['pdf_data'] = pdf_data 3748 replace_dict['pdf_lines'] = pdf_lines 3749 3750 # Lines that differ between subprocess group and regular 3751 if proc_id: 3752 replace_dict['numproc'] = int(proc_id) 3753 replace_dict['passcuts_begin'] = "" 3754 replace_dict['passcuts_end'] = "" 3755 # Set lines for subprocess group version 3756 # Set define_iconfigs_lines 3757 replace_dict['define_subdiag_lines'] = \ 3758 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3759 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3760 replace_dict['cutsdone'] = "" 3761 else: 3762 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 3763 replace_dict['passcuts_end'] = "ENDIF" 3764 replace_dict['define_subdiag_lines'] = "" 3765 replace_dict['cutsdone'] = " cutsdone=.false.\n cutspassed=.false." 3766 3767 if not isinstance(self, ProcessExporterFortranMEGroup): 3768 ncomb=matrix_element.get_helicity_combinations() 3769 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 3770 else: 3771 replace_dict['read_write_good_hel'] = "" 3772 3773 3774 3775 file = open(pjoin(_file_path, \ 3776 'iolibs/template_files/auto_dsig_v4.inc')).read() 3777 file = file % replace_dict 3778 3779 # Write the file 3780 writer.writelines(file, context={'read_write_good_hel':True})
3781 3782 #=========================================================================== 3783 # write_coloramps_file 3784 #===========================================================================
3785 - def write_coloramps_file(self, writer, mapconfigs, matrix_element):
3786 """Write the coloramps.inc file for MadEvent""" 3787 3788 lines = self.get_icolamp_lines(mapconfigs, matrix_element, 1) 3789 lines.insert(0, "logical icolamp(%d,%d,1)" % \ 3790 (max(len(matrix_element.get('color_basis').keys()), 1), 3791 len(mapconfigs))) 3792 3793 3794 # Write the file 3795 writer.writelines(lines) 3796 3797 return True
3798 3799 #=========================================================================== 3800 # write_colors_file 3801 #===========================================================================
3802 - def write_colors_file(self, writer, matrix_elements):
3803 """Write the get_color.f file for MadEvent, which returns color 3804 for all particles used in the matrix element.""" 3805 3806 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 3807 matrix_elements = [matrix_elements] 3808 3809 model = matrix_elements[0].get('processes')[0].get('model') 3810 3811 # We need the both particle and antiparticle wf_ids, since the identity 3812 # depends on the direction of the wf. 3813 wf_ids = set(sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 3814 for wf in d.get('wavefunctions')],[]) \ 3815 for d in me.get('diagrams')], []) \ 3816 for me in matrix_elements], [])) 3817 3818 leg_ids = set(sum([sum([sum([[l.get('id'), 3819 model.get_particle(l.get('id')).get_anti_pdg_code()] \ 3820 for l in p.get_legs_with_decays()], []) \ 3821 for p in me.get('processes')], []) \ 3822 for me in matrix_elements], [])) 3823 particle_ids = sorted(list(wf_ids.union(leg_ids))) 3824 3825 lines = """function get_color(ipdg) 3826 implicit none 3827 integer get_color, ipdg 3828 3829 if(ipdg.eq.%d)then 3830 get_color=%d 3831 return 3832 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 3833 3834 for part_id in particle_ids[1:]: 3835 lines += """else if(ipdg.eq.%d)then 3836 get_color=%d 3837 return 3838 """ % (part_id, model.get_particle(part_id).get_color()) 3839 # Dummy particle for multiparticle vertices with pdg given by 3840 # first code not in the model 3841 lines += """else if(ipdg.eq.%d)then 3842 c This is dummy particle used in multiparticle vertices 3843 get_color=2 3844 return 3845 """ % model.get_first_non_pdg() 3846 lines += """else 3847 write(*,*)'Error: No color given for pdg ',ipdg 3848 get_color=0 3849 return 3850 endif 3851 end 3852 """ 3853 3854 # Write the file 3855 writer.writelines(lines) 3856 3857 return True
3858 3859 #=========================================================================== 3860 # write_config_nqcd_file 3861 #===========================================================================
3862 - def write_config_nqcd_file(self, writer, nqcd_list):
3863 """Write the config_nqcd.inc with the number of QCD couplings 3864 for each config""" 3865 3866 lines = [] 3867 for iconf, n in enumerate(nqcd_list): 3868 lines.append("data nqcd(%d)/%d/" % (iconf+1, n)) 3869 3870 # Write the file 3871 writer.writelines(lines) 3872 3873 return True
3874 3875 #=========================================================================== 3876 # write_maxconfigs_file 3877 #===========================================================================
3878 - def write_maxconfigs_file(self, writer, matrix_elements):
3879 """Write the maxconfigs.inc file for MadEvent""" 3880 3881 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 3882 maxconfigs = max([me.get_num_configs() for me in \ 3883 matrix_elements.get('matrix_elements')]) 3884 else: 3885 maxconfigs = max([me.get_num_configs() for me in matrix_elements]) 3886 3887 lines = "integer lmaxconfigs\n" 3888 lines += "parameter(lmaxconfigs=%d)" % maxconfigs 3889 3890 # Write the file 3891 writer.writelines(lines) 3892 3893 return True
3894 3895 #=========================================================================== 3896 # read_write_good_hel 3897 #===========================================================================
3898 - def read_write_good_hel(self, ncomb):
3899 """return the code to read/write the good_hel common_block""" 3900 3901 convert = {'ncomb' : ncomb} 3902 output = """ 3903 subroutine write_good_hel(stream_id) 3904 implicit none 3905 integer stream_id 3906 INTEGER NCOMB 3907 PARAMETER ( NCOMB=%(ncomb)d) 3908 LOGICAL GOODHEL(NCOMB) 3909 INTEGER NTRY 3910 common/BLOCK_GOODHEL/NTRY,GOODHEL 3911 write(stream_id,*) GOODHEL 3912 return 3913 end 3914 3915 3916 subroutine read_good_hel(stream_id) 3917 implicit none 3918 include 'genps.inc' 3919 integer stream_id 3920 INTEGER NCOMB 3921 PARAMETER ( NCOMB=%(ncomb)d) 3922 LOGICAL GOODHEL(NCOMB) 3923 INTEGER NTRY 3924 common/BLOCK_GOODHEL/NTRY,GOODHEL 3925 read(stream_id,*) GOODHEL 3926 NTRY = MAXTRIES + 1 3927 return 3928 end 3929 3930 subroutine init_good_hel() 3931 implicit none 3932 INTEGER NCOMB 3933 PARAMETER ( NCOMB=%(ncomb)d) 3934 LOGICAL GOODHEL(NCOMB) 3935 INTEGER NTRY 3936 INTEGER I 3937 3938 do i=1,NCOMB 3939 GOODHEL(I) = .false. 3940 enddo 3941 NTRY = 0 3942 end 3943 3944 integer function get_maxsproc() 3945 implicit none 3946 get_maxsproc = 1 3947 return 3948 end 3949 3950 """ % convert 3951 3952 return output
3953 3954 #=========================================================================== 3955 # write_config_subproc_map_file 3956 #===========================================================================
3957 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
3958 """Write a dummy config_subproc.inc file for MadEvent""" 3959 3960 lines = [] 3961 3962 for iconfig in range(len(s_and_t_channels)): 3963 lines.append("DATA CONFSUB(1,%d)/1/" % \ 3964 (iconfig + 1)) 3965 3966 # Write the file 3967 writer.writelines(lines) 3968 3969 return True
3970 3971 #=========================================================================== 3972 # write_configs_file 3973 #===========================================================================
3974 - def write_configs_file(self, writer, matrix_element):
3975 """Write the configs.inc file for MadEvent""" 3976 3977 # Extract number of external particles 3978 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3979 3980 model = matrix_element.get('processes')[0].get('model') 3981 configs = [(i+1, d) for (i, d) in \ 3982 enumerate(matrix_element.get('diagrams'))] 3983 mapconfigs = [c[0] for c in configs] 3984 return mapconfigs, self.write_configs_file_from_diagrams(writer, 3985 [[c[1]] for c in configs], 3986 mapconfigs, 3987 nexternal, ninitial, 3988 model)
3989 3990 #=========================================================================== 3991 # write_run_configs_file 3992 #===========================================================================
3993 - def write_run_config_file(self, writer):
3994 """Write the run_configs.inc file for MadEvent""" 3995 3996 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 3997 3998 if self.proc_characteristic['loop_induced']: 3999 job_per_chan = 1 4000 else: 4001 job_per_chan = 5 4002 text = open(path).read() % {'chanperjob': job_per_chan} 4003 writer.write(text) 4004 return True
4005 4006 4007 #=========================================================================== 4008 # write_configs_file_from_diagrams 4009 #===========================================================================
4010 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 4011 nexternal, ninitial, model):
4012 """Write the actual configs.inc file. 4013 4014 configs is the diagrams corresponding to configs (each 4015 diagrams is a list of corresponding diagrams for all 4016 subprocesses, with None if there is no corresponding diagrams 4017 for a given process). 4018 mapconfigs gives the diagram number for each config. 4019 4020 For s-channels, we need to output one PDG for each subprocess in 4021 the subprocess group, in order to be able to pick the right 4022 one for multiprocesses.""" 4023 4024 lines = [] 4025 4026 s_and_t_channels = [] 4027 4028 nqcd_list = [] 4029 4030 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 4031 for config in configs if [d for d in config if d][0].\ 4032 get_vertex_leg_numbers()!=[]] 4033 minvert = min(vert_list) if vert_list!=[] else 0 4034 4035 # Number of subprocesses 4036 nsubprocs = len(configs[0]) 4037 4038 nconfigs = 0 4039 4040 new_pdg = model.get_first_non_pdg() 4041 4042 for iconfig, helas_diags in enumerate(configs): 4043 if any([vert > minvert for vert in 4044 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 4045 # Only 3-vertices allowed in configs.inc 4046 continue 4047 nconfigs += 1 4048 4049 # Need s- and t-channels for all subprocesses, including 4050 # those that don't contribute to this config 4051 empty_verts = [] 4052 stchannels = [] 4053 for h in helas_diags: 4054 if h: 4055 # get_s_and_t_channels gives vertices starting from 4056 # final state external particles and working inwards 4057 stchannels.append(h.get('amplitudes')[0].\ 4058 get_s_and_t_channels(ninitial, model, 4059 new_pdg)) 4060 else: 4061 stchannels.append((empty_verts, None)) 4062 4063 # For t-channels, just need the first non-empty one 4064 tchannels = [t for s,t in stchannels if t != None][0] 4065 4066 # For s_and_t_channels (to be used later) use only first config 4067 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 4068 tchannels]) 4069 4070 # Make sure empty_verts is same length as real vertices 4071 if any([s for s,t in stchannels]): 4072 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 4073 4074 # Reorganize s-channel vertices to get a list of all 4075 # subprocesses for each vertex 4076 schannels = zip(*[s for s,t in stchannels]) 4077 else: 4078 schannels = [] 4079 4080 allchannels = schannels 4081 if len(tchannels) > 1: 4082 # Write out tchannels only if there are any non-trivial ones 4083 allchannels = schannels + tchannels 4084 4085 # Write out propagators for s-channel and t-channel vertices 4086 4087 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 4088 # Correspondance between the config and the diagram = amp2 4089 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 4090 mapconfigs[iconfig])) 4091 # Number of QCD couplings in this diagram 4092 nqcd = 0 4093 for h in helas_diags: 4094 if h: 4095 try: 4096 nqcd = h.calculate_orders()['QCD'] 4097 except KeyError: 4098 pass 4099 break 4100 else: 4101 continue 4102 4103 nqcd_list.append(nqcd) 4104 4105 for verts in allchannels: 4106 if verts in schannels: 4107 vert = [v for v in verts if v][0] 4108 else: 4109 vert = verts 4110 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 4111 last_leg = vert.get('legs')[-1] 4112 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 4113 (last_leg.get('number'), nconfigs, len(daughters), 4114 ",".join([str(d) for d in daughters]))) 4115 if verts in schannels: 4116 pdgs = [] 4117 for v in verts: 4118 if v: 4119 pdgs.append(v.get('legs')[-1].get('id')) 4120 else: 4121 pdgs.append(0) 4122 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4123 (last_leg.get('number'), nconfigs, nsubprocs, 4124 ",".join([str(d) for d in pdgs]))) 4125 lines.append("data tprid(%d,%d)/0/" % \ 4126 (last_leg.get('number'), nconfigs)) 4127 elif verts in tchannels[:-1]: 4128 lines.append("data tprid(%d,%d)/%d/" % \ 4129 (last_leg.get('number'), nconfigs, 4130 abs(last_leg.get('id')))) 4131 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4132 (last_leg.get('number'), nconfigs, nsubprocs, 4133 ",".join(['0'] * nsubprocs))) 4134 4135 # Write out number of configs 4136 lines.append("# Number of configs") 4137 lines.append("data mapconfig(0)/%d/" % nconfigs) 4138 4139 # Write the file 4140 writer.writelines(lines) 4141 4142 return s_and_t_channels, nqcd_list
4143 4144 #=========================================================================== 4145 # write_decayBW_file 4146 #===========================================================================
4147 - def write_decayBW_file(self, writer, s_and_t_channels):
4148 """Write the decayBW.inc file for MadEvent""" 4149 4150 lines = [] 4151 4152 booldict = {None: "0", True: "1", False: "2"} 4153 4154 for iconf, config in enumerate(s_and_t_channels): 4155 schannels = config[0] 4156 for vertex in schannels: 4157 # For the resulting leg, pick out whether it comes from 4158 # decay or not, as given by the onshell flag 4159 leg = vertex.get('legs')[-1] 4160 lines.append("data gForceBW(%d,%d)/%s/" % \ 4161 (leg.get('number'), iconf + 1, 4162 booldict[leg.get('onshell')])) 4163 4164 # Write the file 4165 writer.writelines(lines) 4166 4167 return True
4168 4169 #=========================================================================== 4170 # write_dname_file 4171 #===========================================================================
4172 - def write_dname_file(self, writer, dir_name):
4173 """Write the dname.mg file for MG4""" 4174 4175 line = "DIRNAME=%s" % dir_name 4176 4177 # Write the file 4178 writer.write(line + "\n") 4179 4180 return True
4181 4182 #=========================================================================== 4183 # write_driver 4184 #===========================================================================
4185 - def write_driver(self, writer, ncomb, n_grouped_proc, v5=True):
4186 """Write the SubProcess/driver.f file for MG4""" 4187 4188 path = pjoin(_file_path,'iolibs','template_files','madevent_driver.f') 4189 4190 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4191 card = 'Source/MODEL/MG5_param.dat' 4192 else: 4193 card = 'param_card.dat' 4194 # Requiring each helicity configuration to be probed by 10 points for 4195 # matrix element before using the resulting grid for MC over helicity 4196 # sampling. 4197 # We multiply this by 2 because each grouped subprocess is called at most 4198 # twice for each IMIRROR. 4199 replace_dict = {'param_card_name':card, 4200 'ncomb':ncomb, 4201 'hel_init_points':n_grouped_proc*10*2} 4202 if not v5: 4203 replace_dict['secondparam']=',.true.' 4204 else: 4205 replace_dict['secondparam']='' 4206 4207 text = open(path).read() % replace_dict 4208 4209 writer.write(text) 4210 4211 return True
4212 4213 #=========================================================================== 4214 # write_addmothers 4215 #===========================================================================
4216 - def write_addmothers(self, writer):
4217 """Write the SubProcess/addmothers.f""" 4218 4219 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 4220 4221 text = open(path).read() % {'iconfig': 'diag_number'} 4222 writer.write(text) 4223 4224 return True
4225 4226 4227 #=========================================================================== 4228 # write_combine_events 4229 #===========================================================================
4230 - def write_combine_events(self, writer, nb_proc=100):
4231 """Write the SubProcess/driver.f file for MG4""" 4232 4233 path = pjoin(_file_path,'iolibs','template_files','madevent_combine_events.f') 4234 4235 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4236 card = 'Source/MODEL/MG5_param.dat' 4237 else: 4238 card = 'param_card.dat' 4239 4240 #set maxpup (number of @X in the process card) 4241 4242 text = open(path).read() % {'param_card_name':card, 'maxpup':nb_proc+1} 4243 #the +1 is just a security. This is not needed but I feel(OM) safer with it. 4244 writer.write(text) 4245 4246 return True
4247 4248 4249 #=========================================================================== 4250 # write_symmetry 4251 #===========================================================================
4252 - def write_symmetry(self, writer, v5=True):
4253 """Write the SubProcess/driver.f file for ME""" 4254 4255 path = pjoin(_file_path,'iolibs','template_files','madevent_symmetry.f') 4256 4257 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4258 card = 'Source/MODEL/MG5_param.dat' 4259 else: 4260 card = 'param_card.dat' 4261 text = open(path).read() 4262 4263 if v5: 4264 text = text % {'param_card_name':card, 'setparasecondarg':''} 4265 else: 4266 text = text % {'param_card_name':card, 'setparasecondarg':',.true.'} 4267 writer.write(text) 4268 4269 return True
4270 4271 4272 4273 4274 #=========================================================================== 4275 # write_iproc_file 4276 #===========================================================================
4277 - def write_iproc_file(self, writer, me_number):
4278 """Write the iproc.dat file for MG4""" 4279 line = "%d" % (me_number + 1) 4280 4281 # Write the file 4282 for line_to_write in writer.write_line(line): 4283 writer.write(line_to_write) 4284 return True
4285 4286 #=========================================================================== 4287 # write_mg_sym_file 4288 #===========================================================================
4289 - def write_mg_sym_file(self, writer, matrix_element):
4290 """Write the mg.sym file for MadEvent.""" 4291 4292 lines = [] 4293 4294 # Extract process with all decays included 4295 final_legs = filter(lambda leg: leg.get('state') == True, 4296 matrix_element.get('processes')[0].get_legs_with_decays()) 4297 4298 ninitial = len(filter(lambda leg: leg.get('state') == False, 4299 matrix_element.get('processes')[0].get('legs'))) 4300 4301 identical_indices = {} 4302 4303 # Extract identical particle info 4304 for i, leg in enumerate(final_legs): 4305 if leg.get('id') in identical_indices: 4306 identical_indices[leg.get('id')].append(\ 4307 i + ninitial + 1) 4308 else: 4309 identical_indices[leg.get('id')] = [i + ninitial + 1] 4310 4311 # Remove keys which have only one particle 4312 for key in identical_indices.keys(): 4313 if len(identical_indices[key]) < 2: 4314 del identical_indices[key] 4315 4316 # Write mg.sym file 4317 lines.append(str(len(identical_indices.keys()))) 4318 for key in identical_indices.keys(): 4319 lines.append(str(len(identical_indices[key]))) 4320 for number in identical_indices[key]: 4321 lines.append(str(number)) 4322 4323 # Write the file 4324 writer.writelines(lines) 4325 4326 return True
4327 4328 #=========================================================================== 4329 # write_mg_sym_file 4330 #===========================================================================
4331 - def write_default_mg_sym_file(self, writer):
4332 """Write the mg.sym file for MadEvent.""" 4333 4334 lines = "0" 4335 4336 # Write the file 4337 writer.writelines(lines) 4338 4339 return True
4340 4341 #=========================================================================== 4342 # write_ncombs_file 4343 #===========================================================================
4344 - def write_ncombs_file(self, writer, nexternal):
4345 """Write the ncombs.inc file for MadEvent.""" 4346 4347 # ncomb (used for clustering) is 2^nexternal 4348 file = " integer n_max_cl\n" 4349 file = file + "parameter (n_max_cl=%d)" % (2 ** nexternal) 4350 4351 # Write the file 4352 writer.writelines(file) 4353 4354 return True
4355 4356 #=========================================================================== 4357 # write_processes_file 4358 #===========================================================================
4359 - def write_processes_file(self, writer, subproc_group):
4360 """Write the processes.dat file with info about the subprocesses 4361 in this group.""" 4362 4363 lines = [] 4364 4365 for ime, me in \ 4366 enumerate(subproc_group.get('matrix_elements')): 4367 lines.append("%s %s" % (str(ime+1) + " " * (7-len(str(ime+1))), 4368 ",".join(p.base_string() for p in \ 4369 me.get('processes')))) 4370 if me.get('has_mirror_process'): 4371 mirror_procs = [copy.copy(p) for p in me.get('processes')] 4372 for proc in mirror_procs: 4373 legs = copy.copy(proc.get('legs_with_decays')) 4374 legs.insert(0, legs.pop(1)) 4375 proc.set("legs_with_decays", legs) 4376 lines.append("mirror %s" % ",".join(p.base_string() for p in \ 4377 mirror_procs)) 4378 else: 4379 lines.append("mirror none") 4380 4381 # Write the file 4382 writer.write("\n".join(lines)) 4383 4384 return True
4385 4386 #=========================================================================== 4387 # write_symswap_file 4388 #===========================================================================
4389 - def write_symswap_file(self, writer, ident_perms):
4390 """Write the file symswap.inc for MG4 by comparing diagrams using 4391 the internal matrix element value functionality.""" 4392 4393 lines = [] 4394 4395 # Write out lines for symswap.inc file (used to permute the 4396 # external leg momenta 4397 for iperm, perm in enumerate(ident_perms): 4398 lines.append("data (isym(i,%d),i=1,nexternal)/%s/" % \ 4399 (iperm+1, ",".join([str(i+1) for i in perm]))) 4400 lines.append("data nsym/%d/" % len(ident_perms)) 4401 4402 # Write the file 4403 writer.writelines(lines) 4404 4405 return True
4406 4407 #=========================================================================== 4408 # write_symfact_file 4409 #===========================================================================
4410 - def write_symfact_file(self, writer, symmetry):
4411 """Write the files symfact.dat for MG4 by comparing diagrams using 4412 the internal matrix element value functionality.""" 4413 4414 pos = max(2, int(math.ceil(math.log10(len(symmetry))))) 4415 form = "%"+str(pos)+"r %"+str(pos+1)+"r" 4416 # Write out lines for symswap.inc file (used to permute the 4417 # external leg momenta 4418 lines = [ form %(i+1, s) for i,s in enumerate(symmetry) if s != 0] 4419 # Write the file 4420 writer.write('\n'.join(lines)) 4421 writer.write('\n') 4422 4423 return True
4424 4425 #=========================================================================== 4426 # write_symperms_file 4427 #===========================================================================
4428 - def write_symperms_file(self, writer, perms):
4429 """Write the symperms.inc file for subprocess group, used for 4430 symmetric configurations""" 4431 4432 lines = [] 4433 for iperm, perm in enumerate(perms): 4434 lines.append("data (perms(i,%d),i=1,nexternal)/%s/" % \ 4435 (iperm+1, ",".join([str(i+1) for i in perm]))) 4436 4437 # Write the file 4438 writer.writelines(lines) 4439 4440 return True
4441 4442 #=========================================================================== 4443 # write_subproc 4444 #===========================================================================
4445 - def write_subproc(self, writer, subprocdir):
4446 """Append this subprocess to the subproc.mg file for MG4""" 4447 4448 # Write line to file 4449 writer.write(subprocdir + "\n") 4450 4451 return True
4452
4453 #=============================================================================== 4454 # ProcessExporterFortranMEGroup 4455 #=============================================================================== 4456 -class ProcessExporterFortranMEGroup(ProcessExporterFortranME):
4457 """Class to take care of exporting a set of matrix elements to 4458 MadEvent subprocess group format.""" 4459 4460 matrix_file = "matrix_madevent_group_v4.inc" 4461 4462 #=========================================================================== 4463 # generate_subprocess_directory_v4 4464 #===========================================================================
4465 - def generate_subprocess_directory_v4(self, subproc_group, 4466 fortran_model, 4467 group_number):
4468 """Generate the Pn directory for a subprocess group in MadEvent, 4469 including the necessary matrix_N.f files, configs.inc and various 4470 other helper files""" 4471 4472 assert isinstance(subproc_group, group_subprocs.SubProcessGroup), \ 4473 "subproc_group object not SubProcessGroup" 4474 4475 if not self.model: 4476 self.model = subproc_group.get('matrix_elements')[0].\ 4477 get('processes')[0].get('model') 4478 4479 cwd = os.getcwd() 4480 path = pjoin(self.dir_path, 'SubProcesses') 4481 4482 os.chdir(path) 4483 pathdir = os.getcwd() 4484 4485 # Create the directory PN in the specified path 4486 subprocdir = "P%d_%s" % (subproc_group.get('number'), 4487 subproc_group.get('name')) 4488 try: 4489 os.mkdir(subprocdir) 4490 except os.error as error: 4491 logger.warning(error.strerror + " " + subprocdir) 4492 4493 try: 4494 os.chdir(subprocdir) 4495 except os.error: 4496 logger.error('Could not cd to directory %s' % subprocdir) 4497 return 0 4498 4499 logger.info('Creating files in directory %s' % subprocdir) 4500 4501 # Create the matrix.f files, auto_dsig.f files and all inc files 4502 # for all subprocesses in the group 4503 4504 maxamps = 0 4505 maxflows = 0 4506 tot_calls = 0 4507 4508 matrix_elements = subproc_group.get('matrix_elements') 4509 4510 # Add the driver.f, all grouped ME's must share the same number of 4511 # helicity configuration 4512 ncomb = matrix_elements[0].get_helicity_combinations() 4513 for me in matrix_elements[1:]: 4514 if ncomb!=me.get_helicity_combinations(): 4515 raise MadGraph5Error, "All grouped processes must share the "+\ 4516 "same number of helicity configurations." 4517 4518 filename = 'driver.f' 4519 self.write_driver(writers.FortranWriter(filename),ncomb, 4520 n_grouped_proc=len(matrix_elements), v5=self.opt['v5_model']) 4521 4522 for ime, matrix_element in \ 4523 enumerate(matrix_elements): 4524 filename = 'matrix%d.f' % (ime+1) 4525 calls, ncolor = \ 4526 self.write_matrix_element_v4(writers.FortranWriter(filename), 4527 matrix_element, 4528 fortran_model, 4529 proc_id=str(ime+1), 4530 config_map=subproc_group.get('diagram_maps')[ime], 4531 subproc_number=group_number) 4532 4533 filename = 'auto_dsig%d.f' % (ime+1) 4534 self.write_auto_dsig_file(writers.FortranWriter(filename), 4535 matrix_element, 4536 str(ime+1)) 4537 4538 # Keep track of needed quantities 4539 tot_calls += int(calls) 4540 maxflows = max(maxflows, ncolor) 4541 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 4542 4543 # Draw diagrams 4544 filename = "matrix%d.ps" % (ime+1) 4545 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 4546 get('diagrams'), 4547 filename, 4548 model = \ 4549 matrix_element.get('processes')[0].\ 4550 get('model'), 4551 amplitude=True) 4552 logger.info("Generating Feynman diagrams for " + \ 4553 matrix_element.get('processes')[0].nice_string()) 4554 plot.draw() 4555 4556 # Extract number of external particles 4557 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4558 4559 # Generate a list of diagrams corresponding to each configuration 4560 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 4561 # If a subprocess has no diagrams for this config, the number is 0 4562 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 4563 4564 filename = 'auto_dsig.f' 4565 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 4566 subproc_group) 4567 4568 filename = 'coloramps.inc' 4569 self.write_coloramps_file(writers.FortranWriter(filename), 4570 subproc_diagrams_for_config, 4571 maxflows, 4572 matrix_elements) 4573 4574 filename = 'get_color.f' 4575 self.write_colors_file(writers.FortranWriter(filename), 4576 matrix_elements) 4577 4578 filename = 'config_subproc_map.inc' 4579 self.write_config_subproc_map_file(writers.FortranWriter(filename), 4580 subproc_diagrams_for_config) 4581 4582 filename = 'configs.inc' 4583 nconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 4584 writers.FortranWriter(filename), 4585 subproc_group, 4586 subproc_diagrams_for_config) 4587 4588 filename = 'config_nqcd.inc' 4589 self.write_config_nqcd_file(writers.FortranWriter(filename), 4590 nqcd_list) 4591 4592 filename = 'decayBW.inc' 4593 self.write_decayBW_file(writers.FortranWriter(filename), 4594 s_and_t_channels) 4595 4596 filename = 'dname.mg' 4597 self.write_dname_file(writers.FortranWriter(filename), 4598 subprocdir) 4599 4600 filename = 'iproc.dat' 4601 self.write_iproc_file(writers.FortranWriter(filename), 4602 group_number) 4603 4604 filename = 'leshouche.inc' 4605 self.write_leshouche_file(writers.FortranWriter(filename), 4606 subproc_group) 4607 4608 filename = 'maxamps.inc' 4609 self.write_maxamps_file(writers.FortranWriter(filename), 4610 maxamps, 4611 maxflows, 4612 max([len(me.get('processes')) for me in \ 4613 matrix_elements]), 4614 len(matrix_elements)) 4615 4616 # Note that mg.sym is not relevant for this case 4617 filename = 'mg.sym' 4618 self.write_default_mg_sym_file(writers.FortranWriter(filename)) 4619 4620 filename = 'mirrorprocs.inc' 4621 self.write_mirrorprocs(writers.FortranWriter(filename), 4622 subproc_group) 4623 4624 filename = 'ncombs.inc' 4625 self.write_ncombs_file(writers.FortranWriter(filename), 4626 nexternal) 4627 4628 filename = 'nexternal.inc' 4629 self.write_nexternal_file(writers.FortranWriter(filename), 4630 nexternal, ninitial) 4631 4632 filename = 'ngraphs.inc' 4633 self.write_ngraphs_file(writers.FortranWriter(filename), 4634 nconfigs) 4635 4636 filename = 'pmass.inc' 4637 self.write_pmass_file(writers.FortranWriter(filename), 4638 matrix_element) 4639 4640 filename = 'props.inc' 4641 self.write_props_file(writers.FortranWriter(filename), 4642 matrix_element, 4643 s_and_t_channels) 4644 4645 filename = 'processes.dat' 4646 files.write_to_file(filename, 4647 self.write_processes_file, 4648 subproc_group) 4649 4650 # Find config symmetries and permutations 4651 symmetry, perms, ident_perms = \ 4652 diagram_symmetry.find_symmetry(subproc_group) 4653 4654 filename = 'symswap.inc' 4655 self.write_symswap_file(writers.FortranWriter(filename), 4656 ident_perms) 4657 4658 filename = 'symfact_orig.dat' 4659 self.write_symfact_file(open(filename, 'w'), symmetry) 4660 4661 filename = 'symperms.inc' 4662 self.write_symperms_file(writers.FortranWriter(filename), 4663 perms) 4664 4665 # Generate jpgs -> pass in make_html 4666 #os.system(pjoin('..', '..', 'bin', 'gen_jpeg-pl')) 4667 4668 self.link_files_in_SubProcess(pjoin(pathdir,subprocdir)) 4669 4670 #import nexternal/leshouch in Source 4671 ln('nexternal.inc', '../../Source', log=False) 4672 ln('leshouche.inc', '../../Source', log=False) 4673 ln('maxamps.inc', '../../Source', log=False) 4674 4675 # Return to SubProcesses dir) 4676 os.chdir(pathdir) 4677 4678 # Add subprocess to subproc.mg 4679 filename = 'subproc.mg' 4680 files.append_to_file(filename, 4681 self.write_subproc, 4682 subprocdir) 4683 4684 # Return to original dir 4685 os.chdir(cwd) 4686 4687 if not tot_calls: 4688 tot_calls = 0 4689 return tot_calls
4690 4691 #=========================================================================== 4692 # write_super_auto_dsig_file 4693 #===========================================================================
4694 - def write_super_auto_dsig_file(self, writer, subproc_group):
4695 """Write the auto_dsig.f file selecting between the subprocesses 4696 in subprocess group mode""" 4697 4698 replace_dict = {} 4699 4700 # Extract version number and date from VERSION file 4701 info_lines = self.get_mg5_info_lines() 4702 replace_dict['info_lines'] = info_lines 4703 4704 matrix_elements = subproc_group.get('matrix_elements') 4705 4706 # Extract process info lines 4707 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 4708 matrix_elements]) 4709 replace_dict['process_lines'] = process_lines 4710 4711 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 4712 replace_dict['nexternal'] = nexternal 4713 4714 replace_dict['nsprocs'] = 2*len(matrix_elements) 4715 4716 # Generate dsig definition line 4717 dsig_def_line = "DOUBLE PRECISION " + \ 4718 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 4719 range(len(matrix_elements))]) 4720 replace_dict["dsig_def_line"] = dsig_def_line 4721 4722 # Generate dsig process lines 4723 call_dsig_proc_lines = [] 4724 for iproc in range(len(matrix_elements)): 4725 call_dsig_proc_lines.append(\ 4726 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 4727 {"num": iproc + 1, 4728 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 4729 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 4730 4731 ncomb=matrix_elements[0].get_helicity_combinations() 4732 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 4733 4734 file = open(pjoin(_file_path, \ 4735 'iolibs/template_files/super_auto_dsig_group_v4.inc')).read() 4736 file = file % replace_dict 4737 4738 # Write the file 4739 writer.writelines(file)
4740 4741 #=========================================================================== 4742 # write_mirrorprocs 4743 #===========================================================================
4744 - def write_mirrorprocs(self, writer, subproc_group):
4745 """Write the mirrorprocs.inc file determining which processes have 4746 IS mirror process in subprocess group mode.""" 4747 4748 lines = [] 4749 bool_dict = {True: '.true.', False: '.false.'} 4750 matrix_elements = subproc_group.get('matrix_elements') 4751 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 4752 (len(matrix_elements), 4753 ",".join([bool_dict[me.get('has_mirror_process')] for \ 4754 me in matrix_elements]))) 4755 # Write the file 4756 writer.writelines(lines)
4757 4758 #=========================================================================== 4759 # write_addmothers 4760 #===========================================================================
4761 - def write_addmothers(self, writer):
4762 """Write the SubProcess/addmothers.f""" 4763 4764 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 4765 4766 text = open(path).read() % {'iconfig': 'lconfig'} 4767 writer.write(text) 4768 4769 return True
4770 4771 4772 #=========================================================================== 4773 # write_coloramps_file 4774 #===========================================================================
4775 - def write_coloramps_file(self, writer, diagrams_for_config, maxflows, 4776 matrix_elements):
4777 """Write the coloramps.inc file for MadEvent in Subprocess group mode""" 4778 4779 # Create a map from subprocess (matrix element) to a list of 4780 # the diagrams corresponding to each config 4781 4782 lines = [] 4783 4784 subproc_to_confdiag = {} 4785 for config in diagrams_for_config: 4786 for subproc, diag in enumerate(config): 4787 try: 4788 subproc_to_confdiag[subproc].append(diag) 4789 except KeyError: 4790 subproc_to_confdiag[subproc] = [diag] 4791 4792 for subproc in sorted(subproc_to_confdiag.keys()): 4793 lines.extend(self.get_icolamp_lines(subproc_to_confdiag[subproc], 4794 matrix_elements[subproc], 4795 subproc + 1)) 4796 4797 lines.insert(0, "logical icolamp(%d,%d,%d)" % \ 4798 (maxflows, 4799 len(diagrams_for_config), 4800 len(matrix_elements))) 4801 4802 # Write the file 4803 writer.writelines(lines) 4804 4805 return True
4806 4807 #=========================================================================== 4808 # write_config_subproc_map_file 4809 #===========================================================================
4810 - def write_config_subproc_map_file(self, writer, config_subproc_map):
4811 """Write the config_subproc_map.inc file for subprocess groups""" 4812 4813 lines = [] 4814 # Output only configs that have some corresponding diagrams 4815 iconfig = 0 4816 for config in config_subproc_map: 4817 if set(config) == set([0]): 4818 continue 4819 lines.append("DATA (CONFSUB(i,%d),i=1,%d)/%s/" % \ 4820 (iconfig + 1, len(config), 4821 ",".join([str(i) for i in config]))) 4822 iconfig += 1 4823 # Write the file 4824 writer.writelines(lines) 4825 4826 return True
4827 4828 #=========================================================================== 4829 # read_write_good_hel 4830 #===========================================================================
4831 - def read_write_good_hel(self, ncomb):
4832 """return the code to read/write the good_hel common_block""" 4833 4834 convert = {'ncomb' : ncomb} 4835 4836 output = """ 4837 subroutine write_good_hel(stream_id) 4838 implicit none 4839 integer stream_id 4840 INTEGER NCOMB 4841 PARAMETER ( NCOMB=%(ncomb)d) 4842 LOGICAL GOODHEL(NCOMB, 2) 4843 INTEGER NTRY(2) 4844 common/BLOCK_GOODHEL/NTRY,GOODHEL 4845 write(stream_id,*) GOODHEL 4846 return 4847 end 4848 4849 4850 subroutine read_good_hel(stream_id) 4851 implicit none 4852 include 'genps.inc' 4853 integer stream_id 4854 INTEGER NCOMB 4855 PARAMETER ( NCOMB=%(ncomb)d) 4856 LOGICAL GOODHEL(NCOMB, 2) 4857 INTEGER NTRY(2) 4858 common/BLOCK_GOODHEL/NTRY,GOODHEL 4859 read(stream_id,*) GOODHEL 4860 NTRY(1) = MAXTRIES + 1 4861 NTRY(2) = MAXTRIES + 1 4862 return 4863 end 4864 4865 subroutine init_good_hel() 4866 implicit none 4867 INTEGER NCOMB 4868 PARAMETER ( NCOMB=%(ncomb)d) 4869 LOGICAL GOODHEL(NCOMB, 2) 4870 INTEGER NTRY(2) 4871 INTEGER I 4872 4873 do i=1,NCOMB 4874 GOODHEL(I,1) = .false. 4875 GOODHEL(I,2) = .false. 4876 enddo 4877 NTRY(1) = 0 4878 NTRY(2) = 0 4879 end 4880 4881 integer function get_maxsproc() 4882 implicit none 4883 include 'maxamps.inc' 4884 4885 get_maxsproc = maxsproc 4886 return 4887 end 4888 4889 """ % convert 4890 4891 return output
4892 4893 4894 4895 #=========================================================================== 4896 # write_configs_file 4897 #===========================================================================
4898 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
4899 """Write the configs.inc file with topology information for a 4900 subprocess group. Use the first subprocess with a diagram for each 4901 configuration.""" 4902 4903 matrix_elements = subproc_group.get('matrix_elements') 4904 model = matrix_elements[0].get('processes')[0].get('model') 4905 4906 diagrams = [] 4907 config_numbers = [] 4908 for iconfig, config in enumerate(diagrams_for_config): 4909 # Check if any diagrams correspond to this config 4910 if set(config) == set([0]): 4911 continue 4912 subproc_diags = [] 4913 for s,d in enumerate(config): 4914 if d: 4915 subproc_diags.append(matrix_elements[s].\ 4916 get('diagrams')[d-1]) 4917 else: 4918 subproc_diags.append(None) 4919 diagrams.append(subproc_diags) 4920 config_numbers.append(iconfig + 1) 4921 4922 # Extract number of external particles 4923 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 4924 4925 return len(diagrams), \ 4926 self.write_configs_file_from_diagrams(writer, diagrams, 4927 config_numbers, 4928 nexternal, ninitial, 4929 model)
4930 4931 #=========================================================================== 4932 # write_run_configs_file 4933 #===========================================================================
4934 - def write_run_config_file(self, writer):
4935 """Write the run_configs.inc file for MadEvent""" 4936 4937 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 4938 if self.proc_characteristic['loop_induced']: 4939 job_per_chan = 1 4940 else: 4941 job_per_chan = 2 4942 text = open(path).read() % {'chanperjob':job_per_chan} 4943 writer.write(text) 4944 return True
4945 4946 4947 #=========================================================================== 4948 # write_leshouche_file 4949 #===========================================================================
4950 - def write_leshouche_file(self, writer, subproc_group):
4951 """Write the leshouche.inc file for MG4""" 4952 4953 all_lines = [] 4954 4955 for iproc, matrix_element in \ 4956 enumerate(subproc_group.get('matrix_elements')): 4957 all_lines.extend(self.get_leshouche_lines(matrix_element, 4958 iproc)) 4959 4960 # Write the file 4961 writer.writelines(all_lines) 4962 4963 return True
4964 4965 4966
4967 - def finalize_v4_directory(self,*args, **opts):
4968 4969 4970 4971 super(ProcessExporterFortranMEGroup, self).finalize_v4_directory(*args, 4972 **opts) 4973 #ensure that the grouping information is on the correct value 4974 self.proc_characteristic['grouped_matrix'] = True
4975 4976 4977 #=============================================================================== 4978 # UFO_model_to_mg4 4979 #=============================================================================== 4980 4981 python_to_fortran = lambda x: parsers.UFOExpressionParserFortran().parse(x)
4982 4983 -class UFO_model_to_mg4(object):
4984 """ A converter of the UFO-MG5 Model to the MG4 format """ 4985 4986 # The list below shows the only variables the user is allowed to change by 4987 # himself for each PS point. If he changes any other, then calling 4988 # UPDATE_AS_PARAM() (or equivalently MP_UPDATE_AS_PARAM()) will not 4989 # correctly account for the change. 4990 PS_dependent_key = ['aS','MU_R'] 4991 mp_complex_format = 'complex*32' 4992 mp_real_format = 'real*16' 4993 # Warning, it is crucial none of the couplings/parameters of the model 4994 # starts with this prefix. I should add a check for this. 4995 # You can change it as the global variable to check_param_card.ParamCard 4996 mp_prefix = check_param_card.ParamCard.mp_prefix 4997
4998 - def __init__(self, model, output_path, opt=None):
4999 """ initialization of the objects """ 5000 5001 self.model = model 5002 self.model_name = model['name'] 5003 self.dir_path = output_path 5004 if opt: 5005 self.opt = opt 5006 else: 5007 self.opt = {'complex_mass': False, 'export_format': 'madevent', 'mp':True, 5008 'loop_induced': False} 5009 5010 self.coups_dep = [] # (name, expression, type) 5011 self.coups_indep = [] # (name, expression, type) 5012 self.params_dep = [] # (name, expression, type) 5013 self.params_indep = [] # (name, expression, type) 5014 self.params_ext = [] # external parameter 5015 self.p_to_f = parsers.UFOExpressionParserFortran() 5016 self.mp_p_to_f = parsers.UFOExpressionParserMPFortran()
5017
5019 """modify the parameter if some of them are identical up to the case""" 5020 5021 lower_dict={} 5022 duplicate = set() 5023 keys = self.model['parameters'].keys() 5024 for key in keys: 5025 for param in self.model['parameters'][key]: 5026 lower_name = param.name.lower() 5027 if not lower_name: 5028 continue 5029 try: 5030 lower_dict[lower_name].append(param) 5031 except KeyError,error: 5032 lower_dict[lower_name] = [param] 5033 else: 5034 duplicate.add(lower_name) 5035 logger.debug('%s is define both as lower case and upper case.' 5036 % lower_name) 5037 if not duplicate: 5038 return 5039 5040 re_expr = r'''\b(%s)\b''' 5041 to_change = [] 5042 change={} 5043 for value in duplicate: 5044 for i, var in enumerate(lower_dict[value]): 5045 to_change.append(var.name) 5046 new_name = '%s%s' % (var.name.lower(), 5047 ('__%d'%(i+1) if i>0 else '')) 5048 change[var.name] = new_name 5049 var.name = new_name 5050 5051 # Apply the modification to the map_CTcoup_CTparam of the model 5052 # if it has one (giving for each coupling the CT parameters whcih 5053 # are necessary and which should be exported to the model. 5054 if hasattr(self.model,'map_CTcoup_CTparam'): 5055 for coup, ctparams in self.model.map_CTcoup_CTparam: 5056 for i, ctparam in enumerate(ctparams): 5057 try: 5058 self.model.map_CTcoup_CTparam[coup][i] = change[ctparam] 5059 except KeyError: 5060 pass 5061 5062 replace = lambda match_pattern: change[match_pattern.groups()[0]] 5063 rep_pattern = re.compile(re_expr % '|'.join(to_change)) 5064 5065 # change parameters 5066 for key in keys: 5067 if key == ('external',): 5068 continue 5069 for param in self.model['parameters'][key]: 5070 param.expr = rep_pattern.sub(replace, param.expr) 5071 5072 # change couplings 5073 for key in self.model['couplings'].keys(): 5074 for coup in self.model['couplings'][key]: 5075 coup.expr = rep_pattern.sub(replace, coup.expr) 5076 5077 # change mass/width 5078 for part in self.model['particles']: 5079 if str(part.get('mass')) in to_change: 5080 part.set('mass', rep_pattern.sub(replace, str(part.get('mass')))) 5081 if str(part.get('width')) in to_change: 5082 part.set('width', rep_pattern.sub(replace, str(part.get('width'))))
5083
5084 - def refactorize(self, wanted_couplings = []):
5085 """modify the couplings to fit with MG4 convention """ 5086 5087 # Keep only separation in alphaS 5088 keys = self.model['parameters'].keys() 5089 keys.sort(key=len) 5090 for key in keys: 5091 to_add = [o for o in self.model['parameters'][key] if o.name] 5092 5093 if key == ('external',): 5094 self.params_ext += to_add 5095 elif any([(k in key) for k in self.PS_dependent_key]): 5096 self.params_dep += to_add 5097 else: 5098 self.params_indep += to_add 5099 # same for couplings 5100 keys = self.model['couplings'].keys() 5101 keys.sort(key=len) 5102 for key, coup_list in self.model['couplings'].items(): 5103 if any([(k in key) for k in self.PS_dependent_key]): 5104 self.coups_dep += [c for c in coup_list if 5105 (not wanted_couplings or c.name in \ 5106 wanted_couplings)] 5107 else: 5108 self.coups_indep += [c for c in coup_list if 5109 (not wanted_couplings or c.name in \ 5110 wanted_couplings)] 5111 5112 # MG4 use G and not aS as it basic object for alphas related computation 5113 #Pass G in the independant list 5114 if 'G' in self.params_dep: 5115 index = self.params_dep.index('G') 5116 G = self.params_dep.pop(index) 5117 # G.expr = '2*cmath.sqrt(as*pi)' 5118 # self.params_indep.insert(0, self.params_dep.pop(index)) 5119 # No need to add it if not defined 5120 5121 if 'aS' not in self.params_ext: 5122 logger.critical('aS not define as external parameter adding it!') 5123 #self.model['parameters']['aS'] = base_objects.ParamCardVariable('aS', 0.138,'DUMMY',(1,)) 5124 self.params_indep.append( base_objects. ModelVariable('aS', '0.138','real')) 5125 self.params_indep.append( base_objects. ModelVariable('G', '4.1643','real'))
5126 - def build(self, wanted_couplings = [], full=True):
5127 """modify the couplings to fit with MG4 convention and creates all the 5128 different files""" 5129 5130 self.pass_parameter_to_case_insensitive() 5131 self.refactorize(wanted_couplings) 5132 5133 # write the files 5134 if full: 5135 if wanted_couplings: 5136 # extract the wanted ct parameters 5137 self.extract_needed_CTparam(wanted_couplings=wanted_couplings) 5138 self.write_all()
5139 5140
5141 - def open(self, name, comment='c', format='default'):
5142 """ Open the file name in the correct directory and with a valid 5143 header.""" 5144 5145 file_path = pjoin(self.dir_path, name) 5146 5147 if format == 'fortran': 5148 fsock = writers.FortranWriter(file_path, 'w') 5149 else: 5150 fsock = open(file_path, 'w') 5151 5152 file.writelines(fsock, comment * 77 + '\n') 5153 file.writelines(fsock,'%(comment)s written by the UFO converter\n' % \ 5154 {'comment': comment + (6 - len(comment)) * ' '}) 5155 file.writelines(fsock, comment * 77 + '\n\n') 5156 return fsock
5157 5158
5159 - def write_all(self):
5160 """ write all the files """ 5161 #write the part related to the external parameter 5162 self.create_ident_card() 5163 self.create_param_read() 5164 5165 #write the definition of the parameter 5166 self.create_input() 5167 self.create_intparam_def(dp=True,mp=False) 5168 if self.opt['mp']: 5169 self.create_intparam_def(dp=False,mp=True) 5170 5171 # definition of the coupling. 5172 self.create_actualize_mp_ext_param_inc() 5173 self.create_coupl_inc() 5174 self.create_write_couplings() 5175 self.create_couplings() 5176 5177 # the makefile 5178 self.create_makeinc() 5179 self.create_param_write() 5180 5181 # The model functions 5182 self.create_model_functions_inc() 5183 self.create_model_functions_def() 5184 5185 # The param_card.dat 5186 self.create_param_card() 5187 5188 5189 # All the standard files 5190 self.copy_standard_file()
5191 5192 ############################################################################ 5193 ## ROUTINE CREATING THE FILES ############################################ 5194 ############################################################################ 5195
5196 - def copy_standard_file(self):
5197 """Copy the standard files for the fortran model.""" 5198 5199 #copy the library files 5200 file_to_link = ['formats.inc','printout.f', \ 5201 'rw_para.f', 'testprog.f'] 5202 5203 for filename in file_to_link: 5204 cp( MG5DIR + '/models/template_files/fortran/' + filename, \ 5205 self.dir_path) 5206 5207 file = open(os.path.join(MG5DIR,\ 5208 'models/template_files/fortran/rw_para.f')).read() 5209 5210 includes=["include \'coupl.inc\'","include \'input.inc\'", 5211 "include \'model_functions.inc\'"] 5212 if self.opt['mp']: 5213 includes.extend(["include \'mp_coupl.inc\'","include \'mp_input.inc\'"]) 5214 # In standalone and madloop we do no use the compiled param card but 5215 # still parse the .dat one so we must load it. 5216 if self.opt['loop_induced']: 5217 #loop induced follow MadEvent way to handle the card. 5218 load_card = '' 5219 lha_read_filename='lha_read.f' 5220 elif self.opt['export_format'] in ['madloop','madloop_optimized', 'madloop_matchbox']: 5221 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5222 lha_read_filename='lha_read_mp.f' 5223 elif self.opt['export_format'].startswith('standalone') or self.opt['export_format'] in ['madweight']\ 5224 or self.opt['export_format'].startswith('matchbox'): 5225 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5226 lha_read_filename='lha_read.f' 5227 else: 5228 load_card = '' 5229 lha_read_filename='lha_read.f' 5230 cp( MG5DIR + '/models/template_files/fortran/' + lha_read_filename, \ 5231 os.path.join(self.dir_path,'lha_read.f')) 5232 5233 file=file%{'includes':'\n '.join(includes), 5234 'load_card':load_card} 5235 writer=open(os.path.join(self.dir_path,'rw_para.f'),'w') 5236 writer.writelines(file) 5237 writer.close() 5238 5239 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 5240 or self.opt['loop_induced']: 5241 cp( MG5DIR + '/models/template_files/fortran/makefile_madevent', 5242 self.dir_path + '/makefile') 5243 if self.opt['export_format'] in ['FKS5_default', 'FKS5_optimized']: 5244 path = pjoin(self.dir_path, 'makefile') 5245 text = open(path).read() 5246 text = text.replace('madevent','aMCatNLO') 5247 open(path, 'w').writelines(text) 5248 elif self.opt['export_format'] in ['standalone', 'standalone_msP','standalone_msF', 5249 'madloop','madloop_optimized', 'standalone_rw', 'madweight','matchbox','madloop_matchbox']: 5250 cp( MG5DIR + '/models/template_files/fortran/makefile_standalone', 5251 self.dir_path + '/makefile') 5252 #elif self.opt['export_format'] in []: 5253 #pass 5254 else: 5255 raise MadGraph5Error('Unknown format')
5256
5257 - def create_coupl_inc(self):
5258 """ write coupling.inc """ 5259 5260 fsock = self.open('coupl.inc', format='fortran') 5261 if self.opt['mp']: 5262 mp_fsock = self.open('mp_coupl.inc', format='fortran') 5263 mp_fsock_same_name = self.open('mp_coupl_same_name.inc',\ 5264 format='fortran') 5265 5266 # Write header 5267 header = """double precision G 5268 common/strong/ G 5269 5270 double complex gal(2) 5271 common/weak/ gal 5272 5273 double precision MU_R 5274 common/rscale/ MU_R 5275 5276 double precision Nf 5277 parameter(Nf=%d) 5278 """ % self.model.get_nflav() 5279 5280 fsock.writelines(header) 5281 5282 if self.opt['mp']: 5283 header = """%(real_mp_format)s %(mp_prefix)sG 5284 common/MP_strong/ %(mp_prefix)sG 5285 5286 %(complex_mp_format)s %(mp_prefix)sgal(2) 5287 common/MP_weak/ %(mp_prefix)sgal 5288 5289 %(complex_mp_format)s %(mp_prefix)sMU_R 5290 common/MP_rscale/ %(mp_prefix)sMU_R 5291 5292 """ 5293 5294 5295 5296 5297 mp_fsock.writelines(header%{'real_mp_format':self.mp_real_format, 5298 'complex_mp_format':self.mp_complex_format, 5299 'mp_prefix':self.mp_prefix}) 5300 mp_fsock_same_name.writelines(header%{'real_mp_format':self.mp_real_format, 5301 'complex_mp_format':self.mp_complex_format, 5302 'mp_prefix':''}) 5303 5304 # Write the Mass definition/ common block 5305 masses = set() 5306 widths = set() 5307 if self.opt['complex_mass']: 5308 complex_mass = set() 5309 5310 for particle in self.model.get('particles'): 5311 #find masses 5312 one_mass = particle.get('mass') 5313 if one_mass.lower() != 'zero': 5314 masses.add(one_mass) 5315 5316 # find width 5317 one_width = particle.get('width') 5318 if one_width.lower() != 'zero': 5319 widths.add(one_width) 5320 if self.opt['complex_mass'] and one_mass.lower() != 'zero': 5321 complex_mass.add('CMASS_%s' % one_mass) 5322 5323 if masses: 5324 fsock.writelines('double precision '+','.join(masses)+'\n') 5325 fsock.writelines('common/masses/ '+','.join(masses)+'\n\n') 5326 if self.opt['mp']: 5327 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5328 ','.join(masses)+'\n') 5329 mp_fsock_same_name.writelines('common/MP_masses/ '+\ 5330 ','.join(masses)+'\n\n') 5331 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5332 self.mp_prefix+m for m in masses])+'\n') 5333 mp_fsock.writelines('common/MP_masses/ '+\ 5334 ','.join([self.mp_prefix+m for m in masses])+'\n\n') 5335 5336 if widths: 5337 fsock.writelines('double precision '+','.join(widths)+'\n') 5338 fsock.writelines('common/widths/ '+','.join(widths)+'\n\n') 5339 if self.opt['mp']: 5340 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5341 ','.join(widths)+'\n') 5342 mp_fsock_same_name.writelines('common/MP_widths/ '+\ 5343 ','.join(widths)+'\n\n') 5344 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5345 self.mp_prefix+w for w in widths])+'\n') 5346 mp_fsock.writelines('common/MP_widths/ '+\ 5347 ','.join([self.mp_prefix+w for w in widths])+'\n\n') 5348 5349 # Write the Couplings 5350 coupling_list = [coupl.name for coupl in self.coups_dep + self.coups_indep] 5351 fsock.writelines('double complex '+', '.join(coupling_list)+'\n') 5352 fsock.writelines('common/couplings/ '+', '.join(coupling_list)+'\n') 5353 if self.opt['mp']: 5354 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5355 ','.join(coupling_list)+'\n') 5356 mp_fsock_same_name.writelines('common/MP_couplings/ '+\ 5357 ','.join(coupling_list)+'\n\n') 5358 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5359 self.mp_prefix+c for c in coupling_list])+'\n') 5360 mp_fsock.writelines('common/MP_couplings/ '+\ 5361 ','.join([self.mp_prefix+c for c in coupling_list])+'\n\n') 5362 5363 # Write complex mass for complex mass scheme (if activated) 5364 if self.opt['complex_mass'] and complex_mass: 5365 fsock.writelines('double complex '+', '.join(complex_mass)+'\n') 5366 fsock.writelines('common/complex_mass/ '+', '.join(complex_mass)+'\n') 5367 if self.opt['mp']: 5368 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5369 ','.join(complex_mass)+'\n') 5370 mp_fsock_same_name.writelines('common/MP_complex_mass/ '+\ 5371 ','.join(complex_mass)+'\n\n') 5372 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5373 self.mp_prefix+cm for cm in complex_mass])+'\n') 5374 mp_fsock.writelines('common/MP_complex_mass/ '+\ 5375 ','.join([self.mp_prefix+cm for cm in complex_mass])+'\n\n')
5376
5377 - def create_write_couplings(self):
5378 """ write the file coupl_write.inc """ 5379 5380 fsock = self.open('coupl_write.inc', format='fortran') 5381 5382 fsock.writelines("""write(*,*) ' Couplings of %s' 5383 write(*,*) ' ---------------------------------' 5384 write(*,*) ' '""" % self.model_name) 5385 def format(coupl): 5386 return 'write(*,2) \'%(name)s = \', %(name)s' % {'name': coupl.name}
5387 5388 # Write the Couplings 5389 lines = [format(coupl) for coupl in self.coups_dep + self.coups_indep] 5390 fsock.writelines('\n'.join(lines)) 5391 5392
5393 - def create_input(self):
5394 """create input.inc containing the definition of the parameters""" 5395 5396 fsock = self.open('input.inc', format='fortran') 5397 if self.opt['mp']: 5398 mp_fsock = self.open('mp_input.inc', format='fortran') 5399 5400 #find mass/ width since they are already define 5401 already_def = set() 5402 for particle in self.model.get('particles'): 5403 already_def.add(particle.get('mass').lower()) 5404 already_def.add(particle.get('width').lower()) 5405 if self.opt['complex_mass']: 5406 already_def.add('cmass_%s' % particle.get('mass').lower()) 5407 5408 is_valid = lambda name: name.lower() not in ['g', 'mu_r', 'zero'] and \ 5409 name.lower() not in already_def 5410 5411 real_parameters = [param.name for param in self.params_dep + 5412 self.params_indep if param.type == 'real' 5413 and is_valid(param.name)] 5414 5415 real_parameters += [param.name for param in self.params_ext 5416 if param.type == 'real'and 5417 is_valid(param.name)] 5418 5419 # check the parameter is a CT parameter or not 5420 # if yes, just use the needed ones 5421 real_parameters = [param for param in real_parameters \ 5422 if self.check_needed_param(param)] 5423 5424 fsock.writelines('double precision '+','.join(real_parameters)+'\n') 5425 fsock.writelines('common/params_R/ '+','.join(real_parameters)+'\n\n') 5426 if self.opt['mp']: 5427 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5428 self.mp_prefix+p for p in real_parameters])+'\n') 5429 mp_fsock.writelines('common/MP_params_R/ '+','.join([\ 5430 self.mp_prefix+p for p in real_parameters])+'\n\n') 5431 5432 complex_parameters = [param.name for param in self.params_dep + 5433 self.params_indep if param.type == 'complex' and 5434 is_valid(param.name)] 5435 5436 # check the parameter is a CT parameter or not 5437 # if yes, just use the needed ones 5438 complex_parameters = [param for param in complex_parameters \ 5439 if self.check_needed_param(param)] 5440 5441 if complex_parameters: 5442 fsock.writelines('double complex '+','.join(complex_parameters)+'\n') 5443 fsock.writelines('common/params_C/ '+','.join(complex_parameters)+'\n\n') 5444 if self.opt['mp']: 5445 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5446 self.mp_prefix+p for p in complex_parameters])+'\n') 5447 mp_fsock.writelines('common/MP_params_C/ '+','.join([\ 5448 self.mp_prefix+p for p in complex_parameters])+'\n\n')
5449
5450 - def check_needed_param(self, param):
5451 """ Returns whether the parameter in argument is needed for this 5452 specific computation or not.""" 5453 5454 # If this is a leading order model or if there was no CT parameter 5455 # employed in this NLO model, one can directly return that the 5456 # parameter is needed since only CTParameters are filtered. 5457 if not hasattr(self, 'allCTparameters') or \ 5458 self.allCTparameters is None or self.usedCTparameters is None or \ 5459 len(self.allCTparameters)==0: 5460 return True 5461 5462 # We must allow the conjugate shorthand for the complex parameter as 5463 # well so we check wether either the parameter name or its name with 5464 # 'conjg__' substituted with '' is present in the list. 5465 # This is acceptable even if some parameter had an original name 5466 # including 'conjg__' in it, because at worst we export a parameter 5467 # was not needed. 5468 param = param.lower() 5469 cjg_param = param.replace('conjg__','',1) 5470 5471 # First make sure it is a CTparameter 5472 if param not in self.allCTparameters and \ 5473 cjg_param not in self.allCTparameters: 5474 return True 5475 5476 # Now check if it is in the list of CTparameters actually used 5477 return (param in self.usedCTparameters or \ 5478 cjg_param in self.usedCTparameters)
5479
5480 - def extract_needed_CTparam(self,wanted_couplings=[]):
5481 """ Extract what are the needed CT parameters given the wanted_couplings""" 5482 5483 if not hasattr(self.model,'map_CTcoup_CTparam') or not wanted_couplings: 5484 # Setting these lists to none wil disable the filtering in 5485 # check_needed_param 5486 self.allCTparameters = None 5487 self.usedCTparameters = None 5488 return 5489 5490 # All CTparameters appearin in all CT couplings 5491 allCTparameters=self.model.map_CTcoup_CTparam.values() 5492 # Define in this class the list of all CT parameters 5493 self.allCTparameters=list(\ 5494 set(itertools.chain.from_iterable(allCTparameters))) 5495 5496 # All used CT couplings 5497 w_coupls = [coupl.lower() for coupl in wanted_couplings] 5498 allUsedCTCouplings = [coupl for coupl in 5499 self.model.map_CTcoup_CTparam.keys() if coupl.lower() in w_coupls] 5500 5501 # Now define the list of all CT parameters that are actually used 5502 self.usedCTparameters=list(\ 5503 set(itertools.chain.from_iterable([ 5504 self.model.map_CTcoup_CTparam[coupl] for coupl in allUsedCTCouplings 5505 ]))) 5506 5507 # Now at last, make these list case insensitive 5508 self.allCTparameters = [ct.lower() for ct in self.allCTparameters] 5509 self.usedCTparameters = [ct.lower() for ct in self.usedCTparameters]
5510
5511 - def create_intparam_def(self, dp=True, mp=False):
5512 """ create intparam_definition.inc setting the internal parameters. 5513 Output the double precision and/or the multiple precision parameters 5514 depending on the parameters dp and mp. If mp only, then the file names 5515 get the 'mp_' prefix. 5516 """ 5517 5518 fsock = self.open('%sintparam_definition.inc'% 5519 ('mp_' if mp and not dp else ''), format='fortran') 5520 5521 fsock.write_comments(\ 5522 "Parameters that should not be recomputed event by event.\n") 5523 fsock.writelines("if(readlha) then\n") 5524 if dp: 5525 fsock.writelines("G = 2 * DSQRT(AS*PI) ! for the first init\n") 5526 if mp: 5527 fsock.writelines("MP__G = 2 * SQRT(MP__AS*MP__PI) ! for the first init\n") 5528 5529 for param in self.params_indep: 5530 if param.name == 'ZERO': 5531 continue 5532 # check whether the parameter is a CT parameter 5533 # if yes,just used the needed ones 5534 if not self.check_needed_param(param.name): 5535 continue 5536 if dp: 5537 fsock.writelines("%s = %s\n" % (param.name, 5538 self.p_to_f.parse(param.expr))) 5539 if mp: 5540 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 5541 self.mp_p_to_f.parse(param.expr))) 5542 5543 fsock.writelines('endif') 5544 5545 fsock.write_comments('\nParameters that should be recomputed at an event by even basis.\n') 5546 if dp: 5547 fsock.writelines("aS = G**2/4/pi\n") 5548 if mp: 5549 fsock.writelines("MP__aS = MP__G**2/4/MP__PI\n") 5550 for param in self.params_dep: 5551 # check whether the parameter is a CT parameter 5552 # if yes,just used the needed ones 5553 if not self.check_needed_param(param.name): 5554 continue 5555 if dp: 5556 fsock.writelines("%s = %s\n" % (param.name, 5557 self.p_to_f.parse(param.expr))) 5558 elif mp: 5559 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 5560 self.mp_p_to_f.parse(param.expr))) 5561 5562 fsock.write_comments("\nDefinition of the EW coupling used in the write out of aqed\n") 5563 if ('aEWM1',) in self.model['parameters']: 5564 if dp: 5565 fsock.writelines(""" gal(1) = 3.5449077018110318d0 / DSQRT(aEWM1) 5566 gal(2) = 1d0 5567 """) 5568 elif mp: 5569 fsock.writelines(""" %(mp_prefix)sgal(1) = 2 * SQRT(MP__PI/MP__aEWM1) 5570 %(mp_prefix)sgal(2) = 1d0 5571 """ %{'mp_prefix':self.mp_prefix}) 5572 pass 5573 # in Gmu scheme, aEWM1 is not external but Gf is an exteranl variable 5574 elif ('Gf',) in self.model['parameters']: 5575 if dp: 5576 fsock.writelines(""" gal(1) = 2.378414230005442133435d0*MDL_MW*DSQRT(1D0-MDL_MW**2/MDL_MZ**2)*DSQRT(MDL_Gf) 5577 gal(2) = 1d0 5578 """) 5579 elif mp: 5580 fsock.writelines(""" %(mp_prefix)sgal(1) = 2*MP__MDL_MW*SQRT(1e0_16-MP__MDL_MW**2/MP__MDL_MZ**2)*SQRT(SQRT(2e0_16)*MP__MDL_Gf) 5581 %(mp_prefix)sgal(2) = 1d0 5582 """ %{'mp_prefix':self.mp_prefix}) 5583 pass 5584 else: 5585 if dp: 5586 logger.warning('$RED aEWM1 and Gf not define in MODEL. AQED will not be written correcty in LHE FILE') 5587 fsock.writelines(""" gal(1) = 1d0 5588 gal(2) = 1d0 5589 """) 5590 elif mp: 5591 fsock.writelines(""" %(mp_prefix)sgal(1) = 1e0_16 5592 %(mp_prefix)sgal(2) = 1e0_16 5593 """%{'mp_prefix':self.mp_prefix})
5594 5595
5596 - def create_couplings(self):
5597 """ create couplings.f and all couplingsX.f """ 5598 5599 nb_def_by_file = 25 5600 5601 self.create_couplings_main(nb_def_by_file) 5602 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5603 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5604 5605 for i in range(nb_coup_indep): 5606 # For the independent couplings, we compute the double and multiple 5607 # precision ones together 5608 data = self.coups_indep[nb_def_by_file * i: 5609 min(len(self.coups_indep), nb_def_by_file * (i+1))] 5610 self.create_couplings_part(i + 1, data, dp=True, mp=self.opt['mp']) 5611 5612 for i in range(nb_coup_dep): 5613 # For the dependent couplings, we compute the double and multiple 5614 # precision ones in separate subroutines. 5615 data = self.coups_dep[nb_def_by_file * i: 5616 min(len(self.coups_dep), nb_def_by_file * (i+1))] 5617 self.create_couplings_part( i + 1 + nb_coup_indep , data, 5618 dp=True,mp=False) 5619 if self.opt['mp']: 5620 self.create_couplings_part( i + 1 + nb_coup_indep , data, 5621 dp=False,mp=True)
5622 5623
5624 - def create_couplings_main(self, nb_def_by_file=25):
5625 """ create couplings.f """ 5626 5627 fsock = self.open('couplings.f', format='fortran') 5628 5629 fsock.writelines("""subroutine coup() 5630 5631 implicit none 5632 double precision PI, ZERO 5633 logical READLHA 5634 parameter (PI=3.141592653589793d0) 5635 parameter (ZERO=0d0) 5636 include \'model_functions.inc\'""") 5637 if self.opt['mp']: 5638 fsock.writelines("""%s MP__PI, MP__ZERO 5639 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 5640 parameter (MP__ZERO=0e0_16) 5641 include \'mp_input.inc\' 5642 include \'mp_coupl.inc\' 5643 """%self.mp_real_format) 5644 fsock.writelines("""include \'input.inc\' 5645 include \'coupl.inc\' 5646 READLHA = .true. 5647 include \'intparam_definition.inc\'""") 5648 if self.opt['mp']: 5649 fsock.writelines("""include \'mp_intparam_definition.inc\'\n""") 5650 5651 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5652 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5653 5654 fsock.writelines('\n'.join(\ 5655 ['call coup%s()' % (i + 1) for i in range(nb_coup_indep)])) 5656 5657 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 5658 5659 fsock.writelines('\n'.join(\ 5660 ['call coup%s()' % (nb_coup_indep + i + 1) \ 5661 for i in range(nb_coup_dep)])) 5662 if self.opt['mp']: 5663 fsock.writelines('\n'.join(\ 5664 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 5665 for i in range(nb_coup_dep)])) 5666 fsock.writelines('''\n return \n end\n''') 5667 5668 fsock.writelines("""subroutine update_as_param() 5669 5670 implicit none 5671 double precision PI, ZERO 5672 logical READLHA 5673 parameter (PI=3.141592653589793d0) 5674 parameter (ZERO=0d0) 5675 include \'model_functions.inc\'""") 5676 fsock.writelines("""include \'input.inc\' 5677 include \'coupl.inc\' 5678 READLHA = .false.""") 5679 fsock.writelines(""" 5680 include \'intparam_definition.inc\'\n 5681 """) 5682 5683 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5684 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5685 5686 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 5687 5688 fsock.writelines('\n'.join(\ 5689 ['call coup%s()' % (nb_coup_indep + i + 1) \ 5690 for i in range(nb_coup_dep)])) 5691 fsock.writelines('''\n return \n end\n''') 5692 5693 fsock.writelines("""subroutine update_as_param2(mu_r2,as2) 5694 5695 implicit none 5696 double precision PI 5697 parameter (PI=3.141592653589793d0) 5698 double precision mu_r2, as2 5699 include \'model_functions.inc\'""") 5700 fsock.writelines("""include \'input.inc\' 5701 include \'coupl.inc\'""") 5702 fsock.writelines(""" 5703 if (mu_r2.gt.0d0) MU_R = mu_r2 5704 G = SQRT(4.0d0*PI*AS2) 5705 AS = as2 5706 5707 CALL UPDATE_AS_PARAM() 5708 """) 5709 fsock.writelines('''\n return \n end\n''') 5710 5711 if self.opt['mp']: 5712 fsock.writelines("""subroutine mp_update_as_param() 5713 5714 implicit none 5715 logical READLHA 5716 include \'model_functions.inc\'""") 5717 fsock.writelines("""%s MP__PI, MP__ZERO 5718 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 5719 parameter (MP__ZERO=0e0_16) 5720 include \'mp_input.inc\' 5721 include \'mp_coupl.inc\' 5722 """%self.mp_real_format) 5723 fsock.writelines("""include \'input.inc\' 5724 include \'coupl.inc\' 5725 include \'actualize_mp_ext_params.inc\' 5726 READLHA = .false. 5727 include \'mp_intparam_definition.inc\'\n 5728 """) 5729 5730 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5731 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5732 5733 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 5734 5735 fsock.writelines('\n'.join(\ 5736 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 5737 for i in range(nb_coup_dep)])) 5738 fsock.writelines('''\n return \n end\n''')
5739
5740 - def create_couplings_part(self, nb_file, data, dp=True, mp=False):
5741 """ create couplings[nb_file].f containing information coming from data. 5742 Outputs the computation of the double precision and/or the multiple 5743 precision couplings depending on the parameters dp and mp. 5744 If mp is True and dp is False, then the prefix 'MP_' is appended to the 5745 filename and subroutine name. 5746 """ 5747 5748 fsock = self.open('%scouplings%s.f' %('mp_' if mp and not dp else '', 5749 nb_file), format='fortran') 5750 fsock.writelines("""subroutine %scoup%s() 5751 5752 implicit none 5753 include \'model_functions.inc\'"""%('mp_' if mp and not dp else '',nb_file)) 5754 if dp: 5755 fsock.writelines(""" 5756 double precision PI, ZERO 5757 parameter (PI=3.141592653589793d0) 5758 parameter (ZERO=0d0) 5759 include 'input.inc' 5760 include 'coupl.inc'""") 5761 if mp: 5762 fsock.writelines("""%s MP__PI, MP__ZERO 5763 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 5764 parameter (MP__ZERO=0e0_16) 5765 include \'mp_input.inc\' 5766 include \'mp_coupl.inc\' 5767 """%self.mp_real_format) 5768 5769 for coupling in data: 5770 if dp: 5771 fsock.writelines('%s = %s' % (coupling.name, 5772 self.p_to_f.parse(coupling.expr))) 5773 if mp: 5774 fsock.writelines('%s%s = %s' % (self.mp_prefix,coupling.name, 5775 self.mp_p_to_f.parse(coupling.expr))) 5776 fsock.writelines('end')
5777
5778 - def create_model_functions_inc(self):
5779 """ Create model_functions.inc which contains the various declarations 5780 of auxiliary functions which might be used in the couplings expressions 5781 """ 5782 5783 additional_fct = [] 5784 # check for functions define in the UFO model 5785 ufo_fct = self.model.get('functions') 5786 if ufo_fct: 5787 for fct in ufo_fct: 5788 # already handle by default 5789 if fct.name not in ["complexconjugate", "re", "im", "sec", 5790 "csc", "asec", "acsc", "theta_function", "cond", 5791 "condif", "reglogp", "reglogm", "reglog", "recms", "arg", "cot"]: 5792 additional_fct.append(fct.name) 5793 5794 5795 fsock = self.open('model_functions.inc', format='fortran') 5796 fsock.writelines("""double complex cond 5797 double complex condif 5798 double complex reglog 5799 double complex reglogp 5800 double complex reglogm 5801 double complex recms 5802 double complex arg 5803 %s 5804 """ % "\n".join([" double complex %s" % i for i in additional_fct])) 5805 5806 5807 if self.opt['mp']: 5808 fsock.writelines("""%(complex_mp_format)s mp_cond 5809 %(complex_mp_format)s mp_condif 5810 %(complex_mp_format)s mp_reglog 5811 %(complex_mp_format)s mp_reglogp 5812 %(complex_mp_format)s mp_reglogm 5813 %(complex_mp_format)s mp_recms 5814 %(complex_mp_format)s mp_arg 5815 %(additional)s 5816 """ %\ 5817 {"additional": "\n".join([" %s %s" % (self.mp_complex_format, i) for i in additional_fct]), 5818 'complex_mp_format':self.mp_complex_format 5819 })
5820
5821 - def create_model_functions_def(self):
5822 """ Create model_functions.f which contains the various definitions 5823 of auxiliary functions which might be used in the couplings expressions 5824 Add the functions.f functions for formfactors support 5825 """ 5826 5827 fsock = self.open('model_functions.f', format='fortran') 5828 fsock.writelines("""double complex function cond(condition,truecase,falsecase) 5829 implicit none 5830 double complex condition,truecase,falsecase 5831 if(condition.eq.(0.0d0,0.0d0)) then 5832 cond=truecase 5833 else 5834 cond=falsecase 5835 endif 5836 end 5837 5838 double complex function condif(condition,truecase,falsecase) 5839 implicit none 5840 logical condition 5841 double complex truecase,falsecase 5842 if(condition) then 5843 condif=truecase 5844 else 5845 condif=falsecase 5846 endif 5847 end 5848 5849 double complex function recms(condition,expr) 5850 implicit none 5851 logical condition 5852 double complex expr 5853 if(condition)then 5854 recms=expr 5855 else 5856 recms=dcmplx(dble(expr)) 5857 endif 5858 end 5859 5860 double complex function reglog(arg) 5861 implicit none 5862 double complex TWOPII 5863 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 5864 double complex arg 5865 if(arg.eq.(0.0d0,0.0d0)) then 5866 reglog=(0.0d0,0.0d0) 5867 else 5868 reglog=log(arg) 5869 endif 5870 end 5871 5872 double complex function reglogp(arg) 5873 implicit none 5874 double complex TWOPII 5875 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 5876 double complex arg 5877 if(arg.eq.(0.0d0,0.0d0))then 5878 reglogp=(0.0d0,0.0d0) 5879 else 5880 if(dble(arg).lt.0.0d0.and.dimag(arg).lt.0.0d0)then 5881 reglogp=log(arg) + TWOPII 5882 else 5883 reglogp=log(arg) 5884 endif 5885 endif 5886 end 5887 5888 double complex function reglogm(arg) 5889 implicit none 5890 double complex TWOPII 5891 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 5892 double complex arg 5893 if(arg.eq.(0.0d0,0.0d0))then 5894 reglogm=(0.0d0,0.0d0) 5895 else 5896 if(dble(arg).lt.0.0d0.and.dimag(arg).gt.0.0d0)then 5897 reglogm=log(arg) - TWOPII 5898 else 5899 reglogm=log(arg) 5900 endif 5901 endif 5902 end 5903 5904 double complex function arg(comnum) 5905 implicit none 5906 double complex comnum 5907 double complex iim 5908 iim = (0.0d0,1.0d0) 5909 if(comnum.eq.(0.0d0,0.0d0)) then 5910 arg=(0.0d0,0.0d0) 5911 else 5912 arg=log(comnum/abs(comnum))/iim 5913 endif 5914 end""") 5915 if self.opt['mp']: 5916 fsock.writelines(""" 5917 5918 %(complex_mp_format)s function mp_cond(condition,truecase,falsecase) 5919 implicit none 5920 %(complex_mp_format)s condition,truecase,falsecase 5921 if(condition.eq.(0.0e0_16,0.0e0_16)) then 5922 mp_cond=truecase 5923 else 5924 mp_cond=falsecase 5925 endif 5926 end 5927 5928 %(complex_mp_format)s function mp_condif(condition,truecase,falsecase) 5929 implicit none 5930 logical condition 5931 %(complex_mp_format)s truecase,falsecase 5932 if(condition) then 5933 mp_condif=truecase 5934 else 5935 mp_condif=falsecase 5936 endif 5937 end 5938 5939 %(complex_mp_format)s function mp_recms(condition,expr) 5940 implicit none 5941 logical condition 5942 %(complex_mp_format)s expr 5943 if(condition)then 5944 mp_recms=expr 5945 else 5946 mp_recms=cmplx(real(expr),kind=16) 5947 endif 5948 end 5949 5950 %(complex_mp_format)s function mp_reglog(arg) 5951 implicit none 5952 %(complex_mp_format)s TWOPII 5953 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 5954 %(complex_mp_format)s arg 5955 if(arg.eq.(0.0e0_16,0.0e0_16)) then 5956 mp_reglog=(0.0e0_16,0.0e0_16) 5957 else 5958 mp_reglog=log(arg) 5959 endif 5960 end 5961 5962 %(complex_mp_format)s function mp_reglogp(arg) 5963 implicit none 5964 %(complex_mp_format)s TWOPII 5965 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 5966 %(complex_mp_format)s arg 5967 if(arg.eq.(0.0e0_16,0.0e0_16))then 5968 mp_reglogp=(0.0e0_16,0.0e0_16) 5969 else 5970 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).lt.0.0e0_16)then 5971 mp_reglogp=log(arg) + TWOPII 5972 else 5973 mp_reglogp=log(arg) 5974 endif 5975 endif 5976 end 5977 5978 %(complex_mp_format)s function mp_reglogm(arg) 5979 implicit none 5980 %(complex_mp_format)s TWOPII 5981 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 5982 %(complex_mp_format)s arg 5983 if(arg.eq.(0.0e0_16,0.0e0_16))then 5984 mp_reglogm=(0.0e0_16,0.0e0_16) 5985 else 5986 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).gt.0.0e0_16)then 5987 mp_reglogm=log(arg) - TWOPII 5988 else 5989 mp_reglogm=log(arg) 5990 endif 5991 endif 5992 end 5993 5994 %(complex_mp_format)s function mp_arg(comnum) 5995 implicit none 5996 %(complex_mp_format)s comnum 5997 %(complex_mp_format)s imm 5998 imm = (0.0e0_16,1.0e0_16) 5999 if(comnum.eq.(0.0e0_16,0.0e0_16)) then 6000 mp_arg=(0.0e0_16,0.0e0_16) 6001 else 6002 mp_arg=log(comnum/abs(comnum))/imm 6003 endif 6004 end"""%{'complex_mp_format':self.mp_complex_format}) 6005 6006 6007 #check for the file functions.f 6008 model_path = self.model.get('modelpath') 6009 if os.path.exists(pjoin(model_path,'Fortran','functions.f')): 6010 fsock.write_comment_line(' USER DEFINE FUNCTIONS ') 6011 input = pjoin(model_path,'Fortran','functions.f') 6012 file.writelines(fsock, open(input).read()) 6013 fsock.write_comment_line(' END USER DEFINE FUNCTIONS ') 6014 6015 # check for functions define in the UFO model 6016 ufo_fct = self.model.get('functions') 6017 if ufo_fct: 6018 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS ') 6019 for fct in ufo_fct: 6020 # already handle by default 6021 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc", "condif", 6022 "theta_function", "cond", "reglog", "reglogp", "reglogm", "recms","arg"]: 6023 ufo_fct_template = """ 6024 double complex function %(name)s(%(args)s) 6025 implicit none 6026 double complex %(args)s 6027 %(name)s = %(fct)s 6028 6029 return 6030 end 6031 """ 6032 text = ufo_fct_template % { 6033 'name': fct.name, 6034 'args': ", ".join(fct.arguments), 6035 'fct': self.p_to_f.parse(fct.expr) 6036 } 6037 fsock.writelines(text) 6038 if self.opt['mp']: 6039 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS FOR MP') 6040 for fct in ufo_fct: 6041 # already handle by default 6042 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc","condif", 6043 "theta_function", "cond", "reglog", "reglogp","reglogm", "recms","arg"]: 6044 ufo_fct_template = """ 6045 %(complex_mp_format)s function mp__%(name)s(mp__%(args)s) 6046 implicit none 6047 %(complex_mp_format)s mp__%(args)s 6048 mp__%(name)s = %(fct)s 6049 6050 return 6051 end 6052 """ 6053 text = ufo_fct_template % { 6054 'name': fct.name, 6055 'args': ", mp__".join(fct.arguments), 6056 'fct': self.mp_p_to_f.parse(fct.expr), 6057 'complex_mp_format': self.mp_complex_format 6058 } 6059 fsock.writelines(text) 6060 6061 6062 6063 fsock.write_comment_line(' STOP UFO DEFINE FUNCTIONS ')
6064 6065 6066
6067 - def create_makeinc(self):
6068 """create makeinc.inc containing the file to compile """ 6069 6070 fsock = self.open('makeinc.inc', comment='#') 6071 text = 'MODEL = couplings.o lha_read.o printout.o rw_para.o' 6072 text += ' model_functions.o ' 6073 6074 nb_coup_indep = 1 + len(self.coups_dep) // 25 6075 nb_coup_dep = 1 + len(self.coups_indep) // 25 6076 couplings_files=['couplings%s.o' % (i+1) \ 6077 for i in range(nb_coup_dep + nb_coup_indep) ] 6078 if self.opt['mp']: 6079 couplings_files+=['mp_couplings%s.o' % (i+1) for i in \ 6080 range(nb_coup_dep,nb_coup_dep + nb_coup_indep) ] 6081 text += ' '.join(couplings_files) 6082 fsock.writelines(text)
6083
6084 - def create_param_write(self):
6085 """ create param_write """ 6086 6087 fsock = self.open('param_write.inc', format='fortran') 6088 6089 fsock.writelines("""write(*,*) ' External Params' 6090 write(*,*) ' ---------------------------------' 6091 write(*,*) ' '""") 6092 def format(name): 6093 return 'write(*,*) \'%(name)s = \', %(name)s' % {'name': name}
6094 6095 # Write the external parameter 6096 lines = [format(param.name) for param in self.params_ext] 6097 fsock.writelines('\n'.join(lines)) 6098 6099 fsock.writelines("""write(*,*) ' Internal Params' 6100 write(*,*) ' ---------------------------------' 6101 write(*,*) ' '""") 6102 lines = [format(data.name) for data in self.params_indep 6103 if data.name != 'ZERO' and self.check_needed_param(data.name)] 6104 fsock.writelines('\n'.join(lines)) 6105 fsock.writelines("""write(*,*) ' Internal Params evaluated point by point' 6106 write(*,*) ' ----------------------------------------' 6107 write(*,*) ' '""") 6108 lines = [format(data.name) for data in self.params_dep \ 6109 if self.check_needed_param(data.name)] 6110 6111 fsock.writelines('\n'.join(lines)) 6112 6113 6114
6115 - def create_ident_card(self):
6116 """ create the ident_card.dat """ 6117 6118 def format(parameter): 6119 """return the line for the ident_card corresponding to this parameter""" 6120 colum = [parameter.lhablock.lower()] + \ 6121 [str(value) for value in parameter.lhacode] + \ 6122 [parameter.name] 6123 if not parameter.name: 6124 return '' 6125 return ' '.join(colum)+'\n'
6126 6127 fsock = self.open('ident_card.dat') 6128 6129 external_param = [format(param) for param in self.params_ext] 6130 fsock.writelines('\n'.join(external_param)) 6131
6132 - def create_actualize_mp_ext_param_inc(self):
6133 """ create the actualize_mp_ext_params.inc code """ 6134 6135 # In principle one should actualize all external, but for now, it is 6136 # hardcoded that only AS and MU_R can by dynamically changed by the user 6137 # so that we only update those ones. 6138 # Of course, to be on the safe side, one could decide to update all 6139 # external parameters. 6140 update_params_list=[p for p in self.params_ext if p.name in 6141 self.PS_dependent_key] 6142 6143 res_strings = ["%(mp_prefix)s%(name)s=%(name)s"\ 6144 %{'mp_prefix':self.mp_prefix,'name':param.name}\ 6145 for param in update_params_list] 6146 # When read_lha is false, it is G which is taken in input and not AS, so 6147 # this is what should be reset here too. 6148 if 'aS' in [param.name for param in update_params_list]: 6149 res_strings.append("%(mp_prefix)sG=G"%{'mp_prefix':self.mp_prefix}) 6150 6151 fsock = self.open('actualize_mp_ext_params.inc', format='fortran') 6152 fsock.writelines('\n'.join(res_strings))
6153
6154 - def create_param_read(self):
6155 """create param_read""" 6156 6157 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 6158 or self.opt['loop_induced']: 6159 fsock = self.open('param_read.inc', format='fortran') 6160 fsock.writelines(' include \'../param_card.inc\'') 6161 return 6162 6163 def format_line(parameter): 6164 """return the line for the ident_card corresponding to this 6165 parameter""" 6166 template = \ 6167 """ call LHA_get_real(npara,param,value,'%(name)s',%(name)s,%(value)s)""" \ 6168 % {'name': parameter.name, 6169 'value': self.p_to_f.parse(str(parameter.value.real))} 6170 if self.opt['mp']: 6171 template = template+ \ 6172 ("\n call MP_LHA_get_real(npara,param,value,'%(name)s',"+ 6173 "%(mp_prefix)s%(name)s,%(value)s)") \ 6174 % {'name': parameter.name,'mp_prefix': self.mp_prefix, 6175 'value': self.mp_p_to_f.parse(str(parameter.value.real))} 6176 return template 6177 6178 fsock = self.open('param_read.inc', format='fortran') 6179 res_strings = [format_line(param) \ 6180 for param in self.params_ext] 6181 6182 # Correct width sign for Majorana particles (where the width 6183 # and mass need to have the same sign) 6184 for particle in self.model.get('particles'): 6185 if particle.is_fermion() and particle.get('self_antipart') and \ 6186 particle.get('width').lower() != 'zero': 6187 6188 res_strings.append('%(width)s = sign(%(width)s,%(mass)s)' % \ 6189 {'width': particle.get('width'), 'mass': particle.get('mass')}) 6190 if self.opt['mp']: 6191 res_strings.append(\ 6192 ('%(mp_pref)s%(width)s = sign(%(mp_pref)s%(width)s,'+\ 6193 '%(mp_pref)s%(mass)s)')%{'width': particle.get('width'),\ 6194 'mass': particle.get('mass'),'mp_pref':self.mp_prefix}) 6195 6196 fsock.writelines('\n'.join(res_strings)) 6197 6198 6199 @staticmethod
6200 - def create_param_card_static(model, output_path, rule_card_path=False, 6201 mssm_convert=True):
6202 """ create the param_card.dat for a givent model --static method-- """ 6203 #1. Check if a default param_card is present: 6204 done = False 6205 if hasattr(model, 'restrict_card') and isinstance(model.restrict_card, str): 6206 restrict_name = os.path.basename(model.restrict_card)[9:-4] 6207 model_path = model.get('modelpath') 6208 if os.path.exists(pjoin(model_path,'paramcard_%s.dat' % restrict_name)): 6209 done = True 6210 files.cp(pjoin(model_path,'paramcard_%s.dat' % restrict_name), 6211 output_path) 6212 if not done: 6213 param_writer.ParamCardWriter(model, output_path) 6214 6215 if rule_card_path: 6216 if hasattr(model, 'rule_card'): 6217 model.rule_card.write_file(rule_card_path) 6218 6219 if mssm_convert: 6220 model_name = model.get('name') 6221 # IF MSSM convert the card to SLAH1 6222 if model_name == 'mssm' or model_name.startswith('mssm-'): 6223 import models.check_param_card as translator 6224 # Check the format of the param_card for Pythia and make it correct 6225 if rule_card_path: 6226 translator.make_valid_param_card(output_path, rule_card_path) 6227 translator.convert_to_slha1(output_path)
6228
6229 - def create_param_card(self):
6230 """ create the param_card.dat """ 6231 6232 rule_card = pjoin(self.dir_path, 'param_card_rule.dat') 6233 if not os.path.exists(rule_card): 6234 rule_card=False 6235 self.create_param_card_static(self.model, 6236 output_path=pjoin(self.dir_path, 'param_card.dat'), 6237 rule_card_path=rule_card, 6238 mssm_convert=True)
6239
6240 -def ExportV4Factory(cmd, noclean, output_type='default', group_subprocesses=True):
6241 """ Determine which Export_v4 class is required. cmd is the command 6242 interface containing all potential usefull information. 6243 The output_type argument specifies from which context the output 6244 is called. It is 'madloop' for MadLoop5, 'amcatnlo' for FKS5 output 6245 and 'default' for tree-level outputs.""" 6246 6247 opt = cmd.options 6248 6249 # ========================================================================== 6250 # First check whether Ninja must be installed. 6251 # Ninja would only be required if: 6252 # a) Loop optimized output is selected 6253 # b) the process gathered from the amplitude generated use loops 6254 6255 if len(cmd._curr_amps)>0: 6256 try: 6257 curr_proc = cmd._curr_amps[0].get('process') 6258 except base_objects.PhysicsObject.PhysicsObjectError: 6259 curr_proc = None 6260 elif hasattr(cmd,'_fks_multi_proc') and \ 6261 len(cmd._fks_multi_proc.get('process_definitions'))>0: 6262 curr_proc = cmd._fks_multi_proc.get('process_definitions')[0] 6263 else: 6264 curr_proc = None 6265 6266 requires_ninja = opt['loop_optimized_output'] and (not curr_proc is None) and \ 6267 (curr_proc.get('perturbation_couplings') != [] and \ 6268 not curr_proc.get('NLO_mode') in [None,'real','tree','LO','LOonly']) 6269 # An installation is required then, but only if the specified path is the 6270 # default local one and that the Ninja library appears missing. 6271 if requires_ninja and (not opt['ninja'] is None) and\ 6272 os.path.abspath(opt['ninja'])==pjoin(MG5DIR,'HEPTools','lib') and\ 6273 not os.path.isfile(pjoin(MG5DIR,'HEPTools','lib','libninja.a')): 6274 # Then install Ninja here from the tarballs in the vendor 6275 # directory so that it would work offline too. 6276 logger.info( 6277 """MG5aMC will now install the loop reduction tool 'Ninja' from the local offline installer. 6278 Use the command 'install ninja' if you want to update to the latest online version. 6279 This installation can take some time but only needs to be performed once.""",'$MG:color:GREEN') 6280 try: 6281 cmd.do_install('ninja',paths={'HEPToolsInstaller': 6282 pjoin(MG5DIR,'vendor','OfflineHEPToolsInstaller.tar.gz')}, 6283 additional_options=[ 6284 '--ninja_tarball=%s'%pjoin(MG5DIR,'vendor','ninja.tar.gz'), 6285 '--oneloop_tarball=%s'%pjoin(MG5DIR,'vendor','oneloop.tar.gz')]) 6286 except InvalidCmd: 6287 logger.warning( 6288 """The offline installation of Ninja was unsuccessful, and MG5aMC disabled it. 6289 In the future, if you want to reactivate Ninja, you can do so by re-attempting 6290 its online installation with the command 'install ninja' or install it on your 6291 own and set the path to its library in the MG5aMC option 'ninja'.""") 6292 cmd.exec_cmd("set ninja ''") 6293 cmd.exec_cmd('save options') 6294 6295 6296 # ========================================================================== 6297 # First treat the MadLoop5 standalone case 6298 MadLoop_SA_options = {'clean': not noclean, 6299 'complex_mass':cmd.options['complex_mass_scheme'], 6300 'export_format':'madloop', 6301 'mp':True, 6302 'loop_dir': os.path.join(cmd._mgme_dir,'Template','loop_material'), 6303 'cuttools_dir': cmd._cuttools_dir, 6304 'iregi_dir':cmd._iregi_dir, 6305 'pjfry_dir':cmd.options['pjfry'], 6306 'golem_dir':cmd.options['golem'], 6307 'samurai_dir':cmd.options['samurai'], 6308 'ninja_dir':cmd.options['ninja'], 6309 'fortran_compiler':cmd.options['fortran_compiler'], 6310 'f2py_compiler':cmd.options['f2py_compiler'], 6311 'output_dependencies':cmd.options['output_dependencies'], 6312 'SubProc_prefix':'P', 6313 'compute_color_flows':cmd.options['loop_color_flows'], 6314 'mode': 'reweight' if cmd._export_format == "standalone_rw" else '', 6315 'cluster_local_path': cmd.options['cluster_local_path'] 6316 } 6317 6318 if output_type.startswith('madloop'): 6319 import madgraph.loop.loop_exporters as loop_exporters 6320 if os.path.isdir(os.path.join(cmd._mgme_dir, 'Template/loop_material')): 6321 ExporterClass=None 6322 if not cmd.options['loop_optimized_output']: 6323 ExporterClass=loop_exporters.LoopProcessExporterFortranSA 6324 else: 6325 if output_type == "madloop": 6326 ExporterClass=loop_exporters.LoopProcessOptimizedExporterFortranSA 6327 MadLoop_SA_options['export_format'] = 'madloop_optimized' 6328 elif output_type == "madloop_matchbox": 6329 ExporterClass=loop_exporters.LoopProcessExporterFortranMatchBox 6330 MadLoop_SA_options['export_format'] = 'madloop_matchbox' 6331 else: 6332 raise Exception, "output_type not recognize %s" % output_type 6333 return ExporterClass(cmd._mgme_dir, cmd._export_dir, MadLoop_SA_options) 6334 else: 6335 raise MadGraph5Error('MG5_aMC cannot find the \'loop_material\' directory'+\ 6336 ' in %s'%str(cmd._mgme_dir)) 6337 6338 # Then treat the aMC@NLO output 6339 elif output_type=='amcatnlo': 6340 import madgraph.iolibs.export_fks as export_fks 6341 ExporterClass=None 6342 amcatnlo_options = dict(opt) 6343 amcatnlo_options.update(MadLoop_SA_options) 6344 amcatnlo_options['mp'] = len(cmd._fks_multi_proc.get_virt_amplitudes()) > 0 6345 if not cmd.options['loop_optimized_output']: 6346 logger.info("Writing out the aMC@NLO code") 6347 ExporterClass = export_fks.ProcessExporterFortranFKS 6348 amcatnlo_options['export_format']='FKS5_default' 6349 else: 6350 logger.info("Writing out the aMC@NLO code, using optimized Loops") 6351 ExporterClass = export_fks.ProcessOptimizedExporterFortranFKS 6352 amcatnlo_options['export_format']='FKS5_optimized' 6353 return ExporterClass(cmd._mgme_dir, cmd._export_dir, amcatnlo_options) 6354 6355 # Then the default tree-level output 6356 elif output_type=='default': 6357 assert group_subprocesses in [True, False] 6358 6359 opt = dict(opt) 6360 opt.update({'clean': not noclean, 6361 'complex_mass': cmd.options['complex_mass_scheme'], 6362 'export_format':cmd._export_format, 6363 'mp': False, 6364 'sa_symmetry':False, 6365 'model': cmd._curr_model.get('name'), 6366 'v5_model': False if cmd._model_v4_path else True }) 6367 6368 format = cmd._export_format #shortcut 6369 6370 if format in ['standalone_msP', 'standalone_msF', 'standalone_rw']: 6371 opt['sa_symmetry'] = True 6372 6373 loop_induced_opt = dict(opt) 6374 loop_induced_opt.update(MadLoop_SA_options) 6375 loop_induced_opt['export_format'] = 'madloop_optimized' 6376 loop_induced_opt['SubProc_prefix'] = 'PV' 6377 # For loop_induced output with MadEvent, we must have access to the 6378 # color flows. 6379 loop_induced_opt['compute_color_flows'] = True 6380 for key in opt: 6381 if key not in loop_induced_opt: 6382 loop_induced_opt[key] = opt[key] 6383 6384 if format == 'matrix' or format.startswith('standalone'): 6385 return ProcessExporterFortranSA(cmd._mgme_dir, cmd._export_dir, opt, 6386 format=format) 6387 6388 elif format in ['madevent'] and group_subprocesses: 6389 if isinstance(cmd._curr_amps[0], 6390 loop_diagram_generation.LoopAmplitude): 6391 import madgraph.loop.loop_exporters as loop_exporters 6392 return loop_exporters.LoopInducedExporterMEGroup(cmd._mgme_dir, 6393 cmd._export_dir,loop_induced_opt) 6394 else: 6395 return ProcessExporterFortranMEGroup(cmd._mgme_dir, 6396 cmd._export_dir,opt) 6397 elif format in ['madevent']: 6398 if isinstance(cmd._curr_amps[0], 6399 loop_diagram_generation.LoopAmplitude): 6400 import madgraph.loop.loop_exporters as loop_exporters 6401 return loop_exporters.LoopInducedExporterMENoGroup(cmd._mgme_dir, 6402 cmd._export_dir,loop_induced_opt) 6403 else: 6404 return ProcessExporterFortranME(cmd._mgme_dir, 6405 cmd._export_dir,opt) 6406 elif format in ['matchbox']: 6407 return ProcessExporterFortranMatchBox(cmd._mgme_dir, cmd._export_dir,opt) 6408 elif cmd._export_format in ['madweight'] and group_subprocesses: 6409 6410 return ProcessExporterFortranMWGroup(cmd._mgme_dir, cmd._export_dir, 6411 opt) 6412 elif cmd._export_format in ['madweight']: 6413 return ProcessExporterFortranMW(cmd._mgme_dir, cmd._export_dir, opt) 6414 else: 6415 raise Exception, 'Wrong export_v4 format' 6416 else: 6417 raise MadGraph5Error, 'Output type %s not reckognized in ExportV4Factory.'
6418
6419 6420 6421 6422 #=============================================================================== 6423 # ProcessExporterFortranMWGroup 6424 #=============================================================================== 6425 -class ProcessExporterFortranMWGroup(ProcessExporterFortranMW):
6426 """Class to take care of exporting a set of matrix elements to 6427 MadEvent subprocess group format.""" 6428 6429 matrix_file = "matrix_madweight_group_v4.inc" 6430 6431 #=========================================================================== 6432 # generate_subprocess_directory_v4 6433 #===========================================================================
6434 - def generate_subprocess_directory_v4(self, subproc_group, 6435 fortran_model, 6436 group_number):
6437 """Generate the Pn directory for a subprocess group in MadEvent, 6438 including the necessary matrix_N.f files, configs.inc and various 6439 other helper files""" 6440 6441 if not isinstance(subproc_group, group_subprocs.SubProcessGroup): 6442 raise base_objects.PhysicsObject.PhysicsObjectError,\ 6443 "subproc_group object not SubProcessGroup" 6444 6445 if not self.model: 6446 self.model = subproc_group.get('matrix_elements')[0].\ 6447 get('processes')[0].get('model') 6448 6449 pathdir = os.path.join(self.dir_path, 'SubProcesses') 6450 6451 # Create the directory PN in the specified path 6452 subprocdir = "P%d_%s" % (subproc_group.get('number'), 6453 subproc_group.get('name')) 6454 try: 6455 os.mkdir(pjoin(pathdir, subprocdir)) 6456 except os.error as error: 6457 logger.warning(error.strerror + " " + subprocdir) 6458 6459 6460 logger.info('Creating files in directory %s' % subprocdir) 6461 Ppath = pjoin(pathdir, subprocdir) 6462 6463 # Create the matrix.f files, auto_dsig.f files and all inc files 6464 # for all subprocesses in the group 6465 6466 maxamps = 0 6467 maxflows = 0 6468 tot_calls = 0 6469 6470 matrix_elements = subproc_group.get('matrix_elements') 6471 6472 for ime, matrix_element in \ 6473 enumerate(matrix_elements): 6474 filename = pjoin(Ppath, 'matrix%d.f' % (ime+1)) 6475 calls, ncolor = \ 6476 self.write_matrix_element_v4(writers.FortranWriter(filename), 6477 matrix_element, 6478 fortran_model, 6479 str(ime+1), 6480 subproc_group.get('diagram_maps')[\ 6481 ime]) 6482 6483 filename = pjoin(Ppath, 'auto_dsig%d.f' % (ime+1)) 6484 self.write_auto_dsig_file(writers.FortranWriter(filename), 6485 matrix_element, 6486 str(ime+1)) 6487 6488 # Keep track of needed quantities 6489 tot_calls += int(calls) 6490 maxflows = max(maxflows, ncolor) 6491 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 6492 6493 # Draw diagrams 6494 filename = pjoin(Ppath, "matrix%d.ps" % (ime+1)) 6495 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 6496 get('diagrams'), 6497 filename, 6498 model = \ 6499 matrix_element.get('processes')[0].\ 6500 get('model'), 6501 amplitude=True) 6502 logger.info("Generating Feynman diagrams for " + \ 6503 matrix_element.get('processes')[0].nice_string()) 6504 plot.draw() 6505 6506 # Extract number of external particles 6507 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 6508 6509 # Generate a list of diagrams corresponding to each configuration 6510 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 6511 # If a subprocess has no diagrams for this config, the number is 0 6512 6513 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 6514 6515 filename = pjoin(Ppath, 'auto_dsig.f') 6516 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 6517 subproc_group) 6518 6519 filename = pjoin(Ppath,'configs.inc') 6520 nconfigs, s_and_t_channels = self.write_configs_file(\ 6521 writers.FortranWriter(filename), 6522 subproc_group, 6523 subproc_diagrams_for_config) 6524 6525 filename = pjoin(Ppath, 'leshouche.inc') 6526 self.write_leshouche_file(writers.FortranWriter(filename), 6527 subproc_group) 6528 6529 filename = pjoin(Ppath, 'phasespace.inc') 6530 self.write_phasespace_file(writers.FortranWriter(filename), 6531 nconfigs) 6532 6533 6534 filename = pjoin(Ppath, 'maxamps.inc') 6535 self.write_maxamps_file(writers.FortranWriter(filename), 6536 maxamps, 6537 maxflows, 6538 max([len(me.get('processes')) for me in \ 6539 matrix_elements]), 6540 len(matrix_elements)) 6541 6542 filename = pjoin(Ppath, 'mirrorprocs.inc') 6543 self.write_mirrorprocs(writers.FortranWriter(filename), 6544 subproc_group) 6545 6546 filename = pjoin(Ppath, 'nexternal.inc') 6547 self.write_nexternal_file(writers.FortranWriter(filename), 6548 nexternal, ninitial) 6549 6550 filename = pjoin(Ppath, 'pmass.inc') 6551 self.write_pmass_file(writers.FortranWriter(filename), 6552 matrix_element) 6553 6554 filename = pjoin(Ppath, 'props.inc') 6555 self.write_props_file(writers.FortranWriter(filename), 6556 matrix_element, 6557 s_and_t_channels) 6558 6559 # filename = pjoin(Ppath, 'processes.dat') 6560 # files.write_to_file(filename, 6561 # self.write_processes_file, 6562 # subproc_group) 6563 6564 # Generate jpgs -> pass in make_html 6565 #os.system(os.path.join('..', '..', 'bin', 'gen_jpeg-pl')) 6566 6567 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f'] 6568 6569 for file in linkfiles: 6570 ln('../%s' % file, cwd=Ppath) 6571 6572 ln('nexternal.inc', '../../Source', cwd=Ppath, log=False) 6573 ln('leshouche.inc', '../../Source', cwd=Ppath, log=False) 6574 ln('maxamps.inc', '../../Source', cwd=Ppath, log=False) 6575 ln('../../Source/maxparticles.inc', '.', log=True, cwd=Ppath) 6576 ln('../../Source/maxparticles.inc', '.', name='genps.inc', log=True, cwd=Ppath) 6577 ln('phasespace.inc', '../', log=True, cwd=Ppath) 6578 if not tot_calls: 6579 tot_calls = 0 6580 return tot_calls
6581 6582 #=========================================================================== 6583 # write_super_auto_dsig_file 6584 #===========================================================================
6585 - def write_super_auto_dsig_file(self, writer, subproc_group):
6586 """Write the auto_dsig.f file selecting between the subprocesses 6587 in subprocess group mode""" 6588 6589 replace_dict = {} 6590 6591 # Extract version number and date from VERSION file 6592 info_lines = self.get_mg5_info_lines() 6593 replace_dict['info_lines'] = info_lines 6594 6595 matrix_elements = subproc_group.get('matrix_elements') 6596 6597 # Extract process info lines 6598 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 6599 matrix_elements]) 6600 replace_dict['process_lines'] = process_lines 6601 6602 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 6603 replace_dict['nexternal'] = nexternal 6604 6605 replace_dict['nsprocs'] = 2*len(matrix_elements) 6606 6607 # Generate dsig definition line 6608 dsig_def_line = "DOUBLE PRECISION " + \ 6609 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 6610 range(len(matrix_elements))]) 6611 replace_dict["dsig_def_line"] = dsig_def_line 6612 6613 # Generate dsig process lines 6614 call_dsig_proc_lines = [] 6615 for iproc in range(len(matrix_elements)): 6616 call_dsig_proc_lines.append(\ 6617 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 6618 {"num": iproc + 1, 6619 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 6620 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 6621 6622 file = open(os.path.join(_file_path, \ 6623 'iolibs/template_files/super_auto_dsig_mw_group_v4.inc')).read() 6624 file = file % replace_dict 6625 6626 # Write the file 6627 writer.writelines(file)
6628 6629 #=========================================================================== 6630 # write_mirrorprocs 6631 #===========================================================================
6632 - def write_mirrorprocs(self, writer, subproc_group):
6633 """Write the mirrorprocs.inc file determining which processes have 6634 IS mirror process in subprocess group mode.""" 6635 6636 lines = [] 6637 bool_dict = {True: '.true.', False: '.false.'} 6638 matrix_elements = subproc_group.get('matrix_elements') 6639 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 6640 (len(matrix_elements), 6641 ",".join([bool_dict[me.get('has_mirror_process')] for \ 6642 me in matrix_elements]))) 6643 # Write the file 6644 writer.writelines(lines)
6645 6646 #=========================================================================== 6647 # write_configs_file 6648 #===========================================================================
6649 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
6650 """Write the configs.inc file with topology information for a 6651 subprocess group. Use the first subprocess with a diagram for each 6652 configuration.""" 6653 6654 matrix_elements = subproc_group.get('matrix_elements') 6655 model = matrix_elements[0].get('processes')[0].get('model') 6656 6657 diagrams = [] 6658 config_numbers = [] 6659 for iconfig, config in enumerate(diagrams_for_config): 6660 # Check if any diagrams correspond to this config 6661 if set(config) == set([0]): 6662 continue 6663 subproc_diags = [] 6664 for s,d in enumerate(config): 6665 if d: 6666 subproc_diags.append(matrix_elements[s].\ 6667 get('diagrams')[d-1]) 6668 else: 6669 subproc_diags.append(None) 6670 diagrams.append(subproc_diags) 6671 config_numbers.append(iconfig + 1) 6672 6673 # Extract number of external particles 6674 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 6675 6676 return len(diagrams), \ 6677 self.write_configs_file_from_diagrams(writer, diagrams, 6678 config_numbers, 6679 nexternal, ninitial, 6680 matrix_elements[0],model)
6681 6682 #=========================================================================== 6683 # write_run_configs_file 6684 #===========================================================================
6685 - def write_run_config_file(self, writer):
6686 """Write the run_configs.inc file for MadEvent""" 6687 6688 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 6689 text = open(path).read() % {'chanperjob':'2'} 6690 writer.write(text) 6691 return True
6692 6693 6694 #=========================================================================== 6695 # write_leshouche_file 6696 #===========================================================================
6697 - def write_leshouche_file(self, writer, subproc_group):
6698 """Write the leshouche.inc file for MG4""" 6699 6700 all_lines = [] 6701 6702 for iproc, matrix_element in \ 6703 enumerate(subproc_group.get('matrix_elements')): 6704 all_lines.extend(self.get_leshouche_lines(matrix_element, 6705 iproc)) 6706 6707 # Write the file 6708 writer.writelines(all_lines) 6709 6710 return True
6711