1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 """A user friendly command line interface to access MadGraph5_aMC@NLO features.
16 Uses the cmd package for command interpretation and tab completion.
17 """
18 from __future__ import division
19
20 import atexit
21 import glob
22 import logging
23 import math
24 import optparse
25 import os
26 import pydoc
27 import random
28 import re
29 import shutil
30 import subprocess
31 import sys
32 import traceback
33 import time
34 import signal
35 import tarfile
36 import copy
37 import datetime
38 import tarfile
39 import traceback
40 import StringIO
41
42 try:
43 import readline
44 GNU_SPLITTING = ('GNU' in readline.__doc__)
45 except:
46 GNU_SPLITTING = True
47
48 root_path = os.path.split(os.path.dirname(os.path.realpath( __file__ )))[0]
49 root_path = os.path.split(root_path)[0]
50 sys.path.insert(0, os.path.join(root_path,'bin'))
51
52
53 pjoin = os.path.join
54
55 logger = logging.getLogger('madgraph.stdout')
56 logger_stderr = logging.getLogger('madgraph.stderr')
57
58 try:
59 import madgraph
60 except ImportError:
61 aMCatNLO = True
62 import internal.extended_cmd as cmd
63 import internal.common_run_interface as common_run
64 import internal.banner as banner_mod
65 import internal.misc as misc
66 from internal import InvalidCmd, MadGraph5Error
67 import internal.files as files
68 import internal.cluster as cluster
69 import internal.save_load_object as save_load_object
70 import internal.gen_crossxhtml as gen_crossxhtml
71 import internal.sum_html as sum_html
72 import internal.shower_card as shower_card
73 import internal.FO_analyse_card as analyse_card
74 import internal.histograms as histograms
75 else:
76
77 aMCatNLO = False
78 import madgraph.interface.extended_cmd as cmd
79 import madgraph.interface.common_run_interface as common_run
80 import madgraph.iolibs.files as files
81 import madgraph.iolibs.save_load_object as save_load_object
82 import madgraph.madevent.gen_crossxhtml as gen_crossxhtml
83 import madgraph.madevent.sum_html as sum_html
84 import madgraph.various.banner as banner_mod
85 import madgraph.various.cluster as cluster
86 import madgraph.various.misc as misc
87 import madgraph.various.shower_card as shower_card
88 import madgraph.various.FO_analyse_card as analyse_card
89 import madgraph.various.histograms as histograms
90 from madgraph import InvalidCmd, aMCatNLOError, MadGraph5Error
91
94
95
97 """compile the direcory p_dir
98 arguments is the tuple (me_dir, p_dir, mode, options, tests, exe, run_mode)
99 this function needs not to be a class method in order to do
100 the compilation on multicore"""
101
102 if len(arguments) == 1:
103 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments[0]
104 elif len(arguments)==7:
105 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments
106 else:
107 raise aMCatNLOError, 'not correct number of argument'
108 logger.info(' Compiling %s...' % p_dir)
109
110 this_dir = pjoin(me_dir, 'SubProcesses', p_dir)
111
112 try:
113
114
115 for test in tests:
116
117 if test == 'check_poles' and os.path.exists(pjoin(this_dir, 'parton_lum_0.f')):
118 continue
119 misc.compile([test], cwd = this_dir, job_specs = False)
120 input = pjoin(me_dir, '%s_input.txt' % test)
121
122 misc.call(['./%s' % (test)], cwd=this_dir,
123 stdin = open(input), stdout=open(pjoin(this_dir, '%s.log' % test), 'w'),
124 close_fds=True)
125 if test == 'check_poles' and os.path.exists(pjoin(this_dir,'MadLoop5_resources')) :
126 tf=tarfile.open(pjoin(this_dir,'MadLoop5_resources.tar.gz'),'w:gz',
127 dereference=True)
128 tf.add(pjoin(this_dir,'MadLoop5_resources'),arcname='MadLoop5_resources')
129 tf.close()
130
131 if not options['reweightonly']:
132 misc.compile(['gensym'], cwd=this_dir, job_specs = False)
133 open(pjoin(this_dir, 'gensym_input.txt'), 'w').write('%s\n' % run_mode)
134 misc.call(['./gensym'],cwd= this_dir,
135 stdin=open(pjoin(this_dir, 'gensym_input.txt')),
136 stdout=open(pjoin(this_dir, 'gensym.log'), 'w'),
137 close_fds=True)
138
139 misc.compile([exe], cwd=this_dir, job_specs = False)
140 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']:
141 misc.compile(['reweight_xsec_events'], cwd=this_dir, job_specs = False)
142
143 logger.info(' %s done.' % p_dir)
144 return 0
145 except MadGraph5Error, msg:
146 return msg
147
148
150 """check that the current fortran compiler is gfortran 4.6 or later.
151 If block, stops the execution, otherwise just print a warning"""
152
153 msg = 'In order to be able to run at NLO MadGraph5_aMC@NLO, you need to have ' + \
154 'gfortran 4.6 or later installed.\n%s has been detected\n'+\
155 'Note that You can still run all MadEvent run without any problem!'
156
157 if options['fortran_compiler']:
158 compiler = options['fortran_compiler']
159 elif misc.which('gfortran'):
160 compiler = 'gfortran'
161 else:
162 compiler = ''
163
164 if 'gfortran' not in compiler:
165 if block:
166 raise aMCatNLOError(msg % compiler)
167 else:
168 logger.warning(msg % compiler)
169 else:
170 curr_version = misc.get_gfortran_version(compiler)
171 if not ''.join(curr_version.split('.')) >= '46':
172 if block:
173 raise aMCatNLOError(msg % (compiler + ' ' + curr_version))
174 else:
175 logger.warning(msg % (compiler + ' ' + curr_version))
176
177
178
179
180
181
183 """Particularisation of the cmd command for aMCatNLO"""
184
185
186 next_possibility = {
187 'start': [],
188 }
189
190 debug_output = 'ME5_debug'
191 error_debug = 'Please report this bug on https://bugs.launchpad.net/mg5amcnlo\n'
192 error_debug += 'More information is found in \'%(debug)s\'.\n'
193 error_debug += 'Please attach this file to your report.'
194
195 config_debug = 'If you need help with this issue please contact us on https://answers.launchpad.net/mg5amcnlo\n'
196
197
198 keyboard_stop_msg = """stopping all operation
199 in order to quit MadGraph5_aMC@NLO please enter exit"""
200
201
202 InvalidCmd = InvalidCmd
203 ConfigurationError = aMCatNLOError
204
205 - def __init__(self, me_dir, options, *arg, **opt):
206 """Init history and line continuation"""
207
208
209 self.force = False
210
211
212
213 info = misc.get_pkg_info()
214 info_line = ""
215 if info and info.has_key('version') and info.has_key('date'):
216 len_version = len(info['version'])
217 len_date = len(info['date'])
218 if len_version + len_date < 30:
219 info_line = "#* VERSION %s %s %s *\n" % \
220 (info['version'],
221 (30 - len_version - len_date) * ' ',
222 info['date'])
223 else:
224 version = open(pjoin(root_path,'MGMEVersion.txt')).readline().strip()
225 info_line = "#* VERSION %s %s *\n" % \
226 (version, (24 - len(version)) * ' ')
227
228
229
230 self.history_header = \
231 '#************************************************************\n' + \
232 '#* MadGraph5_aMC@NLO *\n' + \
233 '#* *\n' + \
234 "#* * * *\n" + \
235 "#* * * * * *\n" + \
236 "#* * * * * 5 * * * * *\n" + \
237 "#* * * * * *\n" + \
238 "#* * * *\n" + \
239 "#* *\n" + \
240 "#* *\n" + \
241 info_line + \
242 "#* *\n" + \
243 "#* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \
244 "#* https://server06.fynu.ucl.ac.be/projects/madgraph *\n" + \
245 "#* and *\n" + \
246 "#* http://amcatnlo.cern.ch *\n" + \
247 '#* *\n' + \
248 '#************************************************************\n' + \
249 '#* *\n' + \
250 '#* Command File for aMCatNLO *\n' + \
251 '#* *\n' + \
252 '#* run as ./bin/aMCatNLO.py filename *\n' + \
253 '#* *\n' + \
254 '#************************************************************\n'
255
256 if info_line:
257 info_line = info_line[1:]
258
259 logger.info(\
260 "************************************************************\n" + \
261 "* *\n" + \
262 "* W E L C O M E to M A D G R A P H 5 *\n" + \
263 "* a M C @ N L O *\n" + \
264 "* *\n" + \
265 "* * * *\n" + \
266 "* * * * * *\n" + \
267 "* * * * * 5 * * * * *\n" + \
268 "* * * * * *\n" + \
269 "* * * *\n" + \
270 "* *\n" + \
271 info_line + \
272 "* *\n" + \
273 "* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \
274 "* http://amcatnlo.cern.ch *\n" + \
275 "* *\n" + \
276 "* Type 'help' for in-line help. *\n" + \
277 "* *\n" + \
278 "************************************************************")
279 super(CmdExtended, self).__init__(me_dir, options, *arg, **opt)
280
281
283 """return the history header"""
284 return self.history_header % misc.get_time_info()
285
287 """action to perform to close nicely on a keyboard interupt"""
288 try:
289 if hasattr(self, 'cluster'):
290 logger.info('rm jobs on queue')
291 self.cluster.remove()
292 if hasattr(self, 'results'):
293 self.update_status('Stop by the user', level=None, makehtml=True, error=True)
294 self.add_error_log_in_html(KeyboardInterrupt)
295 except:
296 pass
297
298 - def postcmd(self, stop, line):
299 """ Update the status of the run for finishing interactive command """
300
301
302 self.force = False
303
304 if not self.use_rawinput:
305 return stop
306
307
308 arg = line.split()
309 if len(arg) == 0:
310 return stop
311 elif str(arg[0]) in ['exit','quit','EOF']:
312 return stop
313
314 try:
315 self.update_status('Command \'%s\' done.<br> Waiting for instruction.' % arg[0],
316 level=None, error=True)
317 except Exception:
318 misc.sprint('self.update_status fails', log=logger)
319 pass
320
326
332
338
339
340
341
342
343
345 """ The Series of help routine for the aMCatNLOCmd"""
346
350
352 logger.info("syntax: banner_run Path|RUN [--run_options]")
353 logger.info("-- Reproduce a run following a given banner")
354 logger.info(" One of the following argument is require:")
355 logger.info(" Path should be the path of a valid banner.")
356 logger.info(" RUN should be the name of a run of the current directory")
357 self.run_options_help([('-f','answer all question by default'),
358 ('--name=X', 'Define the name associated with the new run')])
359
360
364
369
370
374
378
379
381 logger.info("syntax: open FILE ")
382 logger.info("-- open a file with the appropriate editor.")
383 logger.info(' If FILE belongs to index.html, param_card.dat, run_card.dat')
384 logger.info(' the path to the last created/used directory is used')
385
387 if data:
388 logger.info('-- local options:')
389 for name, info in data:
390 logger.info(' %s : %s' % (name, info))
391
392 logger.info("-- session options:")
393 logger.info(" Note that those options will be kept for the current session")
394 logger.info(" --cluster : Submit to the cluster. Current cluster: %s" % self.options['cluster_type'])
395 logger.info(" --multicore : Run in multi-core configuration")
396 logger.info(" --nb_core=X : limit the number of core to use to X.")
397
398
399
400
401
402
403
405 """ The Series of check routine for the aMCatNLOCmd"""
406
408 """Check the validity of the line. args[0] is the run_directory"""
409
410 if options['force']:
411 self.force = True
412
413 if len(args) == 0:
414 self.help_shower()
415 raise self.InvalidCmd, 'Invalid syntax, please specify the run name'
416 if not os.path.isdir(pjoin(self.me_dir, 'Events', args[0])):
417 raise self.InvalidCmd, 'Directory %s does not exists' % \
418 pjoin(os.getcwd(), 'Events', args[0])
419
420 self.set_run_name(args[0], level= 'shower')
421 args[0] = pjoin(self.me_dir, 'Events', args[0])
422
424 """Check the argument for the plot command
425 plot run_name modes"""
426
427
428 madir = self.options['madanalysis_path']
429 td = self.options['td_path']
430
431 if not madir or not td:
432 logger.info('Retry to read configuration file to find madanalysis/td')
433 self.set_configuration()
434
435 madir = self.options['madanalysis_path']
436 td = self.options['td_path']
437
438 if not madir:
439 error_msg = 'No Madanalysis path correctly set.'
440 error_msg += 'Please use the set command to define the path and retry.'
441 error_msg += 'You can also define it in the configuration file.'
442 raise self.InvalidCmd(error_msg)
443 if not td:
444 error_msg = 'No path to td directory correctly set.'
445 error_msg += 'Please use the set command to define the path and retry.'
446 error_msg += 'You can also define it in the configuration file.'
447 raise self.InvalidCmd(error_msg)
448
449 if len(args) == 0:
450 if not hasattr(self, 'run_name') or not self.run_name:
451 self.help_plot()
452 raise self.InvalidCmd('No run name currently define. Please add this information.')
453 args.append('all')
454 return
455
456
457 if args[0] not in self._plot_mode:
458 self.set_run_name(args[0], level='plot')
459 del args[0]
460 if len(args) == 0:
461 args.append('all')
462 elif not self.run_name:
463 self.help_plot()
464 raise self.InvalidCmd('No run name currently define. Please add this information.')
465
466 for arg in args:
467 if arg not in self._plot_mode and arg != self.run_name:
468 self.help_plot()
469 raise self.InvalidCmd('unknown options %s' % arg)
470
472 """Check the argument for pythia command
473 syntax: pgs [NAME]
474 Note that other option are already remove at this point
475 """
476
477
478 if not self.options['pythia-pgs_path']:
479 logger.info('Retry to read configuration file to find pythia-pgs path')
480 self.set_configuration()
481
482 if not self.options['pythia-pgs_path'] or not \
483 os.path.exists(pjoin(self.options['pythia-pgs_path'],'src')):
484 error_msg = 'No pythia-pgs path correctly set.'
485 error_msg += 'Please use the set command to define the path and retry.'
486 error_msg += 'You can also define it in the configuration file.'
487 raise self.InvalidCmd(error_msg)
488
489 tag = [a for a in arg if a.startswith('--tag=')]
490 if tag:
491 arg.remove(tag[0])
492 tag = tag[0][6:]
493
494
495 if len(arg) == 0 and not self.run_name:
496 if self.results.lastrun:
497 arg.insert(0, self.results.lastrun)
498 else:
499 raise self.InvalidCmd('No run name currently define. Please add this information.')
500
501 if len(arg) == 1 and self.run_name == arg[0]:
502 arg.pop(0)
503
504 if not len(arg) and \
505 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')):
506 self.help_pgs()
507 raise self.InvalidCmd('''No file file pythia_events.hep currently available
508 Please specify a valid run_name''')
509
510 lock = None
511 if len(arg) == 1:
512 prev_tag = self.set_run_name(arg[0], tag, 'pgs')
513 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events', self.run_name))
514
515 if not filenames:
516 raise self.InvalidCmd('No events file corresponding to %s run with tag %s. '% (self.run_name, prev_tag))
517 else:
518 input_file = filenames[0]
519 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep')
520 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'),
521 argument=['-c', input_file],
522 close_fds=True)
523 else:
524 if tag:
525 self.run_card['run_tag'] = tag
526 self.set_run_name(self.run_name, tag, 'pgs')
527
528 return lock
529
530
532 """Check the argument for pythia command
533 syntax: delphes [NAME]
534 Note that other option are already remove at this point
535 """
536
537
538 if not self.options['delphes_path']:
539 logger.info('Retry to read configuration file to find delphes path')
540 self.set_configuration()
541
542 if not self.options['delphes_path']:
543 error_msg = 'No delphes path correctly set.'
544 error_msg += 'Please use the set command to define the path and retry.'
545 error_msg += 'You can also define it in the configuration file.'
546 raise self.InvalidCmd(error_msg)
547
548 tag = [a for a in arg if a.startswith('--tag=')]
549 if tag:
550 arg.remove(tag[0])
551 tag = tag[0][6:]
552
553
554 if len(arg) == 0 and not self.run_name:
555 if self.results.lastrun:
556 arg.insert(0, self.results.lastrun)
557 else:
558 raise self.InvalidCmd('No run name currently define. Please add this information.')
559
560 if len(arg) == 1 and self.run_name == arg[0]:
561 arg.pop(0)
562
563 if not len(arg) and \
564 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')):
565 self.help_pgs()
566 raise self.InvalidCmd('''No file file pythia_events.hep currently available
567 Please specify a valid run_name''')
568
569 if len(arg) == 1:
570 prev_tag = self.set_run_name(arg[0], tag, 'delphes')
571 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events'))
572
573
574 if not filenames:
575 raise self.InvalidCmd('No events file corresponding to %s run with tag %s.:%s '\
576 % (self.run_name, prev_tag,
577 pjoin(self.me_dir,'Events',self.run_name, '%s_pythia_events.hep.gz' % prev_tag)))
578 else:
579 input_file = filenames[0]
580 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep')
581 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'),
582 argument=['-c', input_file],
583 close_fds=True)
584 else:
585 if tag:
586 self.run_card['run_tag'] = tag
587 self.set_run_name(self.run_name, tag, 'delphes')
588
590 """check the validity of the line. args is ORDER,
591 ORDER being LO or NLO. If no mode is passed, NLO is used"""
592
593
594
595 if options['force']:
596 self.force = True
597
598 if not args:
599 args.append('NLO')
600 return
601
602 if len(args) > 1:
603 self.help_calculate_xsect()
604 raise self.InvalidCmd, 'Invalid Syntax: Too many argument'
605
606 elif len(args) == 1:
607 if not args[0] in ['NLO', 'LO']:
608 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1]
609 mode = args[0]
610
611
612 if options['multicore'] and options['cluster']:
613 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \
614 ' are not compatible. Please choose one.'
615
616
618 """check the validity of the line. args is ORDER,
619 ORDER being LO or NLO. If no mode is passed, NLO is used"""
620
621
622
623 if not args:
624 args.append('NLO')
625 return
626
627 if len(args) > 1:
628 self.help_generate_events()
629 raise self.InvalidCmd, 'Invalid Syntax: Too many argument'
630
631 elif len(args) == 1:
632 if not args[0] in ['NLO', 'LO']:
633 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1]
634 mode = args[0]
635
636
637 if options['multicore'] and options['cluster']:
638 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \
639 ' are not compatible. Please choose one.'
640
642 """check the validity of line"""
643
644 if len(args) == 0:
645 self.help_banner_run()
646 raise self.InvalidCmd('banner_run requires at least one argument.')
647
648 tag = [a[6:] for a in args if a.startswith('--tag=')]
649
650
651 if os.path.exists(args[0]):
652 type ='banner'
653 format = self.detect_card_type(args[0])
654 if format != 'banner':
655 raise self.InvalidCmd('The file is not a valid banner.')
656 elif tag:
657 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \
658 (args[0], tag))
659 if not os.path.exists(args[0]):
660 raise self.InvalidCmd('No banner associates to this name and tag.')
661 else:
662 name = args[0]
663 type = 'run'
664 banners = misc.glob('*_banner.txt', pjoin(self.me_dir,'Events', args[0]))
665 if not banners:
666 raise self.InvalidCmd('No banner associates to this name.')
667 elif len(banners) == 1:
668 args[0] = banners[0]
669 else:
670
671 tags = [os.path.basename(p)[len(args[0])+1:-11] for p in banners]
672 tag = self.ask('which tag do you want to use?', tags[0], tags)
673 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \
674 (args[0], tag))
675
676 run_name = [arg[7:] for arg in args if arg.startswith('--name=')]
677 if run_name:
678 try:
679 self.exec_cmd('remove %s all banner -f' % run_name)
680 except Exception:
681 pass
682 self.set_run_name(args[0], tag=None, level='parton', reload_card=True)
683 elif type == 'banner':
684 self.set_run_name(self.find_available_run_name(self.me_dir))
685 elif type == 'run':
686 if not self.results[name].is_empty():
687 run_name = self.find_available_run_name(self.me_dir)
688 logger.info('Run %s is not empty so will use run_name: %s' % \
689 (name, run_name))
690 self.set_run_name(run_name)
691 else:
692 try:
693 self.exec_cmd('remove %s all banner -f' % run_name)
694 except Exception:
695 pass
696 self.set_run_name(name)
697
698
699
701 """check the validity of the line. args is MODE
702 MODE being LO, NLO, aMC@NLO or aMC@LO. If no mode is passed, auto is used"""
703
704
705
706 if options['force']:
707 self.force = True
708
709
710 if not args:
711 args.append('auto')
712 return
713
714 if len(args) > 1:
715 self.help_launch()
716 raise self.InvalidCmd, 'Invalid Syntax: Too many argument'
717
718 elif len(args) == 1:
719 if not args[0] in ['LO', 'NLO', 'aMC@NLO', 'aMC@LO','auto']:
720 raise self.InvalidCmd, '%s is not a valid mode, please use "LO", "NLO", "aMC@NLO" or "aMC@LO"' % args[0]
721 mode = args[0]
722
723
724 if options['multicore'] and options['cluster']:
725 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \
726 ' are not compatible. Please choose one.'
727 if mode == 'NLO' and options['reweightonly']:
728 raise self.InvalidCmd, 'option -r (--reweightonly) needs mode "aMC@NLO" or "aMC@LO"'
729
730
732 """check the validity of the line. args is MODE
733 MODE being FO or MC. If no mode is passed, MC is used"""
734
735
736
737 if options['force']:
738 self.force = True
739
740 if not args:
741 args.append('MC')
742 return
743
744 if len(args) > 1:
745 self.help_compile()
746 raise self.InvalidCmd, 'Invalid Syntax: Too many argument'
747
748 elif len(args) == 1:
749 if not args[0] in ['MC', 'FO']:
750 raise self.InvalidCmd, '%s is not a valid mode, please use "FO" or "MC"' % args[0]
751 mode = args[0]
752
753
754
755
756
757
758
760 """ The Series of help routine for the MadGraphCmd"""
761
763 """auto-completion for launch command"""
764
765 args = self.split_arg(line[0:begidx])
766 if len(args) == 1:
767
768 return self.list_completion(text,['LO','NLO','aMC@NLO','aMC@LO'],line)
769 elif len(args) == 2 and line[begidx-1] == '@':
770 return self.list_completion(text,['LO','NLO'],line)
771 else:
772 opts = []
773 for opt in _launch_parser.option_list:
774 opts += opt._long_opts + opt._short_opts
775 return self.list_completion(text, opts, line)
776
778 "Complete the banner run command"
779 try:
780
781
782 args = self.split_arg(line[0:begidx], error=False)
783
784 if args[-1].endswith(os.path.sep):
785 return self.path_completion(text,
786 os.path.join('.',*[a for a in args \
787 if a.endswith(os.path.sep)]))
788
789
790 if len(args) > 1:
791
792 tags = misc.glob('%s_*_banner.txt' % args[1],pjoin(self.me_dir, 'Events' , args[1]))
793 tags = ['%s' % os.path.basename(t)[len(args[1])+1:-11] for t in tags]
794
795 if args[-1] != '--tag=':
796 tags = ['--tag=%s' % t for t in tags]
797 else:
798 return self.list_completion(text, tags)
799 return self.list_completion(text, tags +['--name=','-f'], line)
800
801
802 possibilites = {}
803
804 comp = self.path_completion(text, os.path.join('.',*[a for a in args \
805 if a.endswith(os.path.sep)]))
806 if os.path.sep in line:
807 return comp
808 else:
809 possibilites['Path from ./'] = comp
810
811 run_list = misc.glob(pjoin('*','*_banner.txt'), pjoin(self.me_dir, 'Events'))
812 run_list = [n.rsplit('/',2)[1] for n in run_list]
813 possibilites['RUN Name'] = self.list_completion(text, run_list)
814
815 return self.deal_multiple_categories(possibilites)
816
817
818 except Exception, error:
819 print error
820
821
834
847
849 """auto-completion for generate_events command
850 call the compeltion for launch"""
851 self.complete_launch(text, line, begidx, endidx)
852
853
863
879
881 "Complete the pgs command"
882 args = self.split_arg(line[0:begidx], error=False)
883 if len(args) == 1:
884
885 data = misc.glob(pjoin('*', 'events_*.hep.gz'),
886 pjoin(self.me_dir, 'Events'))
887 data = [n.rsplit('/',2)[1] for n in data]
888 tmp1 = self.list_completion(text, data)
889 if not self.run_name:
890 return tmp1
891 else:
892 tmp2 = self.list_completion(text, self._run_options + ['-f',
893 '--tag=' ,'--no_default'], line)
894 return tmp1 + tmp2
895 else:
896 return self.list_completion(text, self._run_options + ['-f',
897 '--tag=','--no_default'], line)
898
899 complete_delphes = complete_pgs
900
903
904
905
906
907 -class aMCatNLOCmd(CmdExtended, HelpToCmd, CompleteForCmd, common_run.CommonRunCmd):
908 """The command line processor of MadGraph"""
909
910
911 true = ['T','.true.',True,'true']
912
913 _run_options = ['--cluster','--multicore','--nb_core=','--nb_core=2', '-c', '-m']
914 _generate_options = ['-f', '--laststep=parton', '--laststep=pythia', '--laststep=pgs', '--laststep=delphes']
915 _calculate_decay_options = ['-f', '--accuracy=0.']
916 _set_options = ['stdout_level','fortran_compiler','cpp_compiler','timeout']
917 _plot_mode = ['all', 'parton','shower','pgs','delphes']
918 _clean_mode = _plot_mode + ['channel', 'banner']
919 _display_opts = ['run_name', 'options', 'variable']
920
921
922 web = False
923 cluster_mode = 0
924 queue = 'madgraph'
925 nb_core = None
926 make_opts_var = {}
927
928 next_possibility = {
929 'start': ['generate_events [OPTIONS]', 'calculate_crossx [OPTIONS]', 'launch [OPTIONS]',
930 'help generate_events'],
931 'generate_events': ['generate_events [OPTIONS]', 'shower'],
932 'launch': ['launch [OPTIONS]', 'shower'],
933 'shower' : ['generate_events [OPTIONS]']
934 }
935
936
937
938 - def __init__(self, me_dir = None, options = {}, *completekey, **stdin):
939 """ add information to the cmd """
940
941 self.start_time = 0
942 CmdExtended.__init__(self, me_dir, options, *completekey, **stdin)
943
944
945 self.mode = 'aMCatNLO'
946 self.nb_core = 0
947 self.prompt = "%s>"%os.path.basename(pjoin(self.me_dir))
948
949
950 self.load_results_db()
951 self.results.def_web_mode(self.web)
952
953 proc_card = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read()
954
955 if not '[real=QCD]' in proc_card:
956 check_compiler(self.options, block=True)
957
958
959
961 """ run the shower on a given parton level file """
962 argss = self.split_arg(line)
963 (options, argss) = _launch_parser.parse_args(argss)
964
965 options = options.__dict__
966 options['reweightonly'] = False
967 self.check_shower(argss, options)
968 evt_file = pjoin(os.getcwd(), argss[0], 'events.lhe')
969 self.ask_run_configuration('onlyshower', options)
970 self.run_mcatnlo(evt_file, options)
971
972 self.update_status('', level='all', update_results=True)
973
974
976 """Create the plot for a given run"""
977
978
979 args = self.split_arg(line)
980
981 self.check_plot(args)
982 logger.info('plot for run %s' % self.run_name)
983
984 if not self.force:
985 self.ask_edit_cards([], args, plot=True)
986
987 if any([arg in ['parton'] for arg in args]):
988 filename = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')
989 if os.path.exists(filename+'.gz'):
990 misc.gunzip(filename)
991 if os.path.exists(filename):
992 logger.info('Found events.lhe file for run %s' % self.run_name)
993 shutil.move(filename, pjoin(self.me_dir, 'Events', 'unweighted_events.lhe'))
994 self.create_plot('parton')
995 shutil.move(pjoin(self.me_dir, 'Events', 'unweighted_events.lhe'), filename)
996 misc.gzip(filename)
997
998 if any([arg in ['all','parton'] for arg in args]):
999 filename = pjoin(self.me_dir, 'Events', self.run_name, 'MADatNLO.top')
1000 if os.path.exists(filename):
1001 logger.info('Found MADatNLO.top file for run %s' % \
1002 self.run_name)
1003 output = pjoin(self.me_dir, 'HTML',self.run_name, 'plots_parton.html')
1004 plot_dir = pjoin(self.me_dir, 'HTML', self.run_name, 'plots_parton')
1005
1006 if not os.path.isdir(plot_dir):
1007 os.makedirs(plot_dir)
1008 top_file = pjoin(plot_dir, 'plots.top')
1009 files.cp(filename, top_file)
1010 madir = self.options['madanalysis_path']
1011 tag = self.run_card['run_tag']
1012 td = self.options['td_path']
1013 misc.call(['%s/plot' % self.dirbin, madir, td],
1014 stdout = open(pjoin(plot_dir, 'plot.log'),'a'),
1015 stderr = subprocess.STDOUT,
1016 cwd=plot_dir)
1017
1018 misc.call(['%s/plot_page-pl' % self.dirbin,
1019 os.path.basename(plot_dir),
1020 'parton'],
1021 stdout = open(pjoin(plot_dir, 'plot.log'),'a'),
1022 stderr = subprocess.STDOUT,
1023 cwd=pjoin(self.me_dir, 'HTML', self.run_name))
1024 shutil.move(pjoin(self.me_dir, 'HTML',self.run_name ,'plots.html'),
1025 output)
1026
1027 os.remove(pjoin(self.me_dir, 'Events', 'plots.top'))
1028
1029 if any([arg in ['all','shower'] for arg in args]):
1030 filenames = misc.glob('events_*.lhe.gz', pjoin(self.me_dir, 'Events', self.run_name))
1031 if len(filenames) != 1:
1032 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events', self.run_name))
1033 if len(filenames) != 1:
1034 logger.info('No shower level file found for run %s' % \
1035 self.run_name)
1036 return
1037 filename = filenames[0]
1038 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep'))
1039
1040 if not os.path.exists(pjoin(self.me_dir, 'Cards', 'pythia_card.dat')):
1041 if aMCatNLO and not self.options['mg5_path']:
1042 raise "plotting NLO HEP file needs MG5 utilities"
1043
1044 files.cp(pjoin(self.options['mg5_path'], 'Template','LO', 'Cards', 'pythia_card_default.dat'),
1045 pjoin(self.me_dir, 'Cards', 'pythia_card.dat'))
1046 self.run_hep2lhe()
1047 else:
1048 filename = filenames[0]
1049 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep'))
1050
1051 self.create_plot('shower')
1052 lhe_file_name = filename.replace('.hep.gz', '.lhe')
1053 shutil.move(pjoin(self.me_dir, 'Events','pythia_events.lhe'),
1054 lhe_file_name)
1055 misc.gzip(lhe_file_name)
1056
1057 if any([arg in ['all','pgs'] for arg in args]):
1058 filename = pjoin(self.me_dir, 'Events', self.run_name,
1059 '%s_pgs_events.lhco' % self.run_tag)
1060 if os.path.exists(filename+'.gz'):
1061 misc.gunzip(filename)
1062 if os.path.exists(filename):
1063 self.create_plot('PGS')
1064 misc.gzip(filename)
1065 else:
1066 logger.info('No valid files for pgs plot')
1067
1068 if any([arg in ['all','delphes'] for arg in args]):
1069 filename = pjoin(self.me_dir, 'Events', self.run_name,
1070 '%s_delphes_events.lhco' % self.run_tag)
1071 if os.path.exists(filename+'.gz'):
1072 misc.gunzip(filename)
1073 if os.path.exists(filename):
1074
1075 self.create_plot('Delphes')
1076
1077 misc.gzip(filename)
1078 else:
1079 logger.info('No valid files for delphes plot')
1080
1081
1082
1084 """Main commands: calculates LO/NLO cross-section, using madevent_mintFO
1085 this function wraps the do_launch one"""
1086
1087 self.start_time = time.time()
1088 argss = self.split_arg(line)
1089
1090 (options, argss) = _calculate_xsect_parser.parse_args(argss)
1091 options = options.__dict__
1092 options['reweightonly'] = False
1093 options['parton'] = True
1094 self.check_calculate_xsect(argss, options)
1095 self.do_launch(line, options, argss)
1096
1097
1099 """Make a run from the banner file"""
1100
1101 args = self.split_arg(line)
1102
1103 self.check_banner_run(args)
1104
1105
1106 for name in ['shower_card.dat', 'madspin_card.dat']:
1107 try:
1108 os.remove(pjoin(self.me_dir, 'Cards', name))
1109 except Exception:
1110 pass
1111
1112 banner_mod.split_banner(args[0], self.me_dir, proc_card=False)
1113
1114
1115 if not self.force:
1116 ans = self.ask('Do you want to modify the Cards/Run Type?', 'n', ['y','n'])
1117 if ans == 'n':
1118 self.force = True
1119
1120
1121 if self.force:
1122 mode_status = {'order': 'NLO', 'fixed_order': False, 'madspin':False, 'shower':True}
1123 banner = banner_mod.Banner(args[0])
1124 for line in banner['run_settings']:
1125 if '=' in line:
1126 mode, value = [t.strip() for t in line.split('=')]
1127 mode_status[mode] = value
1128 else:
1129 mode_status = {}
1130
1131
1132 self.do_launch('-n %s %s' % (self.run_name, '-f' if self.force else ''),
1133 switch=mode_status)
1134
1135
1137 """Main commands: generate events
1138 this function just wraps the do_launch one"""
1139 self.do_launch(line)
1140
1141
1142
1144 """Advanced commands: this is for creating the correct run_card.inc from the nlo format"""
1145
1146 self.check_param_card(pjoin(self.me_dir, 'Cards','param_card.dat'))
1147 return super(aMCatNLOCmd,self).do_treatcards(line, amcatnlo)
1148
1149
1151 """assign all configuration variable from file
1152 loop over the different config file if config_file not define """
1153 return super(aMCatNLOCmd,self).set_configuration(amcatnlo=amcatnlo, **opt)
1154
1155
1156 - def do_launch(self, line, options={}, argss=[], switch={}):
1157 """Main commands: launch the full chain
1158 options and args are relevant if the function is called from other
1159 functions, such as generate_events or calculate_xsect
1160 mode gives the list of switch needed for the computation (usefull for banner_run)
1161 """
1162
1163 if not argss and not options:
1164 self.start_time = time.time()
1165 argss = self.split_arg(line)
1166
1167 (options, argss) = _launch_parser.parse_args(argss)
1168 options = options.__dict__
1169 self.check_launch(argss, options)
1170
1171
1172 if 'run_name' in options.keys() and options['run_name']:
1173 self.run_name = options['run_name']
1174
1175
1176 if os.path.isdir(pjoin(self.me_dir, 'Events', self.run_name)):
1177 logger.warning('Removing old run information in \n'+
1178 pjoin(self.me_dir, 'Events', self.run_name))
1179 files.rm(pjoin(self.me_dir, 'Events', self.run_name))
1180 self.results.delete_run(self.run_name)
1181 else:
1182 self.run_name = ''
1183
1184 if options['multicore']:
1185 self.cluster_mode = 2
1186 elif options['cluster']:
1187 self.cluster_mode = 1
1188
1189 if not switch:
1190 mode = argss[0]
1191
1192 if mode in ['LO', 'NLO']:
1193 options['parton'] = True
1194 mode = self.ask_run_configuration(mode, options)
1195 else:
1196 mode = self.ask_run_configuration('auto', options, switch)
1197
1198 self.results.add_detail('run_mode', mode)
1199
1200 self.update_status('Starting run', level=None, update_results=True)
1201
1202 if self.options['automatic_html_opening']:
1203 misc.open_file(os.path.join(self.me_dir, 'crossx.html'))
1204 self.options['automatic_html_opening'] = False
1205
1206 if '+' in mode:
1207 mode = mode.split('+')[0]
1208 self.compile(mode, options)
1209 evt_file = self.run(mode, options)
1210
1211 if self.run_card['nevents'] == 0 and not mode in ['LO', 'NLO']:
1212 logger.info('No event file generated: grids have been set-up with a '\
1213 'relative precision of %s' % self.run_card['req_acc'])
1214 return
1215
1216 if not mode in ['LO', 'NLO']:
1217 assert evt_file == pjoin(self.me_dir,'Events', self.run_name, 'events.lhe'), '%s != %s' %(evt_file, pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz'))
1218 self.exec_cmd('reweight -from_cards', postcmd=False)
1219 self.exec_cmd('decay_events -from_cards', postcmd=False)
1220 evt_file = pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')
1221
1222 if not mode in ['LO', 'NLO', 'noshower', 'noshowerLO'] \
1223 and not options['parton']:
1224 self.run_mcatnlo(evt_file, options)
1225 elif mode == 'noshower':
1226 logger.warning("""You have chosen not to run a parton shower. NLO events without showering are NOT physical.
1227 Please, shower the Les Houches events before using them for physics analyses.""")
1228
1229
1230 self.update_status('', level='all', update_results=True)
1231 if self.run_card['ickkw'] == 3 and \
1232 (mode in ['noshower'] or \
1233 (('PYTHIA8' not in self.run_card['parton_shower'].upper()) and (mode in ['aMC@NLO']))):
1234 logger.warning("""You are running with FxFx merging enabled.
1235 To be able to merge samples of various multiplicities without double counting,
1236 you have to remove some events after showering 'by hand'.
1237 Please read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""")
1238
1239 self.store_result()
1240
1241 if self.param_card_iterator:
1242 param_card_iterator = self.param_card_iterator
1243 self.param_card_iterator = []
1244 param_card_iterator.store_entry(self.run_name, self.results.current['cross'])
1245 orig_name = self.run_name
1246
1247 with misc.TMP_variable(self, 'allow_notification_center', False):
1248 for i,card in enumerate(param_card_iterator):
1249 card.write(pjoin(self.me_dir,'Cards','param_card.dat'))
1250 if not options['force']:
1251 options['force'] = True
1252 if options['run_name']:
1253 options['run_name'] = '%s_%s' % (orig_name, i+1)
1254 if not argss:
1255 argss = [mode, "-f"]
1256 elif argss[0] == "auto":
1257 argss[0] = mode
1258 self.do_launch("", options=options, argss=argss, switch=switch)
1259
1260 param_card_iterator.store_entry(self.run_name, self.results.current['cross'])
1261
1262 param_card_iterator.write(pjoin(self.me_dir,'Cards','param_card.dat'))
1263 name = misc.get_scan_name(orig_name, self.run_name)
1264 path = pjoin(self.me_dir, 'Events','scan_%s.txt' % name)
1265 logger.info("write all cross-section results in %s" % path, '$MG:color:BLACK')
1266 param_card_iterator.write_summary(path)
1267
1268 if self.allow_notification_center:
1269 misc.apple_notify('Run %s finished' % os.path.basename(self.me_dir),
1270 '%s: %s +- %s ' % (self.results.current['run_name'],
1271 self.results.current['cross'],
1272 self.results.current['error']))
1273
1274
1275
1277 """Advanced commands: just compile the executables """
1278 argss = self.split_arg(line)
1279
1280 (options, argss) = _compile_parser.parse_args(argss)
1281 options = options.__dict__
1282 options['reweightonly'] = False
1283 options['nocompile'] = False
1284 self.check_compile(argss, options)
1285
1286 mode = {'FO': 'NLO', 'MC': 'aMC@NLO'}[argss[0]]
1287 self.ask_run_configuration(mode, options)
1288 self.compile(mode, options)
1289
1290
1291 self.update_status('', level='all', update_results=True)
1292
1293
1295 """Update random number seed with the value from the run_card.
1296 If this is 0, update the number according to a fresh one"""
1297 iseed = self.run_card['iseed']
1298 if iseed == 0:
1299 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit'))
1300 iseed = int(randinit.read()[2:]) + 1
1301 randinit.close()
1302 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit'), 'w')
1303 randinit.write('r=%d' % iseed)
1304 randinit.close()
1305
1306
1307 - def run(self, mode, options):
1308 """runs aMC@NLO. Returns the name of the event file created"""
1309 logger.info('Starting run')
1310
1311 if not 'only_generation' in options.keys():
1312 options['only_generation'] = False
1313
1314
1315 if mode in ['LO', 'NLO'] and self.run_card['iappl'] == 2 and not options['only_generation']:
1316 options['only_generation'] = True
1317 self.get_characteristics(pjoin(self.me_dir, 'SubProcesses', 'proc_characteristics'))
1318 self.setup_cluster_or_multicore()
1319 self.update_random_seed()
1320
1321 folder_names = {'LO': ['born_G*'], 'NLO': ['all_G*'],
1322 'aMC@LO': ['GB*'], 'aMC@NLO': ['GF*']}
1323 folder_names['noshower'] = folder_names['aMC@NLO']
1324 folder_names['noshowerLO'] = folder_names['aMC@LO']
1325 p_dirs = [d for d in \
1326 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d]
1327
1328 self.clean_previous_results(options,p_dirs,folder_names[mode])
1329
1330 mcatnlo_status = ['Setting up grids', 'Computing upper envelope', 'Generating events']
1331
1332
1333 if options['reweightonly']:
1334 event_norm=self.run_card['event_norm']
1335 nevents=self.run_card['nevents']
1336 return self.reweight_and_collect_events(options, mode, nevents, event_norm)
1337
1338 if mode in ['LO', 'NLO']:
1339
1340 mode_dict = {'NLO': 'all', 'LO': 'born'}
1341 logger.info('Doing fixed order %s' % mode)
1342 req_acc = self.run_card['req_acc_FO']
1343
1344
1345
1346 if self.run_card['iappl'] == 2:
1347 self.applgrid_distribute(options,mode_dict[mode],p_dirs)
1348
1349
1350
1351 integration_step=-1
1352 jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \
1353 req_acc,mode_dict[mode],integration_step,mode,fixed_order=True)
1354 self.prepare_directories(jobs_to_run,mode)
1355
1356
1357
1358
1359 while True:
1360 integration_step=integration_step+1
1361 self.run_all_jobs(jobs_to_run,integration_step)
1362 self.collect_log_files(jobs_to_run,integration_step)
1363 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \
1364 jobs_to_collect,integration_step,mode,mode_dict[mode])
1365 if not jobs_to_run:
1366
1367 break
1368
1369 self.finalise_run_FO(folder_names[mode],jobs_to_collect)
1370 self.update_status('Run complete', level='parton', update_results=True)
1371 return
1372
1373 elif mode in ['aMC@NLO','aMC@LO','noshower','noshowerLO']:
1374 if self.ninitial == 1:
1375 raise aMCatNLOError('Decay processes can only be run at fixed order.')
1376 mode_dict = {'aMC@NLO': 'all', 'aMC@LO': 'born',\
1377 'noshower': 'all', 'noshowerLO': 'born'}
1378 shower = self.run_card['parton_shower'].upper()
1379 nevents = self.run_card['nevents']
1380 req_acc = self.run_card['req_acc']
1381 if nevents == 0 and req_acc < 0 :
1382 raise aMCatNLOError('Cannot determine the required accuracy from the number '\
1383 'of events, because 0 events requested. Please set '\
1384 'the "req_acc" parameter in the run_card to a value '\
1385 'between 0 and 1')
1386 elif req_acc >1 or req_acc == 0 :
1387 raise aMCatNLOError('Required accuracy ("req_acc" in the run_card) should '\
1388 'be between larger than 0 and smaller than 1, '\
1389 'or set to -1 for automatic determination. Current '\
1390 'value is %f' % req_acc)
1391
1392 elif req_acc < 0 and nevents > 1000000 :
1393 req_acc=0.001
1394
1395 shower_list = ['HERWIG6', 'HERWIGPP', 'PYTHIA6Q', 'PYTHIA6PT', 'PYTHIA8']
1396
1397 if not shower in shower_list:
1398 raise aMCatNLOError('%s is not a valid parton shower. '\
1399 'Please use one of the following: %s' \
1400 % (shower, ', '.join(shower_list)))
1401
1402
1403 if shower == 'PYTHIA6PT' and self.proc_characteristics['has_fsr']:
1404 raise aMCatNLOError('PYTHIA6PT does not support processes with FSR')
1405
1406 if mode in ['aMC@NLO', 'aMC@LO']:
1407 logger.info('Doing %s matched to parton shower' % mode[4:])
1408 elif mode in ['noshower','noshowerLO']:
1409 logger.info('Generating events without running the shower.')
1410 elif options['only_generation']:
1411 logger.info('Generating events starting from existing results')
1412
1413 jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \
1414 req_acc,mode_dict[mode],1,mode,fixed_order=False)
1415
1416
1417 if options['only_generation']:
1418 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \
1419 jobs_to_collect,1,mode,mode_dict[mode],fixed_order=False)
1420 else:
1421 self.prepare_directories(jobs_to_run,mode,fixed_order=False)
1422
1423
1424
1425 for mint_step, status in enumerate(mcatnlo_status):
1426 if options['only_generation'] and mint_step < 2:
1427 continue
1428 self.update_status(status, level='parton')
1429 self.run_all_jobs(jobs_to_run,mint_step,fixed_order=False)
1430 self.collect_log_files(jobs_to_run,mint_step)
1431 if mint_step+1==2 and nevents==0:
1432 self.print_summary(options,2,mode)
1433 return
1434 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \
1435 jobs_to_collect,mint_step,mode,mode_dict[mode],fixed_order=False)
1436
1437 self.check_event_files(jobs_to_collect)
1438
1439 if self.cluster_mode == 1:
1440
1441 self.update_status(
1442 'Waiting while files are transferred back from the cluster nodes',
1443 level='parton')
1444 time.sleep(10)
1445
1446 event_norm=self.run_card['event_norm']
1447 return self.reweight_and_collect_events(options, mode, nevents, event_norm)
1448
1449 - def create_jobs_to_run(self,options,p_dirs,req_acc,run_mode,\
1450 integration_step,mode,fixed_order=True):
1451 """Creates a list of dictionaries with all the jobs to be run"""
1452 jobs_to_run=[]
1453 if not options['only_generation']:
1454
1455
1456
1457 npoints = self.run_card['npoints_FO_grid']
1458 niters = self.run_card['niters_FO_grid']
1459 for p_dir in p_dirs:
1460 try:
1461 with open(pjoin(self.me_dir,'SubProcesses',p_dir,'channels.txt')) as chan_file:
1462 channels=chan_file.readline().split()
1463 except IOError:
1464 logger.warning('No integration channels found for contribution %s' % p_dir)
1465 continue
1466 for channel in channels:
1467 job={}
1468 job['p_dir']=p_dir
1469 job['channel']=channel
1470 job['split']=0
1471 if fixed_order and req_acc == -1:
1472 job['accuracy']=0
1473 job['niters']=niters
1474 job['npoints']=npoints
1475 elif fixed_order and req_acc > 0:
1476 job['accuracy']=0.10
1477 job['niters']=6
1478 job['npoints']=-1
1479 elif not fixed_order:
1480 job['accuracy']=0.03
1481 job['niters']=12
1482 job['npoints']=-1
1483 else:
1484 raise aMCatNLOError('No consistent "req_acc_FO" set. Use a value '+
1485 'between 0 and 1 or set it equal to -1.')
1486 job['mint_mode']=0
1487 job['run_mode']=run_mode
1488 job['wgt_frac']=1.0
1489 jobs_to_run.append(job)
1490 jobs_to_collect=copy.copy(jobs_to_run)
1491 else:
1492
1493
1494
1495 name_suffix={'born' :'B', 'all':'F'}
1496 for p_dir in p_dirs:
1497 for chan_dir in os.listdir(pjoin(self.me_dir,'SubProcesses',p_dir)):
1498 if ((chan_dir.startswith(run_mode+'_G') and fixed_order) or\
1499 (chan_dir.startswith('G'+name_suffix[run_mode]) and (not fixed_order))) and \
1500 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', p_dir, chan_dir)) or \
1501 os.path.exists(pjoin(self.me_dir, 'SubProcesses', p_dir, chan_dir))):
1502 job={}
1503 job['p_dir']=p_dir
1504 if fixed_order:
1505 channel=chan_dir.split('_')[1]
1506 job['channel']=channel[1:]
1507 if len(chan_dir.split('_')) == 3:
1508 split=int(chan_dir.split('_')[2])
1509 else:
1510 split=0
1511 else:
1512 if len(chan_dir.split('_')) == 2:
1513 split=int(chan_dir.split('_')[1])
1514 channel=chan_dir.split('_')[0]
1515 job['channel']=channel[2:]
1516 else:
1517 job['channel']=chan_dir[2:]
1518 split=0
1519 job['split']=split
1520 job['run_mode']=run_mode
1521 job['dirname']=pjoin(self.me_dir, 'SubProcesses', p_dir, chan_dir)
1522 job['wgt_frac']=1.0
1523 if not fixed_order: job['mint_mode']=1
1524 jobs_to_run.append(job)
1525 jobs_to_collect=copy.copy(jobs_to_run)
1526 if fixed_order:
1527 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run,
1528 jobs_to_collect,integration_step,mode,run_mode)
1529
1530 integration_step=1
1531 for job in jobs_to_run:
1532 while os.path.exists(pjoin(job['dirname'],'res_%s.dat' % integration_step)):
1533 integration_step=integration_step+1
1534 integration_step=integration_step-1
1535 else:
1536 self.append_the_results(jobs_to_collect,integration_step)
1537 return jobs_to_run,jobs_to_collect,integration_step
1538
1540 """Set-up the G* directories for running"""
1541 name_suffix={'born' :'B' , 'all':'F'}
1542 for job in jobs_to_run:
1543 if job['split'] == 0:
1544 if fixed_order :
1545 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'],
1546 job['run_mode']+'_G'+job['channel'])
1547 else:
1548 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'],
1549 'G'+name_suffix[job['run_mode']]+job['channel'])
1550 else:
1551 if fixed_order :
1552 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'],
1553 job['run_mode']+'_G'+job['channel']+'_'+str(job['split']))
1554 else:
1555 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'],
1556 'G'+name_suffix[job['run_mode']]+job['channel']+'_'+str(job['split']))
1557 job['dirname']=dirname
1558 if not os.path.isdir(dirname):
1559 os.makedirs(dirname)
1560 self.write_input_file(job,fixed_order)
1561 if not fixed_order:
1562
1563 if job['split'] != 0:
1564 for f in ['grid.MC_integer','mint_grids','res_1']:
1565 if not os.path.isfile(pjoin(job['dirname'],f)):
1566 files.ln(pjoin(job['dirname'].rsplit("_",1)[0],f),job['dirname'])
1567
1568
1601
1602
1603 - def run_all_jobs(self,jobs_to_run,integration_step,fixed_order=True):
1604 """Loops over the jobs_to_run and executes them using the function 'run_exe'"""
1605 if fixed_order:
1606 if integration_step == 0:
1607 self.update_status('Setting up grids', level=None)
1608 else:
1609 self.update_status('Refining results, step %i' % integration_step, level=None)
1610 self.ijob = 0
1611 name_suffix={'born' :'B', 'all':'F'}
1612 if fixed_order:
1613 run_type="Fixed order integration step %s" % integration_step
1614 else:
1615 run_type="MINT step %s" % integration_step
1616 self.njobs=len(jobs_to_run)
1617 for job in jobs_to_run:
1618 executable='ajob1'
1619 if fixed_order:
1620 arguments=[job['channel'],job['run_mode'], \
1621 str(job['split']),str(integration_step)]
1622 else:
1623 arguments=[job['channel'],name_suffix[job['run_mode']], \
1624 str(job['split']),str(integration_step)]
1625 self.run_exe(executable,arguments,run_type,
1626 cwd=pjoin(self.me_dir,'SubProcesses',job['p_dir']))
1627
1628 if self.cluster_mode == 2:
1629 time.sleep(1)
1630 self.wait_for_complete(run_type)
1631
1632
1633 - def collect_the_results(self,options,req_acc,jobs_to_run,jobs_to_collect,\
1634 integration_step,mode,run_mode,fixed_order=True):
1635 """Collect the results, make HTML pages, print the summary and
1636 determine if there are more jobs to run. Returns the list
1637 of the jobs that still need to be run, as well as the
1638 complete list of jobs that need to be collected to get the
1639 final answer.
1640 """
1641
1642 self.append_the_results(jobs_to_run,integration_step)
1643 self.cross_sect_dict = self.write_res_txt_file(jobs_to_collect,integration_step)
1644
1645 if fixed_order:
1646 cross, error = sum_html.make_all_html_results(self, ['%s*' % run_mode])
1647 else:
1648 name_suffix={'born' :'B' , 'all':'F'}
1649 cross, error = sum_html.make_all_html_results(self, ['G%s*' % name_suffix[run_mode]])
1650 self.results.add_detail('cross', cross)
1651 self.results.add_detail('error', error)
1652
1653 jobs_to_run_new=self.update_jobs_to_run(req_acc,integration_step,jobs_to_run,fixed_order)
1654
1655
1656 if (not jobs_to_run_new) and fixed_order:
1657
1658 scale_pdf_info=self.collect_scale_pdf_info(options,jobs_to_collect)
1659 self.print_summary(options,integration_step,mode,scale_pdf_info,done=True)
1660 return jobs_to_run_new,jobs_to_collect
1661 elif jobs_to_run_new:
1662
1663 scale_pdf_info=[]
1664 self.print_summary(options,integration_step,mode,scale_pdf_info,done=False)
1665 else:
1666
1667
1668
1669 scale_pdf_info=[]
1670
1671 if (not fixed_order) and integration_step+1 == 2 :
1672
1673 jobs_to_run_new,jobs_to_collect_new= \
1674 self.check_the_need_to_split(jobs_to_run_new,jobs_to_collect)
1675 self.prepare_directories(jobs_to_run_new,mode,fixed_order)
1676 self.write_nevents_unweighted_file(jobs_to_collect_new,jobs_to_collect)
1677 self.write_nevts_files(jobs_to_run_new)
1678 else:
1679 self.prepare_directories(jobs_to_run_new,mode,fixed_order)
1680 jobs_to_collect_new=jobs_to_collect
1681 return jobs_to_run_new,jobs_to_collect_new
1682
1683
1685 """writes the nevents_unweighted file in the SubProcesses directory.
1686 We also need to write the jobs that will generate 0 events,
1687 because that makes sure that the cross section from those channels
1688 is taken into account in the event weights (by collect_events.f).
1689 """
1690 content=[]
1691 for job in jobs:
1692 path=pjoin(job['dirname'].split('/')[-2],job['dirname'].split('/')[-1])
1693 lhefile=pjoin(path,'events.lhe')
1694 content.append(' %s %d %9e %9e' % \
1695 (lhefile.ljust(40),job['nevents'],job['resultABS']*job['wgt_frac'],job['wgt_frac']))
1696 for job in jobs0events:
1697 if job['nevents']==0:
1698 path=pjoin(job['dirname'].split('/')[-2],job['dirname'].split('/')[-1])
1699 lhefile=pjoin(path,'events.lhe')
1700 content.append(' %s %d %9e %9e' % \
1701 (lhefile.ljust(40),job['nevents'],job['resultABS'],1.))
1702 with open(pjoin(self.me_dir,'SubProcesses',"nevents_unweighted"),'w') as f:
1703 f.write('\n'.join(content)+'\n')
1704
1706 """write the nevts files in the SubProcesses/P*/G*/ directories"""
1707 for job in jobs:
1708 with open(pjoin(job['dirname'],'nevts'),'w') as f:
1709 f.write('%i\n' % job['nevents'])
1710
1712 """Looks in the jobs_to_run to see if there is the need to split the
1713 event generation step. Updates jobs_to_run and
1714 jobs_to_collect to replace the split-job by its
1715 splits. Also removes jobs that do not need any events.
1716 """
1717 nevt_job=self.run_card['nevt_job']
1718 if nevt_job > 0:
1719 jobs_to_collect_new=copy.copy(jobs_to_collect)
1720 for job in jobs_to_run:
1721 nevents=job['nevents']
1722 if nevents == 0:
1723 jobs_to_collect_new.remove(job)
1724 elif nevents > nevt_job:
1725 jobs_to_collect_new.remove(job)
1726 if nevents % nevt_job != 0 :
1727 nsplit=int(nevents/nevt_job)+1
1728 else:
1729 nsplit=int(nevents/nevt_job)
1730 for i in range(1,nsplit+1):
1731 job_new=copy.copy(job)
1732 left_over=nevents % nsplit
1733 if i <= left_over:
1734 job_new['nevents']=int(nevents/nsplit)+1
1735 job_new['wgt_frac']=float(job_new['nevents'])/float(nevents)
1736 else:
1737 job_new['nevents']=int(nevents/nsplit)
1738 job_new['wgt_frac']=float(job_new['nevents'])/float(nevents)
1739 job_new['split']=i
1740 job_new['dirname']=job['dirname']+'_%i' % job_new['split']
1741 jobs_to_collect_new.append(job_new)
1742 jobs_to_run_new=copy.copy(jobs_to_collect_new)
1743 else:
1744 jobs_to_run_new=copy.copy(jobs_to_collect)
1745 for job in jobs_to_collect:
1746 if job['nevents'] == 0:
1747 jobs_to_run_new.remove(job)
1748 jobs_to_collect_new=copy.copy(jobs_to_run_new)
1749
1750 return jobs_to_run_new,jobs_to_collect_new
1751
1752
1754 """
1755 For (N)LO+PS: determines the number of events and/or the required
1756 accuracy per job.
1757 For fixed order: determines which jobs need higher precision and
1758 returns those with the newly requested precision.
1759 """
1760 err=self.cross_sect_dict['errt']
1761 tot=self.cross_sect_dict['xsect']
1762 errABS=self.cross_sect_dict['erra']
1763 totABS=self.cross_sect_dict['xseca']
1764 jobs_new=[]
1765 if fixed_order:
1766 if req_acc == -1:
1767 if step+1 == 1:
1768 npoints = self.run_card['npoints_FO']
1769 niters = self.run_card['niters_FO']
1770 for job in jobs:
1771 job['mint_mode']=-1
1772 job['niters']=niters
1773 job['npoints']=npoints
1774 jobs_new.append(job)
1775 elif step+1 == 2:
1776 pass
1777 elif step+1 > 2:
1778 raise aMCatNLOError('Cannot determine number of iterations and PS points '+
1779 'for integration step %i' % step )
1780 elif ( req_acc > 0 and err/tot > req_acc*1.2 ) or step <= 0:
1781 req_accABS=req_acc*abs(tot)/totABS
1782 for job in jobs:
1783 job['mint_mode']=-1
1784
1785 job['accuracy']=req_accABS*math.sqrt(totABS/job['resultABS'])
1786
1787
1788 if (job['accuracy'] > job['errorABS']/job['resultABS'] and step != 0) \
1789 and not (step==-1 and self.run_card['iappl'] == 2):
1790 continue
1791
1792 itmax_fl=job['niters_done']*math.pow(job['errorABS']/
1793 (job['accuracy']*job['resultABS']),2)
1794 if itmax_fl <= 4.0 :
1795 job['niters']=max(int(round(itmax_fl)),2)
1796 job['npoints']=job['npoints_done']*2
1797 elif itmax_fl > 4.0 and itmax_fl <= 16.0 :
1798 job['niters']=4
1799 job['npoints']=int(round(job['npoints_done']*itmax_fl/4.0))*2
1800 else:
1801 if itmax_fl > 100.0 : itmax_fl=50.0
1802 job['niters']=int(round(math.sqrt(itmax_fl)))
1803 job['npoints']=int(round(job['npoints_done']*itmax_fl/
1804 round(math.sqrt(itmax_fl))))*2
1805
1806 jobs_new.append(job)
1807 return jobs_new
1808 elif step+1 <= 2:
1809 nevents=self.run_card['nevents']
1810
1811 if req_acc<0:
1812 req_acc2_inv=nevents
1813 else:
1814 req_acc2_inv=1/(req_acc*req_acc)
1815 if step+1 == 1 or step+1 == 2 :
1816
1817 for job in jobs:
1818 accuracy=min(math.sqrt(totABS/(req_acc2_inv*job['resultABS'])),0.2)
1819 job['accuracy']=accuracy
1820 if step+1 == 2:
1821
1822
1823 r=self.get_randinit_seed()
1824 random.seed(r)
1825 totevts=nevents
1826 for job in jobs:
1827 job['nevents'] = 0
1828 while totevts :
1829 target = random.random() * totABS
1830 crosssum = 0.
1831 i = 0
1832 while i<len(jobs) and crosssum < target:
1833 job = jobs[i]
1834 crosssum += job['resultABS']
1835 i += 1
1836 totevts -= 1
1837 i -= 1
1838 jobs[i]['nevents'] += 1
1839 for job in jobs:
1840 job['mint_mode']=step+1
1841 return jobs
1842 else:
1843 return []
1844
1845
1847 """ Get the random number seed from the randinit file """
1848 with open(pjoin(self.me_dir,"SubProcesses","randinit")) as randinit:
1849
1850 iseed = int(randinit.read()[2:])
1851 return iseed
1852
1853
1855 """Appends the results for each of the jobs in the job list"""
1856 error_found=False
1857 for job in jobs:
1858 try:
1859 if integration_step >= 0 :
1860 with open(pjoin(job['dirname'],'res_%s.dat' % integration_step)) as res_file:
1861 results=res_file.readline().split()
1862 else:
1863
1864
1865 with open(pjoin(job['dirname'],'res.dat')) as res_file:
1866 results=res_file.readline().split()
1867 except IOError:
1868 if not error_found:
1869 error_found=True
1870 error_log=[]
1871 error_log.append(pjoin(job['dirname'],'log.txt'))
1872 continue
1873 job['resultABS']=float(results[0])
1874 job['errorABS']=float(results[1])
1875 job['result']=float(results[2])
1876 job['error']=float(results[3])
1877 job['niters_done']=int(results[4])
1878 job['npoints_done']=int(results[5])
1879 job['time_spend']=float(results[6])
1880 job['err_percABS'] = job['errorABS']/job['resultABS']*100.
1881 job['err_perc'] = job['error']/job['result']*100.
1882 if error_found:
1883 raise aMCatNLOError('An error occurred during the collection of results.\n' +
1884 'Please check the .log files inside the directories which failed:\n' +
1885 '\n'.join(error_log)+'\n')
1886
1887
1888
1890 """writes the res.txt files in the SubProcess dir"""
1891 jobs.sort(key = lambda job: -job['errorABS'])
1892 content=[]
1893 content.append('\n\nCross section per integration channel:')
1894 for job in jobs:
1895 content.append('%(p_dir)20s %(channel)15s %(result)10.8e %(error)6.4e %(err_perc)6.4f%% ' % job)
1896 content.append('\n\nABS cross section per integration channel:')
1897 for job in jobs:
1898 content.append('%(p_dir)20s %(channel)15s %(resultABS)10.8e %(errorABS)6.4e %(err_percABS)6.4f%% ' % job)
1899 totABS=0
1900 errABS=0
1901 tot=0
1902 err=0
1903 for job in jobs:
1904 totABS+= job['resultABS']*job['wgt_frac']
1905 errABS+= math.pow(job['errorABS'],2)*job['wgt_frac']
1906 tot+= job['result']*job['wgt_frac']
1907 err+= math.pow(job['error'],2)*job['wgt_frac']
1908 if jobs:
1909 content.append('\nTotal ABS and \nTotal: \n %10.8e +- %6.4e (%6.4e%%)\n %10.8e +- %6.4e (%6.4e%%) \n' %\
1910 (totABS, math.sqrt(errABS), math.sqrt(errABS)/totABS *100.,\
1911 tot, math.sqrt(err), math.sqrt(err)/tot *100.))
1912 with open(pjoin(self.me_dir,'SubProcesses','res_%s.txt' % integration_step),'w') as res_file:
1913 res_file.write('\n'.join(content))
1914 randinit=self.get_randinit_seed()
1915 return {'xsect':tot,'xseca':totABS,'errt':math.sqrt(err),\
1916 'erra':math.sqrt(errABS),'randinit':randinit}
1917
1918
1920 """read the scale_pdf_dependence.dat files and collects there results"""
1921 scale_pdf_info=[]
1922 if any(self.run_card['reweight_scale']) or any(self.run_card['reweight_PDF']) or \
1923 len(self.run_card['dynamical_scale_choice']) > 1 or len(self.run_card['lhaid']) > 1:
1924 evt_files=[]
1925 evt_wghts=[]
1926 for job in jobs:
1927 evt_files.append(pjoin(job['dirname'],'scale_pdf_dependence.dat'))
1928 evt_wghts.append(job['wgt_frac'])
1929 scale_pdf_info = self.pdf_scale_from_reweighting(evt_files,evt_wghts)
1930 return scale_pdf_info
1931
1932
1934 """combines the plots and puts then in the Events/run* directory"""
1935 devnull = open(os.devnull, 'w')
1936
1937 if self.analyse_card['fo_analysis_format'].lower() == 'topdrawer':
1938 misc.call(['./combine_plots_FO.sh'] + folder_name, \
1939 stdout=devnull,
1940 cwd=pjoin(self.me_dir, 'SubProcesses'))
1941 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.top'),
1942 pjoin(self.me_dir, 'Events', self.run_name))
1943 logger.info('The results of this run and the TopDrawer file with the plots' + \
1944 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
1945 elif self.analyse_card['fo_analysis_format'].lower() == 'hwu':
1946 out=pjoin(self.me_dir,'Events',self.run_name,'MADatNLO')
1947 self.combine_plots_HwU(jobs,out)
1948 try:
1949 misc.call(['gnuplot','MADatNLO.gnuplot'],\
1950 stdout=devnull,stderr=devnull,\
1951 cwd=pjoin(self.me_dir, 'Events', self.run_name))
1952 except Exception:
1953 pass
1954 logger.info('The results of this run and the HwU and GnuPlot files with the plots' + \
1955 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
1956 elif self.analyse_card['fo_analysis_format'].lower() == 'root':
1957 misc.call(['./combine_root.sh'] + folder_name, \
1958 stdout=devnull,
1959 cwd=pjoin(self.me_dir, 'SubProcesses'))
1960 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.root'),
1961 pjoin(self.me_dir, 'Events', self.run_name))
1962 logger.info('The results of this run and the ROOT file with the plots' + \
1963 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
1964 else:
1965 logger.info('The results of this run' + \
1966 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
1967 devnull.close()
1968
1970 """Sums all the plots in the HwU format."""
1971 logger.debug('Combining HwU plots.')
1972
1973 command = []
1974 command.append(pjoin(self.me_dir, 'bin', 'internal','histograms.py'))
1975 for job in jobs:
1976 if job['dirname'].endswith('.HwU'):
1977 command.append(job['dirname'])
1978 else:
1979 command.append(pjoin(job['dirname'],'MADatNLO.HwU'))
1980 command.append("--out="+out)
1981 command.append("--gnuplot")
1982 command.append("--band=[]")
1983 command.append("--lhapdf-config="+self.options['lhapdf'])
1984 if normalisation:
1985 command.append("--multiply="+(','.join([str(n) for n in normalisation])))
1986 command.append("--sum")
1987 command.append("--keep_all_weights")
1988 command.append("--no_open")
1989
1990 p = misc.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.STDOUT, cwd=self.me_dir)
1991
1992 while p.poll() is None:
1993 line = p.stdout.readline()
1994 if any(t in line for t in ['INFO:','WARNING:','CRITICAL:','ERROR:','KEEP:']):
1995 print line[:-1]
1996 elif __debug__ and line:
1997 logger.debug(line[:-1])
1998
1999
2001 """Combines the APPLgrids in all the SubProcess/P*/all_G*/ directories"""
2002 logger.debug('Combining APPLgrids \n')
2003 applcomb=pjoin(self.options['applgrid'].rstrip('applgrid-config'),
2004 'applgrid-combine')
2005 all_jobs=[]
2006 for job in jobs:
2007 all_jobs.append(job['dirname'])
2008 ngrids=len(all_jobs)
2009 nobs =len([name for name in os.listdir(all_jobs[0]) if name.endswith("_out.root")])
2010 for obs in range(0,nobs):
2011 gdir = [pjoin(job,"grid_obs_"+str(obs)+"_out.root") for job in all_jobs]
2012
2013 if self.run_card["iappl"] == 1:
2014 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events",self.run_name,
2015 "aMCfast_obs_"+str(obs)+"_starting_grid.root"), '--optimise']+ gdir)
2016 elif self.run_card["iappl"] == 2:
2017 unc2_inv=pow(cross/error,2)
2018 unc2_inv_ngrids=pow(cross/error,2)*ngrids
2019 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events",
2020 self.run_name,"aMCfast_obs_"+str(obs)+".root"),'-s',
2021 str(unc2_inv),'--weight',str(unc2_inv)]+ gdir)
2022 for job in all_jobs:
2023 os.remove(pjoin(job,"grid_obs_"+str(obs)+"_in.root"))
2024 else:
2025 raise aMCatNLOError('iappl parameter can only be 0, 1 or 2')
2026
2027 for ggdir in gdir:
2028 os.remove(ggdir)
2029
2030
2032 """Distributes the APPLgrids ready to be filled by a second run of the code"""
2033
2034
2035 if not('appl_start_grid' in options.keys() and options['appl_start_grid']):
2036 gfiles = misc.glob(pjoin('*', 'aMCfast_obs_0_starting_grid.root'),
2037 pjoin(self.me_dir,'Events'))
2038
2039 time_stamps={}
2040 for root_file in gfiles:
2041 time_stamps[root_file]=os.path.getmtime(root_file)
2042 options['appl_start_grid']= \
2043 max(time_stamps.iterkeys(), key=(lambda key:
2044 time_stamps[key])).split('/')[-2]
2045 logger.info('No --appl_start_grid option given. '+\
2046 'Guessing that start grid from run "%s" should be used.' \
2047 % options['appl_start_grid'])
2048
2049 if 'appl_start_grid' in options.keys() and options['appl_start_grid']:
2050 self.appl_start_grid = options['appl_start_grid']
2051 start_grid_dir=pjoin(self.me_dir, 'Events', self.appl_start_grid)
2052
2053 if not os.path.exists(pjoin(start_grid_dir,
2054 'aMCfast_obs_0_starting_grid.root')):
2055 raise self.InvalidCmd('APPLgrid file not found: %s' % \
2056 pjoin(start_grid_dir,'aMCfast_obs_0_starting_grid.root'))
2057 else:
2058 all_grids=[pjoin(start_grid_dir,name) for name in os.listdir( \
2059 start_grid_dir) if name.endswith("_starting_grid.root")]
2060 nobs =len(all_grids)
2061 gstring=" ".join(all_grids)
2062 if not hasattr(self, 'appl_start_grid') or not self.appl_start_grid:
2063 raise self.InvalidCmd('No APPLgrid name currently defined.'+
2064 'Please provide this information.')
2065
2066 for pdir in p_dirs:
2067 g_dirs = [file for file in os.listdir(pjoin(self.me_dir,
2068 "SubProcesses",pdir)) if file.startswith(mode+'_G') and
2069 os.path.isdir(pjoin(self.me_dir,"SubProcesses",pdir, file))]
2070 for g_dir in g_dirs:
2071 for grid in all_grids:
2072 obs=grid.split('_')[-3]
2073 files.cp(grid,pjoin(self.me_dir,"SubProcesses",pdir,g_dir,
2074 'grid_obs_'+obs+'_in.root'))
2075
2076
2077
2078
2080 """collect the log files and put them in a single, html-friendly file
2081 inside the Events/run_.../ directory"""
2082 log_file = pjoin(self.me_dir, 'Events', self.run_name,
2083 'alllogs_%d.html' % integration_step)
2084 outfile = open(log_file, 'w')
2085
2086 content = ''
2087 content += '<HTML><BODY>\n<font face="courier" size=2>'
2088 for job in jobs:
2089
2090 log=pjoin(job['dirname'],'log_MINT%s.txt' % integration_step)
2091 content += '<a name=%s></a>\n' % (os.path.dirname(log).replace(
2092 pjoin(self.me_dir,'SubProcesses'),''))
2093
2094 content += '<font color="red">\n'
2095 content += '<br>LOG file for integration channel %s, %s <br>' % \
2096 (os.path.dirname(log).replace(pjoin(self.me_dir,
2097 'SubProcesses'), ''),
2098 integration_step)
2099 content += '</font>\n'
2100
2101
2102 content += '<PRE>\n' + open(log).read() + '\n</PRE>'
2103 content +='<br>\n'
2104 outfile.write(content)
2105 content=''
2106
2107 outfile.write('</font>\n</BODY></HTML>\n')
2108 outfile.close()
2109
2110
2112 """Combine the plots and put the res*.txt files in the Events/run.../ folder."""
2113
2114 res_files = misc.glob('res_*.txt', pjoin(self.me_dir, 'SubProcesses'))
2115 for res_file in res_files:
2116 files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name))
2117
2118 self.combine_plots_FO(folder_name,jobs)
2119
2120
2121 if self.run_card['iappl'] != 0:
2122 cross=self.cross_sect_dict['xsect']
2123 error=self.cross_sect_dict['errt']
2124 self.applgrid_combine(cross,error,jobs)
2125
2126
2128 """setup the number of cores for multicore, and the cluster-type for cluster runs"""
2129 if self.cluster_mode == 1:
2130 cluster_name = self.options['cluster_type']
2131 self.cluster = cluster.from_name[cluster_name](**self.options)
2132 if self.cluster_mode == 2:
2133 try:
2134 import multiprocessing
2135 if not self.nb_core:
2136 try:
2137 self.nb_core = int(self.options['nb_core'])
2138 except TypeError:
2139 self.nb_core = multiprocessing.cpu_count()
2140 logger.info('Using %d cores' % self.nb_core)
2141 except ImportError:
2142 self.nb_core = 1
2143 logger.warning('Impossible to detect the number of cores => Using One.\n'+
2144 'Use set nb_core X in order to set this number and be able to'+
2145 'run in multicore.')
2146
2147 self.cluster = cluster.MultiCore(**self.options)
2148
2149
2151 """Clean previous results.
2152 o. If doing only the reweighting step, do not delete anything and return directlty.
2153 o. Always remove all the G*_* files (from split event generation).
2154 o. Remove the G* (or born_G* or all_G*) only when NOT doing only_generation or reweight_only."""
2155 if options['reweightonly']:
2156 return
2157 if not options['only_generation']:
2158 self.update_status('Cleaning previous results', level=None)
2159 for dir in p_dirs:
2160
2161 for obj in folder_name:
2162
2163 to_rm = [file for file in \
2164 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \
2165 if file.startswith(obj[:-1]) and \
2166 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \
2167 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))]
2168
2169 to_always_rm = [file for file in \
2170 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \
2171 if file.startswith(obj[:-1]) and
2172 '_' in file and not '_G' in file and \
2173 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \
2174 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))]
2175
2176 if not options['only_generation']:
2177 to_always_rm.extend(to_rm)
2178 if os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz')):
2179 to_always_rm.append(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz'))
2180 files.rm([pjoin(self.me_dir, 'SubProcesses', dir, d) for d in to_always_rm])
2181 return
2182
2183
2184 - def print_summary(self, options, step, mode, scale_pdf_info=[], done=True):
2185 """print a summary of the results contained in self.cross_sect_dict.
2186 step corresponds to the mintMC step, if =2 (i.e. after event generation)
2187 some additional infos are printed"""
2188
2189 proc_card_lines = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read().split('\n')
2190 process = ''
2191 for line in proc_card_lines:
2192 if line.startswith('generate') or line.startswith('add process'):
2193 process = process+(line.replace('generate ', '')).replace('add process ','')+' ; '
2194 lpp = {0:'l', 1:'p', -1:'pbar'}
2195 if self.ninitial == 1:
2196 proc_info = '\n Process %s' % process[:-3]
2197 else:
2198 proc_info = '\n Process %s\n Run at %s-%s collider (%s + %s GeV)' % \
2199 (process[:-3], lpp[self.run_card['lpp1']], lpp[self.run_card['lpp2']],
2200 self.run_card['ebeam1'], self.run_card['ebeam2'])
2201
2202 if self.ninitial == 1:
2203 self.cross_sect_dict['unit']='GeV'
2204 self.cross_sect_dict['xsec_string']='(Partial) decay width'
2205 self.cross_sect_dict['axsec_string']='(Partial) abs(decay width)'
2206 else:
2207 self.cross_sect_dict['unit']='pb'
2208 self.cross_sect_dict['xsec_string']='Total cross section'
2209 self.cross_sect_dict['axsec_string']='Total abs(cross section)'
2210
2211 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']:
2212 status = ['Determining the number of unweighted events per channel',
2213 'Updating the number of unweighted events per channel',
2214 'Summary:']
2215 computed='(computed from LHE events)'
2216 elif mode in ['NLO', 'LO']:
2217 status = ['Results after grid setup:','Current results:',
2218 'Final results and run summary:']
2219 computed='(computed from histogram information)'
2220
2221 if step != 2 and mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']:
2222 message = status[step] + '\n\n Intermediate results:' + \
2223 ('\n Random seed: %(randinit)d' + \
2224 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' + \
2225 '\n %(axsec_string)s: %(xseca)8.3e +- %(erra)6.1e %(unit)s \n') \
2226 % self.cross_sect_dict
2227 elif mode in ['NLO','LO'] and not done:
2228 if step == 0:
2229 message = '\n ' + status[0] + \
2230 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \
2231 self.cross_sect_dict
2232 else:
2233 message = '\n ' + status[1] + \
2234 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \
2235 self.cross_sect_dict
2236
2237 else:
2238 message = '\n --------------------------------------------------------------'
2239 message = message + \
2240 '\n ' + status[2] + proc_info
2241 if mode not in ['LO', 'NLO']:
2242 message = message + \
2243 '\n Number of events generated: %s' % self.run_card['nevents']
2244 message = message + \
2245 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \
2246 self.cross_sect_dict
2247 message = message + \
2248 '\n --------------------------------------------------------------'
2249 if scale_pdf_info and (self.run_card['nevents']>=10000 or mode in ['NLO', 'LO']):
2250 if scale_pdf_info[0]:
2251
2252 message = message + '\n Scale variation %s:' % computed
2253 for s in scale_pdf_info[0]:
2254 if s['unc']:
2255 if self.run_card['ickkw'] != -1:
2256 message = message + \
2257 ('\n Dynamical_scale_choice %(label)i (envelope of %(size)s values): '\
2258 '\n %(cen)8.3e pb +%(max)0.1f%% -%(min)0.1f%%') % s
2259 else:
2260 message = message + \
2261 ('\n Soft and hard scale dependence (added in quadrature): '\
2262 '\n %(cen)8.3e pb +%(max_q)0.1f%% -%(min_q)0.1f%%') % s
2263
2264 else:
2265 message = message + \
2266 ('\n Dynamical_scale_choice %(label)i: '\
2267 '\n %(cen)8.3e pb') % s
2268
2269 if scale_pdf_info[1]:
2270 message = message + '\n PDF variation %s:' % computed
2271 for p in scale_pdf_info[1]:
2272 if p['unc']=='none':
2273 message = message + \
2274 ('\n %(name)s (central value only): '\
2275 '\n %(cen)8.3e pb') % p
2276
2277 elif p['unc']=='unknown':
2278 message = message + \
2279 ('\n %(name)s (%(size)s members; combination method unknown): '\
2280 '\n %(cen)8.3e pb') % p
2281 else:
2282 message = message + \
2283 ('\n %(name)s (%(size)s members; using %(unc)s method): '\
2284 '\n %(cen)8.3e pb +%(max)0.1f%% -%(min)0.1f%%') % p
2285
2286 message = message + \
2287 '\n --------------------------------------------------------------'
2288
2289
2290 if (mode in ['NLO', 'LO'] and not done) or \
2291 (mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO'] and step!=2):
2292 logger.info(message+'\n')
2293 return
2294
2295
2296
2297
2298
2299 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']:
2300 log_GV_files = misc.glob(pjoin('P*','G*','log_MINT*.txt'),
2301 pjoin(self.me_dir, 'SubProcesses'))
2302 all_log_files = log_GV_files
2303 elif mode == 'NLO':
2304 log_GV_files = misc.glob(pjoin('P*','all_G*','log_MINT*.txt'),
2305 pjoin(self.me_dir, 'SubProcesses'))
2306 all_log_files = log_GV_files
2307
2308 elif mode == 'LO':
2309 log_GV_files = ''
2310 all_log_files = misc.glob(pjoin('P*','born_G*','log_MINT*.txt'),
2311 pjoin(self.me_dir, 'SubProcesses'))
2312 else:
2313 raise aMCatNLOError, 'Running mode %s not supported.'%mode
2314
2315 try:
2316 message, debug_msg = \
2317 self.compile_advanced_stats(log_GV_files, all_log_files, message)
2318 except Exception as e:
2319 debug_msg = 'Advanced statistics collection failed with error "%s"\n'%str(e)
2320 err_string = StringIO.StringIO()
2321 traceback.print_exc(limit=4, file=err_string)
2322 debug_msg += 'Please report this backtrace to a MadGraph developer:\n%s'\
2323 %err_string.getvalue()
2324
2325 logger.debug(debug_msg+'\n')
2326 logger.info(message+'\n')
2327
2328
2329 evt_path = pjoin(self.me_dir, 'Events', self.run_name)
2330 open(pjoin(evt_path, 'summary.txt'),'w').write(message+'\n')
2331 open(pjoin(evt_path, '.full_summary.txt'),
2332 'w').write(message+'\n\n'+debug_msg+'\n')
2333
2334 self.archive_files(evt_path,mode)
2335
2337 """ Copies in the Events/Run_<xxx> directory relevant files characterizing
2338 the run."""
2339
2340 files_to_arxiv = [pjoin('Cards','param_card.dat'),
2341 pjoin('Cards','MadLoopParams.dat'),
2342 pjoin('Cards','FKS_params.dat'),
2343 pjoin('Cards','run_card.dat'),
2344 pjoin('Subprocesses','setscales.f'),
2345 pjoin('Subprocesses','cuts.f')]
2346
2347 if mode in ['NLO', 'LO']:
2348 files_to_arxiv.append(pjoin('Cards','FO_analyse_card.dat'))
2349
2350 if not os.path.exists(pjoin(evt_path,'RunMaterial')):
2351 os.mkdir(pjoin(evt_path,'RunMaterial'))
2352
2353 for path in files_to_arxiv:
2354 if os.path.isfile(pjoin(self.me_dir,path)):
2355 files.cp(pjoin(self.me_dir,path),pjoin(evt_path,'RunMaterial'))
2356 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'],cwd=evt_path)
2357 shutil.rmtree(pjoin(evt_path,'RunMaterial'))
2358
2360 """ This functions goes through the log files given in arguments and
2361 compiles statistics about MadLoop stability, virtual integration
2362 optimization and detection of potential error messages into a nice
2363 debug message to printed at the end of the run """
2364
2365 def safe_float(str_float):
2366 try:
2367 return float(str_float)
2368 except ValueError:
2369 logger.debug('Could not convert the following float during'+
2370 ' advanced statistics printout: %s'%str(str_float))
2371 return -1.0
2372
2373
2374
2375
2376 stats = {'UPS':{}, 'Errors':[], 'virt_stats':{}, 'timings':{}}
2377 mint_search = re.compile(r"MINT(?P<ID>\d*).txt")
2378
2379
2380
2381
2382
2383
2384
2385 UPS_stat_finder = re.compile(
2386 r"Satistics from MadLoop:.*"+\
2387 r"Total points tried\:\s+(?P<ntot>\d+).*"+\
2388 r"Stability unknown\:\s+(?P<nsun>\d+).*"+\
2389 r"Stable PS point\:\s+(?P<nsps>\d+).*"+\
2390 r"Unstable PS point \(and rescued\)\:\s+(?P<nups>\d+).*"+\
2391 r"Exceptional PS point \(unstable and not rescued\)\:\s+(?P<neps>\d+).*"+\
2392 r"Double precision used\:\s+(?P<nddp>\d+).*"+\
2393 r"Quadruple precision used\:\s+(?P<nqdp>\d+).*"+\
2394 r"Initialization phase\-space points\:\s+(?P<nini>\d+).*"+\
2395 r"Unknown return code \(100\)\:\s+(?P<n100>\d+).*"+\
2396 r"Unknown return code \(10\)\:\s+(?P<n10>\d+).*",re.DOTALL)
2397
2398 unit_code_meaning = { 0 : 'Not identified (CTModeRun != -1)',
2399 1 : 'CutTools (double precision)',
2400 2 : 'PJFry++',
2401 3 : 'IREGI',
2402 4 : 'Golem95',
2403 5 : 'Samurai',
2404 6 : 'Ninja (double precision)',
2405 8 : 'Ninja (quadruple precision)',
2406 9 : 'CutTools (quadruple precision)'}
2407 RetUnit_finder =re.compile(
2408 r"#Unit\s*(?P<unit>\d+)\s*=\s*(?P<n_occurences>\d+)")
2409
2410
2411 for gv_log in log_GV_files:
2412 channel_name = '/'.join(gv_log.split('/')[-5:-1])
2413 log=open(gv_log,'r').read()
2414 UPS_stats = re.search(UPS_stat_finder,log)
2415 for retunit_stats in re.finditer(RetUnit_finder, log):
2416 if channel_name not in stats['UPS'].keys():
2417 stats['UPS'][channel_name] = [0]*10+[[0]*10]
2418 stats['UPS'][channel_name][10][int(retunit_stats.group('unit'))] \
2419 += int(retunit_stats.group('n_occurences'))
2420 if not UPS_stats is None:
2421 try:
2422 stats['UPS'][channel_name][0] += int(UPS_stats.group('ntot'))
2423 stats['UPS'][channel_name][1] += int(UPS_stats.group('nsun'))
2424 stats['UPS'][channel_name][2] += int(UPS_stats.group('nsps'))
2425 stats['UPS'][channel_name][3] += int(UPS_stats.group('nups'))
2426 stats['UPS'][channel_name][4] += int(UPS_stats.group('neps'))
2427 stats['UPS'][channel_name][5] += int(UPS_stats.group('nddp'))
2428 stats['UPS'][channel_name][6] += int(UPS_stats.group('nqdp'))
2429 stats['UPS'][channel_name][7] += int(UPS_stats.group('nini'))
2430 stats['UPS'][channel_name][8] += int(UPS_stats.group('n100'))
2431 stats['UPS'][channel_name][9] += int(UPS_stats.group('n10'))
2432 except KeyError:
2433 stats['UPS'][channel_name] = [int(UPS_stats.group('ntot')),
2434 int(UPS_stats.group('nsun')),int(UPS_stats.group('nsps')),
2435 int(UPS_stats.group('nups')),int(UPS_stats.group('neps')),
2436 int(UPS_stats.group('nddp')),int(UPS_stats.group('nqdp')),
2437 int(UPS_stats.group('nini')),int(UPS_stats.group('n100')),
2438 int(UPS_stats.group('n10')),[0]*10]
2439 debug_msg = ""
2440 if len(stats['UPS'].keys())>0:
2441 nTotPS = sum([chan[0] for chan in stats['UPS'].values()],0)
2442 nTotsun = sum([chan[1] for chan in stats['UPS'].values()],0)
2443 nTotsps = sum([chan[2] for chan in stats['UPS'].values()],0)
2444 nTotups = sum([chan[3] for chan in stats['UPS'].values()],0)
2445 nToteps = sum([chan[4] for chan in stats['UPS'].values()],0)
2446 nTotddp = sum([chan[5] for chan in stats['UPS'].values()],0)
2447 nTotqdp = sum([chan[6] for chan in stats['UPS'].values()],0)
2448 nTotini = sum([chan[7] for chan in stats['UPS'].values()],0)
2449 nTot100 = sum([chan[8] for chan in stats['UPS'].values()],0)
2450 nTot10 = sum([chan[9] for chan in stats['UPS'].values()],0)
2451 nTot1 = [sum([chan[10][i] for chan in stats['UPS'].values()],0) \
2452 for i in range(10)]
2453 UPSfracs = [(chan[0] , 0.0 if chan[1][0]==0 else \
2454 safe_float(chan[1][4]*100)/chan[1][0]) for chan in stats['UPS'].items()]
2455 maxUPS = max(UPSfracs, key = lambda w: w[1])
2456
2457 tmpStr = ""
2458 tmpStr += '\n Number of loop ME evaluations (by MadLoop): %d'%nTotPS
2459 tmpStr += '\n Stability unknown: %d'%nTotsun
2460 tmpStr += '\n Stable PS point: %d'%nTotsps
2461 tmpStr += '\n Unstable PS point (and rescued): %d'%nTotups
2462 tmpStr += '\n Unstable PS point (and not rescued): %d'%nToteps
2463 tmpStr += '\n Only double precision used: %d'%nTotddp
2464 tmpStr += '\n Quadruple precision used: %d'%nTotqdp
2465 tmpStr += '\n Initialization phase-space points: %d'%nTotini
2466 tmpStr += '\n Reduction methods used:'
2467 red_methods = [(unit_code_meaning[i],nTot1[i]) for i in \
2468 unit_code_meaning.keys() if nTot1[i]>0]
2469 for method, n in sorted(red_methods, key= lambda l: l[1], reverse=True):
2470 tmpStr += '\n > %s%s%s'%(method,' '*(33-len(method)),n)
2471 if nTot100 != 0:
2472 debug_msg += '\n Unknown return code (100): %d'%nTot100
2473 if nTot10 != 0:
2474 debug_msg += '\n Unknown return code (10): %d'%nTot10
2475 nUnknownUnit = sum(nTot1[u] for u in range(10) if u \
2476 not in unit_code_meaning.keys())
2477 if nUnknownUnit != 0:
2478 debug_msg += '\n Unknown return code (1): %d'\
2479 %nUnknownUnit
2480
2481 if maxUPS[1]>0.001:
2482 message += tmpStr
2483 message += '\n Total number of unstable PS point detected:'+\
2484 ' %d (%4.2f%%)'%(nToteps,safe_float(100*nToteps)/nTotPS)
2485 message += '\n Maximum fraction of UPS points in '+\
2486 'channel %s (%4.2f%%)'%maxUPS
2487 message += '\n Please report this to the authors while '+\
2488 'providing the file'
2489 message += '\n %s'%str(pjoin(os.path.dirname(self.me_dir),
2490 maxUPS[0],'UPS.log'))
2491 else:
2492 debug_msg += tmpStr
2493
2494
2495
2496
2497
2498
2499 virt_tricks_finder = re.compile(
2500 r"accumulated results Virtual ratio\s*=\s*-?(?P<v_ratio>[\d\+-Eed\.]*)"+\
2501 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_ratio_err>[\d\+-Eed\.]*)\s*\%\)\s*\n"+\
2502 r"accumulated results ABS virtual\s*=\s*-?(?P<v_abs_contr>[\d\+-Eed\.]*)"+\
2503 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_abs_contr_err>[\d\+-Eed\.]*)\s*\%\)")
2504
2505 virt_frac_finder = re.compile(r"update virtual fraction to\s*:\s*"+\
2506 "-?(?P<v_frac>[\d\+-Eed\.]*)\s*-?(?P<v_average>[\d\+-Eed\.]*)")
2507
2508 channel_contr_finder = re.compile(r"Final result \[ABS\]\s*:\s*-?(?P<v_contr>[\d\+-Eed\.]*)")
2509
2510 channel_contr_list = {}
2511 for gv_log in log_GV_files:
2512 logfile=open(gv_log,'r')
2513 log = logfile.read()
2514 logfile.close()
2515 channel_name = '/'.join(gv_log.split('/')[-3:-1])
2516 vf_stats = None
2517 for vf_stats in re.finditer(virt_frac_finder, log):
2518 pass
2519 if not vf_stats is None:
2520 v_frac = safe_float(vf_stats.group('v_frac'))
2521 v_average = safe_float(vf_stats.group('v_average'))
2522 try:
2523 if v_frac < stats['virt_stats']['v_frac_min'][0]:
2524 stats['virt_stats']['v_frac_min']=(v_frac,channel_name)
2525 if v_frac > stats['virt_stats']['v_frac_max'][0]:
2526 stats['virt_stats']['v_frac_max']=(v_frac,channel_name)
2527 stats['virt_stats']['v_frac_avg'][0] += v_frac
2528 stats['virt_stats']['v_frac_avg'][1] += 1
2529 except KeyError:
2530 stats['virt_stats']['v_frac_min']=[v_frac,channel_name]
2531 stats['virt_stats']['v_frac_max']=[v_frac,channel_name]
2532 stats['virt_stats']['v_frac_avg']=[v_frac,1]
2533
2534
2535 ccontr_stats = None
2536 for ccontr_stats in re.finditer(channel_contr_finder, log):
2537 pass
2538 if not ccontr_stats is None:
2539 contrib = safe_float(ccontr_stats.group('v_contr'))
2540 try:
2541 if contrib>channel_contr_list[channel_name]:
2542 channel_contr_list[channel_name]=contrib
2543 except KeyError:
2544 channel_contr_list[channel_name]=contrib
2545
2546
2547
2548
2549 average_contrib = 0.0
2550 for value in channel_contr_list.values():
2551 average_contrib += value
2552 if len(channel_contr_list.values()) !=0:
2553 average_contrib = average_contrib / len(channel_contr_list.values())
2554
2555 relevant_log_GV_files = []
2556 excluded_channels = set([])
2557 all_channels = set([])
2558 for log_file in log_GV_files:
2559 channel_name = '/'.join(log_file.split('/')[-3:-1])
2560 all_channels.add(channel_name)
2561 try:
2562 if channel_contr_list[channel_name] > (0.1*average_contrib):
2563 relevant_log_GV_files.append(log_file)
2564 else:
2565 excluded_channels.add(channel_name)
2566 except KeyError:
2567 relevant_log_GV_files.append(log_file)
2568
2569
2570 for gv_log in relevant_log_GV_files:
2571 logfile=open(gv_log,'r')
2572 log = logfile.read()
2573 logfile.close()
2574 channel_name = '/'.join(gv_log.split('/')[-3:-1])
2575
2576 vt_stats = None
2577 for vt_stats in re.finditer(virt_tricks_finder, log):
2578 pass
2579 if not vt_stats is None:
2580 vt_stats_group = vt_stats.groupdict()
2581 v_ratio = safe_float(vt_stats.group('v_ratio'))
2582 v_ratio_err = safe_float(vt_stats.group('v_ratio_err'))
2583 v_contr = safe_float(vt_stats.group('v_abs_contr'))
2584 v_contr_err = safe_float(vt_stats.group('v_abs_contr_err'))
2585 try:
2586 if v_ratio < stats['virt_stats']['v_ratio_min'][0]:
2587 stats['virt_stats']['v_ratio_min']=(v_ratio,channel_name)
2588 if v_ratio > stats['virt_stats']['v_ratio_max'][0]:
2589 stats['virt_stats']['v_ratio_max']=(v_ratio,channel_name)
2590 if v_ratio < stats['virt_stats']['v_ratio_err_min'][0]:
2591 stats['virt_stats']['v_ratio_err_min']=(v_ratio_err,channel_name)
2592 if v_ratio > stats['virt_stats']['v_ratio_err_max'][0]:
2593 stats['virt_stats']['v_ratio_err_max']=(v_ratio_err,channel_name)
2594 if v_contr < stats['virt_stats']['v_contr_min'][0]:
2595 stats['virt_stats']['v_contr_min']=(v_contr,channel_name)
2596 if v_contr > stats['virt_stats']['v_contr_max'][0]:
2597 stats['virt_stats']['v_contr_max']=(v_contr,channel_name)
2598 if v_contr_err < stats['virt_stats']['v_contr_err_min'][0]:
2599 stats['virt_stats']['v_contr_err_min']=(v_contr_err,channel_name)
2600 if v_contr_err > stats['virt_stats']['v_contr_err_max'][0]:
2601 stats['virt_stats']['v_contr_err_max']=(v_contr_err,channel_name)
2602 except KeyError:
2603 stats['virt_stats']['v_ratio_min']=[v_ratio,channel_name]
2604 stats['virt_stats']['v_ratio_max']=[v_ratio,channel_name]
2605 stats['virt_stats']['v_ratio_err_min']=[v_ratio_err,channel_name]
2606 stats['virt_stats']['v_ratio_err_max']=[v_ratio_err,channel_name]
2607 stats['virt_stats']['v_contr_min']=[v_contr,channel_name]
2608 stats['virt_stats']['v_contr_max']=[v_contr,channel_name]
2609 stats['virt_stats']['v_contr_err_min']=[v_contr_err,channel_name]
2610 stats['virt_stats']['v_contr_err_max']=[v_contr_err,channel_name]
2611
2612 vf_stats = None
2613 for vf_stats in re.finditer(virt_frac_finder, log):
2614 pass
2615 if not vf_stats is None:
2616 v_frac = safe_float(vf_stats.group('v_frac'))
2617 v_average = safe_float(vf_stats.group('v_average'))
2618 try:
2619 if v_average < stats['virt_stats']['v_average_min'][0]:
2620 stats['virt_stats']['v_average_min']=(v_average,channel_name)
2621 if v_average > stats['virt_stats']['v_average_max'][0]:
2622 stats['virt_stats']['v_average_max']=(v_average,channel_name)
2623 stats['virt_stats']['v_average_avg'][0] += v_average
2624 stats['virt_stats']['v_average_avg'][1] += 1
2625 except KeyError:
2626 stats['virt_stats']['v_average_min']=[v_average,channel_name]
2627 stats['virt_stats']['v_average_max']=[v_average,channel_name]
2628 stats['virt_stats']['v_average_avg']=[v_average,1]
2629
2630 try:
2631 debug_msg += '\n\n Statistics on virtual integration optimization : '
2632
2633 debug_msg += '\n Maximum virt fraction computed %.3f (%s)'\
2634 %tuple(stats['virt_stats']['v_frac_max'])
2635 debug_msg += '\n Minimum virt fraction computed %.3f (%s)'\
2636 %tuple(stats['virt_stats']['v_frac_min'])
2637 debug_msg += '\n Average virt fraction computed %.3f'\
2638 %safe_float(stats['virt_stats']['v_frac_avg'][0]/safe_float(stats['virt_stats']['v_frac_avg'][1]))
2639 debug_msg += '\n Stats below exclude negligible channels (%d excluded out of %d)'%\
2640 (len(excluded_channels),len(all_channels))
2641 debug_msg += '\n Maximum virt ratio used %.2f (%s)'\
2642 %tuple(stats['virt_stats']['v_average_max'])
2643 debug_msg += '\n Maximum virt ratio found from grids %.2f (%s)'\
2644 %tuple(stats['virt_stats']['v_ratio_max'])
2645 tmpStr = '\n Max. MC err. on virt ratio from grids %.1f %% (%s)'\
2646 %tuple(stats['virt_stats']['v_ratio_err_max'])
2647 debug_msg += tmpStr
2648
2649
2650
2651
2652
2653
2654
2655
2656 tmpStr = '\n Maximum MC error on abs virt %.1f %% (%s)'\
2657 %tuple(stats['virt_stats']['v_contr_err_max'])
2658 debug_msg += tmpStr
2659
2660
2661
2662
2663 except KeyError:
2664 debug_msg += '\n Could not find statistics on the integration optimization. '
2665
2666
2667
2668
2669
2670 timing_stat_finder = re.compile(r"\s*Time spent in\s*(?P<name>\w*)\s*:\s*"+\
2671 "(?P<time>[\d\+-Eed\.]*)\s*")
2672
2673 for logf in log_GV_files:
2674 logfile=open(logf,'r')
2675 log = logfile.read()
2676 logfile.close()
2677 channel_name = '/'.join(logf.split('/')[-3:-1])
2678 mint = re.search(mint_search,logf)
2679 if not mint is None:
2680 channel_name = channel_name+' [step %s]'%mint.group('ID')
2681
2682 for time_stats in re.finditer(timing_stat_finder, log):
2683 try:
2684 stats['timings'][time_stats.group('name')][channel_name]+=\
2685 safe_float(time_stats.group('time'))
2686 except KeyError:
2687 if time_stats.group('name') not in stats['timings'].keys():
2688 stats['timings'][time_stats.group('name')] = {}
2689 stats['timings'][time_stats.group('name')][channel_name]=\
2690 safe_float(time_stats.group('time'))
2691
2692
2693 Tstr = lambda secs: str(datetime.timedelta(seconds=int(secs)))
2694 try:
2695 totTimeList = [(time, chan) for chan, time in \
2696 stats['timings']['Total'].items()]
2697 except KeyError:
2698 totTimeList = []
2699
2700 totTimeList.sort()
2701 if len(totTimeList)>0:
2702 debug_msg += '\n\n Inclusive timing profile :'
2703 debug_msg += '\n Overall slowest channel %s (%s)'%\
2704 (Tstr(totTimeList[-1][0]),totTimeList[-1][1])
2705 debug_msg += '\n Average channel running time %s'%\
2706 Tstr(sum([el[0] for el in totTimeList])/len(totTimeList))
2707 debug_msg += '\n Aggregated total running time %s'%\
2708 Tstr(sum([el[0] for el in totTimeList]))
2709 else:
2710 debug_msg += '\n\n Inclusive timing profile non available.'
2711
2712 sorted_keys = sorted(stats['timings'].keys(), key= lambda stat: \
2713 sum(stats['timings'][stat].values()), reverse=True)
2714 for name in sorted_keys:
2715 if name=='Total':
2716 continue
2717 if sum(stats['timings'][name].values())<=0.0:
2718 debug_msg += '\n Zero time record for %s.'%name
2719 continue
2720 try:
2721 TimeList = [((100.0*time/stats['timings']['Total'][chan]),
2722 chan) for chan, time in stats['timings'][name].items()]
2723 except KeyError, ZeroDivisionError:
2724 debug_msg += '\n\n Timing profile for %s unavailable.'%name
2725 continue
2726 TimeList.sort()
2727 debug_msg += '\n Timing profile for <%s> :'%name
2728 try:
2729 debug_msg += '\n Overall fraction of time %.3f %%'%\
2730 safe_float((100.0*(sum(stats['timings'][name].values())/
2731 sum(stats['timings']['Total'].values()))))
2732 except KeyError, ZeroDivisionError:
2733 debug_msg += '\n Overall fraction of time unavailable.'
2734 debug_msg += '\n Largest fraction of time %.3f %% (%s)'%\
2735 (TimeList[-1][0],TimeList[-1][1])
2736 debug_msg += '\n Smallest fraction of time %.3f %% (%s)'%\
2737 (TimeList[0][0],TimeList[0][1])
2738
2739
2740
2741
2742
2743
2744
2745
2746
2747
2748 err_finder = re.compile(\
2749 r"(?<!of\spaper\sfor\s)\bERROR\b(?!\scalculation\.)",re.IGNORECASE)
2750 for log in all_log_files:
2751 logfile=open(log,'r')
2752 nErrors = len(re.findall(err_finder, logfile.read()))
2753 logfile.close()
2754 if nErrors != 0:
2755 stats['Errors'].append((str(log),nErrors))
2756
2757 nErrors = sum([err[1] for err in stats['Errors']],0)
2758 if nErrors != 0:
2759 debug_msg += '\n WARNING:: A total of %d error%s ha%s been '\
2760 %(nErrors,'s' if nErrors>1 else '','ve' if nErrors>1 else 's')+\
2761 'found in the following log file%s:'%('s' if \
2762 len(stats['Errors'])>1 else '')
2763 for error in stats['Errors'][:3]:
2764 log_name = '/'.join(error[0].split('/')[-5:])
2765 debug_msg += '\n > %d error%s in %s'%\
2766 (error[1],'s' if error[1]>1 else '',log_name)
2767 if len(stats['Errors'])>3:
2768 nRemainingErrors = sum([err[1] for err in stats['Errors']][3:],0)
2769 nRemainingLogs = len(stats['Errors'])-3
2770 debug_msg += '\n And another %d error%s in %d other log file%s'%\
2771 (nRemainingErrors, 's' if nRemainingErrors>1 else '',
2772 nRemainingLogs, 's ' if nRemainingLogs>1 else '')
2773
2774 return message, debug_msg
2775
2776
2778 """this function calls the reweighting routines and creates the event file in the
2779 Event dir. Return the name of the event file created
2780 """
2781 scale_pdf_info=[]
2782 if any(self.run_card['reweight_scale']) or any(self.run_card['reweight_PDF']) or \
2783 len(self.run_card['dynamical_scale_choice']) > 1 or len(self.run_card['lhaid']) > 1:
2784 scale_pdf_info = self.run_reweight(options['reweightonly'])
2785 self.update_status('Collecting events', level='parton', update_results=True)
2786 misc.compile(['collect_events'],
2787 cwd=pjoin(self.me_dir, 'SubProcesses'), nocompile=options['nocompile'])
2788 p = misc.Popen(['./collect_events'], cwd=pjoin(self.me_dir, 'SubProcesses'),
2789 stdin=subprocess.PIPE,
2790 stdout=open(pjoin(self.me_dir, 'collect_events.log'), 'w'))
2791 if event_norm.lower() == 'sum':
2792 p.communicate(input = '1\n')
2793 elif event_norm.lower() == 'unity':
2794 p.communicate(input = '3\n')
2795 else:
2796 p.communicate(input = '2\n')
2797
2798
2799 filename = open(pjoin(self.me_dir, 'collect_events.log')).read().split()[-1]
2800
2801 if not os.path.exists(pjoin(self.me_dir, 'SubProcesses', filename)):
2802 raise aMCatNLOError('An error occurred during event generation. ' + \
2803 'The event file has not been created. Check collect_events.log')
2804 evt_file = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')
2805 misc.gzip(pjoin(self.me_dir, 'SubProcesses', filename), stdout=evt_file)
2806 if not options['reweightonly']:
2807 self.print_summary(options, 2, mode, scale_pdf_info)
2808 res_files = misc.glob('res*.txt', pjoin(self.me_dir, 'SubProcesses'))
2809 for res_file in res_files:
2810 files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name))
2811
2812 logger.info('The %s file has been generated.\n' % (evt_file))
2813 self.results.add_detail('nb_event', nevents)
2814 self.update_status('Events generated', level='parton', update_results=True)
2815 return evt_file[:-3]
2816
2817
2819 """runs mcatnlo on the generated event file, to produce showered-events
2820 """
2821 logger.info('Preparing MCatNLO run')
2822 try:
2823 misc.gunzip(evt_file)
2824 except Exception:
2825 pass
2826
2827 self.banner = banner_mod.Banner(evt_file)
2828 shower = self.banner.get_detail('run_card', 'parton_shower').upper()
2829
2830
2831
2832 if int(self.banner.get_detail('run_card', 'nevents') / \
2833 self.shower_card['nsplit_jobs']) * self.shower_card['nsplit_jobs'] \
2834 != self.banner.get_detail('run_card', 'nevents'):
2835 logger.warning(\
2836 'nsplit_jobs in the shower card is not a divisor of the number of events.\n' + \
2837 'Setting it to 1.')
2838 self.shower_card['nsplit_jobs'] = 1
2839
2840
2841 if self.shower_card['nevents'] > 0 and \
2842 self.shower_card['nevents'] < self.banner.get_detail('run_card', 'nevents') and \
2843 self.shower_card['nsplit_jobs'] != 1:
2844 logger.warning(\
2845 'Only a part of the events will be showered.\n' + \
2846 'Setting nsplit_jobs in the shower_card to 1.')
2847 self.shower_card['nsplit_jobs'] = 1
2848
2849 self.banner_to_mcatnlo(evt_file)
2850
2851
2852
2853
2854 if 'fastjet' in self.shower_card['extralibs']:
2855
2856 if not 'stdc++' in self.shower_card['extralibs']:
2857 logger.warning('Linking FastJet: adding stdc++ to EXTRALIBS')
2858 self.shower_card['extralibs'] += ' stdc++'
2859
2860 try:
2861
2862 p = subprocess.Popen([self.options['fastjet'], '--prefix'], \
2863 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
2864 output, error = p.communicate()
2865
2866 output = output[:-1]
2867
2868 if not pjoin(output, 'lib') in self.shower_card['extrapaths']:
2869 logger.warning('Linking FastJet: updating EXTRAPATHS')
2870 self.shower_card['extrapaths'] += ' ' + pjoin(output, 'lib')
2871 if not pjoin(output, 'include') in self.shower_card['includepaths']:
2872 logger.warning('Linking FastJet: updating INCLUDEPATHS')
2873 self.shower_card['includepaths'] += ' ' + pjoin(output, 'include')
2874
2875 include_line = '#include "fastjet/ClusterSequence.hh"//INCLUDE_FJ'
2876 namespace_line = 'namespace fj = fastjet;//NAMESPACE_FJ'
2877 except Exception:
2878 logger.warning('Linking FastJet: using fjcore')
2879
2880 self.shower_card['extralibs'] = self.shower_card['extralibs'].replace('fastjet', '')
2881 if not 'fjcore.o' in self.shower_card['analyse']:
2882 self.shower_card['analyse'] += ' fjcore.o'
2883
2884 include_line = '#include "fjcore.hh"//INCLUDE_FJ'
2885 namespace_line = 'namespace fj = fjcore;//NAMESPACE_FJ'
2886
2887 fjwrapper_lines = open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc')).read().split('\n')
2888 for line in fjwrapper_lines:
2889 if '//INCLUDE_FJ' in line:
2890 fjwrapper_lines[fjwrapper_lines.index(line)] = include_line
2891 if '//NAMESPACE_FJ' in line:
2892 fjwrapper_lines[fjwrapper_lines.index(line)] = namespace_line
2893 with open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc'), 'w') as fsock:
2894 fsock.write('\n'.join(fjwrapper_lines) + '\n')
2895
2896 extrapaths = self.shower_card['extrapaths'].split()
2897
2898
2899 if shower in ['HERWIGPP', 'PYTHIA8']:
2900 path_dict = {'HERWIGPP': ['hepmc_path',
2901 'thepeg_path',
2902 'hwpp_path'],
2903 'PYTHIA8': ['pythia8_path']}
2904
2905 if not all([self.options[ppath] for ppath in path_dict[shower]]):
2906 raise aMCatNLOError('Some paths are missing in the configuration file.\n' + \
2907 ('Please make sure you have set these variables: %s' % ', '.join(path_dict[shower])))
2908
2909 if shower == 'HERWIGPP':
2910 extrapaths.append(pjoin(self.options['hepmc_path'], 'lib'))
2911
2912 if shower == 'PYTHIA8' and not os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')):
2913 extrapaths.append(pjoin(self.options['pythia8_path'], 'lib'))
2914
2915 if 'LD_LIBRARY_PATH' in os.environ.keys():
2916 ldlibrarypath = os.environ['LD_LIBRARY_PATH']
2917 else:
2918 ldlibrarypath = ''
2919 ldlibrarypath += ':' + ':'.join(extrapaths)
2920 os.putenv('LD_LIBRARY_PATH', ldlibrarypath)
2921
2922 shower_card_path = pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat')
2923 self.shower_card.write_card(shower, shower_card_path)
2924
2925
2926 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'shower_card_set.dat')):
2927 files.mv(pjoin(self.me_dir, 'MCatNLO', 'shower_card_set.dat'),
2928 pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat'))
2929
2930 mcatnlo_log = pjoin(self.me_dir, 'mcatnlo.log')
2931 self.update_status('Compiling MCatNLO for %s...' % shower, level='shower')
2932
2933
2934
2935 if shower == 'PYTHIA8' and not \
2936 os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')) and \
2937 'dl' not in self.shower_card['extralibs'].split():
2938
2939 self.shower_card['extralibs'] += ' dl'
2940 logger.warning("'dl' was added to extralibs from the shower_card.dat.\n" + \
2941 "It is needed for the correct running of PY8.2xx.\n" + \
2942 "If this library cannot be found on your system, a crash will occur.")
2943
2944 misc.call(['./MCatNLO_MadFKS.inputs'], stdout=open(mcatnlo_log, 'w'),
2945 stderr=open(mcatnlo_log, 'w'),
2946 cwd=pjoin(self.me_dir, 'MCatNLO'),
2947 close_fds=True)
2948
2949 exe = 'MCATNLO_%s_EXE' % shower
2950 if not os.path.exists(pjoin(self.me_dir, 'MCatNLO', exe)) and \
2951 not os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe')):
2952 print open(mcatnlo_log).read()
2953 raise aMCatNLOError('Compilation failed, check %s for details' % mcatnlo_log)
2954 logger.info(' ... done')
2955
2956
2957 count = 1
2958 while os.path.isdir(pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \
2959 (shower, count))):
2960 count += 1
2961 rundir = pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \
2962 (shower, count))
2963 os.mkdir(rundir)
2964 files.cp(shower_card_path, rundir)
2965
2966
2967
2968 event_files = misc.glob('events_*.lhe', pjoin(self.me_dir, 'Events', self.run_name))
2969 if max(len(event_files), 1) != self.shower_card['nsplit_jobs']:
2970 logger.info('Cleaning old files and splitting the event file...')
2971
2972 files.rm([f for f in event_files if 'events.lhe' not in f])
2973 if self.shower_card['nsplit_jobs'] > 1:
2974 misc.compile(['split_events'], cwd = pjoin(self.me_dir, 'Utilities'), nocompile=options['nocompile'])
2975 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'split_events')],
2976 stdin=subprocess.PIPE,
2977 stdout=open(pjoin(self.me_dir, 'Events', self.run_name, 'split_events.log'), 'w'),
2978 cwd=pjoin(self.me_dir, 'Events', self.run_name))
2979 p.communicate(input = 'events.lhe\n%d\n' % self.shower_card['nsplit_jobs'])
2980 logger.info('Splitting done.')
2981 event_files = misc.glob('events_*.lhe', pjoin(self.me_dir, 'Events', self.run_name))
2982
2983 event_files.sort()
2984
2985 self.update_status('Showering events...', level='shower')
2986 logger.info('(Running in %s)' % rundir)
2987 if shower != 'PYTHIA8':
2988 files.mv(pjoin(self.me_dir, 'MCatNLO', exe), rundir)
2989 files.mv(pjoin(self.me_dir, 'MCatNLO', 'MCATNLO_%s_input' % shower), rundir)
2990 else:
2991
2992 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.cmd'), rundir)
2993 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe'), rundir)
2994 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')):
2995 files.ln(pjoin(self.options['pythia8_path'], 'examples', 'config.sh'), rundir)
2996 files.ln(pjoin(self.options['pythia8_path'], 'xmldoc'), rundir)
2997 else:
2998 files.ln(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc'), rundir)
2999
3000 if shower == 'HERWIGPP':
3001 try:
3002 files.ln(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++'), rundir)
3003 except Exception:
3004 raise aMCatNLOError('The Herwig++ path set in the configuration file is not valid.')
3005
3006 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so')):
3007 files.cp(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so'), rundir)
3008
3009 files.ln(evt_file, rundir, 'events.lhe')
3010 for i, f in enumerate(event_files):
3011 files.ln(f, rundir,'events_%d.lhe' % (i + 1))
3012
3013 if not self.shower_card['analyse']:
3014
3015 out_id = 'HEP'
3016 else:
3017
3018 if "HwU" in self.shower_card['analyse']:
3019 out_id = 'HWU'
3020 else:
3021 out_id = 'TOP'
3022
3023
3024 with open(pjoin(rundir, 'shower.sh'), 'w') as fsock:
3025 fsock.write(open(pjoin(self.me_dir, 'MCatNLO', 'shower_template.sh')).read() \
3026 % {'extralibs': ':'.join(extrapaths)})
3027 subprocess.call(['chmod', '+x', pjoin(rundir, 'shower.sh')])
3028
3029 if event_files:
3030 arg_list = [[shower, out_id, self.run_name, '%d' % (i + 1)] \
3031 for i in range(len(event_files))]
3032 else:
3033 arg_list = [[shower, out_id, self.run_name]]
3034
3035 self.run_all({rundir: 'shower.sh'}, arg_list, 'shower')
3036 self.njobs = 1
3037 self.wait_for_complete('shower')
3038
3039
3040 message = ''
3041 warning = ''
3042 to_gzip = [evt_file]
3043 if out_id == 'HEP':
3044
3045 if shower in ['PYTHIA8', 'HERWIGPP']:
3046 hep_format = 'HEPMC'
3047 ext = 'hepmc'
3048 else:
3049 hep_format = 'StdHEP'
3050 ext = 'hep'
3051
3052 hep_file = '%s_%s_0.%s.gz' % \
3053 (pjoin(os.path.dirname(evt_file), 'events'), shower, ext)
3054 count = 0
3055
3056
3057
3058 while os.path.exists(hep_file) or \
3059 os.path.exists(hep_file.replace('.%s.gz' % ext, '__1.%s.gz' % ext)) :
3060 count +=1
3061 hep_file = '%s_%s_%d.%s.gz' % \
3062 (pjoin(os.path.dirname(evt_file), 'events'), shower, count, ext)
3063
3064 try:
3065 if self.shower_card['nsplit_jobs'] == 1:
3066 files.mv(os.path.join(rundir, 'events.%s.gz' % ext), hep_file)
3067 message = ('The file %s has been generated. \nIt contains showered' + \
3068 ' and hadronized events in the %s format obtained' + \
3069 ' showering the parton-level event file %s.gz with %s') % \
3070 (hep_file, hep_format, evt_file, shower)
3071 else:
3072 hep_list = []
3073 for i in range(self.shower_card['nsplit_jobs']):
3074 hep_list.append(hep_file.replace('.%s.gz' % ext, '__%d.%s.gz' % (i + 1, ext)))
3075 files.mv(os.path.join(rundir, 'events_%d.%s.gz' % (i + 1, ext)), hep_list[-1])
3076 message = ('The following files have been generated:\n %s\nThey contain showered' + \
3077 ' and hadronized events in the %s format obtained' + \
3078 ' showering the (split) parton-level event file %s.gz with %s') % \
3079 ('\n '.join(hep_list), hep_format, evt_file, shower)
3080
3081 except OSError, IOError:
3082 raise aMCatNLOError('No file has been generated, an error occurred.'+\
3083 ' More information in %s' % pjoin(os.getcwd(), 'amcatnlo_run.log'))
3084
3085
3086 if hep_format == 'StdHEP':
3087 try:
3088 self.do_plot('%s -f' % self.run_name)
3089 except Exception, error:
3090 logger.info("Fail to make the plot. Continue...")
3091 pass
3092
3093 elif out_id == 'TOP' or out_id == 'HWU':
3094
3095 if out_id=='TOP':
3096 ext='top'
3097 elif out_id=='HWU':
3098 ext='HwU'
3099 topfiles = []
3100 top_tars = [tarfile.TarFile(f) for f in misc.glob('histfile*.tar', rundir)]
3101 for top_tar in top_tars:
3102 topfiles.extend(top_tar.getnames())
3103
3104
3105 if len(top_tars) != self.shower_card['nsplit_jobs']:
3106 raise aMCatNLOError('%d job(s) expected, %d file(s) found' % \
3107 (self.shower_card['nsplit_jobs'], len(top_tars)))
3108
3109
3110
3111 filename = 'plot_%s_%d_' % (shower, 1)
3112 count = 1
3113 while os.path.exists(pjoin(self.me_dir, 'Events',
3114 self.run_name, '%s0.%s' % (filename,ext))) or \
3115 os.path.exists(pjoin(self.me_dir, 'Events',
3116 self.run_name, '%s0__1.%s' % (filename,ext))):
3117 count += 1
3118 filename = 'plot_%s_%d_' % (shower, count)
3119
3120 if out_id=='TOP':
3121 hist_format='TopDrawer format'
3122 elif out_id=='HWU':
3123 hist_format='HwU and GnuPlot formats'
3124
3125 if not topfiles:
3126
3127 warning = 'No .top file has been generated. For the results of your ' +\
3128 'run, please check inside %s' % rundir
3129 elif self.shower_card['nsplit_jobs'] == 1:
3130
3131 top_tars[0].extractall(path = rundir)
3132 plotfiles = []
3133 for i, file in enumerate(topfiles):
3134 if out_id=='TOP':
3135 plotfile = pjoin(self.me_dir, 'Events', self.run_name,
3136 '%s%d.top' % (filename, i))
3137 files.mv(pjoin(rundir, file), plotfile)
3138 elif out_id=='HWU':
3139 out=pjoin(self.me_dir,'Events',
3140 self.run_name,'%s%d'% (filename,i))
3141 histos=[{'dirname':pjoin(rundir,file)}]
3142 self.combine_plots_HwU(histos,out)
3143 try:
3144 misc.call(['gnuplot','%s%d.gnuplot' % (filename,i)],\
3145 stdout=os.open(os.devnull, os.O_RDWR),\
3146 stderr=os.open(os.devnull, os.O_RDWR),\
3147 cwd=pjoin(self.me_dir, 'Events', self.run_name))
3148 except Exception:
3149 pass
3150 plotfile=pjoin(self.me_dir,'Events',self.run_name,
3151 '%s%d.HwU'% (filename,i))
3152 plotfiles.append(plotfile)
3153
3154 ffiles = 'files'
3155 have = 'have'
3156 if len(plotfiles) == 1:
3157 ffiles = 'file'
3158 have = 'has'
3159
3160 message = ('The %s %s %s been generated, with histograms in the' + \
3161 ' %s, obtained by showering the parton-level' + \
3162 ' file %s.gz with %s.') % (ffiles, ', '.join(plotfiles), have, \
3163 hist_format, evt_file, shower)
3164 else:
3165
3166 topfiles_set = set(topfiles)
3167 plotfiles = []
3168 for j, top_tar in enumerate(top_tars):
3169 top_tar.extractall(path = rundir)
3170 for i, file in enumerate(topfiles_set):
3171 plotfile = pjoin(self.me_dir, 'Events', self.run_name,
3172 '%s%d__%d.%s' % (filename, i, j + 1,ext))
3173 files.mv(pjoin(rundir, file), plotfile)
3174 plotfiles.append(plotfile)
3175
3176
3177 if self.shower_card['combine_td']:
3178 misc.compile(['sum_plots'], cwd = pjoin(self.me_dir, 'Utilities'))
3179
3180 if self.banner.get('run_card', 'event_norm').lower() == 'sum':
3181 norm = 1.
3182 elif self.banner.get('run_card', 'event_norm').lower() == 'average':
3183 norm = 1./float(self.shower_card['nsplit_jobs'])
3184
3185 plotfiles2 = []
3186 for i, file in enumerate(topfiles_set):
3187 filelist = ['%s%d__%d.%s' % (filename, i, j + 1,ext) \
3188 for j in range(self.shower_card['nsplit_jobs'])]
3189 if out_id=='TOP':
3190 infile="%d\n%s\n%s\n" % \
3191 (self.shower_card['nsplit_jobs'],
3192 '\n'.join(filelist),
3193 '\n'.join([str(norm)] * self.shower_card['nsplit_jobs']))
3194 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'sum_plots')],
3195 stdin=subprocess.PIPE,
3196 stdout=os.open(os.devnull, os.O_RDWR),
3197 cwd=pjoin(self.me_dir, 'Events', self.run_name))
3198 p.communicate(input = infile)
3199 files.mv(pjoin(self.me_dir, 'Events', self.run_name, 'sum.top'),
3200 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.top' % (filename, i)))
3201 elif out_id=='HWU':
3202 out=pjoin(self.me_dir,'Events',
3203 self.run_name,'%s%d'% (filename,i))
3204 histos=[]
3205 norms=[]
3206 for plotfile in plotfiles:
3207 histos.append({'dirname':plotfile})
3208 norms.append(norm)
3209 self.combine_plots_HwU(histos,out,normalisation=norms)
3210 try:
3211 misc.call(['gnuplot','%s%d.gnuplot' % (filename, i)],\
3212 stdout=os.open(os.devnull, os.O_RDWR),\
3213 stderr=os.open(os.devnull, os.O_RDWR),\
3214 cwd=pjoin(self.me_dir, 'Events',self.run_name))
3215 except Exception:
3216 pass
3217
3218 plotfiles2.append(pjoin(self.me_dir, 'Events', self.run_name, '%s%d.%s' % (filename, i,ext)))
3219 tar = tarfile.open(
3220 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.tar.gz' % (filename, i)), 'w:gz')
3221 for f in filelist:
3222 tar.add(pjoin(self.me_dir, 'Events', self.run_name, f), arcname=f)
3223 files.rm([pjoin(self.me_dir, 'Events', self.run_name, f) for f in filelist])
3224
3225 tar.close()
3226
3227 ffiles = 'files'
3228 have = 'have'
3229 if len(plotfiles2) == 1:
3230 ffiles = 'file'
3231 have = 'has'
3232
3233 message = ('The %s %s %s been generated, with histograms in the' + \
3234 ' %s, obtained by showering the parton-level' + \
3235 ' file %s.gz with %s.\n' + \
3236 'The files from the different shower ' + \
3237 'jobs (before combining them) can be found inside %s.') % \
3238 (ffiles, ', '.join(plotfiles2), have, hist_format,\
3239 evt_file, shower,
3240 ', '.join([f.replace('%s' % ext, 'tar.gz') for f in plotfiles2]))
3241
3242 else:
3243 message = ('The following files have been generated:\n %s\n' + \
3244 'They contain histograms in the' + \
3245 ' %s, obtained by showering the parton-level' + \
3246 ' file %s.gz with %s.') % ('\n '.join(plotfiles), \
3247 hist_format, evt_file, shower)
3248
3249
3250 run_dir_path = pjoin(rundir, self.run_name)
3251 if os.path.exists(pjoin(run_dir_path,'RunMaterial.tar.gz')):
3252 misc.call(['tar','-xzpf','RunMaterial.tar.gz'],cwd=run_dir_path)
3253 files.cp(pjoin(self.me_dir,'Cards','shower_card.dat'),
3254 pjoin(run_dir_path,'RunMaterial','shower_card_for_%s_%d.dat'\
3255 %(shower, count)))
3256 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'],
3257 cwd=run_dir_path)
3258 shutil.rmtree(pjoin(run_dir_path,'RunMaterial'))
3259
3260 for f in to_gzip:
3261 misc.gzip(f)
3262 if message:
3263 logger.info(message)
3264 if warning:
3265 logger.warning(warning)
3266
3267 self.update_status('Run complete', level='shower', update_results=True)
3268
3269
3270
3271 - def set_run_name(self, name, tag=None, level='parton', reload_card=False):
3272 """define the run name, the run_tag, the banner and the results."""
3273
3274
3275 upgrade_tag = {'parton': ['parton','pythia','pgs','delphes','shower'],
3276 'pythia': ['pythia','pgs','delphes'],
3277 'shower': ['shower'],
3278 'pgs': ['pgs'],
3279 'delphes':['delphes'],
3280 'plot':[]}
3281
3282
3283
3284 if name == self.run_name:
3285 if reload_card:
3286 run_card = pjoin(self.me_dir, 'Cards','run_card.dat')
3287 self.run_card = banner_mod.RunCardNLO(run_card)
3288
3289
3290 if tag:
3291 self.run_card['run_tag'] = tag
3292 self.run_tag = tag
3293 self.results.add_run(self.run_name, self.run_card)
3294 else:
3295 for tag in upgrade_tag[level]:
3296 if getattr(self.results[self.run_name][-1], tag):
3297 tag = self.get_available_tag()
3298 self.run_card['run_tag'] = tag
3299 self.run_tag = tag
3300 self.results.add_run(self.run_name, self.run_card)
3301 break
3302 return
3303
3304
3305 if self.run_name:
3306 self.store_result()
3307
3308 self.run_name = name
3309
3310
3311 run_card = pjoin(self.me_dir, 'Cards','run_card.dat')
3312 self.run_card = banner_mod.RunCardNLO(run_card)
3313
3314 new_tag = False
3315
3316 self.banner = banner_mod.recover_banner(self.results, level, self.run_name, tag)
3317 if tag:
3318 self.run_card['run_tag'] = tag
3319 new_tag = True
3320 elif not self.run_name in self.results and level =='parton':
3321 pass
3322 elif not self.run_name in self.results:
3323
3324 logger.warning('Trying to run data on unknown run.')
3325 self.results.add_run(name, self.run_card)
3326 self.results.update('add run %s' % name, 'all', makehtml=True)
3327 else:
3328 for tag in upgrade_tag[level]:
3329
3330 if getattr(self.results[self.run_name][-1], tag):
3331
3332 tag = self.get_available_tag()
3333 self.run_card['run_tag'] = tag
3334 new_tag = True
3335 break
3336 if not new_tag:
3337
3338 tag = self.results[self.run_name][-1]['tag']
3339 self.run_card['run_tag'] = tag
3340
3341
3342 if name in self.results and not new_tag:
3343 self.results.def_current(self.run_name)
3344 else:
3345 self.results.add_run(self.run_name, self.run_card)
3346
3347 self.run_tag = self.run_card['run_tag']
3348
3349
3350
3351 if level == 'parton':
3352 return
3353 elif level == 'pythia':
3354 return self.results[self.run_name][0]['tag']
3355 else:
3356 for i in range(-1,-len(self.results[self.run_name])-1,-1):
3357 tagRun = self.results[self.run_name][i]
3358 if tagRun.pythia:
3359 return tagRun['tag']
3360
3361
3363 """ tar the pythia results. This is done when we are quite sure that
3364 the pythia output will not be use anymore """
3365
3366 if not self.run_name:
3367 return
3368
3369 self.results.save()
3370
3371 if not self.to_store:
3372 return
3373
3374 if 'event' in self.to_store:
3375 if os.path.exists(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')):
3376 if not os.path.exists(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz')):
3377 self.update_status('gzipping output file: events.lhe', level='parton', error=True)
3378 misc.gzip(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe'))
3379 else:
3380 os.remove(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe'))
3381 if os.path.exists(pjoin(self.me_dir,'Events','reweight.lhe')):
3382 os.remove(pjoin(self.me_dir,'Events', 'reweight.lhe'))
3383
3384
3385 tag = self.run_card['run_tag']
3386
3387 self.to_store = []
3388
3389
3391 """reads the info in the init block and returns them in a dictionary"""
3392 ev_file = open(evt_file)
3393 init = ""
3394 found = False
3395 while True:
3396 line = ev_file.readline()
3397 if "<init>" in line:
3398 found = True
3399 elif found and not line.startswith('#'):
3400 init += line
3401 if "</init>" in line or "<event>" in line:
3402 break
3403 ev_file.close()
3404
3405
3406
3407
3408
3409
3410 init_dict = {}
3411 init_dict['idbmup1'] = int(init.split()[0])
3412 init_dict['idbmup2'] = int(init.split()[1])
3413 init_dict['ebmup1'] = float(init.split()[2])
3414 init_dict['ebmup2'] = float(init.split()[3])
3415 init_dict['pdfgup1'] = int(init.split()[4])
3416 init_dict['pdfgup2'] = int(init.split()[5])
3417 init_dict['pdfsup1'] = int(init.split()[6])
3418 init_dict['pdfsup2'] = int(init.split()[7])
3419 init_dict['idwtup'] = int(init.split()[8])
3420 init_dict['nprup'] = int(init.split()[9])
3421
3422 return init_dict
3423
3424
3426 """creates the mcatnlo input script using the values set in the header of the event_file.
3427 It also checks if the lhapdf library is used"""
3428 shower = self.banner.get('run_card', 'parton_shower').upper()
3429 pdlabel = self.banner.get('run_card', 'pdlabel')
3430 itry = 0
3431 nevents = self.shower_card['nevents']
3432 init_dict = self.get_init_dict(evt_file)
3433
3434 if nevents < 0 or \
3435 nevents > self.banner.get_detail('run_card', 'nevents'):
3436 nevents = self.banner.get_detail('run_card', 'nevents')
3437
3438 nevents = nevents / self.shower_card['nsplit_jobs']
3439
3440 mcmass_dict = {}
3441 for line in [l for l in self.banner['montecarlomasses'].split('\n') if l]:
3442 pdg = int(line.split()[0])
3443 mass = float(line.split()[1])
3444 mcmass_dict[pdg] = mass
3445
3446 content = 'EVPREFIX=%s\n' % pjoin(os.path.split(evt_file)[1])
3447 content += 'NEVENTS=%d\n' % nevents
3448 content += 'NEVENTS_TOT=%d\n' % (self.banner.get_detail('run_card', 'nevents') /\
3449 self.shower_card['nsplit_jobs'])
3450 content += 'MCMODE=%s\n' % shower
3451 content += 'PDLABEL=%s\n' % pdlabel
3452 content += 'ALPHAEW=%s\n' % self.banner.get_detail('param_card', 'sminputs', 1).value
3453
3454
3455 content += 'TMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 6).value
3456 content += 'TWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 6).value
3457 content += 'ZMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 23).value
3458 content += 'ZWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 23).value
3459 content += 'WMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 24).value
3460 content += 'WWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 24).value
3461 try:
3462 content += 'HGGMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 25).value
3463 content += 'HGGWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 25).value
3464 except KeyError:
3465 content += 'HGGMASS=120.\n'
3466 content += 'HGGWIDTH=0.00575308848\n'
3467 content += 'beammom1=%s\n' % self.banner.get_detail('run_card', 'ebeam1')
3468 content += 'beammom2=%s\n' % self.banner.get_detail('run_card', 'ebeam2')
3469 content += 'BEAM1=%s\n' % self.banner.get_detail('run_card', 'lpp1')
3470 content += 'BEAM2=%s\n' % self.banner.get_detail('run_card', 'lpp2')
3471 content += 'DMASS=%s\n' % mcmass_dict[1]
3472 content += 'UMASS=%s\n' % mcmass_dict[2]
3473 content += 'SMASS=%s\n' % mcmass_dict[3]
3474 content += 'CMASS=%s\n' % mcmass_dict[4]
3475 content += 'BMASS=%s\n' % mcmass_dict[5]
3476 try:
3477 content += 'EMASS=%s\n' % mcmass_dict[11]
3478 content += 'MUMASS=%s\n' % mcmass_dict[13]
3479 content += 'TAUMASS=%s\n' % mcmass_dict[15]
3480 except KeyError:
3481
3482 mcmass_lines = [l for l in \
3483 open(pjoin(self.me_dir, 'SubProcesses', 'MCmasses_%s.inc' % shower.upper())
3484 ).read().split('\n') if l]
3485 new_mcmass_dict = {}
3486 for l in mcmass_lines:
3487 key, val = l.split('=')
3488 new_mcmass_dict[key.strip()] = val.replace('d', 'e').strip()
3489 content += 'EMASS=%s\n' % new_mcmass_dict['mcmass(11)']
3490 content += 'MUMASS=%s\n' % new_mcmass_dict['mcmass(13)']
3491 content += 'TAUMASS=%s\n' % new_mcmass_dict['mcmass(15)']
3492
3493 content += 'GMASS=%s\n' % mcmass_dict[21]
3494 content += 'EVENT_NORM=%s\n' % self.banner.get_detail('run_card', 'event_norm').lower()
3495
3496 if int(self.shower_card['pdfcode']) > 1 or \
3497 (pdlabel=='lhapdf' and int(self.shower_card['pdfcode'])==1):
3498
3499
3500
3501
3502 self.link_lhapdf(pjoin(self.me_dir, 'lib'))
3503 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'],
3504 stdout = subprocess.PIPE).stdout.read().strip()
3505 content += 'LHAPDFPATH=%s\n' % lhapdfpath
3506 pdfsetsdir = self.get_lhapdf_pdfsetsdir()
3507 if self.shower_card['pdfcode']==1:
3508 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])]
3509 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']])
3510 else:
3511 lhaid_list = [abs(int(self.shower_card['pdfcode']))]
3512 content += 'PDFCODE=%s\n' % self.shower_card['pdfcode']
3513 self.copy_lhapdf_set(lhaid_list, pdfsetsdir)
3514 elif int(self.shower_card['pdfcode'])==1:
3515
3516
3517
3518
3519
3520
3521 try:
3522 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'],
3523 stdout = subprocess.PIPE).stdout.read().strip()
3524 self.link_lhapdf(pjoin(self.me_dir, 'lib'))
3525 content += 'LHAPDFPATH=%s\n' % lhapdfpath
3526 pdfsetsdir = self.get_lhapdf_pdfsetsdir()
3527 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])]
3528 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']])
3529 self.copy_lhapdf_set(lhaid_list, pdfsetsdir)
3530 except Exception:
3531 logger.warning('Trying to shower events using the same PDF in the shower as used in the generation'+\
3532 ' of the events using LHAPDF. However, no valid LHAPDF installation found with the'+\
3533 ' needed PDF set. Will use default internal PDF for the shower instead. To use the'+\
3534 ' same set as was used in the event generation install LHAPDF and set the path using'+\
3535 ' "set /path_to_lhapdf/bin/lhapdf-config" from the MadGraph5_aMC@NLO python shell')
3536 content += 'LHAPDFPATH=\n'
3537 content += 'PDFCODE=0\n'
3538 else:
3539 content += 'LHAPDFPATH=\n'
3540 content += 'PDFCODE=0\n'
3541
3542 content += 'ICKKW=%s\n' % self.banner.get_detail('run_card', 'ickkw')
3543 content += 'PTJCUT=%s\n' % self.banner.get_detail('run_card', 'ptj')
3544
3545 if self.options['pythia8_path']:
3546 content+='PY8PATH=%s\n' % self.options['pythia8_path']
3547 if self.options['hwpp_path']:
3548 content+='HWPPPATH=%s\n' % self.options['hwpp_path']
3549 if self.options['thepeg_path']:
3550 content+='THEPEGPATH=%s\n' % self.options['thepeg_path']
3551 if self.options['hepmc_path']:
3552 content+='HEPMCPATH=%s\n' % self.options['hepmc_path']
3553
3554 output = open(pjoin(self.me_dir, 'MCatNLO', 'banner.dat'), 'w')
3555 output.write(content)
3556 output.close()
3557 return shower
3558
3559
3561 """runs the reweight_xsec_events executables on each sub-event file generated
3562 to compute on the fly scale and/or PDF uncertainities"""
3563 logger.info(' Doing reweight')
3564
3565 nev_unw = pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted')
3566
3567 if only:
3568 if os.path.exists(nev_unw + '.orig'):
3569 files.cp(nev_unw + '.orig', nev_unw)
3570 else:
3571 raise aMCatNLOError('Cannot find event file information')
3572
3573
3574 file = open(nev_unw)
3575 lines = file.read().split('\n')
3576 file.close()
3577
3578 files.cp(nev_unw, nev_unw + '.orig')
3579
3580
3581 evt_files = [line.split()[0] for line in lines[:-1] if line.split()[1] != '0']
3582 evt_wghts = [float(line.split()[3]) for line in lines[:-1] if line.split()[1] != '0']
3583
3584 job_dict = {}
3585 exe = 'reweight_xsec_events.local'
3586 for i, evt_file in enumerate(evt_files):
3587 path, evt = os.path.split(evt_file)
3588 files.ln(pjoin(self.me_dir, 'SubProcesses', exe), \
3589 pjoin(self.me_dir, 'SubProcesses', path))
3590 job_dict[path] = [exe]
3591
3592 self.run_all(job_dict, [[evt, '1']], 'Running reweight')
3593
3594
3595 for evt_file in evt_files:
3596 last_line = subprocess.Popen(['tail', '-n1', '%s.rwgt' % \
3597 pjoin(self.me_dir, 'SubProcesses', evt_file)], \
3598 stdout = subprocess.PIPE).stdout.read().strip()
3599 if last_line != "</LesHouchesEvents>":
3600 raise aMCatNLOError('An error occurred during reweight. Check the' + \
3601 '\'reweight_xsec_events.output\' files inside the ' + \
3602 '\'SubProcesses/P*/G*/ directories for details')
3603
3604
3605 newfile = open(nev_unw, 'w')
3606 for line in lines:
3607 if line:
3608 newfile.write(line.replace(line.split()[0], line.split()[0] + '.rwgt') + '\n')
3609 newfile.close()
3610
3611 return self.pdf_scale_from_reweighting(evt_files,evt_wghts)
3612
3614 """This function takes the files with the scale and pdf values
3615 written by the reweight_xsec_events.f code
3616 (P*/G*/pdf_scale_dependence.dat) and computes the overall
3617 scale and PDF uncertainty (the latter is computed using the
3618 Hessian method (if lhaid<90000) or Gaussian (if lhaid>90000))
3619 and returns it in percents. The expected format of the file
3620 is: n_scales xsec_scale_central xsec_scale1 ... n_pdf
3621 xsec_pdf0 xsec_pdf1 ...."""
3622
3623 scales=[]
3624 pdfs=[]
3625 for i,evt_file in enumerate(evt_files):
3626 path, evt=os.path.split(evt_file)
3627 with open(pjoin(self.me_dir, 'SubProcesses', path, 'scale_pdf_dependence.dat'),'r') as f:
3628 data_line=f.readline()
3629 if "scale variations:" in data_line:
3630 for j,scale in enumerate(self.run_card['dynamical_scale_choice']):
3631 data_line = f.readline().split()
3632 scales_this = [float(val)*evt_wghts[i] for val in f.readline().replace("D", "E").split()]
3633 try:
3634 scales[j] = [a + b for a, b in zip(scales[j], scales_this)]
3635 except IndexError:
3636 scales+=[scales_this]
3637 data_line=f.readline()
3638 if "pdf variations:" in data_line:
3639 for j,pdf in enumerate(self.run_card['lhaid']):
3640 data_line = f.readline().split()
3641 pdfs_this = [float(val)*evt_wghts[i] for val in f.readline().replace("D", "E").split()]
3642 try:
3643 pdfs[j] = [a + b for a, b in zip(pdfs[j], pdfs_this)]
3644 except IndexError:
3645 pdfs+=[pdfs_this]
3646
3647
3648 scale_info=[]
3649 for j,scale in enumerate(scales):
3650 s_cen=scale[0]
3651 if s_cen != 0.0 and self.run_card['reweight_scale'][j]:
3652
3653 s_max=(max(scale)/s_cen-1)*100
3654 s_min=(1-min(scale)/s_cen)*100
3655
3656 ren_var=[]
3657 fac_var=[]
3658 for i in range(len(self.run_card['rw_rscale'])):
3659 ren_var.append(scale[i]-s_cen)
3660 for i in range(len(self.run_card['rw_fscale'])):
3661 fac_var.append(scale[i*len(self.run_card['rw_rscale'])]-s_cen)
3662 s_max_q=((s_cen+math.sqrt(math.pow(max(ren_var),2)+math.pow(max(fac_var),2)))/s_cen-1)*100
3663 s_min_q=(1-(s_cen-math.sqrt(math.pow(min(ren_var),2)+math.pow(min(fac_var),2)))/s_cen)*100
3664 s_size=len(scale)
3665 else:
3666 s_max=0.0
3667 s_min=0.0
3668 s_max_q=0.0
3669 s_min_q=0.0
3670 s_size=len(scale)
3671 scale_info.append({'cen':s_cen, 'min':s_min, 'max':s_max, \
3672 'min_q':s_min_q, 'max_q':s_max_q, 'size':s_size, \
3673 'label':self.run_card['dynamical_scale_choice'][j], \
3674 'unc':self.run_card['reweight_scale'][j]})
3675
3676
3677 if any(self.run_card['reweight_pdf']):
3678 use_lhapdf=False
3679 lhapdf_libdir=subprocess.Popen([self.options['lhapdf'],'--libdir'],\
3680 stdout=subprocess.PIPE).stdout.read().strip()
3681
3682 try:
3683 candidates=[dirname for dirname in os.listdir(lhapdf_libdir) \
3684 if os.path.isdir(pjoin(lhapdf_libdir,dirname))]
3685 except OSError:
3686 candidates=[]
3687 for candidate in candidates:
3688 if os.path.isfile(pjoin(lhapdf_libdir,candidate,'site-packages','lhapdf.so')):
3689 sys.path.insert(0,pjoin(lhapdf_libdir,candidate,'site-packages'))
3690 try:
3691 import lhapdf
3692 use_lhapdf=True
3693 break
3694 except ImportError:
3695 sys.path.pop(0)
3696 continue
3697
3698 if not use_lhapdf:
3699 try:
3700 candidates=[dirname for dirname in os.listdir(lhapdf_libdir+'64') \
3701 if os.path.isdir(pjoin(lhapdf_libdir+'64',dirname))]
3702 except OSError:
3703 candidates=[]
3704 for candidate in candidates:
3705 if os.path.isfile(pjoin(lhapdf_libdir+'64',candidate,'site-packages','lhapdf.so')):
3706 sys.path.insert(0,pjoin(lhapdf_libdir+'64',candidate,'site-packages'))
3707 try:
3708 import lhapdf
3709 use_lhapdf=True
3710 break
3711 except ImportError:
3712 sys.path.pop(0)
3713 continue
3714
3715 if not use_lhapdf:
3716 try:
3717 import lhapdf
3718 use_lhapdf=True
3719 except ImportError:
3720 logger.warning("Failed to access python version of LHAPDF: "\
3721 "cannot compute PDF uncertainty from the "\
3722 "weights in the events. The weights in the LHE " \
3723 "event files will still cover all PDF set members, "\
3724 "but there will be no PDF uncertainty printed in the run summary. \n "\
3725 "If the python interface to LHAPDF is available on your system, try "\
3726 "adding its location to the PYTHONPATH environment variable and the"\
3727 "LHAPDF library location to LD_LIBRARY_PATH (linux) or DYLD_LIBRARY_PATH (mac os x).")
3728 use_lhapdf=False
3729
3730
3731 if any(self.run_card['reweight_pdf']) and use_lhapdf: lhapdf.setVerbosity(0)
3732
3733 pdf_info=[]
3734 for j,pdfset in enumerate(pdfs):
3735 p_cen=pdfset[0]
3736 if p_cen != 0.0 and self.run_card['reweight_pdf'][j]:
3737 if use_lhapdf:
3738 pdfsetname=self.run_card['lhapdfsetname'][j]
3739 try:
3740 p=lhapdf.getPDFSet(pdfsetname)
3741 ep=p.uncertainty(pdfset,-1)
3742 p_cen=ep.central
3743 p_min=abs(ep.errminus/p_cen)*100
3744 p_max=abs(ep.errplus/p_cen)*100
3745 p_type=p.errorType
3746 p_size=p.size
3747 p_conf=p.errorConfLevel
3748 except:
3749 logger.warning("Could not access LHAPDF to compute uncertainties for %s" % pdfsetname)
3750 p_min=0.0
3751 p_max=0.0
3752 p_type='unknown'
3753 p_conf='unknown'
3754 p_size=len(pdfset)
3755 else:
3756 p_min=0.0
3757 p_max=0.0
3758 p_type='unknown'
3759 p_conf='unknown'
3760 p_size=len(pdfset)
3761 pdfsetname=self.run_card['lhaid'][j]
3762 else:
3763 p_min=0.0
3764 p_max=0.0
3765 p_type='none'
3766 p_conf='unknown'
3767 p_size=len(pdfset)
3768 pdfsetname=self.run_card['lhaid'][j]
3769 pdf_info.append({'cen':p_cen, 'min':p_min, 'max':p_max, \
3770 'unc':p_type, 'name':pdfsetname, 'size':p_size, \
3771 'label':self.run_card['lhaid'][j], 'conf':p_conf})
3772
3773 scale_pdf_info=[scale_info,pdf_info]
3774 return scale_pdf_info
3775
3776
3788
3789 - def run_all(self, job_dict, arg_list, run_type='monitor', split_jobs = False):
3790 """runs the jobs in job_dict (organized as folder: [job_list]), with arguments args"""
3791 self.ijob = 0
3792 if run_type != 'shower':
3793 self.njobs = sum(len(jobs) for jobs in job_dict.values()) * len(arg_list)
3794 for args in arg_list:
3795 for Pdir, jobs in job_dict.items():
3796 for job in jobs:
3797 self.run_exe(job, args, run_type, cwd=pjoin(self.me_dir, 'SubProcesses', Pdir) )
3798 if self.cluster_mode == 2:
3799 time.sleep(1)
3800 else:
3801 self.njobs = len(arg_list)
3802 for args in arg_list:
3803 [(cwd, exe)] = job_dict.items()
3804 self.run_exe(exe, args, run_type, cwd)
3805
3806 self.wait_for_complete(run_type)
3807
3808
3809
3811 """check the integrity of the event files after splitting, and resubmit
3812 those which are not nicely terminated"""
3813 jobs_to_resubmit = []
3814 for job in jobs:
3815 last_line = ''
3816 try:
3817 last_line = subprocess.Popen(
3818 ['tail', '-n1', pjoin(job['dirname'], 'events.lhe')], \
3819 stdout = subprocess.PIPE).stdout.read().strip()
3820 except IOError:
3821 pass
3822 if last_line != "</LesHouchesEvents>":
3823 jobs_to_resubmit.append(job)
3824 self.njobs = 0
3825 if jobs_to_resubmit:
3826 run_type = 'Resubmitting broken jobs'
3827 logger.info('Some event files are broken, corresponding jobs will be resubmitted.')
3828 for job in jobs_to_resubmit:
3829 logger.debug('Resubmitting ' + job['dirname'] + '\n')
3830 self.run_all_jobs(jobs_to_resubmit,2,fixed_order=False)
3831
3832
3834 """looks into the nevents_unweighed_splitted file to check how many
3835 split jobs are needed for this (pdir, job). arg is F, B or V"""
3836
3837 splittings = []
3838 ajob = open(pjoin(self.me_dir, 'SubProcesses', pdir, job)).read()
3839 pattern = re.compile('for i in (\d+) ; do')
3840 match = re.search(pattern, ajob)
3841 channel = match.groups()[0]
3842
3843
3844 nevents_file = open(pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted_splitted')).read()
3845
3846
3847 pattern = re.compile(r"%s_(\d+)/events.lhe" % \
3848 pjoin(pdir, 'G%s%s' % (arg,channel)))
3849 matches = re.findall(pattern, nevents_file)
3850 for m in matches:
3851 splittings.append(m)
3852 return splittings
3853
3854
3855 - def run_exe(self, exe, args, run_type, cwd=None):
3856 """this basic function launch locally/on cluster exe with args as argument.
3857 """
3858
3859
3860 execpath = None
3861 if cwd and os.path.exists(pjoin(cwd, exe)):
3862 execpath = pjoin(cwd, exe)
3863 elif not cwd and os.path.exists(exe):
3864 execpath = exe
3865 else:
3866 raise aMCatNLOError('Cannot find executable %s in %s' \
3867 % (exe, os.getcwd()))
3868
3869 if self.cluster_mode == 1 and not os.access(execpath, os.X_OK):
3870 subprocess.call(['chmod', '+x', exe], cwd=cwd)
3871
3872 if self.cluster_mode == 0:
3873
3874 misc.call(['./'+exe] + args, cwd=cwd)
3875 self.ijob += 1
3876 self.update_status((max([self.njobs - self.ijob - 1, 0]),
3877 min([1, self.njobs - self.ijob]),
3878 self.ijob, run_type), level='parton')
3879
3880
3881 elif 'reweight' in exe:
3882
3883
3884 input_files, output_files = [], []
3885 pdfinput = self.get_pdf_input_filename()
3886 if os.path.exists(pdfinput):
3887 input_files.append(pdfinput)
3888 input_files.append(pjoin(os.path.dirname(exe), os.path.pardir, 'reweight_xsec_events'))
3889 input_files.append(pjoin(cwd, os.path.pardir, 'leshouche_info.dat'))
3890 input_files.append(args[0])
3891 output_files.append('%s.rwgt' % os.path.basename(args[0]))
3892 output_files.append('reweight_xsec_events.output')
3893 output_files.append('scale_pdf_dependence.dat')
3894
3895 return self.cluster.submit2(exe, args, cwd=cwd,
3896 input_files=input_files, output_files=output_files,
3897 required_output=output_files)
3898
3899 elif 'ajob' in exe:
3900
3901
3902 if type(args[0]) == str:
3903 input_files, output_files, required_output, args = self.getIO_ajob(exe,cwd,args)
3904
3905 self.cluster.submit2(exe, args, cwd=cwd,
3906 input_files=input_files, output_files=output_files,
3907 required_output=required_output)
3908
3909
3910
3911
3912
3913
3914 elif 'shower' in exe:
3915
3916
3917
3918 input_files, output_files = [], []
3919 shower = args[0]
3920
3921 if shower == 'PYTHIA8':
3922 input_files.append(pjoin(cwd, 'Pythia8.exe'))
3923 input_files.append(pjoin(cwd, 'Pythia8.cmd'))
3924 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')):
3925 input_files.append(pjoin(cwd, 'config.sh'))
3926 input_files.append(pjoin(self.options['pythia8_path'], 'xmldoc'))
3927 else:
3928 input_files.append(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc'))
3929 else:
3930 input_files.append(pjoin(cwd, 'MCATNLO_%s_EXE' % shower))
3931 input_files.append(pjoin(cwd, 'MCATNLO_%s_input' % shower))
3932 if shower == 'HERWIGPP':
3933 input_files.append(pjoin(cwd, 'Herwig++'))
3934 input_files.append(pjoin(cwd, 'HepMCFortran.so'))
3935 if len(args) == 3:
3936 if os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')):
3937 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz'))
3938 elif os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')):
3939 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe'))
3940 else:
3941 raise aMCatNLOError, 'Event file not present in %s' % \
3942 pjoin(self.me_dir, 'Events', self.run_name)
3943 else:
3944 input_files.append(pjoin(cwd, 'events_%s.lhe' % args[3]))
3945
3946 if len(args) == 3:
3947 output_files.append('mcatnlo_run.log')
3948 else:
3949 output_files.append('mcatnlo_run_%s.log' % args[3])
3950 if args[1] == 'HEP':
3951 if len(args) == 3:
3952 fname = 'events'
3953 else:
3954 fname = 'events_%s' % args[3]
3955 if shower in ['PYTHIA8', 'HERWIGPP']:
3956 output_files.append(fname + '.hepmc.gz')
3957 else:
3958 output_files.append(fname + '.hep.gz')
3959 elif args[1] == 'TOP' or args[1] == 'HWU':
3960 if len(args) == 3:
3961 fname = 'histfile'
3962 else:
3963 fname = 'histfile_%s' % args[3]
3964 output_files.append(fname + '.tar')
3965 else:
3966 raise aMCatNLOError, 'Not a valid output argument for shower job : %d' % args[1]
3967
3968 self.cluster.submit2(exe, args, cwd=cwd,
3969 input_files=input_files, output_files=output_files)
3970
3971 else:
3972 return self.cluster.submit(exe, args, cwd=cwd)
3973
3975
3976
3977
3978 output_files = []
3979 required_output = []
3980 input_files = [pjoin(self.me_dir, 'SubProcesses', 'randinit'),
3981 pjoin(cwd, 'symfact.dat'),
3982 pjoin(cwd, 'iproc.dat'),
3983 pjoin(cwd, 'initial_states_map.dat'),
3984 pjoin(cwd, 'configs_and_props_info.dat'),
3985 pjoin(cwd, 'leshouche_info.dat'),
3986 pjoin(cwd, 'FKS_params.dat')]
3987
3988
3989 if os.path.exists(pjoin(self.me_dir,'OLP_virtuals','gosam.rc')):
3990 input_files.append(pjoin(self.me_dir, 'Cards', 'param_card.dat'))
3991
3992 if os.path.exists(pjoin(cwd,'nevents.tar')):
3993 input_files.append(pjoin(cwd,'nevents.tar'))
3994
3995 if os.path.exists(pjoin(self.me_dir,'SubProcesses','OLE_order.olc')):
3996 input_files.append(pjoin(cwd, 'OLE_order.olc'))
3997
3998
3999 if os.path.exists(pjoin(cwd,'MadLoop5_resources.tar.gz')) and \
4000 cluster.need_transfer(self.options):
4001 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz'))
4002 elif os.path.exists(pjoin(cwd,'MadLoop5_resources')) and \
4003 cluster.need_transfer(self.options):
4004 tf=tarfile.open(pjoin(cwd,'MadLoop5_resources.tar.gz'),'w:gz',
4005 dereference=True)
4006 tf.add(pjoin(cwd,'MadLoop5_resources'),arcname='MadLoop5_resources')
4007 tf.close()
4008 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz'))
4009
4010 if args[1] == 'born' or args[1] == 'all':
4011
4012 input_files.append(pjoin(cwd, 'madevent_mintFO'))
4013 if args[2] == '0':
4014 current = '%s_G%s' % (args[1],args[0])
4015 else:
4016 current = '%s_G%s_%s' % (args[1],args[0],args[2])
4017 if os.path.exists(pjoin(cwd,current)):
4018 input_files.append(pjoin(cwd, current))
4019 output_files.append(current)
4020
4021 required_output.append('%s/results.dat' % current)
4022 required_output.append('%s/res_%s.dat' % (current,args[3]))
4023 required_output.append('%s/log_MINT%s.txt' % (current,args[3]))
4024 required_output.append('%s/mint_grids' % current)
4025 required_output.append('%s/grid.MC_integer' % current)
4026 if args[3] != '0':
4027 required_output.append('%s/scale_pdf_dependence.dat' % current)
4028
4029 elif args[1] == 'F' or args[1] == 'B':
4030
4031 input_files.append(pjoin(cwd, 'madevent_mintMC'))
4032
4033 if args[2] == '0':
4034 current = 'G%s%s' % (args[1],args[0])
4035 else:
4036 current = 'G%s%s_%s' % (args[1],args[0],args[2])
4037 if os.path.exists(pjoin(cwd,current)):
4038 input_files.append(pjoin(cwd, current))
4039 output_files.append(current)
4040 if args[2] > '0':
4041
4042 output_files.append('G%s%s_%s' % (args[1], args[0], args[2]))
4043 required_output.append('G%s%s_%s/log_MINT%s.txt' % (args[1],args[0],args[2],args[3]))
4044
4045 else:
4046 required_output.append('%s/log_MINT%s.txt' % (current,args[3]))
4047 if args[3] in ['0','1']:
4048 required_output.append('%s/results.dat' % current)
4049 if args[3] == '1':
4050 output_files.append('%s/results.dat' % current)
4051
4052 else:
4053 raise aMCatNLOError, 'not valid arguments: %s' %(', '.join(args))
4054
4055
4056 pdfinput = self.get_pdf_input_filename()
4057 if os.path.exists(pdfinput):
4058 input_files.append(pdfinput)
4059 return input_files, output_files, required_output, args
4060
4061
4062 - def compile(self, mode, options):
4063 """compiles aMC@NLO to compute either NLO or NLO matched to shower, as
4064 specified in mode"""
4065
4066 os.mkdir(pjoin(self.me_dir, 'Events', self.run_name))
4067
4068 self.banner.write(pjoin(self.me_dir, 'Events', self.run_name,
4069 '%s_%s_banner.txt' % (self.run_name, self.run_tag)))
4070
4071 self.get_characteristics(pjoin(self.me_dir,
4072 'SubProcesses', 'proc_characteristics'))
4073
4074
4075 amcatnlo_log = pjoin(self.me_dir, 'compile_amcatnlo.log')
4076 madloop_log = pjoin(self.me_dir, 'compile_madloop.log')
4077 reweight_log = pjoin(self.me_dir, 'compile_reweight.log')
4078 test_log = pjoin(self.me_dir, 'test.log')
4079
4080
4081 self.make_opts_var = {}
4082 if self.proc_characteristics['has_loops'] and \
4083 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')):
4084 self.make_opts_var['madloop'] = 'true'
4085
4086 self.update_status('Compiling the code', level=None, update_results=True)
4087
4088 libdir = pjoin(self.me_dir, 'lib')
4089 sourcedir = pjoin(self.me_dir, 'Source')
4090
4091
4092 files.rm([amcatnlo_log, madloop_log, reweight_log, test_log])
4093
4094 if '+' in mode:
4095 mode = mode.split('+')[0]
4096 if mode in ['NLO', 'LO']:
4097 exe = 'madevent_mintFO'
4098 tests = ['test_ME']
4099 self.analyse_card.write_card(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts'))
4100 elif mode in ['aMC@NLO', 'aMC@LO','noshower','noshowerLO']:
4101 exe = 'madevent_mintMC'
4102 tests = ['test_ME', 'test_MC']
4103
4104 with open(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts'),'w') as fsock:
4105 fsock.write('FO_ANALYSE=analysis_dummy.o dbook.o open_output_files_dummy.o HwU_dummy.o\n')
4106
4107
4108 p_dirs = [d for d in \
4109 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d]
4110
4111 self.do_treatcards('', amcatnlo=True)
4112
4113
4114 if all([os.path.exists(pjoin(self.me_dir, 'SubProcesses', p_dir, exe)) \
4115 for p_dir in p_dirs]) and options['nocompile']:
4116 return
4117
4118
4119 if os.path.exists(pjoin(libdir, 'PDFsets')):
4120 files.rm(pjoin(libdir, 'PDFsets'))
4121
4122
4123 if self.run_card['pdlabel'] == 'lhapdf' and \
4124 (self.banner.get_detail('run_card', 'lpp1') != 0 or \
4125 self.banner.get_detail('run_card', 'lpp2') != 0):
4126
4127 self.link_lhapdf(libdir, [pjoin('SubProcesses', p) for p in p_dirs])
4128 pdfsetsdir = self.get_lhapdf_pdfsetsdir()
4129 lhaid_list = self.run_card['lhaid']
4130 self.copy_lhapdf_set(lhaid_list, pdfsetsdir)
4131
4132 else:
4133 if self.run_card['lpp1'] == 1 == self.run_card['lpp2']:
4134 logger.info('Using built-in libraries for PDFs')
4135 if self.run_card['lpp1'] == 0 == self.run_card['lpp2']:
4136 logger.info('Lepton-Lepton collision: Ignoring \'pdlabel\' and \'lhaid\' in the run_card.')
4137 self.make_opts_var['lhapdf'] = ""
4138
4139
4140 if self.run_card['iappl'] != 0:
4141 self.make_opts_var['applgrid'] = 'True'
4142
4143 for code in ['applgrid','amcfast']:
4144 try:
4145 p = subprocess.Popen([self.options[code], '--version'], \
4146 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
4147 except OSError:
4148 raise aMCatNLOError(('No valid %s installation found. \n' + \
4149 'Please set the path to %s-config by using \n' + \
4150 'MG5_aMC> set <absolute-path-to-%s>/bin/%s-config \n') % (code,code,code,code))
4151 else:
4152 output, _ = p.communicate()
4153 if code is 'applgrid' and output < '1.4.63':
4154 raise aMCatNLOError('Version of APPLgrid is too old. Use 1.4.69 or later.'\
4155 +' You are using %s',output)
4156 if code is 'amcfast' and output < '1.1.1':
4157 raise aMCatNLOError('Version of aMCfast is too old. Use 1.1.1 or later.'\
4158 +' You are using %s',output)
4159
4160
4161 appllibs=" APPLLIBS=$(shell %s --ldflags) $(shell %s --ldcflags) \n" \
4162 % (self.options['amcfast'],self.options['applgrid'])
4163 text=open(pjoin(self.me_dir,'Source','make_opts'),'r').readlines()
4164 text_out=[]
4165 for line in text:
4166 if line.strip().startswith('APPLLIBS=$'):
4167 line=appllibs
4168 text_out.append(line)
4169 with open(pjoin(self.me_dir,'Source','make_opts'),'w') as fsock:
4170 fsock.writelines(text_out)
4171 else:
4172 self.make_opts_var['applgrid'] = ""
4173
4174 if 'fastjet' in self.options.keys() and self.options['fastjet']:
4175 self.make_opts_var['fastjet_config'] = self.options['fastjet']
4176
4177
4178 self.update_make_opts()
4179
4180
4181 self.update_status('Compiling source...', level=None)
4182 misc.compile(['clean4pdf'], cwd = sourcedir)
4183 misc.compile(cwd = sourcedir)
4184 if os.path.exists(pjoin(libdir, 'libdhelas.a')) \
4185 and os.path.exists(pjoin(libdir, 'libgeneric.a')) \
4186 and os.path.exists(pjoin(libdir, 'libmodel.a')) \
4187 and os.path.exists(pjoin(libdir, 'libpdf.a')):
4188 logger.info(' ...done, continuing with P* directories')
4189 else:
4190 raise aMCatNLOError('Compilation failed')
4191
4192
4193 MCatNLO_libdir = pjoin(self.me_dir, 'MCatNLO', 'lib')
4194 if not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libstdhep.a'))) or \
4195 not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libFmcfio.a'))):
4196 if os.path.exists(pjoin(sourcedir,'StdHEP')):
4197 logger.info('Compiling StdHEP (can take a couple of minutes) ...')
4198 misc.compile(['StdHEP'], cwd = sourcedir)
4199 logger.info(' ...done.')
4200 else:
4201 raise aMCatNLOError('Could not compile StdHEP because its'+\
4202 ' source directory could not be found in the SOURCE folder.\n'+\
4203 " Check the MG5_aMC option 'output_dependencies.'")
4204
4205
4206 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \
4207 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))):
4208 if os.path.exists(pjoin(sourcedir,'CutTools')):
4209 logger.info('Compiling CutTools (can take a couple of minutes) ...')
4210 misc.compile(['CutTools'], cwd = sourcedir)
4211 logger.info(' ...done.')
4212 else:
4213 raise aMCatNLOError('Could not compile CutTools because its'+\
4214 ' source directory could not be found in the SOURCE folder.\n'+\
4215 " Check the MG5_aMC option 'output_dependencies.'")
4216 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \
4217 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))):
4218 raise aMCatNLOError('CutTools compilation failed.')
4219
4220
4221
4222 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin(
4223 libdir, 'libcts.a')))),'compiler_version.log')
4224 if os.path.exists(compiler_log_path):
4225 compiler_version_used = open(compiler_log_path,'r').read()
4226 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\
4227 pjoin(sourcedir,'make_opts')))) in compiler_version_used:
4228 if os.path.exists(pjoin(sourcedir,'CutTools')):
4229 logger.info('CutTools was compiled with a different fortran'+\
4230 ' compiler. Re-compiling it now...')
4231 misc.compile(['cleanCT'], cwd = sourcedir)
4232 misc.compile(['CutTools'], cwd = sourcedir)
4233 logger.info(' ...done.')
4234 else:
4235 raise aMCatNLOError("CutTools installation in %s"\
4236 %os.path.realpath(pjoin(libdir, 'libcts.a'))+\
4237 " seems to have been compiled with a different compiler than"+\
4238 " the one specified in MG5_aMC. Please recompile CutTools.")
4239
4240
4241 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libiregi.a'))) \
4242 and os.path.exists(pjoin(sourcedir,'IREGI')):
4243 logger.info('Compiling IREGI (can take a couple of minutes) ...')
4244 misc.compile(['IREGI'], cwd = sourcedir)
4245 logger.info(' ...done.')
4246
4247 if os.path.exists(pjoin(libdir, 'libiregi.a')):
4248
4249
4250 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin(
4251 libdir, 'libiregi.a')))),'compiler_version.log')
4252 if os.path.exists(compiler_log_path):
4253 compiler_version_used = open(compiler_log_path,'r').read()
4254 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\
4255 pjoin(sourcedir,'make_opts')))) in compiler_version_used:
4256 if os.path.exists(pjoin(sourcedir,'IREGI')):
4257 logger.info('IREGI was compiled with a different fortran'+\
4258 ' compiler. Re-compiling it now...')
4259 misc.compile(['cleanIR'], cwd = sourcedir)
4260 misc.compile(['IREGI'], cwd = sourcedir)
4261 logger.info(' ...done.')
4262 else:
4263 raise aMCatNLOError("IREGI installation in %s"\
4264 %os.path.realpath(pjoin(libdir, 'libiregi.a'))+\
4265 " seems to have been compiled with a different compiler than"+\
4266 " the one specified in MG5_aMC. Please recompile IREGI.")
4267
4268
4269 if self.proc_characteristics['has_loops'] and \
4270 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')):
4271 if mode in ['NLO', 'aMC@NLO', 'noshower']:
4272 tests.append('check_poles')
4273
4274
4275 self.update_status('Compiling directories...', level=None)
4276
4277 for test in tests:
4278 self.write_test_input(test)
4279
4280 try:
4281 import multiprocessing
4282 if not self.nb_core:
4283 try:
4284 self.nb_core = int(self.options['nb_core'])
4285 except TypeError:
4286 self.nb_core = multiprocessing.cpu_count()
4287 except ImportError:
4288 self.nb_core = 1
4289
4290 compile_options = copy.copy(self.options)
4291 compile_options['nb_core'] = self.nb_core
4292 compile_cluster = cluster.MultiCore(**compile_options)
4293 logger.info('Compiling on %d cores' % self.nb_core)
4294
4295 update_status = lambda i, r, f: self.donothing(i,r,f)
4296 for p_dir in p_dirs:
4297 compile_cluster.submit(prog = compile_dir,
4298 argument = [self.me_dir, p_dir, mode, options,
4299 tests, exe, self.options['run_mode']])
4300 try:
4301 compile_cluster.wait(self.me_dir, update_status)
4302 except Exception, error:
4303 logger.warning("Fail to compile the Subprocesses")
4304 if __debug__:
4305 raise
4306 compile_cluster.remove()
4307 self.do_quit('')
4308
4309 logger.info('Checking test output:')
4310 for p_dir in p_dirs:
4311 logger.info(p_dir)
4312 for test in tests:
4313 logger.info(' Result for %s:' % test)
4314
4315 this_dir = pjoin(self.me_dir, 'SubProcesses', p_dir)
4316
4317 self.check_tests(test, this_dir)
4318
4319
4322
4323
4325 """just call the correct parser for the test log.
4326 Skip check_poles for LOonly folders"""
4327 if test in ['test_ME', 'test_MC']:
4328 return self.parse_test_mx_log(pjoin(dir, '%s.log' % test))
4329 elif test == 'check_poles' and not os.path.exists(pjoin(dir,'parton_lum_0.f')):
4330 return self.parse_check_poles_log(pjoin(dir, '%s.log' % test))
4331
4332
4334 """read and parse the test_ME/MC.log file"""
4335 content = open(log).read()
4336 if 'FAILED' in content:
4337 logger.info('Output of the failing test:\n'+content[:-1],'$MG:color:BLACK')
4338 raise aMCatNLOError('Some tests failed, run cannot continue.\n' + \
4339 'Please check that widths of final state particles (e.g. top) have been' + \
4340 ' set to 0 in the param_card.dat.')
4341 else:
4342 lines = [l for l in content.split('\n') if 'PASSED' in l]
4343 logger.info(' Passed.')
4344 logger.debug('\n'+'\n'.join(lines))
4345
4346
4348 """reads and parse the check_poles.log file"""
4349 content = open(log).read()
4350 npass = 0
4351 nfail = 0
4352 for line in content.split('\n'):
4353 if 'PASSED' in line:
4354 npass +=1
4355 tolerance = float(line.split()[1])
4356 if 'FAILED' in line:
4357 nfail +=1
4358 tolerance = float(line.split()[1])
4359
4360 if nfail + npass == 0:
4361 logger.warning('0 points have been tried')
4362 return
4363
4364 if float(nfail)/float(nfail+npass) > 0.1:
4365 raise aMCatNLOError('Poles do not cancel, run cannot continue')
4366 else:
4367 logger.info(' Poles successfully cancel for %d points over %d (tolerance=%2.1e)' \
4368 %(npass, nfail+npass, tolerance))
4369
4370
4391
4392
4393
4394
4396 """ return the model name """
4397 if hasattr(self, 'model_name'):
4398 return self.model_name
4399
4400 model = 'sm'
4401 proc = []
4402 for line in open(os.path.join(self.me_dir,'Cards','proc_card_mg5.dat')):
4403 line = line.split('#')[0]
4404
4405 if line.startswith('import') and 'model' in line:
4406 model = line.split()[2]
4407 proc = []
4408 elif line.startswith('generate'):
4409 proc.append(line.split(None,1)[1])
4410 elif line.startswith('add process'):
4411 proc.append(line.split(None,2)[2])
4412
4413 self.model = model
4414 self.process = proc
4415 return model
4416
4417
4418
4419
4421 """Ask the question when launching generate_events/multi_run"""
4422
4423 if 'parton' not in options:
4424 options['parton'] = False
4425 if 'reweightonly' not in options:
4426 options['reweightonly'] = False
4427
4428
4429 void = 'NOT INSTALLED'
4430 switch_order = ['order', 'fixed_order', 'shower','madspin', 'reweight']
4431 switch_default = {'order': 'NLO', 'fixed_order': 'OFF', 'shower': void,
4432 'madspin': void,'reweight':'OFF'}
4433 if not switch:
4434 switch = switch_default
4435 else:
4436 switch.update(dict((k,value) for k,v in switch_default.items() if k not in switch))
4437 default_switch = ['ON', 'OFF']
4438
4439
4440 allowed_switch_value = {'order': ['LO', 'NLO'],
4441 'fixed_order': default_switch,
4442 'shower': default_switch,
4443 'madspin': default_switch,
4444 'reweight': default_switch}
4445
4446 description = {'order': 'Perturbative order of the calculation:',
4447 'fixed_order': 'Fixed order (no event generation and no MC@[N]LO matching):',
4448 'shower': 'Shower the generated events:',
4449 'madspin': 'Decay particles with the MadSpin module:',
4450 'reweight': 'Add weights to the events based on changing model parameters:'}
4451
4452 force_switch = {('shower', 'ON'): {'fixed_order': 'OFF'},
4453 ('madspin', 'ON'): {'fixed_order':'OFF'},
4454 ('reweight', 'ON'): {'fixed_order':'OFF'},
4455 ('fixed_order', 'ON'): {'shower': 'OFF', 'madspin': 'OFF', 'reweight':'OFF'}
4456 }
4457 special_values = ['LO', 'NLO', 'aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']
4458
4459 assign_switch = lambda key, value: switch.__setitem__(key, value if switch[key] != void else void )
4460
4461 if self.proc_characteristics['ninitial'] == 1:
4462 switch['fixed_order'] = 'ON'
4463 switch['shower'] = 'Not available for decay'
4464 switch['madspin'] = 'Not available for decay'
4465 switch['reweight'] = 'Not available for decay'
4466 allowed_switch_value['fixed_order'] = ['ON']
4467 allowed_switch_value['shower'] = ['OFF']
4468 allowed_switch_value['madspin'] = ['OFF']
4469 allowed_switch_value['reweight'] = ['OFF']
4470 available_mode = ['0','1']
4471 special_values = ['LO', 'NLO']
4472 else:
4473
4474 available_mode = ['0', '1', '2','3']
4475
4476 if mode == 'auto':
4477 mode = None
4478 if not mode and (options['parton'] or options['reweightonly']):
4479 mode = 'noshower'
4480
4481
4482 if '3' in available_mode:
4483 if os.path.exists(pjoin(self.me_dir, 'Cards', 'shower_card.dat')):
4484 switch['shower'] = 'ON'
4485 else:
4486 switch['shower'] = 'OFF'
4487
4488 if (not aMCatNLO or self.options['mg5_path']) and '3' in available_mode:
4489 available_mode.append('4')
4490 if os.path.exists(pjoin(self.me_dir,'Cards','madspin_card.dat')):
4491 switch['madspin'] = 'ON'
4492 else:
4493 switch['madspin'] = 'OFF'
4494 if misc.has_f2py() or self.options['f2py_compiler']:
4495 available_mode.append('5')
4496 if os.path.exists(pjoin(self.me_dir,'Cards','reweight_card.dat')):
4497 switch['reweight'] = 'ON'
4498 else:
4499 switch['reweight'] = 'OFF'
4500 else:
4501 switch['reweight'] = 'Not available (requires NumPy)'
4502
4503 if 'do_reweight' in options and options['do_reweight'] and '3' in available_mode:
4504 if switch['reweight'] == "OFF":
4505 switch['reweight'] = "ON"
4506 elif switch['reweight'] != "ON":
4507 logger.critical("Cannot run REWEIGHT: %s" % switch['reweight'])
4508 if 'do_madspin' in options and options['do_madspin']:
4509 if switch['madspin'] == "OFF":
4510 switch['madspin'] = 'ON'
4511 elif switch['madspin'] != "ON":
4512 logger.critical("Cannot run MadSpin module: %s" % switch['reweight'])
4513
4514 answers = list(available_mode) + ['auto', 'done']
4515 alias = {}
4516 for id, key in enumerate(switch_order):
4517 if switch[key] != void and switch[key] in allowed_switch_value[key] and \
4518 len(allowed_switch_value[key]) >1:
4519 answers += ['%s=%s' % (key, s) for s in allowed_switch_value[key]]
4520
4521 alias.update(dict(('%s=%s' % (key, s.lower()), '%s=%s' % (key, s))
4522 for s in allowed_switch_value[key]))
4523 answers += special_values
4524
4525 def create_question(switch):
4526 switch_format = " %i %-61s %12s=%s\n"
4527 question = "The following switches determine which operations are executed:\n"
4528 for id, key in enumerate(switch_order):
4529 question += switch_format % (id+1, description[key], key, switch[key])
4530 question += ' Either type the switch number (1 to %s) to change its default setting,\n' % (id+1)
4531 question += ' or set any switch explicitly (e.g. type \'order=LO\' at the prompt)\n'
4532 question += ' Type \'0\', \'auto\', \'done\' or just press enter when you are done.\n'
4533 return question
4534
4535
4536 def modify_switch(mode, answer, switch):
4537 if '=' in answer:
4538 key, status = answer.split('=')
4539 switch[key] = status
4540 if (key, status) in force_switch:
4541 for key2, status2 in force_switch[(key, status)].items():
4542 if switch[key2] not in [status2, void]:
4543 logger.info('For coherence \'%s\' is set to \'%s\''
4544 % (key2, status2), '$MG:color:BLACK')
4545 switch[key2] = status2
4546 elif answer in ['0', 'auto', 'done']:
4547 return
4548 elif answer in special_values:
4549 logger.info('Enter mode value: %s. Go to the related mode' % answer, '$MG:color:BLACK')
4550
4551
4552 if answer == 'LO':
4553 switch['order'] = 'LO'
4554 switch['fixed_order'] = 'ON'
4555 assign_switch('shower', 'OFF')
4556 elif answer == 'NLO':
4557 switch['order'] = 'NLO'
4558 switch['fixed_order'] = 'ON'
4559 assign_switch('shower', 'OFF')
4560 elif answer == 'aMC@NLO':
4561 switch['order'] = 'NLO'
4562 switch['fixed_order'] = 'OFF'
4563 assign_switch('shower', 'ON')
4564 elif answer == 'aMC@LO':
4565 switch['order'] = 'LO'
4566 switch['fixed_order'] = 'OFF'
4567 assign_switch('shower', 'ON')
4568 elif answer == 'noshower':
4569 switch['order'] = 'NLO'
4570 switch['fixed_order'] = 'OFF'
4571 assign_switch('shower', 'OFF')
4572 elif answer == 'noshowerLO':
4573 switch['order'] = 'LO'
4574 switch['fixed_order'] = 'OFF'
4575 assign_switch('shower', 'OFF')
4576 if mode:
4577 return
4578 return switch
4579
4580 modify_switch(mode, self.last_mode, switch)
4581 if switch['madspin'] == 'OFF' and os.path.exists(pjoin(self.me_dir,'Cards','madspin_card.dat')):
4582 assign_switch('madspin', 'ON')
4583
4584 if not self.force:
4585 answer = ''
4586 while answer not in ['0', 'done', 'auto', 'onlyshower']:
4587 question = create_question(switch)
4588 if mode:
4589 answer = mode
4590 else:
4591 answer = self.ask(question, '0', answers, alias=alias)
4592 if answer.isdigit() and answer != '0':
4593 key = switch_order[int(answer) - 1]
4594 opt1 = allowed_switch_value[key][0]
4595 opt2 = allowed_switch_value[key][1]
4596 answer = '%s=%s' % (key, opt1 if switch[key] == opt2 else opt2)
4597
4598 if not modify_switch(mode, answer, switch):
4599 break
4600
4601
4602 if not mode or mode == 'auto':
4603 if switch['order'] == 'LO':
4604 if switch['shower'] == 'ON':
4605 mode = 'aMC@LO'
4606 elif switch['fixed_order'] == 'ON':
4607 mode = 'LO'
4608 else:
4609 mode = 'noshowerLO'
4610 elif switch['order'] == 'NLO':
4611 if switch['shower'] == 'ON':
4612 mode = 'aMC@NLO'
4613 elif switch['fixed_order'] == 'ON':
4614 mode = 'NLO'
4615 else:
4616 mode = 'noshower'
4617 logger.info('will run in mode: %s' % mode)
4618
4619 if mode == 'noshower':
4620 logger.warning("""You have chosen not to run a parton shower. NLO events without showering are NOT physical.
4621 Please, shower the Les Houches events before using them for physics analyses.""")
4622
4623
4624
4625 cards = ['param_card.dat', 'run_card.dat']
4626 ignore = []
4627 if mode in ['LO', 'NLO']:
4628 options['parton'] = True
4629 ignore = ['shower_card.dat', 'madspin_card.dat']
4630 cards.append('FO_analyse_card.dat')
4631 else:
4632 if switch['madspin'] == 'ON':
4633 cards.append('madspin_card.dat')
4634 if switch['reweight'] == 'ON':
4635 cards.append('reweight_card.dat')
4636 if 'aMC@' in mode:
4637 cards.append('shower_card.dat')
4638 if mode == 'onlyshower':
4639 cards = ['shower_card.dat']
4640 if options['reweightonly']:
4641 cards = ['run_card.dat']
4642
4643 self.keep_cards(cards, ignore)
4644
4645 if mode =='onlyshower':
4646 cards = ['shower_card.dat']
4647
4648
4649
4650 first_cmd = []
4651
4652 if not options['force'] and not self.force:
4653 self.ask_edit_cards(cards, plot=False, first_cmd=first_cmd)
4654
4655
4656 self.banner = banner_mod.Banner()
4657
4658
4659 for card in cards:
4660 self.banner.add(pjoin(self.me_dir, 'Cards', card))
4661
4662 run_settings = '\n'.join(['%s = %s' % (k, v) for (k, v) in switch.items()])
4663 self.banner.add_text('run_settings', run_settings)
4664
4665 if not mode =='onlyshower':
4666 self.run_card = self.banner.charge_card('run_card')
4667 self.run_tag = self.run_card['run_tag']
4668
4669 if not hasattr(self, 'run_name') or not self.run_name:
4670 self.run_name = self.find_available_run_name(self.me_dir)
4671
4672 if self.run_name.startswith('run_'):
4673 if mode in ['LO','aMC@LO','noshowerLO']:
4674 self.run_name += '_LO'
4675 self.set_run_name(self.run_name, self.run_tag, 'parton')
4676 if self.run_card['ickkw'] == 3 and mode in ['LO', 'aMC@LO', 'noshowerLO']:
4677 raise self.InvalidCmd("""FxFx merging (ickkw=3) not allowed at LO""")
4678 elif self.run_card['ickkw'] == 3 and mode in ['aMC@NLO', 'noshower']:
4679 logger.warning("""You are running with FxFx merging enabled. To be able to merge
4680 samples of various multiplicities without double counting, you
4681 have to remove some events after showering 'by hand'. Please
4682 read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""")
4683 if self.run_card['parton_shower'].upper() == 'PYTHIA6Q':
4684 raise self.InvalidCmd("""FxFx merging does not work with Q-squared ordered showers.""")
4685 elif self.run_card['parton_shower'].upper() != 'HERWIG6' and self.run_card['parton_shower'].upper() != 'PYTHIA8':
4686 question="FxFx merging not tested for %s shower. Do you want to continue?\n" % self.run_card['parton_shower'] + \
4687 "Type \'n\' to stop or \'y\' to continue"
4688 answers = ['n','y']
4689 answer = self.ask(question, 'n', answers, alias=alias)
4690 if answer == 'n':
4691 error = '''Stop opertation'''
4692 self.ask_run_configuration(mode, options)
4693
4694 elif self.run_card['ickkw'] == -1 and mode in ['aMC@NLO', 'noshower']:
4695
4696 raise self.InvalidCmd("""NNLL+NLO jet veto runs (ickkw=-1) only possible for fNLO or LO.""")
4697 if 'aMC@' in mode or mode == 'onlyshower':
4698 self.shower_card = self.banner.charge_card('shower_card')
4699
4700 elif mode in ['LO', 'NLO']:
4701 analyse_card_path = pjoin(self.me_dir, 'Cards','FO_analyse_card.dat')
4702 self.analyse_card = self.banner.charge_card('FO_analyse_card')
4703
4704 return mode
4705
4706
4707
4708
4709
4711 """The command line processor of MadGraph"""
4712
4713 _compile_usage = "compile [MODE] [options]\n" + \
4714 "-- compiles aMC@NLO \n" + \
4715 " MODE can be either FO, for fixed-order computations, \n" + \
4716 " or MC for matching with parton-shower monte-carlos. \n" + \
4717 " (if omitted, it is set to MC)\n"
4718 _compile_parser = misc.OptionParser(usage=_compile_usage)
4719 _compile_parser.add_option("-f", "--force", default=False, action='store_true',
4720 help="Use the card present in the directory for the launch, without editing them")
4721
4722 _launch_usage = "launch [MODE] [options]\n" + \
4723 "-- execute aMC@NLO \n" + \
4724 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \
4725 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \
4726 " computation of the total cross section and the filling of parton-level histograms \n" + \
4727 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \
4728 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \
4729 " event file is generated which will be showered with the MonteCarlo specified \n" + \
4730 " in the run_card.dat\n"
4731
4732 _launch_parser = misc.OptionParser(usage=_launch_usage)
4733 _launch_parser.add_option("-f", "--force", default=False, action='store_true',
4734 help="Use the card present in the directory for the launch, without editing them")
4735 _launch_parser.add_option("-c", "--cluster", default=False, action='store_true',
4736 help="Submit the jobs on the cluster")
4737 _launch_parser.add_option("-m", "--multicore", default=False, action='store_true',
4738 help="Submit the jobs on multicore mode")
4739 _launch_parser.add_option("-x", "--nocompile", default=False, action='store_true',
4740 help="Skip compilation. Ignored if no executable is found")
4741 _launch_parser.add_option("-r", "--reweightonly", default=False, action='store_true',
4742 help="Skip integration and event generation, just run reweight on the" + \
4743 " latest generated event files (see list in SubProcesses/nevents_unweighted)")
4744 _launch_parser.add_option("-p", "--parton", default=False, action='store_true',
4745 help="Stop the run after the parton level file generation (you need " + \
4746 "to shower the file in order to get physical results)")
4747 _launch_parser.add_option("-o", "--only_generation", default=False, action='store_true',
4748 help="Skip grid set up, just generate events starting from " + \
4749 "the last available results")
4750 _launch_parser.add_option("-n", "--name", default=False, dest='run_name',
4751 help="Provide a name to the run")
4752 _launch_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid',
4753 help="For use with APPLgrid only: start from existing grids")
4754 _launch_parser.add_option("-R", "--reweight", default=False, dest='do_reweight', action='store_true',
4755 help="Run the reweight module (reweighting by different model parameters)")
4756 _launch_parser.add_option("-M", "--madspin", default=False, dest='do_madspin', action='store_true',
4757 help="Run the madspin package")
4758
4759
4760
4761 _generate_events_usage = "generate_events [MODE] [options]\n" + \
4762 "-- execute aMC@NLO \n" + \
4763 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \
4764 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \
4765 " computation of the total cross section and the filling of parton-level histograms \n" + \
4766 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \
4767 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \
4768 " event file is generated which will be showered with the MonteCarlo specified \n" + \
4769 " in the run_card.dat\n"
4770
4771 _generate_events_parser = misc.OptionParser(usage=_generate_events_usage)
4772 _generate_events_parser.add_option("-f", "--force", default=False, action='store_true',
4773 help="Use the card present in the directory for the generate_events, without editing them")
4774 _generate_events_parser.add_option("-c", "--cluster", default=False, action='store_true',
4775 help="Submit the jobs on the cluster")
4776 _generate_events_parser.add_option("-m", "--multicore", default=False, action='store_true',
4777 help="Submit the jobs on multicore mode")
4778 _generate_events_parser.add_option("-x", "--nocompile", default=False, action='store_true',
4779 help="Skip compilation. Ignored if no executable is found")
4780 _generate_events_parser.add_option("-r", "--reweightonly", default=False, action='store_true',
4781 help="Skip integration and event generation, just run reweight on the" + \
4782 " latest generated event files (see list in SubProcesses/nevents_unweighted)")
4783 _generate_events_parser.add_option("-p", "--parton", default=False, action='store_true',
4784 help="Stop the run after the parton level file generation (you need " + \
4785 "to shower the file in order to get physical results)")
4786 _generate_events_parser.add_option("-o", "--only_generation", default=False, action='store_true',
4787 help="Skip grid set up, just generate events starting from " + \
4788 "the last available results")
4789 _generate_events_parser.add_option("-n", "--name", default=False, dest='run_name',
4790 help="Provide a name to the run")
4791
4792
4793
4794 _calculate_xsect_usage = "calculate_xsect [ORDER] [options]\n" + \
4795 "-- calculate cross section up to ORDER.\n" + \
4796 " ORDER can be either LO or NLO (if omitted, it is set to NLO). \n"
4797
4798 _calculate_xsect_parser = misc.OptionParser(usage=_calculate_xsect_usage)
4799 _calculate_xsect_parser.add_option("-f", "--force", default=False, action='store_true',
4800 help="Use the card present in the directory for the launch, without editing them")
4801 _calculate_xsect_parser.add_option("-c", "--cluster", default=False, action='store_true',
4802 help="Submit the jobs on the cluster")
4803 _calculate_xsect_parser.add_option("-m", "--multicore", default=False, action='store_true',
4804 help="Submit the jobs on multicore mode")
4805 _calculate_xsect_parser.add_option("-x", "--nocompile", default=False, action='store_true',
4806 help="Skip compilation. Ignored if no executable is found")
4807 _calculate_xsect_parser.add_option("-n", "--name", default=False, dest='run_name',
4808 help="Provide a name to the run")
4809 _calculate_xsect_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid',
4810 help="For use with APPLgrid only: start from existing grids")
4811 _calculate_xsect_parser.add_option("-o", "--only_generation", default=False, action='store_true',
4812 help="Skip grid set up, just generate events starting from " + \
4813 "the last available results")
4814
4815 _shower_usage = 'shower run_name [options]\n' + \
4816 '-- do shower/hadronization on parton-level file generated for run run_name\n' + \
4817 ' all the information (e.g. number of events, MonteCarlo, ...\n' + \
4818 ' are directly read from the header of the event file\n'
4819 _shower_parser = misc.OptionParser(usage=_shower_usage)
4820 _shower_parser.add_option("-f", "--force", default=False, action='store_true',
4821 help="Use the shower_card present in the directory for the launch, without editing")
4822
4823 if '__main__' == __name__:
4824
4825
4826 import sys
4827 if not sys.version_info[0] == 2 or sys.version_info[1] < 6:
4828 sys.exit('MadGraph/MadEvent 5 works only with python 2.6 or later (but not python 3.X).\n'+\
4829 'Please upgrate your version of python.')
4830
4831 import os
4832 import optparse
4833
4834
4835 root_path = os.path.dirname(os.path.dirname(os.path.realpath( __file__ )))
4836 sys.path.insert(0, root_path)
4837
4840 - def error(self, msg=''):
4842
4843 usage = "usage: %prog [options] [FILE] "
4844 parser = MyOptParser(usage=usage)
4845 parser.add_option("-l", "--logging", default='INFO',
4846 help="logging level (DEBUG|INFO|WARNING|ERROR|CRITICAL) [%default]")
4847 parser.add_option("","--web", action="store_true", default=False, dest='web', \
4848 help='force toce to be in secure mode')
4849 parser.add_option("","--debug", action="store_true", default=False, dest='debug', \
4850 help='force to launch debug mode')
4851 parser_error = ''
4852 done = False
4853
4854 for i in range(len(sys.argv)-1):
4855 try:
4856 (options, args) = parser.parse_args(sys.argv[1:len(sys.argv)-i])
4857 done = True
4858 except MyOptParser.InvalidOption, error:
4859 pass
4860 else:
4861 args += sys.argv[len(sys.argv)-i:]
4862 if not done:
4863
4864 try:
4865 (options, args) = parser.parse_args()
4866 except MyOptParser.InvalidOption, error:
4867 print error
4868 sys.exit(2)
4869
4870 if len(args) == 0:
4871 args = ''
4872
4873 import subprocess
4874 import logging
4875 import logging.config
4876
4877
4878 import internal.coloring_logging
4879 try:
4880 if __debug__ and options.logging == 'INFO':
4881 options.logging = 'DEBUG'
4882 if options.logging.isdigit():
4883 level = int(options.logging)
4884 else:
4885 level = eval('logging.' + options.logging)
4886 print os.path.join(root_path, 'internal', 'me5_logging.conf')
4887 logging.config.fileConfig(os.path.join(root_path, 'internal', 'me5_logging.conf'))
4888 logging.root.setLevel(level)
4889 logging.getLogger('madgraph').setLevel(level)
4890 except:
4891 raise
4892 pass
4893
4894
4895 try:
4896 if args:
4897
4898 if '--web' in args:
4899 i = args.index('--web')
4900 args.pop(i)
4901 cmd_line = aMCatNLOCmd(force_run=True)
4902 else:
4903 cmd_line = aMCatNLOCmdShell(force_run=True)
4904
4905 if not hasattr(cmd_line, 'do_%s' % args[0]):
4906 if parser_error:
4907 print parser_error
4908 print 'and %s can not be interpreted as a valid command.' % args[0]
4909 else:
4910 print 'ERROR: %s not a valid command. Please retry' % args[0]
4911 else:
4912 cmd_line.use_rawinput = False
4913 cmd_line.run_cmd(' '.join(args))
4914 cmd_line.run_cmd('quit')
4915
4916 except KeyboardInterrupt:
4917 print 'quit on KeyboardInterrupt'
4918 pass
4919