diff options
Diffstat (limited to 'Tools/performance_tests')
-rw-r--r-- | Tools/performance_tests/cori.py | 5 | ||||
-rw-r--r-- | Tools/performance_tests/functions_perftest.py | 10 | ||||
-rw-r--r-- | Tools/performance_tests/run_automated.py | 37 |
3 files changed, 39 insertions, 13 deletions
diff --git a/Tools/performance_tests/cori.py b/Tools/performance_tests/cori.py index 40dda319b..5fc5f5e06 100644 --- a/Tools/performance_tests/cori.py +++ b/Tools/performance_tests/cori.py @@ -7,8 +7,7 @@ def executable_name(compiler, architecture): def get_config_command(compiler, architecture): config_command = '' - config_command += 'module unload darshan;' - config_command += 'module load craype-hugepages4M;' + config_command += 'module unload darshan;' if architecture == 'knl': if compiler == 'intel': config_command += 'module unload PrgEnv-gnu;' @@ -68,7 +67,7 @@ def process_analysis(cwd, compiler, architecture, n_node_list, start_date): # Calculate simulation time. Take 5 min + 5 min / simulation def time_min(nb_simulations): - return 5. + len(test_list)*5. + return 5. + nb_simulations*5. def get_batch_string(test_list, job_time_str, Cname, n_node): batch_string = '' diff --git a/Tools/performance_tests/functions_perftest.py b/Tools/performance_tests/functions_perftest.py index bdcacd21b..b8066984d 100644 --- a/Tools/performance_tests/functions_perftest.py +++ b/Tools/performance_tests/functions_perftest.py @@ -72,9 +72,13 @@ def run_batch(run_name, res_dir, bin_name, config_command, architecture='knl',\ return 0 def run_batch_nnode(test_list, res_dir, bin_name, config_command, machine, architecture='knl', Cname='knl', n_node=1, runtime_param_list=[]): + if machine == 'cori': + from cori import time_min, get_batch_string, get_run_string + if machine == 'summit': + from summit import time_min, get_batch_string, get_run_string # Clean res_dir if os.path.exists(res_dir): - shutil.rmtree(res_dir, ignore_errors=True) + shutil.rmtree(res_dir, ignore_errors=True) os.makedirs(res_dir) # Copy files to res_dir cwd = os.environ['AUTOMATED_PERF_TESTS'] + '/warpx/Tools/performance_tests/' @@ -84,11 +88,11 @@ def run_batch_nnode(test_list, res_dir, bin_name, config_command, machine, archi # Calculate simulation time. Take 5 min + 2 min / simulation job_time_min = time_min(len(test_list)) job_time_str = str(int(job_time_min/60)) + ':' + str(int(job_time_min%60)) + ':00' - batch_string = cori.get_batch_string() + batch_string = get_batch_string(test_list, job_time_str, Cname, n_node) for count, current_test in enumerate(test_list): shutil.copy(cwd + current_test.input_file, res_dir) - run_string = get_run_string() + run_string = get_run_string(current_test, architecture, n_node, count, bin_name, runtime_param_list) batch_string += run_string batch_string += 'rm -rf plotfiles ; rm -rf lab_frame_data\n' batch_file = 'batch_script.sh' diff --git a/Tools/performance_tests/run_automated.py b/Tools/performance_tests/run_automated.py index ad2b84621..6dfe053cb 100644 --- a/Tools/performance_tests/run_automated.py +++ b/Tools/performance_tests/run_automated.py @@ -4,19 +4,19 @@ import os, sys, shutil, datetime, git import argparse, re, time, copy import pandas as pd from functions_perftest import store_git_hash, get_file_content, \ - run_batch_nnode, extract_dataframe + run_batch_nnode, extract_dataframe # print(os.environ["LMOD_SYSTEM_NAME"]) if os.getenv("LMOD_SYSTEM_NAME") == 'summit': machine = 'summit' - from summit import executable_name, process_analysis, get_config_command, time_min + from summit import executable_name, process_analysis, get_config_command if os.getenv("NERSC_HOST") == 'cori': machine = 'cori' - from cori import executable_name, process_analysis, get_config_command, time_min + from cori import executable_name, process_analysis, get_config_command -machine = 'cori' -from cori import executable_name, process_analysis, get_config_command, time_min +# machine = 'cori' +# from cori import executable_name, process_analysis, get_config_command print("machine = " + machine) @@ -86,6 +86,17 @@ if args.automated == True: pull_3_repos = True recompile = True + + + + +recompile = False + + + + + + # Each instance of this class contains information for a single test. class test_element(): def __init__(self, input_file=None, n_node=None, n_mpi_per_node=None, @@ -148,7 +159,7 @@ test_list = [copy.deepcopy(item) for item in test_list_unq for _ in range(n_repe # Define directories # ------------------ source_dir_base = os.environ['AUTOMATED_PERF_TESTS'] -warpx_dir = source_dir_base + '/WarpX/' +warpx_dir = source_dir_base + '/warpx/' picsar_dir = source_dir_base + '/picsar/' amrex_dir = source_dir_base + '/amrex/' res_dir_base = os.environ['SCRATCH'] + '/performance_warpx/' @@ -180,6 +191,7 @@ if args.mode == 'run': # Recompile if requested # ---------------------- + print(recompile) if recompile == True: if pull_3_repos == True: git_repo = git.cmd.Git( picsar_dir ) @@ -189,8 +201,19 @@ if args.mode == 'run': git_repo = git.cmd.Git( warpx_dir ) git_repo.pull() + print("toto") + +# AMREX_HOME ?= $(WARPX_HOME)/../amrex +# PICSAR_HOME ?= $(WARPX_HOME)/../picsar +# OPENBC_HOME ?= $(WARPX_HOME)/../openbc_poisson + shutil.copyfile("../../GNUmakefile","./GNUmakefile") - os.system(config_command + " make realclean COMP=%s" %compiler_name[args.compiler] + "; rm -r tmp_build_dir *.mod; make -j 16 COMP=%s" %compiler_name[args.compiler]) + os.system(config_command + " make realclean WARPX_HOME=../.. " + "AMREX_HOME=../../../amrex/ PICSAR_HOME=../../../picsar/ " + "EBASE=perf_tests COMP=%s" %compiler_name[args.compiler] + \ + "; rm -r tmp_build_dir *.mod; make -j 16 WARPX_HOME=../.. " + "AMREX_HOME=../../../amrex/ PICSAR_HOME=../../../picsar/ " + "EBASE=perf_tests COMP=%s" %compiler_name[args.compiler]) if os.path.exists( cwd + 'store_git_hashes.txt' ): os.remove( cwd + 'store_git_hashes.txt' ) |