diff --git a/README.rst b/README.rst index 0fdbfd558f18f8fdcc0004de142a44b77b6a3ef7..054c55818297c59b977bbacccb76e65fe2de9cdd 100644 --- a/README.rst +++ b/README.rst @@ -14,8 +14,8 @@ BiSheng Autotuner requires Python 3.10. Before installing the tool, make sure that ``python3`` in your search path is the correct version of Python. To start, run ``bin/install-autotuner.sh`` in the BiSheng Compiler installation -directory. This will invoke ``pip`` to install Autotuner along with a number -of dependencies. If the installation completes successfully, running +directory. This will invoke ``pip`` to install BiSheng Autotuner along with a +number of dependencies. If the installation completes successfully, running ``bin/llvm-autotune -h`` should display a help message instead of an error. For more detailed instructions on how to perform automatic tuning, visit diff --git a/automation/MySQL/autotuner-sysbench.sh b/automation/MySQL/autotuner-sysbench.sh index 191880e5c17dec5a8ed40b060eb7761cbd0e7e22..0c8d5847efd882768371ff5377b5df1e4eb5f2ac 100755 --- a/automation/MySQL/autotuner-sysbench.sh +++ b/automation/MySQL/autotuner-sysbench.sh @@ -57,20 +57,20 @@ if [ "$1" = "generate" ]; then make -s -j $(nproc) make install -# Generate initial configuration for a new autotuner run under $AUTOTUNE_DATADIR +# Generate initial configuration for a new BiSheng Autotuner run under $AUTOTUNE_DATADIR elif [ "$1" = "minimize" ]; then if [ -z "$AUTOTUNE_DATADIR" ]; then export AUTOTUNE_DATADIR=$2 fi - echo `date "+%d-%H-%M"` "Start generating initial autotuner configuration:\n" + echo `date "+%d-%H-%M"` "Start generating initial BiSheng Autotuner configuration:\n" $LLVMAutotune minimize --search-space=$SearchSpace --name-filter \ $MySQLServer/sql-common/net_serv.cc $MySQLServer/sql-common/client.cc \ $MySQLServer/libmysql/libmysql.cc $MySQLServer/mysys/my_alloc.cc \ $MySQLServer/vio/viosocket.cc $MySQLServer/sql-common/bind_params.cc \ $MySQLServer/mysys/my_malloc.cc $MySQLServer/mysys/pack.cc -# Run the first iteration of autotuner +# Run the first iteration of BiSheng Autotuner # Rebuild the entire MySQL since cmake flags change from the previous stage. elif [ "$1" = "run-one" ]; then diff --git a/automation/SPEC/accumulate.py b/automation/SPEC/accumulate.py index 0b08d8a99e190c6c2c03c1ed1ae9fddd8372e9cc..e05afc0757d232920f46ffcf4bb2974afc7fc8a5 100644 --- a/automation/SPEC/accumulate.py +++ b/automation/SPEC/accumulate.py @@ -10,26 +10,67 @@ import sys from reader import log -FIELDNAMES = ["benchmark", "baseline (s)", "best after tuning (s)", - "# of iterations", "speedup"] +FIELDNAMES = [ + "benchmark", + "baseline (s)", + "best after tuning (s)", + "# of iterations", + "speedup", +] ALL_BENCHMARKS = { - "intspeed.csv": ["600.perlbench_s", "602.gcc_s", "605.mcf_s", - "620.omnetpp_s", "623.xalancbmk_s", "625.x264_s", - "631.deepsjeng_s", "641.leela_s", "648.exchange2_s", - "657.xz_s"], - "intrate.csv": ["500.perlbench_r", "502.gcc_r", "505.mcf_r", - "520.omnetpp_r", "523.xalancbmk_r", "525.x264_r", - "531.deepsjeng_r", "541.leela_r", "548.exchange2_r", - "557.xz_r"], - "fpspeed.csv": ["603.bwaves_s", "607.cactuBSSN_s", "619.lbm_s", - "621.wrf_s", "627.cam4_s", "628.pop2_s", "638.imagick_s", - "644.nab_s", "649.fotonik3d_s", "654.roms_s"], - "fprate.csv": ["503.bwaves_r", "507.cactuBSSN_r", "508.namd_r", - "510.parest_r", "511.povray_r", "519.lbm_r", "521.wrf_r", - "526.blender_r", "527.cam4_r", "538.imagick_r", - "544.nab_r", "549.fotonik3d_r", "554.roms_r"] + "intspeed.csv": [ + "600.perlbench_s", + "602.gcc_s", + "605.mcf_s", + "620.omnetpp_s", + "623.xalancbmk_s", + "625.x264_s", + "631.deepsjeng_s", + "641.leela_s", + "648.exchange2_s", + "657.xz_s", + ], + "intrate.csv": [ + "500.perlbench_r", + "502.gcc_r", + "505.mcf_r", + "520.omnetpp_r", + "523.xalancbmk_r", + "525.x264_r", + "531.deepsjeng_r", + "541.leela_r", + "548.exchange2_r", + "557.xz_r", + ], + "fpspeed.csv": [ + "603.bwaves_s", + "607.cactuBSSN_s", + "619.lbm_s", + "621.wrf_s", + "627.cam4_s", + "628.pop2_s", + "638.imagick_s", + "644.nab_s", + "649.fotonik3d_s", + "654.roms_s", + ], + "fprate.csv": [ + "503.bwaves_r", + "507.cactuBSSN_r", + "508.namd_r", + "510.parest_r", + "511.povray_r", + "519.lbm_r", + "521.wrf_r", + "526.blender_r", + "527.cam4_r", + "538.imagick_r", + "544.nab_r", + "549.fotonik3d_r", + "554.roms_r", + ], } @@ -42,21 +83,24 @@ def init_accumulate(dir: str, filename: str): """ benchmarks = ALL_BENCHMARKS.get(filename) if benchmarks is None: # filename is not valid - print("filename should be one of 'intspeed.csv', 'intrate.csv', \ - 'fpspeed.csv', 'fprate.csv'") + print( + "filename should be one of 'intspeed.csv', 'intrate.csv', \ + 'fpspeed.csv', 'fprate.csv'" + ) sys.exit(1) flags = os.O_WRONLY | os.O_CREAT | os.O_TRUNC modes = stat.S_IWUSR | stat.S_IRUSR - with os.fdopen(os.open(dir + filename, flags, modes), 'w') as csv_file: + with os.fdopen(os.open(dir + filename, flags, modes), "w") as csv_file: writer = csv.DictWriter(csv_file, fieldnames=FIELDNAMES) writer.writeheader() for benchmark_name in benchmarks: writer.writerow({"benchmark": benchmark_name}) -def update(suite: str, benchmark: str, baseline: float, autotuner: float, - iterations: int): +def update( + suite: str, benchmark: str, baseline: float, autotuner: float, iterations: int +): """ update csv file with given data """ @@ -65,7 +109,8 @@ def update(suite: str, benchmark: str, baseline: float, autotuner: float, flags = os.O_WRONLY | os.O_CREAT | os.O_TRUNC modes = stat.S_IWUSR | stat.S_IRUSR with open("{}.csv".format(suite), mode="r") as in_file, os.fdopen( - os.open("temp.csv", flags, modes), 'w') as temp_file: + os.open("temp.csv", flags, modes), "w" + ) as temp_file: reader = csv.DictReader(in_file) writer = csv.DictWriter(temp_file, FIELDNAMES) @@ -75,14 +120,13 @@ def update(suite: str, benchmark: str, baseline: float, autotuner: float, row["baseline (s)"] = baseline row["best after tuning (s)"] = autotuner row["# of iterations"] = iterations - row["speedup"] = str( - round((baseline / autotuner - 1) * 100, - ndigits=2)) + '%' + row["speedup"] = ( + str(round((baseline / autotuner - 1) * 100, ndigits=2)) + "%" + ) writer.writerow(row) shutil.move("temp.csv", "{}.csv".format(suite)) log("ACCUMULATED RESULT CSV SAVED TO {}.csv".format(suite)) except TypeError as _: - log("RUNTIME NOT PRODUCED, " + - "WILL NOT UPDATE AUTOMATION OUTPUT CSV FILES") + log("RUNTIME NOT PRODUCED, " + "WILL NOT UPDATE AUTOMATION OUTPUT CSV FILES") os.remove("temp.csv") diff --git a/automation/SPEC/benchmark.py b/automation/SPEC/benchmark.py index 5b46325080b234113513dfb64572f52f7d6356ff..86068b86724128579eb9f367f40928bb297eeb51 100755 --- a/automation/SPEC/benchmark.py +++ b/automation/SPEC/benchmark.py @@ -19,14 +19,18 @@ from reader import log from reader import parse_baseline from reader import parse_autotuner -RUNCPU_TEMPLATE = "runcpu {0} --tune=base --rebuild --config=llvm.cfg " \ - "--noreportable -S LLVM_DIR={1} --size={2} " +RUNCPU_TEMPLATE = ( + "runcpu {0} --tune=base --rebuild --config=llvm.cfg " + "--noreportable -S LLVM_DIR={1} --size={2} " +) RUNCPU_TEMPLATE += "" # Add your extra configuration here -AUTOTUNER_TEMPLATE = "auto-tuner auto_run {0} -tr spec_tuner " \ - "--plugin-dir {1} --results-log-details " \ - "module_detail.log --stage-order loop " \ - "--stop-after {2} -o {3} " +AUTOTUNER_TEMPLATE = ( + "auto-tuner auto_run {0} -tr spec_tuner " + "--plugin-dir {1} --results-log-details " + "module_detail.log --stage-order loop " + "--stop-after {2} -o {3} " +) AUTOTUNER_TEMPLATE += "" # Add your extra configuration here START_CSV_NUM = None @@ -58,7 +62,7 @@ def args_check(args): """ Check whether the args are valid or not. Exit if necessary. """ - # Check runcpu and autotuner commands exists + # Check runcpu and BiSheng Autotuner commands exists for item in ("runcpu", "auto-tuner"): if not which(item): log("{} command not found, exit".format(item)) @@ -77,16 +81,17 @@ def args_check(args): output = expand_path(args.output) # Check directory exist - for path in (llvm_dir, result_dir, - config_spec, plugin_dir): + for path in (llvm_dir, result_dir, config_spec, plugin_dir): if not os.path.exists(path): log("No such file or directory") sys.exit(1) # Check every benchmark has a stop time if len(args.benchmarks) != len(args.stop_after): - log("Invalid number of argument for --stop-after. " / - "Need to match with num of benchmarks") + log( + "Invalid number of argument for --stop-after. " + / "Need to match with num of benchmarks" + ) sys.exit(1) # Check benchmarks are vaild @@ -94,10 +99,10 @@ def args_check(args): [get_suite(mark) for mark in args.benchmarks] # Make sure dir path is formatted - if result_dir[-1] != '/': - result_dir += '/' - if output[-1] != '/': - output += '/' + if result_dir[-1] != "/": + result_dir += "/" + if output[-1] != "/": + output += "/" # Clean if necessary if args.clean: @@ -107,20 +112,21 @@ def args_check(args): os.remove(result_dir + filename) log("remove {}".format(filename), extra_line=False) except IsADirectoryError as e: - log("can not remove directory {}, please remove manually" - .format(e.filename)) + log( + "can not remove directory {}, please remove manually".format( + e.filename + ) + ) # Check output directory if not os.path.exists(output): log("No existing output directory, creating a new directory") os.makedirs(output) # create four accumulate files - for filename in ("intspeed.csv", "intrate.csv", - "fpspeed.csv", "fprate.csv"): + for filename in ("intspeed.csv", "intrate.csv", "fpspeed.csv", "fprate.csv"): accumulate.init_accumulate(output, filename) else: - for filename in ("intspeed.csv", "intrate.csv", - "fpspeed.csv", "fprate.csv"): + for filename in ("intspeed.csv", "intrate.csv", "fpspeed.csv", "fprate.csv"): file_path = os.path.realpath(output + filename) if not os.path.exists(file_path): accumulate.init_accumulate(output, filename) @@ -128,35 +134,64 @@ def args_check(args): def get_args(): parser = argparse.ArgumentParser( - description="Automation script for auto-tuner SPEC benchmarks.") - - parser.add_argument("benchmarks", nargs='+', help="The benchmarks to run.") - parser.add_argument("-s", "--stop-after", nargs='+', required=True, - help="Stop autotuner benchmarks after given " \ - "seconds. Need to support multiple values if " \ - "multiple benchmarks. Required.") - parser.add_argument("-r", "--result-dir", default="$SPEC/result/", - help="Directory of SPEC produced result csv files " \ - "(default $SPEC/result/).") - parser.add_argument("-c", "--config-spec", default="./spec_automation.ini", - help="Location of spec_sample.ini file " \ - "(default ./spec_automation.ini).") - parser.add_argument("-p", "--plugin-dir", default="../../autotuner/plugin", - help="Directory of Autotuner plugin " \ - "(default ../../autotuner/plugin/).") - parser.add_argument("-o", "--output", default="./automation-output/", - help="Directory where results are accumulated " \ - "(default: ./automation-output/).") - parser.add_argument("-i", "--size", default="ref", choices=["ref", - "train", - "test"], - help="Size of SPEC input data to run (default: ref).") - parser.add_argument("--fake", action="store_true", - help="List, but do not execute, the commands " \ - "needed to build or run the benchmarks.") - parser.add_argument("--clean", action="store_true", - help="Before running benchmarks, remove all " \ - "files from a given (cpu2017/result) folder.") + description="Automation script for auto-tuner SPEC benchmarks." + ) + + parser.add_argument("benchmarks", nargs="+", help="The benchmarks to run.") + parser.add_argument( + "-s", + "--stop-after", + nargs="+", + required=True, + help="Stop BiSheng Autotuner benchmarks after given " + "seconds. Need to support multiple values if " + "multiple benchmarks. Required.", + ) + parser.add_argument( + "-r", + "--result-dir", + default="$SPEC/result/", + help="Directory of SPEC produced result csv files " "(default $SPEC/result/).", + ) + parser.add_argument( + "-c", + "--config-spec", + default="./spec_automation.ini", + help="Location of spec_sample.ini file " "(default ./spec_automation.ini).", + ) + parser.add_argument( + "-p", + "--plugin-dir", + default="../../autotuner/plugin", + help="Directory of BiSheng Autotuner plugin " + "(default ../../autotuner/plugin/).", + ) + parser.add_argument( + "-o", + "--output", + default="./automation-output/", + help="Directory where results are accumulated " + "(default: ./automation-output/).", + ) + parser.add_argument( + "-i", + "--size", + default="ref", + choices=["ref", "train", "test"], + help="Size of SPEC input data to run (default: ref).", + ) + parser.add_argument( + "--fake", + action="store_true", + help="List, but do not execute, the commands " + "needed to build or run the benchmarks.", + ) + parser.add_argument( + "--clean", + action="store_true", + help="Before running benchmarks, remove all " + "files from a given (cpu2017/result) folder.", + ) args = parser.parse_args() return args @@ -174,8 +209,7 @@ def run_baseline(args, i: int) -> float: START_CSV_NUM = flag + 1 if flag else 1 # Run baseline - baseline_cmd = shlex.split( - RUNCPU_TEMPLATE.format(mark, args.llvm_dir, args.size)) + baseline_cmd = shlex.split(RUNCPU_TEMPLATE.format(mark, args.llvm_dir, args.size)) execute_command(baseline_cmd, args.fake) # TODO check return code @@ -184,15 +218,16 @@ def run_baseline(args, i: int) -> float: baseline_runtime = "FAKE DATA" if not args.fake: baseline_runtime = parse_baseline( - get_infix(mark, args.size), mark, START_CSV_NUM, args.result_dir) + get_infix(mark, args.size), mark, START_CSV_NUM, args.result_dir + ) return baseline_runtime def run_autotuner(args, i: int) -> float: """ - Run Autotuner SPEC with given benchmark in args.benchmark[i] - Return the best time autotuner can produce + Run BiSheng Autotuner SPEC with given benchmark in args.benchmark[i] + Return the best time BiSheng Autotuner can produce """ global START_CSV_NUM, END_CSV_NUM @@ -204,22 +239,27 @@ def run_autotuner(args, i: int) -> float: os.environ["AUTOMATION_SPEC_SIZE"] = args.size log('ADD ENV "AUTOMATION_SPEC_SIZE"') - # Run autotuner SPEC - autotuner_cmd = shlex.split(AUTOTUNER_TEMPLATE.format( - args.config_spec, args.plugin_dir, stop_time, mark)) + # Run BiSheng Autotuner SPEC + autotuner_cmd = shlex.split( + AUTOTUNER_TEMPLATE.format(args.config_spec, args.plugin_dir, stop_time, mark) + ) execute_command(autotuner_cmd, args.fake) # Update END_CSV_NUM flag = find_largest_num(args.result_dir) END_CSV_NUM = flag if flag else 1 - # Parse autotuner result + # Parse BiSheng Autotuner result autotuner_runtime = "FAKE DATA" if not args.fake: # Parse from # START_CSV_NUM + 2(include one SPEC result for build) TO END_CSV_NUM - autotuner_runtime = parse_autotuner(get_infix( - mark, args.size), mark, START_CSV_NUM + 2, - END_CSV_NUM, args.result_dir) + autotuner_runtime = parse_autotuner( + get_infix(mark, args.size), + mark, + START_CSV_NUM + 2, + END_CSV_NUM, + args.result_dir, + ) return autotuner_runtime @@ -251,8 +291,7 @@ def main(): mark = args.benchmarks[i] log("BENCHMARK: {}".format(mark), extra_line=False) log("BASELINE RUNTIME: {}".format(baseline_runtime), extra_line=False) - log("AUTOTUNER BEST RUNTIME: {}".format( - autotuner_runtime), extra_line=False) + log("AUTOTUNER BEST RUNTIME: {}".format(autotuner_runtime), extra_line=False) iteration = END_CSV_NUM - START_CSV_NUM - 1 log("ITERATIONS: {}".format(iteration)) @@ -262,8 +301,13 @@ def main(): # update csv if not args.fake: - accumulate.update(args.output + get_suite(mark), mark, - baseline_runtime, autotuner_runtime, iteration) + accumulate.update( + args.output + get_suite(mark), + mark, + baseline_runtime, + autotuner_runtime, + iteration, + ) # Do a final log log("SUMMARY:") diff --git a/automation/SPEC/reader.py b/automation/SPEC/reader.py index 8163e07a15f6323ac862e53ff024a30511e83d0d..94bcf1810b0bccc3f0cd2d14dea8e5eb42c02dea 100644 --- a/automation/SPEC/reader.py +++ b/automation/SPEC/reader.py @@ -6,14 +6,35 @@ import os import sys from spec_csv_parser import SPECCSVParser -INTSPEED_NAMES = ["600", "602", "605", "620", - "623", "625", "631", "641", "648", "657"] -INTRATE_NAMES = ["500", "502", "505", "520", - "523", "525", "531", "541", "548", "557"] -FLOATSPEED_NAMES = ["603", "607", "619", "621", - "627", "628", "638", "644", "649", "654"] -FLOATRATE_NAMES = ["503", "507", "508", "510", "511", - "519", "521", "526", "527", "538", "544", "549", "554"] +INTSPEED_NAMES = ["600", "602", "605", "620", "623", "625", "631", "641", "648", "657"] +INTRATE_NAMES = ["500", "502", "505", "520", "523", "525", "531", "541", "548", "557"] +FLOATSPEED_NAMES = [ + "603", + "607", + "619", + "621", + "627", + "628", + "638", + "644", + "649", + "654", +] +FLOATRATE_NAMES = [ + "503", + "507", + "508", + "510", + "511", + "519", + "521", + "526", + "527", + "538", + "544", + "549", + "554", +] def log(*args, extra_line=True): @@ -50,7 +71,7 @@ def get_infix(benchmark: str, size: str): else: return suite + ".refspeed" else: - return suite + '.' + size + return suite + "." + size def get_suite(benchmark: str): @@ -67,8 +88,9 @@ def get_suite(benchmark: str): elif benchmark in FLOATSPEED_NAMES: return "fpspeed" else: - log("Invalid benchmark name. Use a number to denote benchmark, " \ - "for ex: '602'") + log( + "Invalid benchmark name. Use a number to denote benchmark, " "for ex: '602'" + ) sys.exit(1) @@ -86,8 +108,7 @@ def parse_one_csv(filename_template: str, i: int, benchmark: str) -> float: return float("inf") -def parse_baseline(infix: str, benchmark: str, baseline_num: int, - result_dir: str): +def parse_baseline(infix: str, benchmark: str, baseline_num: int, result_dir: str): """ Parse and return the runtime of baseline with given params """ @@ -97,10 +118,9 @@ def parse_baseline(infix: str, benchmark: str, baseline_num: int, return baseline_time -def parse_autotuner(infix: str, benchmark: str, start: int, end: int, - result_dir: str): +def parse_autotuner(infix: str, benchmark: str, start: int, end: int, result_dir: str): """ - Parse and return the minimum runtime of all autotuner results. + Parse and return the minimum runtime of all BiSheng Autotuner results. Return None if no runtime found. """ mininum = float("inf") @@ -109,7 +129,7 @@ def parse_autotuner(infix: str, benchmark: str, start: int, end: int, log("THE FILENAMES ARE: " + filename_template.format("")) log("Start parsing files") - for i in range(start, end+1): + for i in range(start, end + 1): mininum = min(mininum, parse_one_csv(filename_template, i, benchmark)) if mininum == float("inf"): diff --git a/automation/SPEC/spec_csv_parser.py b/automation/SPEC/spec_csv_parser.py index 608ce4378426623ac0973da1ce40d5c0c3236313..fe298aafa1be061d7b2acf8d0c523ef508156332 100644 --- a/automation/SPEC/spec_csv_parser.py +++ b/automation/SPEC/spec_csv_parser.py @@ -15,7 +15,7 @@ class SPECCSVParser: """ Print all lines in a csv_file """ - with open(csv_file, 'r') as file: + with open(csv_file, "r") as file: reader = csv.reader(file) for i, row in enumerate(reader): print("[{0}] {1}".format(i, row)) @@ -25,7 +25,7 @@ class SPECCSVParser: Return the SPEC runtime in given csv_file """ print("[SPEC AUTOMATION]: NOW PARSE FILE: {}".format(csv_file)) - with open(csv_file, 'r') as file: + with open(csv_file, "r") as file: reader = csv.reader(file) # loop over the Full Results Table. Ending in line 17 try: @@ -37,13 +37,15 @@ class SPECCSVParser: return float(line[2]) except ValueError as _: print( - "[SPEC AUTOMATION]: Error: cannot parse line {} " \ - "with data {}".format(i, line)) + "[SPEC AUTOMATION]: Error: cannot parse line {} " + "with data {}".format(i, line) + ) return float("inf") except StopIteration as _: - print("[SPEC AUTOMATION]: Error: cannot find result " \ - "of {} in {}".format( - benchmark, csv_file)) + print( + "[SPEC AUTOMATION]: Error: cannot find result " + "of {} in {}".format(benchmark, csv_file) + ) return float("inf") diff --git a/autotuner/__init__.py b/autotuner/__init__.py index e012f5bf697d56df7472601b1a17bf8d347ebcf6..8122067f5b6c74c9ba59f3402b13895751cc68cb 100644 --- a/autotuner/__init__.py +++ b/autotuner/__init__.py @@ -5,5 +5,11 @@ Initialization for all source packages Copyright (C) 2017-2020, Huawei Technologies Co., Ltd. All rights reserved. """ # init src package -__all__ = ["tuners", "models", "iomanager", "iomanagerutils", "xmlmanager", - "yamlmanager"] +__all__ = [ + "tuners", + "models", + "iomanager", + "iomanagerutils", + "xmlmanager", + "yamlmanager", +] diff --git a/autotuner/dbutils.py b/autotuner/dbutils.py index 5b38589f82ed5f30696b7e480501a8b479fec088..1424cd199471ded05eefd8a951b92ce3976f5315 100644 --- a/autotuner/dbutils.py +++ b/autotuner/dbutils.py @@ -25,6 +25,7 @@ class OptimalConfig(BASE_TABLE): Stores the parameters for a (hash, type, pass) triple during the tuning run. """ + __tablename__ = "optimalConfigs" hashcode = Column(String, primary_key=True) code_region_type = Column(String, primary_key=True) @@ -38,6 +39,7 @@ class CurrentCodeRegion(BASE_TABLE): Will be used to generate LLVM input. The primary key constraints must match CodeRegion::Operator== in LLVM. """ + __tablename__ = "currentCodeRegions" name = Column(String, primary_key=True) pass_name = Column(String, primary_key=True) @@ -64,7 +66,7 @@ def create_config_db_session(data_dir): session: Session to the database. """ path = os.path.join(data_dir, "configs.db") - engine = create_engine('sqlite:///' + path) + engine = create_engine("sqlite:///" + path) if not os.path.exists(path): BASE_TABLE.metadata.create_all(engine) session_maker = sessionmaker(bind=engine) @@ -88,17 +90,21 @@ def is_current_code_region(db_session, entry): Checks if `entry` is already in the CurrentCodeRegions table. """ try: - found = db_session.query(CurrentCodeRegion).filter( - CurrentCodeRegion.name == entry.name, - CurrentCodeRegion.pass_name == entry.pass_name, - CurrentCodeRegion.func_name == entry.func_name, - CurrentCodeRegion.code_region_type == entry.code_region_type, - CurrentCodeRegion.hashcode == entry.hashcode, - CurrentCodeRegion.debug_file == entry.debug_file, - CurrentCodeRegion.debug_line == entry.debug_line, - CurrentCodeRegion.debug_column == entry.debug_column, - CurrentCodeRegion.invocation == entry.invocation - ).count() + found = ( + db_session.query(CurrentCodeRegion) + .filter( + CurrentCodeRegion.name == entry.name, + CurrentCodeRegion.pass_name == entry.pass_name, + CurrentCodeRegion.func_name == entry.func_name, + CurrentCodeRegion.code_region_type == entry.code_region_type, + CurrentCodeRegion.hashcode == entry.hashcode, + CurrentCodeRegion.debug_file == entry.debug_file, + CurrentCodeRegion.debug_line == entry.debug_line, + CurrentCodeRegion.debug_column == entry.debug_column, + CurrentCodeRegion.invocation == entry.invocation, + ) + .count() + ) return found > 0 except Exception: db_session.rollback() @@ -141,11 +147,15 @@ def is_duplicate_hash(db_session, hashcode, code_region_type, pass_name): in the CurrentCodeRegions table. """ try: - found = db_session.query(CurrentCodeRegion).filter( - CurrentCodeRegion.hashcode == hashcode, - CurrentCodeRegion.code_region_type == code_region_type, - CurrentCodeRegion.pass_name == pass_name - ).count() + found = ( + db_session.query(CurrentCodeRegion) + .filter( + CurrentCodeRegion.hashcode == hashcode, + CurrentCodeRegion.code_region_type == code_region_type, + CurrentCodeRegion.pass_name == pass_name, + ) + .count() + ) return found > 1 except Exception: db_session.rollback() @@ -158,11 +168,15 @@ def optimal_config_exists(db_session, hashcode, code_region_type, pass_name): in the OptimalConfig table. """ try: - result = db_session.query(OptimalConfig).filter( - OptimalConfig.hashcode == hashcode, - OptimalConfig.code_region_type == code_region_type, - OptimalConfig.pass_name == pass_name - ).one_or_none() + result = ( + db_session.query(OptimalConfig) + .filter( + OptimalConfig.hashcode == hashcode, + OptimalConfig.code_region_type == code_region_type, + OptimalConfig.pass_name == pass_name, + ) + .one_or_none() + ) return result is not None except Exception: db_session.rollback() @@ -176,11 +190,15 @@ def get_optimal_config(db_session, hashcode, code_region_type, pass_name): not exist. """ try: - result = db_session.query(OptimalConfig).filter( - OptimalConfig.hashcode == hashcode, - OptimalConfig.code_region_type == code_region_type, - OptimalConfig.pass_name == pass_name - ).one_or_none() + result = ( + db_session.query(OptimalConfig) + .filter( + OptimalConfig.hashcode == hashcode, + OptimalConfig.code_region_type == code_region_type, + OptimalConfig.pass_name == pass_name, + ) + .one_or_none() + ) return result.params if result is not None else None except Exception: db_session.rollback() @@ -205,11 +223,15 @@ def get_current_code_regions(db_session, ignore_seen=False): results = [] seen = db_session.query(CurrentCodeRegion).all() for row in seen: - cfg_row = db_session.query(OptimalConfig).filter( - OptimalConfig.hashcode == row.hashcode, - OptimalConfig.code_region_type == row.code_region_type, - OptimalConfig.pass_name == row.pass_name - ).one_or_none() + cfg_row = ( + db_session.query(OptimalConfig) + .filter( + OptimalConfig.hashcode == row.hashcode, + OptimalConfig.code_region_type == row.code_region_type, + OptimalConfig.pass_name == row.pass_name, + ) + .one_or_none() + ) parameters = cfg_row.params if row.seen else None if ignore_seen and cfg_row: @@ -223,12 +245,9 @@ def get_current_code_regions(db_session, ignore_seen=False): hashcode=row.hashcode, invocation=row.invocation, ) - if (row.debug_file != "" and row.debug_line != "" and - row.debug_column != ""): + if row.debug_file != "" and row.debug_line != "" and row.debug_column != "": debug_loc = DebugLoc( - row.debug_file, - int(row.debug_line), - int(row.debug_column) + row.debug_file, int(row.debug_line), int(row.debug_column) ) code_region.debug_loc = debug_loc results.append(CodeRegionConfiguration(code_region, parameters)) @@ -248,11 +267,15 @@ def update_optimal_configs(db_session, remarks): """ try: for remark in remarks: - row = db_session.query(OptimalConfig).filter( - OptimalConfig.hashcode == str(remark.CodeRegionHash), - OptimalConfig.code_region_type == remark.CodeRegionType, - OptimalConfig.pass_name == remark.Pass - ).one_or_none() + row = ( + db_session.query(OptimalConfig) + .filter( + OptimalConfig.hashcode == str(remark.CodeRegionHash), + OptimalConfig.code_region_type == remark.CodeRegionType, + OptimalConfig.pass_name == remark.Pass, + ) + .one_or_none() + ) if row: # This code region was previously stored in the table. db_session.delete(row) diff --git a/autotuner/iomanager.py b/autotuner/iomanager.py index 98a05327923d09a6f8d460e198555c8d3c972e8e..716a492d716c7870d57592064e33362e4fb0f756 100644 --- a/autotuner/iomanager.py +++ b/autotuner/iomanager.py @@ -7,10 +7,13 @@ import argparse import abc argument_parser = argparse.ArgumentParser(add_help=False) -argument_parser.add_argument('--parse-format', nargs='?', choices=[ - 'xml', 'yaml'], default='yaml', - help='choose the format of LLVM auto-tuning-input/opp,' - '(default: yaml)') +argument_parser.add_argument( + "--parse-format", + nargs="?", + choices=["xml", "yaml"], + default="yaml", + help="choose the format of LLVM auto-tuning-input/opp," "(default: yaml)", +) class EmptySearchSpaceError(Exception): @@ -20,9 +23,15 @@ class EmptySearchSpaceError(Exception): class IOManager(object): @abc.abstractmethod - def build_llvm_input(self, configuration_data, task_map, output_file, - fixed_llvm_input=None, config_db=None, - use_hash_matching=False): + def build_llvm_input( + self, + configuration_data, + task_map, + output_file, + fixed_llvm_input=None, + config_db=None, + use_hash_matching=False, + ): pass @abc.abstractmethod @@ -30,8 +39,13 @@ class IOManager(object): pass @abc.abstractmethod - def parse_search_space(self, search_space, use_dynamic_values=False, - use_baseline_config = False, filepath = None): + def parse_search_space( + self, + search_space, + use_dynamic_values=False, + use_baseline_config=False, + filepath=None, + ): pass @abc.abstractmethod @@ -43,20 +57,38 @@ class IOManager(object): pass @abc.abstractmethod - def generate_search_space_file(self, files, output_file, config_file, - name_filter=None, func_name_filter=None, - file_name_filter=None, type_filter=None, - pass_filter=None, config_db=None, - use_hash_matching=False, - use_prev_configs=False, inject_seed=False): + def generate_search_space_file( + self, + files, + output_file, + config_file, + name_filter=None, + func_name_filter=None, + file_name_filter=None, + type_filter=None, + pass_filter=None, + config_db=None, + use_hash_matching=False, + use_prev_configs=False, + inject_seed=False, + ): pass @abc.abstractmethod - def generate_search_space(self, files, config_file, file_name_filter, - func_name_filter, name_filter, type_filter, - pass_filter, config_db=None, - use_hash_matching=False, use_prev_configs=False, - inject_seed=False): + def generate_search_space( + self, + files, + config_file, + file_name_filter, + func_name_filter, + name_filter, + type_filter, + pass_filter, + config_db=None, + use_hash_matching=False, + use_prev_configs=False, + inject_seed=False, + ): pass @abc.abstractmethod diff --git a/autotuner/iomanagerutils.py b/autotuner/iomanagerutils.py index 0fb6efddc77a63ead92afc8caae8d9978ed803da..e6c5099efb56faeeab69f66d63c95a6032e7e07f 100644 --- a/autotuner/iomanagerutils.py +++ b/autotuner/iomanagerutils.py @@ -9,7 +9,7 @@ from autotuner import yamlmanager def create_io_manager(suffix_str): suffix_str = suffix_str.strip() - if (suffix_str == 'xml'): + if suffix_str == "xml": filemanager = xmlmanager.XMLManager() else: filemanager = yamlmanager.YAMLManager() diff --git a/autotuner/main.py b/autotuner/main.py index deeb41aa7eeb63e22eda191ce40d2d074cbc8eae..fd753546f04c908b963eed4e3bc9a02b7c476ed4 100644 --- a/autotuner/main.py +++ b/autotuner/main.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- """ -Main functions for Autotuner's command-line interface +Main functions for BiSheng Autotuner's command-line interface Copyright (C) 2017-2020, Huawei Technologies Co., Ltd. All rights reserved. """ import argparse @@ -25,48 +25,63 @@ from autotuner.iomanagerutils import create_io_manager def _add_common_tuner_arguments(parser): - parser.add_argument('config_file', nargs='?', - help='The tuning config file.') - parser.add_argument('--plugin-dir', metavar='DIR', - help='specify the dir to load ' - 'customized tuner scripts') - parser.add_argument('-tr', '--tuner', - type=str, - help="Select which tuner to use") - parser.add_argument('-lr', '--list-tuners', - action="store_true", help="List all available tuners") - parser.add_argument('--add-llvm-inputs', nargs='+', - help="add existing llvm configuration input files as " - "constants in addition to the llvm configurations" - " generated in each iteration of the tuning run") + parser.add_argument("config_file", nargs="?", help="The tuning config file.") + parser.add_argument( + "--plugin-dir", + metavar="DIR", + help="specify the dir to load " "customized tuner scripts", + ) + parser.add_argument("-tr", "--tuner", type=str, help="Select which tuner to use") + parser.add_argument( + "-lr", "--list-tuners", action="store_true", help="List all available tuners" + ) + parser.add_argument( + "--add-llvm-inputs", + nargs="+", + help="add existing llvm configuration input files as " + "constants in addition to the llvm configurations" + " generated in each iteration of the tuning run", + ) def _add_common_parse_arguments(parser): - parser.add_argument('-nf', '--name-filter', nargs='+', metavar='Name', - default=[], - help='to filter code regions by names when generating ' - 'search space') - parser.add_argument('--func-name-filter', nargs='+', metavar='Name', - default=[], - help='to filter code regions by function names when ' - 'generating search space') - parser.add_argument('--file-name-filter', nargs='+', metavar='Name', - default=[], - help='to filter code regions by file names when ' - 'generating search space') - parser.add_argument('--hot-func-file', nargs='+', metavar='Name', - default=[], - help=argparse.SUPPRESS) - parser.add_argument('--hot-func-number', metavar='N', type=int, default=10, - help=argparse.SUPPRESS) - parser.add_argument('-scf', '--search-config-file', - help='The Search space config file') + parser.add_argument( + "-nf", + "--name-filter", + nargs="+", + metavar="Name", + default=[], + help="to filter code regions by names when generating " "search space", + ) + parser.add_argument( + "--func-name-filter", + nargs="+", + metavar="Name", + default=[], + help="to filter code regions by function names when " "generating search space", + ) + parser.add_argument( + "--file-name-filter", + nargs="+", + metavar="Name", + default=[], + help="to filter code regions by file names when " "generating search space", + ) + parser.add_argument( + "--hot-func-file", nargs="+", metavar="Name", default=[], help=argparse.SUPPRESS + ) + parser.add_argument( + "--hot-func-number", metavar="N", type=int, default=10, help=argparse.SUPPRESS + ) + parser.add_argument( + "-scf", "--search-config-file", help="The Search space config file" + ) def get_args(): # create the top-level parser - parser = argparse.ArgumentParser(prog='auto-tuner') - subparsers = parser.add_subparsers(help='commands help', dest='command') + parser = argparse.ArgumentParser(prog="auto-tuner") + subparsers = parser.add_subparsers(help="commands help", dest="command") subparsers.required = True # create the the parser for the "run" command @@ -74,70 +89,81 @@ def get_args(): argparsers.append(tunerbase.argument_parser) argparsers.append(argument_parser) - run_parser = subparsers.add_parser('run', - parents=argparsers, - help='Run the tuner') + run_parser = subparsers.add_parser("run", parents=argparsers, help="Run the tuner") _add_common_tuner_arguments(run_parser) - run_parser.add_argument('-ss', '--search_space', - help='The search space file.') - run_parser.add_argument('--enable-final-compile', action='store_true', - default=False, - help='perform final compilation with optimal ' - 'config at the end of tuning') + run_parser.add_argument("-ss", "--search_space", help="The search space file.") + run_parser.add_argument( + "--enable-final-compile", + action="store_true", + default=False, + help="perform final compilation with optimal " "config at the end of tuning", + ) # create the parser for the "merge" command merge_parser = subparsers.add_parser( - 'merge', parents=[argument_parser], - help='Merge LLVM configuration input files') + "merge", parents=[argument_parser], help="Merge LLVM configuration input files" + ) merge_parser.add_argument( - 'input_file', nargs='+', - help='LLVM configuration input files generated by LLVM') - merge_parser.add_argument( - '-o', '--output', metavar='FILE', help='output file') + "input_file", nargs="+", help="LLVM configuration input files generated by LLVM" + ) + merge_parser.add_argument("-o", "--output", metavar="FILE", help="output file") # create the parser for the "divide" command - divide_parser = subparsers.add_parser('divide', parents=[argument_parser], - help='Divide LLVM configuration ' - 'input file into multiple files' - ' based on file_name') + divide_parser = subparsers.add_parser( + "divide", + parents=[argument_parser], + help="Divide LLVM configuration " + "input file into multiple files" + " based on file_name", + ) divide_parser.add_argument( - 'input_file', help='LLVM configuration input file generated by LLVM') + "input_file", help="LLVM configuration input file generated by LLVM" + ) divide_parser.add_argument( - '-o', '--output_dir', metavar='DIR', help='output dir', default='./') + "-o", "--output_dir", metavar="DIR", help="output dir", default="./" + ) # create the parser for the "parse" command - gsc_parser = subparsers.add_parser('parse', parents=[ - argument_parser], help='Parse the tuning' - ' opportunity files and generate ' - 'search space') + gsc_parser = subparsers.add_parser( + "parse", + parents=[argument_parser], + help="Parse the tuning" " opportunity files and generate " "search space", + ) _add_common_parse_arguments(gsc_parser) - gsc_parser.add_argument('opp_file', nargs='+', - help="Opportunity files generated by LLVM") - gsc_parser.add_argument('-o', '--output', metavar='FILE', - help='output file') - gsc_parser.add_argument('-tf', '--type-filter', nargs='+', default=[], - help='to filter code regions by types when' - ' generating search space', - choices=['machine_basic_block', 'loop', 'function', - 'module']) + gsc_parser.add_argument( + "opp_file", nargs="+", help="Opportunity files generated by LLVM" + ) + gsc_parser.add_argument("-o", "--output", metavar="FILE", help="output file") + gsc_parser.add_argument( + "-tf", + "--type-filter", + nargs="+", + default=[], + help="to filter code regions by types when" " generating search space", + choices=["machine_basic_block", "loop", "function", "module"], + ) # create the the parser for the "auto_run" command - auto_run_parser = subparsers.add_parser('auto_run', - parents=argparsers, - help='(recommended) auto-generate ' - 'the search space and run the' - ' auto-phase-based tuning ' - '(the default order of stages' - ' is module -> function ' - '-> loop)') - - auto_run_parser.add_argument('--stage-order', nargs='+', metavar='stage', - default=["module", "function", "loop"], - help='specify stage order of auto_run. ' - 'each stage is a code region type', - choices=['machine_basic_block', 'function', - 'loop', 'module']) + auto_run_parser = subparsers.add_parser( + "auto_run", + parents=argparsers, + help="(recommended) auto-generate " + "the search space and run the" + " auto-phase-based tuning " + "(the default order of stages" + " is module -> function " + "-> loop)", + ) + + auto_run_parser.add_argument( + "--stage-order", + nargs="+", + metavar="stage", + default=["module", "function", "loop"], + help="specify stage order of auto_run. " "each stage is a code region type", + choices=["machine_basic_block", "function", "loop", "module"], + ) _add_common_parse_arguments(auto_run_parser) _add_common_tuner_arguments(auto_run_parser) @@ -149,8 +175,9 @@ def get_args(): def _parse_common_options(args): if args.list_tuners: - print("Available tuners: " + - str(tunerbase.get_available_tuners(args.plugin_dir))) + print( + "Available tuners: " + str(tunerbase.get_available_tuners(args.plugin_dir)) + ) sys.exit(0) if args.list_techniques: @@ -180,12 +207,14 @@ def _parse_common_options(args): module = imp.load_source(args.tuner, module_file) else: module = import_module("autotuner.tuners." + args.tuner) - tuner = getattr(module, 'Tuner') + tuner = getattr(module, "Tuner") except ImportError as error: print(error) print("Please select a valid tuner name.") - print("Available tuners: " + - str(tunerbase.get_available_tuners(args.plugin_dir))) + print( + "Available tuners: " + + str(tunerbase.get_available_tuners(args.plugin_dir)) + ) sys.exit(-1) else: tuner = SimpleTuner @@ -207,7 +236,8 @@ def _parse_common_options(args): config = ConfigParser() config.optionxform = str config["DEFAULT"]["ConfigFilePath"] = os.path.abspath( - os.path.dirname(args.config_file)) + os.path.dirname(args.config_file) + ) config.read(args.config_file) # set up system environment if config.has_section("Environment Setting"): @@ -216,14 +246,12 @@ def _parse_common_options(args): def _setup_env(environment_section): - for (key, value) in list(environment_section.items()): - path_list = [os.path.expanduser(path.strip()) - for path in value.split(",")] + for key, value in list(environment_section.items()): + path_list = [os.path.expanduser(path.strip()) for path in value.split(",")] # if the environment variable exists, prepend to the existing one if key in os.environ: # prepend to the existing one - os.environ[key] = os.pathsep.join(path_list) \ - + os.pathsep + os.environ[key] + os.environ[key] = os.pathsep.join(path_list) + os.pathsep + os.environ[key] else: # create the environment variable os.environ[key] = os.pathsep.join(path_list) @@ -285,8 +313,8 @@ def parse_main(args): dir_name = os.path.dirname(__file__) args.search_config_file = os.path.join( dir_name, - 'search_space_config/default_search_space' + - iomanager.get_file_extension()) + "search_space_config/default_search_space" + iomanager.get_file_extension(), + ) if args.opp_file: input_files = args.opp_file if args.output: @@ -297,19 +325,24 @@ def parse_main(args): # if the hot function file is specified, # we need to parse it to get hot functions and append them into # func_name_filter list - if (args.hot_func_file): + if args.hot_func_file: hot_function_list = utils.parse_hot_function( - args.hot_func_file, args.hot_func_number) + args.hot_func_file, args.hot_func_number + ) args.func_name_filter = list( - set(hot_function_list).union(set(args.func_name_filter))) + set(hot_function_list).union(set(args.func_name_filter)) + ) print("Generating search space from " + str(input_files)) - iomanager.generate_search_space_file(input_files, output_file, - args.search_config_file, - args.name_filter, - args.func_name_filter, - args.file_name_filter, - args.type_filter) + iomanager.generate_search_space_file( + input_files, + output_file, + args.search_config_file, + args.name_filter, + args.func_name_filter, + args.file_name_filter, + args.type_filter, + ) print("The search space has been generated: " + output_file) sys.exit(0) @@ -328,13 +361,18 @@ def run_main(args): opentuner.init_logging() - tuner.main(args, compile_dir=compile_dir, program_name=args.config_file, - llvm_config_file=llvm_config_file, - fixed_llvm_config_files=args.add_llvm_inputs, - enable_final_compile=args.enable_final_compile, - search_space=args.search_space, - run_dir=compile_section["RunDir"], - run_cmd=compile_section["RunCommand"], compile_cmd=compile_cmd) + tuner.main( + args, + compile_dir=compile_dir, + program_name=args.config_file, + llvm_config_file=llvm_config_file, + fixed_llvm_config_files=args.add_llvm_inputs, + enable_final_compile=args.enable_final_compile, + search_space=args.search_space, + run_dir=compile_section["RunDir"], + run_cmd=compile_section["RunCommand"], + compile_cmd=compile_cmd, + ) def auto_run_main(args): @@ -349,8 +387,8 @@ def auto_run_main(args): dir_name = os.path.dirname(__file__) args.search_config_file = os.path.join( dir_name, - 'search_space_config/default_search_space' + - iomanager.get_file_extension()) + "search_space_config/default_search_space" + iomanager.get_file_extension(), + ) # init common options tuner, config = _parse_common_options(args) @@ -391,53 +429,61 @@ def auto_run_main(args): else: fixed_llvm_config_files = [llvm_config_file] - print("=== Starting stage {:d}: {:s} level tuning ===" - .format(index + 1, phase)) - search_space_tree = _generate_search_space(args, compile_dir, - iomanager, opp_compile_cmd, - opp_dir, phase) + print("=== Starting stage {:d}: {:s} level tuning ===".format(index + 1, phase)) + search_space_tree = _generate_search_space( + args, compile_dir, iomanager, opp_compile_cmd, opp_dir, phase + ) if search_space_tree is None: break try: - tuner.main(args, compile_dir=compile_dir, - llvm_config_file=llvm_config_file, - enable_final_compile=True, - fixed_llvm_config_files=fixed_llvm_config_files, - search_space=search_space_tree, - run_dir=run_dir, - run_cmd=run_cmd, compile_cmd=compile_cmd, - stage=phase) + tuner.main( + args, + compile_dir=compile_dir, + llvm_config_file=llvm_config_file, + enable_final_compile=True, + fixed_llvm_config_files=fixed_llvm_config_files, + search_space=search_space_tree, + run_dir=run_dir, + run_cmd=run_cmd, + compile_cmd=compile_cmd, + stage=phase, + ) is_first_stage = False except EmptySearchSpaceError: - print('Empty search space, stop the current stage') + print("Empty search space, stop the current stage") -def _generate_search_space(args, compile_dir, iomanager, opp_compile_cmd, - opp_dir, phase): +def _generate_search_space( + args, compile_dir, iomanager, opp_compile_cmd, opp_dir, phase +): # before generating search space # clean the opp dir in case it is not empty _clean_opp(opp_dir) result = opentuner.MeasurementInterface(args).call_program( - cmd=opp_compile_cmd, cwd=compile_dir) - if result['returncode'] != 0: + cmd=opp_compile_cmd, cwd=compile_dir + ) + if result["returncode"] != 0: print("Failed to generate tuning opportunities, the error was:") - print(result['stderr']) + print(result["stderr"]) return None # generate search space based opp files opp_files = glob.glob(os.path.join(opp_dir, "*")) # if the hot function file is specified, we need to parse it to # get hot functions and append them into func_name_filter list - if (args.hot_func_file): + if args.hot_func_file: hot_function_list = utils.parse_hot_function( - args.hot_func_file, args.hot_func_number) + args.hot_func_file, args.hot_func_number + ) args.func_name_filter = list( - set(hot_function_list).union(set(args.func_name_filter))) + set(hot_function_list).union(set(args.func_name_filter)) + ) search_space_tree = iomanager.generate_search_space( opp_files, args.search_config_file, type_filter=[phase], func_name_filter=args.func_name_filter, - file_name_filter=args.file_name_filter) + file_name_filter=args.file_name_filter, + ) return search_space_tree @@ -457,14 +503,14 @@ def main(): try: run_main(args) except EmptySearchSpaceError: - print('Empty search space, stop tuning') + print("Empty search space, stop tuning") # if the sub-command auto_run is called else: auto_run_main(args) except ConfigParserError as error: - print('Failed to parse your configuration file: ' + args.config_file) + print("Failed to parse your configuration file: " + args.config_file) print(error) exit(1) diff --git a/autotuner/models.py b/autotuner/models.py index d8a921cf8b7a513ccae93982dd3761e910f700d3..5480eed9f8397bed7a45735970b9f9c64470ad74 100644 --- a/autotuner/models.py +++ b/autotuner/models.py @@ -1,6 +1,6 @@ # coding=utf-8 """ -Autotuner Models +BiSheng Autotuner Models Copyright (C) 2017-2020, Huawei Technologies Co., Ltd. All rights reserved. """ @@ -12,8 +12,9 @@ class LegacyCodeRegion(object): compatibilities. """ - def __init__(self, name, file_name, func_name, start_line, end_line, - code_region_type): + def __init__( + self, name, file_name, func_name, start_line, end_line, code_region_type + ): self.name = name self.file_name = file_name self.func_name = func_name @@ -22,14 +23,15 @@ class LegacyCodeRegion(object): self.code_region_type = code_region_type def __eq__(self, other): - return self.code_region_type == other.code_region_type \ - and self.name == other.name \ - and self.file_name == other.file_name \ + return ( + self.code_region_type == other.code_region_type + and self.name == other.name + and self.file_name == other.file_name and self.func_name == other.func_name + ) def __hash__(self): - return hash( - (self.code_region_type, self.name, self.file_name, self.func_name)) + return hash((self.code_region_type, self.name, self.file_name, self.func_name)) class CodeRegion(object): @@ -37,8 +39,16 @@ class CodeRegion(object): A representation of Code Region """ - def __init__(self, pass_name, name, func_name, code_region_type, - hashcode="", invocation=0, debug_loc=None): + def __init__( + self, + pass_name, + name, + func_name, + code_region_type, + hashcode="", + invocation=0, + debug_loc=None, + ): self.pass_name = pass_name self.name = name self.func_name = func_name @@ -48,38 +58,63 @@ class CodeRegion(object): self.invocation = invocation def __eq__(self, other): - return self.code_region_type == other.code_region_type \ - and self.name == other.name \ - and self.debug_loc == other.debug_loc \ - and self.func_name == other.func_name \ - and self.pass_name == other.pass_name \ - and self.hashcode == other.hashcode \ + return ( + self.code_region_type == other.code_region_type + and self.name == other.name + and self.debug_loc == other.debug_loc + and self.func_name == other.func_name + and self.pass_name == other.pass_name + and self.hashcode == other.hashcode and self.invocation == other.invocation + ) def set_debug_loc(self, debug_loc): if isinstance(debug_loc, dict): - if 'File' in debug_loc.keys() and \ - 'Line' in debug_loc.keys() and \ - 'Column' in debug_loc.keys(): + if ( + "File" in debug_loc.keys() + and "Line" in debug_loc.keys() + and "Column" in debug_loc.keys() + ): self.debug_loc = DebugLoc( - debug_loc['File'], debug_loc['Line'], - debug_loc['Column']) + debug_loc["File"], debug_loc["Line"], debug_loc["Column"] + ) elif isinstance(debug_loc, DebugLoc): self.debug_loc = debug_loc def __hash__(self): return hash( - (self.code_region_type, self.name, self.debug_loc, self.func_name, - self.pass_name)) + ( + self.code_region_type, + self.name, + self.debug_loc, + self.func_name, + self.pass_name, + ) + ) def __str__(self): - return "Name: " + self.name + "\n" + \ - "Function Name: " + self.func_name + "\n" + \ - "Type: " + self.code_region_type + "\n" + \ - "DebugLoc: " + str(self.debug_loc) + "\n" + \ - "Pass: " + self.pass_name + "\n" + \ - "Hash: " + str(self.hashcode) + "\n" + \ - "Invocation: " + str(self.invocation) + return ( + "Name: " + + self.name + + "\n" + + "Function Name: " + + self.func_name + + "\n" + + "Type: " + + self.code_region_type + + "\n" + + "DebugLoc: " + + str(self.debug_loc) + + "\n" + + "Pass: " + + self.pass_name + + "\n" + + "Hash: " + + str(self.hashcode) + + "\n" + + "Invocation: " + + str(self.invocation) + ) class CodeRegionConfiguration(object): @@ -106,8 +141,7 @@ class Task(object): self.code_region = code_region def __repr__(self): - return str(self.tuning_id) + str(self.param_list) + str( - self.code_region) + return str(self.tuning_id) + str(self.param_list) + str(self.code_region) class DebugLoc(object): @@ -121,19 +155,35 @@ class DebugLoc(object): self.column = column def __str__(self): - return "File: " + self.file_name + ", " \ - + "Line: " + str(self.line) + ", " \ - + "Column: " + str(self.column) + return ( + "File: " + + self.file_name + + ", " + + "Line: " + + str(self.line) + + ", " + + "Column: " + + str(self.column) + ) def __repr__(self): - return "File: " + self.file_name + ", " \ - + "Line: " + str(self.line) + ", " \ - + "Column: " + str(self.column) + return ( + "File: " + + self.file_name + + ", " + + "Line: " + + str(self.line) + + ", " + + "Column: " + + str(self.column) + ) def __hash__(self): return hash((self.file_name, self.line, self.column)) def __eq__(self, other): - return self.file_name == other.file_name \ - and self.line == other.line \ + return ( + self.file_name == other.file_name + and self.line == other.line and self.column == other.column + ) diff --git a/autotuner/optrecord.py b/autotuner/optrecord.py index 41a2a65a3ddb974cba5680fbab05f75760e7d21b..af62ea745c3fe82d5c4ecb782a830fbae3a64ee0 100644 --- a/autotuner/optrecord.py +++ b/autotuner/optrecord.py @@ -9,6 +9,7 @@ SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception from __future__ import print_function import yaml + # Try to use the C parser. try: from yaml import CLoader as Loader @@ -17,6 +18,7 @@ except ImportError: from multiprocessing import Lock import subprocess + try: # The previously builtin function `intern()` was moved # to the `sys` module in Python 3. @@ -34,6 +36,7 @@ except AttributeError: def iteritems(d): return iter(d.items()) + else: # Python 2 def itervalues(d): @@ -47,21 +50,22 @@ class Remark(yaml.YAMLObject): # Work-around for http://pyyaml.org/ticket/154. yaml_loader = Loader - default_demangler = 'c++filt -n' + default_demangler = "c++filt -n" demangler_proc = None @classmethod def set_demangler(cls, demangler): cls.demangler_proc = subprocess.Popen( - demangler.split(), stdin=subprocess.PIPE, stdout=subprocess.PIPE) + demangler.split(), stdin=subprocess.PIPE, stdout=subprocess.PIPE + ) cls.demangler_lock = Lock() @classmethod def demangle(cls, name): with cls.demangler_lock: - cls.demangler_proc.stdin.write((name + '\n').encode('utf-8')) + cls.demangler_proc.stdin.write((name + "\n").encode("utf-8")) cls.demangler_proc.stdin.flush() - return cls.demangler_proc.stdout.readline().rstrip().decode('utf-8') + return cls.demangler_proc.stdout.readline().rstrip().decode("utf-8") # Intern all strings since we have lot of duplication across filenames, # remark text. @@ -81,7 +85,7 @@ class Remark(yaml.YAMLObject): def _reduce_memory_dict(old_dict): new_dict = dict() - for (k, v) in iteritems(old_dict): + for k, v in iteritems(old_dict): if type(k) is str: k = intern(k) @@ -93,8 +97,7 @@ class Remark(yaml.YAMLObject): new_dict[k] = v return tuple(new_dict.items()) - self.Args = tuple([_reduce_memory_dict(arg_dict) - for arg_dict in self.Args]) + self.Args = tuple([_reduce_memory_dict(arg_dict) for arg_dict in self.Args]) # The inverse operation of the dictonary-related memory optimization in # _reduce_memory_dict. E.g. @@ -102,7 +105,7 @@ class Remark(yaml.YAMLObject): def recover_yaml_structure(self): def tuple_to_dict(t): d = dict() - for (k, v) in t: + for k, v in t: if type(v) is tuple: v = tuple_to_dict(v) d[k] = v @@ -111,23 +114,23 @@ class Remark(yaml.YAMLObject): self.Args = [tuple_to_dict(arg_tuple) for arg_tuple in self.Args] def canonicalize(self): - if not hasattr(self, 'Hotness'): + if not hasattr(self, "Hotness"): self.Hotness = 0 - if not hasattr(self, 'Args'): + if not hasattr(self, "Args"): self.Args = [] self._reduce_memory() @property def File(self): - return self.DebugLoc['File'] + return self.DebugLoc["File"] @property def Line(self): - return int(self.DebugLoc['Line']) + return int(self.DebugLoc["Line"]) @property def Column(self): - return self.DebugLoc['Column'] + return self.DebugLoc["Column"] @property def DebugLocString(self): @@ -142,15 +145,15 @@ class Remark(yaml.YAMLObject): # list containing the value (e.g. for 'Callee' the function) and # optionally a DebugLoc. def get_arg_dict(self): - if hasattr(self, 'ArgDict'): + if hasattr(self, "ArgDict"): return self.ArgDict self.ArgDict = {} for arg in self.Args: if len(arg) == 2: - if arg[0][0] == 'DebugLoc': + if arg[0][0] == "DebugLoc": dbgidx = 0 else: - assert(arg[1][0] == 'DebugLoc') + assert arg[1][0] == "DebugLoc" dbgidx = 1 key = arg[1 - dbgidx][0] @@ -158,7 +161,7 @@ class Remark(yaml.YAMLObject): else: arg = arg[0] key = arg[0] - entry = (arg[1], ) + entry = (arg[1],) self.ArgDict[key] = entry return self.ArgDict diff --git a/autotuner/remarkparser.py b/autotuner/remarkparser.py index 968a86bbde785d55f01c82d61fd25cd927bbb4b3..5e2c043b5f1d2cd36c40257ef80d1d02a39b8974 100644 --- a/autotuner/remarkparser.py +++ b/autotuner/remarkparser.py @@ -9,7 +9,7 @@ import yaml class AutoTuning(Remark): - yaml_tag = '!AutoTuning' + yaml_tag = "!AutoTuning" @property def color(self): @@ -18,7 +18,7 @@ class AutoTuning(Remark): @property def key(self): key_tuple = super().key + (self.CodeRegionType, self.Args, self.Pass) - # DebugLoc is optional in autotuner + # DebugLoc is optional in BiSheng Autotuner if hasattr(self, "DebugLoc"): key_tuple += (self.DebugLoc,) return key_tuple diff --git a/autotuner/resumable/interface.py b/autotuner/resumable/interface.py index 33e6ce0fe3cd15973d3e1d51a80b1599c0c8f0c9..a447da9f7724005f3712b6d249757226726bec6d 100644 --- a/autotuner/resumable/interface.py +++ b/autotuner/resumable/interface.py @@ -1,6 +1,6 @@ # coding=utf-8 """ -Resumable Autotuner Interface. +Resumable BiSheng Autotuner Interface. Copyright (C) 2017-2020, Huawei Technologies Co., Ltd. All rights reserved. """ import glob @@ -8,7 +8,7 @@ import logging import os import random import types -import dill as pickle # Use dill because it supports lambda functions. +import dill as pickle # Use dill because it supports lambda functions. from autotuner.dbutils import create_config_db_session from autotuner.iomanager import EmptySearchSpaceError @@ -38,9 +38,9 @@ class AutoTunerState: self.iomanager = create_io_manager(args.parse_format) self.args = args self.opp_dir = os.path.join(data_dir, "opp") - self.config_file = file_exists_error_or_path(data_dir, "config" + - self.iomanager. - get_file_extension()) + self.config_file = file_exists_error_or_path( + data_dir, "config" + self.iomanager.get_file_extension() + ) self.objective = objective self.root_technique = None self.pending_result_callbacks = None @@ -48,11 +48,12 @@ class AutoTunerState: self.inject_seed = False self.best_result = None - if args.use_optimal_configs != "none" \ - and not args.use_hash_matching: + if args.use_optimal_configs != "none" and not args.use_hash_matching: args.use_optimal_configs = "none" - log.warning("'use-hash-matching' must be enabled to use previous " - "optimal configuration! Disabling reuse/retune.") + log.warning( + "'use-hash-matching' must be enabled to use previous " + "optimal configuration! Disabling reuse/retune." + ) if args.use_optimal_configs == "reuse": self.use_prev_configs = True @@ -64,36 +65,48 @@ class AutoTunerState: self.config_db_dir = os.environ["CONFIG_DB_DIR"] else: self.config_db_dir = data_dir - log.warning("Environment variable CONFIG_DB_DIR is not set; " - "a default directory is used for saving the " - "config database: %s", self.config_db_dir) + log.warning( + "Environment variable CONFIG_DB_DIR is not set; " + "a default directory is used for saving the " + "config database: %s", + self.config_db_dir, + ) # Always create config_db since program-params need it. self.config_db = create_config_db_session(self.config_db_dir) # Init search space. search_space = self._init_search_space() try: - self.task_map = self.iomanager.parse_search_space(search_space, - self.args.use_dynamic_values, - self.args.use_baseline_config, - os.path.join(data_dir, "initial_config.json")) + self.task_map = self.iomanager.parse_search_space( + search_space, + self.args.use_dynamic_values, + self.args.use_baseline_config, + os.path.join(data_dir, "initial_config.json"), + ) except EmptySearchSpaceError: # We found the optimal configurations in database for every # code region. - self.iomanager.generate_baseline_llvm_input(self.config_file, - self.config_db) - log.info("Optimal conditions are found for this code and wrote " - "optimal configuration to %s; re-compile with -fautotune " - "to apply it", self.config_file) - log.info("Please use --use-optimal-configs=none to tune the code " - "from scratch or use --use-optimal-configs=retune to " - "retune the code and use the optimal configurations as " - "starting point (seed value) for the AutoTuner.") + self.iomanager.generate_baseline_llvm_input( + self.config_file, self.config_db + ) + log.info( + "Optimal conditions are found for this code and wrote " + "optimal configuration to %s; re-compile with -fautotune " + "to apply it", + self.config_file, + ) + log.info( + "Please use --use-optimal-configs=none to tune the code " + "from scratch or use --use-optimal-configs=retune to " + "retune the code and use the optimal configurations as " + "starting point (seed value) for the AutoTuner." + ) raise if self.args.use_baseline_config: - self.args.seed_configuration.append(os.path.join(data_dir, - "initial_config.json")) + self.args.seed_configuration.append( + os.path.join(data_dir, "initial_config.json") + ) log.info("Baseline configuration used as input seed.") if self.inject_seed: @@ -101,8 +114,11 @@ class AutoTunerState: # found otherwise assign random values and store the seed baseline # in 'initial_config.json' and forward its path to OpenTuner. # This file will act as initial seed configurations for OpenTuner. - seed_config = self.iomanager.seed_baseline(self.task_map, - self.config_db, os.path.join(data_dir, "initial_config.json")) + seed_config = self.iomanager.seed_baseline( + self.task_map, + self.config_db, + os.path.join(data_dir, "initial_config.json"), + ) self.args.seed_configuration.append(seed_config) log.info("Seed configuration injected to the AutoTuner.") @@ -113,10 +129,10 @@ class AutoTunerState: def __getstate__(self): state = self.__dict__.copy() - if state['config_db']: - state['config_db'].commit() - state['config_db'].close() - state['config_db'] = None + if state["config_db"]: + state["config_db"].commit() + state["config_db"].close() + state["config_db"] = None return state def __setstate__(self, state): @@ -126,14 +142,14 @@ class AutoTunerState: # Do not re-seed if the state is being loaded. self.args.seed_configuration = [] - def _init_search_space(self): if not self.args.search_space: dir_name = os.path.dirname(__file__) search_space_config = os.path.join( dir_name, - "../search_space_config/default_search_space" + - self.iomanager.get_file_extension()) + "../search_space_config/default_search_space" + + self.iomanager.get_file_extension(), + ) else: search_space_config = self.args.search_space check_file_permissions(search_space_config) @@ -143,10 +159,13 @@ class AutoTunerState: raise Exception( "No tuning opportunities files under {}; compile with " "-fautotune-generate to generate tuning opportunities".format( - self.opp_dir)) + self.opp_dir + ) + ) search_space = self.iomanager.generate_search_space( - opp_files, search_space_config, + opp_files, + search_space_config, file_name_filter=self.args.file_name_filter, func_name_filter=self.args.func_name_filter, name_filter=self.args.name_filter, @@ -155,7 +174,8 @@ class AutoTunerState: config_db=self.config_db, use_hash_matching=self.args.use_hash_matching, use_prev_configs=self.use_prev_configs, - inject_seed=self.inject_seed) + inject_seed=self.inject_seed, + ) # Clean up the tuning opportunity files since they are no longer needed # after the search space is generated. @@ -187,8 +207,7 @@ class StateSerializer: def serialize(self, auto_tuner): auto_tuner_state = auto_tuner.auto_tuner_state # Add extra necessary information into AutoTunerState. - auto_tuner_state.root_technique = \ - auto_tuner.api.search_driver.root_technique + auto_tuner_state.root_technique = auto_tuner.api.search_driver.root_technique pending_results = [ result for result in auto_tuner.api.search_driver.pending_result_callbacks @@ -199,7 +218,7 @@ class StateSerializer: file_path = os.path.join(self.data_dir, self.state_file) file_fd = create_secure_fd(file_path) - with os.fdopen(file_fd, 'wb') as file: + with os.fdopen(file_fd, "wb") as file: pickle.dump(auto_tuner_state, file) def deserialize(self): @@ -223,19 +242,17 @@ class AutoTunerInterface: def initialize(self, args, data_dir, objective): self.auto_tuner_state = AutoTunerState(args, data_dir, objective) self.process_deterministic(args, data_dir) - interface = self._create_default_measurement_interface( - self.auto_tuner_state) + interface = self._create_default_measurement_interface(self.auto_tuner_state) self.api = ResumableRunManager(interface, self.auto_tuner_state.args) self.auto_tuner_state.tuning_run_id = self.api.tuning_run.id - log.info("Initialized a new tuning run (ID: %s)", - self.api.tuning_run.id) + log.info("Initialized a new tuning run (ID: %s)", self.api.tuning_run.id) def resume(self, auto_tuning_state): self.auto_tuner_state = auto_tuning_state - interface = self._create_default_measurement_interface( - auto_tuning_state) - self.api = ResumableRunManager(interface, auto_tuning_state.args, - auto_tuning_state) + interface = self._create_default_measurement_interface(auto_tuning_state) + self.api = ResumableRunManager( + interface, auto_tuning_state.args, auto_tuning_state + ) log.info("Resumed a tuning run (ID: %s)", self.api.tuning_run.id) def next_config(self, trials=1, retry_limit=5): @@ -260,9 +277,9 @@ class AutoTunerInterface: log.warning( "Possible configurations may have been exhausted; " "try again or run 'llvm-autotune finalize' to generate " - "optimal configuration") - log.warning("Only %s configurations were generated", - trial_id) + "optimal configuration" + ) + log.warning("Only %s configurations were generated", trial_id) # Save the current desired_results into the database. self.api.commit(force=True) return @@ -270,23 +287,25 @@ class AutoTunerInterface: if trials == 1: file_name = self.auto_tuner_state.config_file else: - index = self.auto_tuner_state.config_file.find('.yaml') + index = self.auto_tuner_state.config_file.find(".yaml") file_name = "{}-{}{}".format( - self.auto_tuner_state.config_file[:index], - trial_id, - self.auto_tuner_state.config_file[index:]) + self.auto_tuner_state.config_file[:index], + trial_id, + self.auto_tuner_state.config_file[index:], + ) self.auto_tuner_state.iomanager.build_llvm_input( - cfg, self.auto_tuner_state.task_map, file_name, + cfg, + self.auto_tuner_state.task_map, + file_name, config_db=self.auto_tuner_state.config_db, - use_hash_matching=self.auto_tuner_state.args.use_hash_matching) + use_hash_matching=self.auto_tuner_state.args.use_hash_matching, + ) - log.info("Generated a new configuration (ID: %s)", - desired_result.id) + log.info("Generated a new configuration (ID: %s)", desired_result.id) # Save state info into the auto_tuning_state. - self.auto_tuner_state.current_desired_result_ids.append( - desired_result.id) + self.auto_tuner_state.current_desired_result_ids.append(desired_result.id) # Save the best_result (found so far) for initialization of SearchDriver # object. @@ -301,13 +320,13 @@ class AutoTunerInterface: """ desired_result_ids = self.auto_tuner_state.current_desired_result_ids if len(feedback_values) != len(desired_result_ids): - log.error("Number of feedback values received: %s", - len(feedback_values)) - log.error("Number of feedback values expected: %s", - len(desired_result_ids)) - raise Exception("Number of feedback values do not match the number" - " of configurations generated in the " - "previous iteration") + log.error("Number of feedback values received: %s", len(feedback_values)) + log.error("Number of feedback values expected: %s", len(desired_result_ids)) + raise Exception( + "Number of feedback values do not match the number" + " of configurations generated in the " + "previous iteration" + ) for trial_id, feedback in enumerate(feedback_values): if self.auto_tuner_state.objective == "maximize": @@ -319,13 +338,14 @@ class AutoTunerInterface: result = Result(time=feedback) current_desired_result = self.api.session.query( - resultsdb.models.DesiredResult).get( - desired_result_ids[trial_id]) - self.api.report_result(current_desired_result, - result) - log.info("Received performance feedback %f for " - "configuration (ID: %s)", feedback, - desired_result_ids[trial_id]) + resultsdb.models.DesiredResult + ).get(desired_result_ids[trial_id]) + self.api.report_result(current_desired_result, result) + log.info( + "Received performance feedback %f for " "configuration (ID: %s)", + feedback, + desired_result_ids[trial_id], + ) self.api.commit(force=True) # Clean up config files from the previous iteration. @@ -357,18 +377,24 @@ class AutoTunerInterface: hash_option = self.auto_tuner_state.args.use_hash_matching if config_update and self.auto_tuner_state.config_db: self.auto_tuner_state.iomanager.update_config_db( - best_cfg, self.auto_tuner_state.task_map, + best_cfg, + self.auto_tuner_state.task_map, config_db=self.auto_tuner_state.config_db, - use_hash_matching=hash_option) + use_hash_matching=hash_option, + ) self.auto_tuner_state.iomanager.build_llvm_input( - best_cfg, self.auto_tuner_state.task_map, + best_cfg, + self.auto_tuner_state.task_map, self.auto_tuner_state.config_file, config_db=self.auto_tuner_state.config_db, - use_hash_matching=hash_option) + use_hash_matching=hash_option, + ) - log.info("Wrote optimal configuration to %s; re-compile with " - "-fautotune to apply it", - self.auto_tuner_state.config_file) + log.info( + "Wrote optimal configuration to %s; re-compile with " + "-fautotune to apply it", + self.auto_tuner_state.config_file, + ) return True else: @@ -384,8 +410,9 @@ class AutoTunerInterface: if self.auto_tuner_state.config_db: self.auto_tuner_state.config_db.commit() self.auto_tuner_state.config_db.close() - log.info("Finalized a tuning run (ID: %s)", - self.auto_tuner_state.tuning_run_id) + log.info( + "Finalized a tuning run (ID: %s)", self.auto_tuner_state.tuning_run_id + ) else: raise Exception("Cannot finalize without an optimal configuration") @@ -397,17 +424,18 @@ class AutoTunerInterface: objective = MaximizeRate() else: raise Exception( - "Unsupported objective: {}".format( - auto_tuning_state.objective)) + "Unsupported objective: {}".format(auto_tuning_state.objective) + ) interface = DefaultMeasurementInterface( args=auto_tuning_state.args, objective=objective, input_manager=FixedInputManager(), manipulator=auto_tuning_state.manipulator, - project_name='unnamed_project', - program_name='unnamed_program', - program_version='0.1') + project_name="unnamed_project", + program_name="unnamed_program", + program_version="0.1", + ) return interface @staticmethod @@ -432,7 +460,7 @@ class AutoTunerInterface: random_state = random.getstate() file_path = os.path.join(data_dir, "random_state.seed") file_fd = create_secure_fd(file_path) - with os.fdopen(file_fd, 'wb') as file: + with os.fdopen(file_fd, "wb") as file: pickle.dump(random_state, file) def _process_all_results(self): @@ -459,9 +487,10 @@ def file_exists_error_or_path(data_dir, filename): file_path = os.path.join(data_dir, filename) if os.path.exists(file_path): raise IOError( - "Existing autotuner data found {}; " + "Existing BiSheng Autotuner data found {}; " "set AUTOTUNER_DATADIR environment variable " - "to a new directory".format(file_path)) + "to a new directory".format(file_path) + ) return file_path diff --git a/autotuner/resumable/main.py b/autotuner/resumable/main.py index b23b2866c181cc50c393fa935a2d91b4a5d8e3a0..fb420c29d728b93ee0acc2bd473815e75fca3290 100644 --- a/autotuner/resumable/main.py +++ b/autotuner/resumable/main.py @@ -1,6 +1,6 @@ # coding=utf-8 """ -Autotuner's command-line interface. +BiSheng Autotuner's command-line interface. Copyright (C) 2017-2020, Huawei Technologies Co., Ltd. All rights reserved. """ from __future__ import print_function @@ -8,6 +8,7 @@ from __future__ import print_function import argparse import logging import os + try: from importlib.metadata import metadata, PackageNotFoundError except ImportError: @@ -22,7 +23,7 @@ from autotuner.iomanager import argument_parser as io_argument_parser log = logging.getLogger(__name__) -MAX_PARALLELISM = 4096 # Define maximum number of trials in parallel. +MAX_PARALLELISM = 4096 # Define maximum number of trials in parallel. def initialize(data_dir, args, objective, trials): @@ -70,9 +71,9 @@ def parse_metadata(project_name): """ try: distribution_metadata_dict = metadata(project_name) - name = distribution_metadata_dict['Summary'] - version = distribution_metadata_dict['Version'] - git_hash, date = distribution_metadata_dict['Keywords'].split(',') + name = distribution_metadata_dict["Summary"] + version = distribution_metadata_dict["Version"] + git_hash, date = distribution_metadata_dict["Keywords"].split(",") except (ValueError, AttributeError, PackageNotFoundError): name = "BiSheng Autotuner" version = "(dev)" @@ -81,18 +82,19 @@ def parse_metadata(project_name): version_message = "{name} {version}".format(name=name, version=version) if len(git_hash) > 0: - version_message += " (commit-{hash} {date})"\ - .format(hash=git_hash, date=date) + version_message += " (commit-{hash} {date})".format(hash=git_hash, date=date) return version_message def create_parser(): # Create the top-level parser - top_parser = argparse.ArgumentParser(prog="llvm-autotune", - formatter_class=argparse.RawTextHelpFormatter) + top_parser = argparse.ArgumentParser( + prog="llvm-autotune", formatter_class=argparse.RawTextHelpFormatter + ) - top_parser.add_argument('-v', '--version', action='version', - version=parse_metadata('autotuner')) + top_parser.add_argument( + "-v", "--version", action="version", version=parse_metadata("autotuner") + ) sub_parsers = top_parser.add_subparsers(dest="command") sub_parsers.required = True @@ -107,19 +109,25 @@ def create_parser(): argument.help = argparse.SUPPRESS _suppress_help_messages(parent_parsers) - min_parser = sub_parsers.add_parser("minimize", parents=parent_parsers, - formatter_class=argparse.RawTextHelpFormatter, - help="Initialize tuning and generate the " - "initial compiler configuration file, " - "aiming to minimize the metric " - "(e.g. run time)") - - max_parser = sub_parsers.add_parser("maximize", parents=parent_parsers, - formatter_class=argparse.RawTextHelpFormatter, - help="Initialize tuning and generate the " - "initial compiler configuration file, " - "aiming to maximize the metric " - "(e.g. throughput)") + min_parser = sub_parsers.add_parser( + "minimize", + parents=parent_parsers, + formatter_class=argparse.RawTextHelpFormatter, + help="Initialize tuning and generate the " + "initial compiler configuration file, " + "aiming to minimize the metric " + "(e.g. run time)", + ) + + max_parser = sub_parsers.add_parser( + "maximize", + parents=parent_parsers, + formatter_class=argparse.RawTextHelpFormatter, + help="Initialize tuning and generate the " + "initial compiler configuration file, " + "aiming to maximize the metric " + "(e.g. throughput)", + ) for parser in (min_parser, max_parser): _add_arg_trials(parser) _add_arg_search_space(parser) @@ -130,38 +138,50 @@ def create_parser(): _add_arg_baseline_config(parser) # Create the the parser for the "feedback" command - feedback_parser = sub_parsers.add_parser("feedback", - formatter_class=argparse.RawTextHelpFormatter, - help="Feed back performance tuning result(s) " - "and generate new test configurations") + feedback_parser = sub_parsers.add_parser( + "feedback", + formatter_class=argparse.RawTextHelpFormatter, + help="Feed back performance tuning result(s) " + "and generate new test configurations", + ) _add_arg_trials(feedback_parser) - sub_parsers.add_parser("dump", - formatter_class=argparse.RawTextHelpFormatter, - help="Dump the current best configuration without " - "terminating the tuning run") - feedback_parser.add_argument("values", type=float, nargs='*', - help="Performance tuning result(s)") - feedback_parser.add_argument("-i", "--feedback-file", - help="Load feedback values from " - "a CSV file; any values " - "specified on command line are " - "overridden by those specified in the " - "file") + sub_parsers.add_parser( + "dump", + formatter_class=argparse.RawTextHelpFormatter, + help="Dump the current best configuration without " + "terminating the tuning run", + ) + feedback_parser.add_argument( + "values", type=float, nargs="*", help="Performance tuning result(s)" + ) + feedback_parser.add_argument( + "-i", + "--feedback-file", + help="Load feedback values from " + "a CSV file; any values " + "specified on command line are " + "overridden by those specified in the " + "file", + ) # Create the parser for the "finalize" command. - finalize_parser = sub_parsers.add_parser("finalize", - formatter_class=argparse.RawTextHelpFormatter, - help="Finalize tuning and generate the optimal " - "compiler configuration") - finalize_parser.add_argument("--store-optimal-configs", - dest="config_update", action="store_true", - help="specifiy if the optimal configuration " - "will be stored in configs.db upon " - "completion. It will overwrite previous " - "rows on conflict. " - "Default: No update.") + finalize_parser = sub_parsers.add_parser( + "finalize", + formatter_class=argparse.RawTextHelpFormatter, + help="Finalize tuning and generate the optimal " "compiler configuration", + ) + finalize_parser.add_argument( + "--store-optimal-configs", + dest="config_update", + action="store_true", + help="specifiy if the optimal configuration " + "will be stored in configs.db upon " + "completion. It will overwrite previous " + "rows on conflict. " + "Default: No update.", + ) return top_parser @@ -182,7 +202,9 @@ def main(): data_dir = "autotune_datadir" log.warning( "Environment variable AUTOTUNE_DATADIR is not set; " - "a default directory is used for saving the data: %s", data_dir) + "a default directory is used for saving the data: %s", + data_dir, + ) try: if args.command == "minimize" or args.command == "maximize": initialize(data_dir, args, args.command, args.trials) @@ -209,114 +231,165 @@ def _add_arg_trials(parser): ivalue = int(value) if ivalue <= 0: raise argparse.ArgumentTypeError( - "{} is an invalid positive int value".format(ivalue)) + "{} is an invalid positive int value".format(ivalue) + ) if ivalue > MAX_PARALLELISM: raise argparse.ArgumentTypeError( - "Maximum number of trials is {}".format(MAX_PARALLELISM)) + "Maximum number of trials is {}".format(MAX_PARALLELISM) + ) return ivalue - parser.add_argument("--trials", type=positive_int, default=1, - help="Specify the number of trials to be tested " - "in the next iteration") + parser.add_argument( + "--trials", + type=positive_int, + default=1, + help="Specify the number of trials to be tested " "in the next iteration", + ) return parser def _add_arg_deterministic(parser): def str2bool(value): - if value.lower() in ['true', '1', 'yes', 't', 'y']: + if value.lower() in ["true", "1", "yes", "t", "y"]: return True - elif value.lower() in ['false', '0', 'no', 'f', 'n']: + elif value.lower() in ["false", "0", "no", "f", "n"]: return False else: - raise argparse.ArgumentTypeError( - "Invalid value: {}".format(value)) - - parser.add_argument("--deterministic", type=str2bool, default=False, - help="Enable deterministic tuning mode to generate " - "reproducible results/output. For testing " - "purposes only; off by default. [True/False]") - - parser.add_argument("--seed", default=0x31337, - help="Specifying the seed value for Random Number " - "Generator. For testing purposes only") - - parser.add_argument("--seed-file", type=str, - help="Specify the path of seed file for Random Number " - "Generator. This option requires " - "'--deterministic=True'.") + raise argparse.ArgumentTypeError("Invalid value: {}".format(value)) + + parser.add_argument( + "--deterministic", + type=str2bool, + default=False, + help="Enable deterministic tuning mode to generate " + "reproducible results/output. For testing " + "purposes only; off by default. [True/False]", + ) + + parser.add_argument( + "--seed", + default=0x31337, + help="Specifying the seed value for Random Number " + "Generator. For testing purposes only", + ) + + parser.add_argument( + "--seed-file", + type=str, + help="Specify the path of seed file for Random Number " + "Generator. This option requires " + "'--deterministic=True'.", + ) def _add_arg_search_space(parser): - parser.add_argument("--search-space", - help="Specify the path of search space file") + parser.add_argument("--search-space", help="Specify the path of search space file") return parser def _add_config_db_arguments(parser): - parser.add_argument("--use-hash-matching", - dest="use_hash_matching", action="store_true", - help="Assign same configuration to the opportunities " - "which have same hash value.") - - parser.add_argument("--use-optimal-configs", - dest="use_optimal_configs", - choices=["none", "reuse", "retune"], - default="none", - help="Use previously found/stored configurations for " - "code regions for current tuning.\n" - "Options: {none[Default], reuse, retune}. " - "reuse/retune can only be used when " - "'use-hash-matching' is enabled.\n" - "none: Do not reuse the old configurations.\n" - "reuse: Reuse the old optimal configurations " - "found and tune code regions which don't have " - "optimal configurations stored in database.\n" - "retune: Retune all the code regions and use " - "the optimal configurations (found in database) " - "as starting point for AutoTuner.\n") + parser.add_argument( + "--use-hash-matching", + dest="use_hash_matching", + action="store_true", + help="Assign same configuration to the opportunities " + "which have same hash value.", + ) + + parser.add_argument( + "--use-optimal-configs", + dest="use_optimal_configs", + choices=["none", "reuse", "retune"], + default="none", + help="Use previously found/stored configurations for " + "code regions for current tuning.\n" + "Options: {none[Default], reuse, retune}. " + "reuse/retune can only be used when " + "'use-hash-matching' is enabled.\n" + "none: Do not reuse the old configurations.\n" + "reuse: Reuse the old optimal configurations " + "found and tune code regions which don't have " + "optimal configurations stored in database.\n" + "retune: Retune all the code regions and use " + "the optimal configurations (found in database) " + "as starting point for BiSheng Autotuner.\n", + ) return parser def _add_code_region_filtering_arguments(parser): - parser.add_argument('--name-filter', nargs='+', metavar='Name', - default=[], - help='Generate search space to include only code ' - 'regions named in space-delimited list.') - parser.add_argument('--func-name-filter', nargs='+', metavar='Name', - default=[], - help='Generate search space to include only code ' - 'regions having function name in space-delimited ' - 'list.') - parser.add_argument('--file-name-filter', nargs='+', metavar='Name', - default=[], - help='Generate search space to include only code ' - 'regions having file name in space-delimited ' - 'list.') - parser.add_argument('--pass-filter', nargs='+', metavar='Name', - default=[], - help='Generate search space to include only code ' - 'regions of a specific pass.') - parser.add_argument('--type-filter', nargs='+', metavar='Name', - default=[], - help='Generate search space to include only code ' - 'regions having type in space-delimited list.\n' - 'Options: [loop, callsite, machine_basic_block, ' - 'other, llvm-param, program-param', - choices=['loop', 'callsite', 'machine_basic_block', - 'other', 'llvm-param', 'program-param']) + parser.add_argument( + "--name-filter", + nargs="+", + metavar="Name", + default=[], + help="Generate search space to include only code " + "regions named in space-delimited list.", + ) + parser.add_argument( + "--func-name-filter", + nargs="+", + metavar="Name", + default=[], + help="Generate search space to include only code " + "regions having function name in space-delimited " + "list.", + ) + parser.add_argument( + "--file-name-filter", + nargs="+", + metavar="Name", + default=[], + help="Generate search space to include only code " + "regions having file name in space-delimited " + "list.", + ) + parser.add_argument( + "--pass-filter", + nargs="+", + metavar="Name", + default=[], + help="Generate search space to include only code " + "regions of a specific pass.", + ) + parser.add_argument( + "--type-filter", + nargs="+", + metavar="Name", + default=[], + help="Generate search space to include only code " + "regions having type in space-delimited list.\n" + "Options: [loop, callsite, machine_basic_block, " + "other, llvm-param, program-param", + choices=[ + "loop", + "callsite", + "machine_basic_block", + "other", + "llvm-param", + "program-param", + ], + ) return parser + def _add_use_dynamic_values(parser): - parser.add_argument('--use-dynamic-values', action='store_true', - help='Turn on dynamic values suggested by the compiler' - 'Default: turned off') + parser.add_argument( + "--use-dynamic-values", + action="store_true", + help="Turn on dynamic values suggested by the compiler" "Default: turned off", + ) def _add_arg_baseline_config(parser): - parser.add_argument("-b", '--use-baseline-config', action='store_true', - help='Start the search from the baseline configuration' - ' instead of a random point in the search space' - ' (default).') + parser.add_argument( + "-b", + "--use-baseline-config", + action="store_true", + help="Start the search from the baseline configuration" + " instead of a random point in the search space" + " (default).", + ) def _suppress_help_messages(parsers): diff --git a/autotuner/resumable/run_manager.py b/autotuner/resumable/run_manager.py index 774e4db1f7e20413be3b225dc3af4c59d290b641..bc732a3aa697e86193acc4cedc2ea03c3243f29b 100644 --- a/autotuner/resumable/run_manager.py +++ b/autotuner/resumable/run_manager.py @@ -15,55 +15,52 @@ class ResumableRunManager(TuningRunManager): This class manages a tuning run that can save/resumed to/from disk. """ - def __init__(self, measurement_interface, args, tuning_state=None, - **kwargs): + def __init__(self, measurement_interface, args, tuning_state=None, **kwargs): if not tuning_state: # Initialize a new run manager. - super(ResumableRunManager, self).__init__(measurement_interface, - args, - **kwargs) + super(ResumableRunManager, self).__init__( + measurement_interface, args, **kwargs + ) else: # Initialize a new run manager with the existing tuning state. - super(TuningRunManager, self).__init__(measurement_interface, args, - **kwargs) - self.engine, self.Session = resultsdb.connect( - tuning_state.args.database) + super(TuningRunManager, self).__init__( + measurement_interface, args, **kwargs + ) + self.engine, self.Session = resultsdb.connect(tuning_state.args.database) # Resume the tuning run from the database - self.tuning_run = self.session.query( - resultsdb.models.TuningRun).get(tuning_state.tuning_run_id) + self.tuning_run = self.session.query(resultsdb.models.TuningRun).get( + tuning_state.tuning_run_id + ) if self.tuning_run.state != "RUNNING": raise Exception( "Cannot resume a complete or aborted tuning run; " "run 'llvm-autotune minimize/maximize' to start " - "a new tuning run") + "a new tuning run" + ) driver_kwargs = { - 'args': self.args, - 'best_result': self.attach_db_session(tuning_state.best_result), - 'input_manager': self.input_manager, - 'manipulator': self.manipulator, - 'measurement_interface': self.measurement_interface, - 'objective': self.objective, - 'session': self.session, - 'tuning_run_main': self, - 'tuning_run': self.tuning_run, - 'extra_seeds': - self.measurement_interface.seed_configurations(), - 'extra_criteria': - self.measurement_interface.extra_convergence_criteria, - 'root_technique': self.attach_db_session( - tuning_state.root_technique) + "args": self.args, + "best_result": self.attach_db_session(tuning_state.best_result), + "input_manager": self.input_manager, + "manipulator": self.manipulator, + "measurement_interface": self.measurement_interface, + "objective": self.objective, + "session": self.session, + "tuning_run_main": self, + "tuning_run": self.tuning_run, + "extra_seeds": self.measurement_interface.seed_configurations(), + "extra_criteria": self.measurement_interface.extra_convergence_criteria, + "root_technique": self.attach_db_session(tuning_state.root_technique), } self.search_driver = self.search_driver_cls(**driver_kwargs) - self.search_driver.pending_result_callbacks = \ - self.attach_db_session(tuning_state.pending_result_callbacks) + self.search_driver.pending_result_callbacks = self.attach_db_session( + tuning_state.pending_result_callbacks + ) - self.measurement_driver = self.measurement_driver_cls( - **driver_kwargs) + self.measurement_driver = self.measurement_driver_cls(**driver_kwargs) self.measurement_interface.set_driver(self.measurement_driver) self.input_manager.set_driver(self.measurement_driver) - self.tuning_run.machine_class = self.measurement_driver. \ - get_machine_class() + self.tuning_run.machine_class = self.measurement_driver.get_machine_class() self.tuning_run.input_class = self.input_manager.get_input_class() # Suppress logs from opentuner modules. @@ -84,8 +81,7 @@ class ResumableRunManager(TuningRunManager): elif isinstance(obj, dict): new_dict = dict() for key, value in obj.items(): - new_dict[self.attach_db_session( - key)] = self.attach_db_session(value) + new_dict[self.attach_db_session(key)] = self.attach_db_session(value) obj = new_dict elif isinstance(obj, list): obj = [self.attach_db_session(ele) for ele in obj] diff --git a/autotuner/test/dummy_tuner.py b/autotuner/test/dummy_tuner.py index 1f5884dd321ecaef99d47d7860f441fa32929cd9..ae936bc6d8666ec64a324187dd84d95e1e8295c7 100644 --- a/autotuner/test/dummy_tuner.py +++ b/autotuner/test/dummy_tuner.py @@ -8,7 +8,8 @@ from autotuner.tuners.tunerbase import CustomTunerBase class Tuner(CustomTunerBase): """ - This class is a placeholder for unittest with autotuner's --tuner flag + This class is a placeholder for unittest with BiSheng Autotuner's --tuner + flag """ def run(self, desired_result, desired_input, limit): diff --git a/autotuner/test/test_auto_run.py b/autotuner/test/test_auto_run.py index cad7a57f5930829f09884380e5d6db66d9be78a6..b14c461ad566c628885f9b9ad14b7956ce454968 100755 --- a/autotuner/test/test_auto_run.py +++ b/autotuner/test/test_auto_run.py @@ -16,7 +16,7 @@ from autotuner.tuners.simple_tuner import SimpleTuner class TestAutoRun(unittest.TestCase): """ - Test the auto_run subcommand in autotuner + Test the auto_run subcommand in BiSheng Autotuner """ def setUp(self): @@ -25,29 +25,35 @@ class TestAutoRun(unittest.TestCase): self.args.list_techniques = False self.args.command = "run" curr_dir = os.path.dirname(__file__) - self.args.config_file = os.path.join( - curr_dir, "Inputs", "test_sample.ini") + self.args.config_file = os.path.join(curr_dir, "Inputs", "test_sample.ini") self.args.tuner = None self.args.output = None self.args.stage_order = ["loop"] self.args.search_config_file = os.path.join( - curr_dir, "Inputs", "test_search_space_config.yaml") + curr_dir, "Inputs", "test_search_space_config.yaml" + ) @mock.patch.object(MeasurementInterface, "call_program") @mock.patch.object(SimpleTuner, "main") @mock.patch("autotuner.main.create_io_manager") @mock.patch("autotuner.main._clean_opp") - def test_phase_based_run_main_iomanager_called(self, - mock_clean_opp, - mock_create_io_manager, - mock_simpletuner_main, - mock_call_program): + def test_phase_based_run_main_iomanager_called( + self, + mock_clean_opp, + mock_create_io_manager, + mock_simpletuner_main, + mock_call_program, + ): mock_iomanager = mock.MagicMock() mock_create_io_manager.return_value = mock_iomanager - fake_result = {"returncode": 0, - "stdout": "succuss", "stderr": "", - 'timeout': False, 'time': 1.89} + fake_result = { + "returncode": 0, + "stdout": "succuss", + "stderr": "", + "timeout": False, + "time": 1.89, + } mock_call_program.return_value = fake_result auto_run_main(self.args) @@ -59,11 +65,13 @@ class TestAutoRun(unittest.TestCase): @mock.patch.object(SimpleTuner, "main") @mock.patch("autotuner.main.create_io_manager") @mock.patch("autotuner.main._clean_opp") - def test_phase_based_run_main_subprocess_called(self, - mock_clean_opp, - mock_create_io_manager, - mock_simpletuner_main, - mock_call_program): + def test_phase_based_run_main_subprocess_called( + self, + mock_clean_opp, + mock_create_io_manager, + mock_simpletuner_main, + mock_call_program, + ): """ Check if the subprocess that calls compiler command is called in auto_run @@ -71,9 +79,13 @@ class TestAutoRun(unittest.TestCase): mock_iomanager = mock.MagicMock() mock_create_io_manager.return_value = mock_iomanager - fake_result = {"returncode": 0, - "stdout": "succuss", "stderr": "", - 'timeout': False, 'time': 1.89} + fake_result = { + "returncode": 0, + "stdout": "succuss", + "stderr": "", + "timeout": False, + "time": 1.89, + } mock_call_program.return_value = fake_result auto_run_main(self.args) @@ -84,20 +96,26 @@ class TestAutoRun(unittest.TestCase): @mock.patch.object(SimpleTuner, "main") @mock.patch("autotuner.main.create_io_manager") @mock.patch("autotuner.main._clean_opp") - def test_phase_based_run_main_tuner_called(self, - mock_clean_opp, - mock_create_io_manager, - mock_simpletuner_main, - mock_call_program): + def test_phase_based_run_main_tuner_called( + self, + mock_clean_opp, + mock_create_io_manager, + mock_simpletuner_main, + mock_call_program, + ): """ Check if simple tuner's main function is called """ mock_iomanager = mock.MagicMock() mock_create_io_manager.return_value = mock_iomanager - fake_result = {"returncode": 0, - "stdout": "succuss", "stderr": "", - 'timeout': False, 'time': 1.89} + fake_result = { + "returncode": 0, + "stdout": "succuss", + "stderr": "", + "timeout": False, + "time": 1.89, + } mock_call_program.return_value = fake_result auto_run_main(self.args) @@ -109,23 +127,29 @@ class TestAutoRun(unittest.TestCase): @mock.patch.object(SimpleTuner, "main") @mock.patch("autotuner.main.create_io_manager") @mock.patch("autotuner.main._clean_opp") - def test_phase_based_run_main_multi_stage(self, mock_clean_opp, - mock_create_io_manager, - mock_simpletuner_main, - mock_call_program): + def test_phase_based_run_main_multi_stage( + self, + mock_clean_opp, + mock_create_io_manager, + mock_simpletuner_main, + mock_call_program, + ): """ Check correct behaviour when tuning with multi-stage """ mock_iomanager = mock.MagicMock() mock_create_io_manager.return_value = mock_iomanager - fake_result = {"returncode": 0, - "stdout": "succuss", "stderr": "", - 'timeout': False, 'time': 1.89} + fake_result = { + "returncode": 0, + "stdout": "succuss", + "stderr": "", + "timeout": False, + "time": 1.89, + } mock_call_program.return_value = fake_result - self.args.stage_order = [ - "other", "function", "loop", "machine_basic_block"] + self.args.stage_order = ["other", "function", "loop", "machine_basic_block"] auto_run_main(self.args) @@ -138,16 +162,23 @@ class TestAutoRun(unittest.TestCase): @mock.patch.object(SimpleTuner, "main") @mock.patch("autotuner.main.create_io_manager") @mock.patch("autotuner.main._clean_opp") - def test_phase_based_run_main_subprocess_failed(self, mock_clean_opp, - mock_create_io_manager, - mock_simpletuner_main, - mock_call_program): + def test_phase_based_run_main_subprocess_failed( + self, + mock_clean_opp, + mock_create_io_manager, + mock_simpletuner_main, + mock_call_program, + ): mock_iomanager = mock.MagicMock() mock_create_io_manager.return_value = mock_iomanager - fake_result = {"returncode": 1, - "stdout": "", "stderr": "some error", - 'timeout': False, 'time': -1} + fake_result = { + "returncode": 1, + "stdout": "", + "stderr": "some error", + "timeout": False, + "time": -1, + } mock_call_program.return_value = fake_result auto_run_main(self.args) @@ -161,18 +192,23 @@ class TestAutoRun(unittest.TestCase): @mock.patch("autotuner.main._clean_opp") @mock.patch("sys.stdout", new_callable=StringIO) def test_phase_based_run_main_empty_searchspace_error( - self, mock_stdout, + self, + mock_stdout, mock_clean_opp, mock_create_io_manager, mock_simpletuner_main, - mock_call_program + mock_call_program, ): mock_iomanager = mock.MagicMock() mock_create_io_manager.return_value = mock_iomanager - fake_result = {"returncode": 0, - "stdout": "succuss", "stderr": "", - 'timeout': False, 'time': 1.89} + fake_result = { + "returncode": 0, + "stdout": "succuss", + "stderr": "", + "timeout": False, + "time": 1.89, + } mock_call_program.return_value = fake_result mock_simpletuner_main.side_effect = EmptySearchSpaceError() @@ -181,9 +217,9 @@ class TestAutoRun(unittest.TestCase): # Check correct error msg is printed self.assertTrue( - "Empty search space, stop the current stage" in - mock_stdout.getvalue(), "Correct EmptySearchSpaceError " \ - "msg not printed") + "Empty search space, stop the current stage" in mock_stdout.getvalue(), + "Correct EmptySearchSpaceError " "msg not printed", + ) mock_simpletuner_main.assert_called_once() @mock.patch.object(MeasurementInterface, "call_program") @@ -192,18 +228,23 @@ class TestAutoRun(unittest.TestCase): @mock.patch("autotuner.main._clean_opp") @mock.patch("sys.stdout", new_callable=StringIO) def test_phase_based_run_main_multi_stage_with_error( - self, mock_stdout, + self, + mock_stdout, mock_clean_opp, mock_create_io_manager, mock_simpletuner_main, - mock_call_program + mock_call_program, ): mock_iomanager = mock.MagicMock() mock_create_io_manager.return_value = mock_iomanager - fake_result = {"returncode": 0, - "stdout": "succuss", "stderr": "", - 'timeout': False, 'time': 1.89} + fake_result = { + "returncode": 0, + "stdout": "succuss", + "stderr": "", + "timeout": False, + "time": 1.89, + } mock_call_program.return_value = fake_result self.args.stage_order = ["function", "loop"] @@ -214,9 +255,9 @@ class TestAutoRun(unittest.TestCase): # Check correct error msg is printed self.assertTrue( - "Empty search space, stop the current stage" in - mock_stdout.getvalue(), "Correct EmptySearchSpaceError " - "msg not printed") + "Empty search space, stop the current stage" in mock_stdout.getvalue(), + "Correct EmptySearchSpaceError " "msg not printed", + ) self.assertEqual(mock_simpletuner_main.call_count, 2) diff --git a/autotuner/test/test_code_region_filter.py b/autotuner/test/test_code_region_filter.py index a68dd0ebed382581e1bf64e4b8041298c6451879..99269f8d30cdcc6450ce843b888ddf8578dae383 100644 --- a/autotuner/test/test_code_region_filter.py +++ b/autotuner/test/test_code_region_filter.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- """ -Test for code region filtering in AutoTuner. +Test for code region filtering in BiSheng Autotuner. Copyright (C) 2017-2022, Huawei Technologies Co., Ltd. All rights reserved. """ import os @@ -9,6 +9,7 @@ import shutil import unittest import tempfile import yaml + try: from yaml import CLoader as Loader except ImportError: @@ -27,13 +28,11 @@ class TestCodeRegionFilter(unittest.TestCase): def setUp(self): # Setting environment before running each test. self.data_dir = tempfile.TemporaryDirectory() - os.environ['AUTOTUNE_DATADIR'] = self.data_dir.name - self.input_dir = os.path.join(os.path.dirname(__file__), - "Inputs/filter_opp/") - shutil.copytree(self.input_dir, self.data_dir.name + '/opp/') + os.environ["AUTOTUNE_DATADIR"] = self.data_dir.name + self.input_dir = os.path.join(os.path.dirname(__file__), "Inputs/filter_opp/") + shutil.copytree(self.input_dir, self.data_dir.name + "/opp/") self.parser = create_parser() - def verify_yaml_content(self, yaml_path, expected_code_regions): """ Counting number of generated code regions in yaml file and comparing it @@ -46,67 +45,66 @@ class TestCodeRegionFilter(unittest.TestCase): self.assertEqual(code_regions, expected_code_regions) - def test_type_filter(self): # Filtering code regions by type. - args = self.parser.parse_args(['minimize', - '--type-filter', 'loop']) + args = self.parser.parse_args(["minimize", "--type-filter", "loop"]) initialize(self.data_dir.name, args, args.command, args.trials) self.verify_yaml_content(self.data_dir.name + "/config.yaml", 11) self.data_dir.cleanup() - def test_func_name_filter(self): # Filtering code regions by containing function name. - args = self.parser.parse_args(['minimize', - '--func-name-filter', 'pat_insert']) + args = self.parser.parse_args(["minimize", "--func-name-filter", "pat_insert"]) initialize(self.data_dir.name, args, args.command, args.trials) self.verify_yaml_content(self.data_dir.name + "/config.yaml", 15) self.data_dir.cleanup() - def test_code_region_name_filter(self): # Filtering code regions by their names. - args = self.parser.parse_args(['minimize', - '--name-filter', 'for.body', 'land.rhs']) + args = self.parser.parse_args( + ["minimize", "--name-filter", "for.body", "land.rhs"] + ) initialize(self.data_dir.name, args, args.command, args.trials) self.verify_yaml_content(self.data_dir.name + "/config.yaml", 3) self.data_dir.cleanup() - def test_file_name_filter(self): # Filtering code regions by file name. - args = self.parser.parse_args(['minimize', - '--file-name-filter', 'patricia_test.c']) + args = self.parser.parse_args( + ["minimize", "--file-name-filter", "patricia_test.c"] + ) initialize(self.data_dir.name, args, args.command, args.trials) self.verify_yaml_content(self.data_dir.name + "/config.yaml", 2) self.data_dir.cleanup() - def test_type_and_file_name_filter(self): # Filtering code regions by type and file name. - args = self.parser.parse_args(['minimize', - '--file-name-filter', 'patricia.c', - '--type-filter', 'callsite']) + args = self.parser.parse_args( + [ + "minimize", + "--file-name-filter", + "patricia.c", + "--type-filter", + "callsite", + ] + ) initialize(self.data_dir.name, args, args.command, args.trials) self.verify_yaml_content(self.data_dir.name + "/config.yaml", 16) self.data_dir.cleanup() - def test_pass_filter(self): # Filtering code regions by optimization pass. - args = self.parser.parse_args(['minimize', '--pass-filter', - 'loop-unroll']) + args = self.parser.parse_args(["minimize", "--pass-filter", "loop-unroll"]) initialize(self.data_dir.name, args, args.command, args.trials) self.verify_yaml_content(self.data_dir.name + "/config.yaml", 11) diff --git a/autotuner/test/test_deterministic.py b/autotuner/test/test_deterministic.py index 0724c6f5ac68fc6301f25c92cab758925ad79da2..8cea8c2ccd87ebd50f79dec762779fba03a69510 100755 --- a/autotuner/test/test_deterministic.py +++ b/autotuner/test/test_deterministic.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- """ -Test to run AutoTuner in deterministic manner +Test to run BiSheng Autotuner in deterministic manner Copyright (C) 2017-2021, Huawei Technologies Co., Ltd. All rights reserved. """ import os @@ -14,7 +14,7 @@ import tempfile class TestAutoTunerDeterministic(unittest.TestCase): """ - Test the deterministic and non-deterministic approach in AutoTuner + Test the deterministic and non-deterministic approach in BiSheng Autotuner """ def setUp(self): @@ -22,7 +22,6 @@ class TestAutoTunerDeterministic(unittest.TestCase): self.base_dir = "" self.autotuner_dir = "" - def compare_yaml_file(self, file_1, file_2): """ Compare two yaml files by their contents using pyyaml @@ -34,11 +33,9 @@ class TestAutoTunerDeterministic(unittest.TestCase): shutil.rmtree(self.base_dir) raise - - def run_autotuner(self, data_dir, output_dir, input_dir, - deterministic=None): + def run_autotuner(self, data_dir, output_dir, input_dir, deterministic=None): """ - Running AutoTuner + Running BiSheng Autotuner """ # mock feedback time mock_time = [19.92, 20.85, 19.65, 19.71, 19.50] @@ -128,7 +125,6 @@ class TestAutoTunerDeterministic(unittest.TestCase): run_dir_2.cleanup() self.base_dir.cleanup() - def test_random(self): self.output_stream = "" curr_dir = os.path.dirname(os.path.abspath(__file__)) @@ -167,14 +163,14 @@ class TestAutoTunerDeterministic(unittest.TestCase): run_dir_2.cleanup() self.base_dir.cleanup() - def test_seed_file(self): """ - Test for saving and using random state. AutoTuner is initialized with - random search space and AutoTuner will save the current random state - as a file in the data directory. This random state file is reused to - initialize the AutoTuner again for same set of opportunities. Hence, - both initialization will generate identical 'config.yaml' file. + Test for saving and using random state. BiSheng Autotuner is initialized + with random search space and BiSheng Autotuner will save the current + random state as a file in the data directory. This random state file is + reused to initialize the BiSheng Autotuner again for same set of + opportunities. Hence, both initialization will generate identical + 'config.yaml' file. """ self.output_stream = "" curr_dir = os.path.dirname(os.path.abspath(__file__)) diff --git a/autotuner/test/test_divide.py b/autotuner/test/test_divide.py index 98739c1b31a630675ad437ab5e1a2261517bed0e..a4475fa5730f65111b48b734793618c298d29002 100644 --- a/autotuner/test/test_divide.py +++ b/autotuner/test/test_divide.py @@ -10,6 +10,7 @@ import unittest import unittest.mock as mock import yaml from autotuner.main import divide_main + try: from yaml import CLoader as Loader except ImportError: @@ -18,7 +19,7 @@ except ImportError: class TestAutotunerDivide(unittest.TestCase): """ - Test the 'divide' subcommand in autotuner + Test the 'divide' subcommand in BiSheng Autotuner """ def setUp(self): @@ -31,22 +32,23 @@ class TestAutotunerDivide(unittest.TestCase): Compare two yaml files by their contents using pyyaml """ with open(expected_yaml_path) as expected_stream, open( - actual_yaml_path) as actual_stream: + actual_yaml_path + ) as actual_stream: actual_generator = yaml.load_all(actual_stream, Loader=Loader) - for expected_remark in yaml.load_all(expected_stream, - Loader=Loader): + for expected_remark in yaml.load_all(expected_stream, Loader=Loader): actual_remark = next(actual_generator) - self.assertEqual(expected_remark, actual_remark, - "Parsed result yaml different " - "from expected") + self.assertEqual( + expected_remark, + actual_remark, + "Parsed result yaml different " "from expected", + ) # expected has finished, check whether actual finished too # if the actual_generator still has content, then fail self.assertRaises(StopIteration, next, actual_generator) def test_divide_main(self): curr_dir = os.path.dirname(__file__) - self.args.input_file = os.path.join(curr_dir, - "Inputs/divide/llvm_input.yaml") + self.args.input_file = os.path.join(curr_dir, "Inputs/divide/llvm_input.yaml") self.args.output_dir = "test_divide_output" with self.assertRaises(SystemExit) as context: @@ -56,17 +58,21 @@ class TestAutotunerDivide(unittest.TestCase): # check directory created self.assertTrue(os.path.isdir("test_divide_output")) # check files divided - self.assertTrue(os.path.isfile(os.path.join( - "test_divide_output", "core_list_join.c.yaml"))) - self.assertTrue(os.path.isfile(os.path.join( - "test_divide_output", "core_main.c.yaml"))) + self.assertTrue( + os.path.isfile(os.path.join("test_divide_output", "core_list_join.c.yaml")) + ) + self.assertTrue( + os.path.isfile(os.path.join("test_divide_output", "core_main.c.yaml")) + ) # check divide output content - self.compare_yaml_content(os.path.join( - curr_dir, "Outputs/divide/llvm_input_core_list_join.c.yaml"), - os.path.join("test_divide_output", "core_list_join.c.yaml")) - self.compare_yaml_content(os.path.join( - curr_dir, "Outputs/divide/llvm_input_core_main.c.yaml"), - os.path.join("test_divide_output", "core_main.c.yaml")) + self.compare_yaml_content( + os.path.join(curr_dir, "Outputs/divide/llvm_input_core_list_join.c.yaml"), + os.path.join("test_divide_output", "core_list_join.c.yaml"), + ) + self.compare_yaml_content( + os.path.join(curr_dir, "Outputs/divide/llvm_input_core_main.c.yaml"), + os.path.join("test_divide_output", "core_main.c.yaml"), + ) # remove test output shutil.rmtree("test_divide_output") diff --git a/autotuner/test/test_merge.py b/autotuner/test/test_merge.py index 664c08d738ce285ce047d927f5cb69be3ab9000c..97404f38e521587186bbb914ea391ae5854fe349 100644 --- a/autotuner/test/test_merge.py +++ b/autotuner/test/test_merge.py @@ -9,6 +9,7 @@ import unittest import unittest.mock as mock import yaml from autotuner.main import merge_main + try: from yaml import CLoader as Loader except ImportError: @@ -17,7 +18,7 @@ except ImportError: class TestAutotunerMerge(unittest.TestCase): """ - Test the 'merge' subcommand in autotuner + Test the 'merge' subcommand in BiSheng Autotuner """ def setUp(self): @@ -31,14 +32,16 @@ class TestAutotunerMerge(unittest.TestCase): """ try: with open(expected_yaml_path) as expected_stream, open( - actual_yaml_path) as actual_stream: + actual_yaml_path + ) as actual_stream: actual_generator = yaml.load_all(actual_stream, Loader=Loader) - for expected_remark in yaml.load_all(expected_stream, - Loader=Loader): + for expected_remark in yaml.load_all(expected_stream, Loader=Loader): actual_remark = next(actual_generator) - self.assertEqual(expected_remark, actual_remark, - "Parsed result yaml different " - "from expected") + self.assertEqual( + expected_remark, + actual_remark, + "Parsed result yaml different " "from expected", + ) # expected has finished, check whether actual finished too # if the actual_generator still has content, then fail self.assertRaises(StopIteration, next, actual_generator) @@ -47,10 +50,10 @@ class TestAutotunerMerge(unittest.TestCase): def test_merge_main(self): curr_dir = os.path.dirname(__file__) - self.args.input_file = [os.path.join(curr_dir, - "Inputs/merge/llvm_input_core_list_join.c.yaml"), - os.path.join(curr_dir, - "Inputs/merge/llvm_input_core_main.c.yaml")] + self.args.input_file = [ + os.path.join(curr_dir, "Inputs/merge/llvm_input_core_list_join.c.yaml"), + os.path.join(curr_dir, "Inputs/merge/llvm_input_core_main.c.yaml"), + ] self.args.output = "actual_merge_result.yaml" with self.assertRaises(SystemExit) as context: @@ -60,9 +63,10 @@ class TestAutotunerMerge(unittest.TestCase): # check file created self.assertTrue(os.path.isfile("actual_merge_result.yaml")) # check divide output content - self.compare_yaml_content(os.path.join( - curr_dir, "Outputs/merge/llvm_input.yaml"), - "actual_merge_result.yaml") + self.compare_yaml_content( + os.path.join(curr_dir, "Outputs/merge/llvm_input.yaml"), + "actual_merge_result.yaml", + ) if __name__ == "__main__": diff --git a/autotuner/test/test_parse.py b/autotuner/test/test_parse.py index b5670f4e9289c941717e5c172c1ace3169cf9968..3d3411db813e7ef20c977b14b8047fdac6c23a12 100755 --- a/autotuner/test/test_parse.py +++ b/autotuner/test/test_parse.py @@ -9,6 +9,7 @@ import unittest import unittest.mock as mock import yaml from autotuner.main import parse_main + try: from yaml import CLoader as Loader except ImportError: @@ -17,17 +18,17 @@ except ImportError: class TestAutotunerParse(unittest.TestCase): """ - Test the 'parse' subcommand in autotuner + Test the 'parse' subcommand in BiSheng Autotuner """ def setUp(self): self.args = mock.MagicMock() self.args.command = "parse" - self.args.opp_file = [os.path.join(os.path.dirname(__file__), - "Inputs/opp/core_list_join.c.yaml"), - os.path.join(os.path.dirname(__file__), - "Inputs/opp/core_main.c.yaml")] + self.args.opp_file = [ + os.path.join(os.path.dirname(__file__), "Inputs/opp/core_list_join.c.yaml"), + os.path.join(os.path.dirname(__file__), "Inputs/opp/core_main.c.yaml"), + ] self.args.output = "actual_search_space.yaml" self.args.name_filter = [] self.args.func_name_filter = [] @@ -35,8 +36,8 @@ class TestAutotunerParse(unittest.TestCase): self.args.hot_func_file = [] self.args.hot_func_number = 10 self.args.search_config_file = os.path.join( - os.path.dirname(__file__), - "Inputs/parse/test_search_space_config.yaml") + os.path.dirname(__file__), "Inputs/parse/test_search_space_config.yaml" + ) def compare_yaml_content(self, expected_yaml_path, actual_yaml_path): """ @@ -44,15 +45,16 @@ class TestAutotunerParse(unittest.TestCase): """ try: with open(expected_yaml_path) as expected_stream, open( - actual_yaml_path) as actual_stream: - actual_generator = yaml.load_all(actual_stream, - Loader=Loader) - for expected_dict in yaml.load_all(expected_stream, - Loader=Loader): + actual_yaml_path + ) as actual_stream: + actual_generator = yaml.load_all(actual_stream, Loader=Loader) + for expected_dict in yaml.load_all(expected_stream, Loader=Loader): actual_dict = next(actual_generator) self.assertDictEqual( - expected_dict, actual_dict, "Parsed result yaml " - "different from expected") + expected_dict, + actual_dict, + "Parsed result yaml " "different from expected", + ) # expected has finished, check whether actual finished too # if the actual_generator still has content, then fail self.assertRaises(StopIteration, next, actual_generator) @@ -66,8 +68,9 @@ class TestAutotunerParse(unittest.TestCase): parse_main(self.args) self.assertEqual(context.exception.code, 0) - expected_ss_path = os.path.join(os.path.dirname( - __file__), "Outputs/parse/search_space.yaml") + expected_ss_path = os.path.join( + os.path.dirname(__file__), "Outputs/parse/search_space.yaml" + ) self.compare_yaml_content(expected_ss_path, "actual_search_space.yaml") def test_parse_main_loop_only(self): @@ -77,8 +80,9 @@ class TestAutotunerParse(unittest.TestCase): parse_main(self.args) self.assertEqual(context.exception.code, 0) - expected_ss_path = os.path.join(os.path.dirname( - __file__), "Outputs/parse/search_space_loop_only.yaml") + expected_ss_path = os.path.join( + os.path.dirname(__file__), "Outputs/parse/search_space_loop_only.yaml" + ) self.compare_yaml_content(expected_ss_path, "actual_search_space.yaml") def test_parse_main_function_only(self): @@ -88,8 +92,9 @@ class TestAutotunerParse(unittest.TestCase): parse_main(self.args) self.assertEqual(context.exception.code, 0) - expected_ss_path = os.path.join(os.path.dirname( - __file__), "Outputs/parse/search_space_function_only.yaml") + expected_ss_path = os.path.join( + os.path.dirname(__file__), "Outputs/parse/search_space_function_only.yaml" + ) self.compare_yaml_content(expected_ss_path, "actual_search_space.yaml") def test_parse_main_module_only(self): @@ -99,8 +104,9 @@ class TestAutotunerParse(unittest.TestCase): parse_main(self.args) self.assertEqual(context.exception.code, 0) - expected_ss_path = os.path.join(os.path.dirname( - __file__), "Outputs/parse/search_space_module_only.yaml") + expected_ss_path = os.path.join( + os.path.dirname(__file__), "Outputs/parse/search_space_module_only.yaml" + ) self.compare_yaml_content(expected_ss_path, "actual_search_space.yaml") def test_parse_main_mbb_only(self): @@ -110,8 +116,9 @@ class TestAutotunerParse(unittest.TestCase): parse_main(self.args) self.assertEqual(context.exception.code, 0) - expected_ss_path = os.path.join(os.path.dirname( - __file__), "Outputs/parse/search_space_mbb_only.yaml") + expected_ss_path = os.path.join( + os.path.dirname(__file__), "Outputs/parse/search_space_mbb_only.yaml" + ) self.compare_yaml_content(expected_ss_path, "actual_search_space.yaml") def test_parse_main_name_filter(self): @@ -122,8 +129,9 @@ class TestAutotunerParse(unittest.TestCase): parse_main(self.args) self.assertEqual(context.exception.code, 0) - expected_ss_path = os.path.join(os.path.dirname( - __file__), "Outputs/parse/search_space_name_filter.yaml") + expected_ss_path = os.path.join( + os.path.dirname(__file__), "Outputs/parse/search_space_name_filter.yaml" + ) self.compare_yaml_content(expected_ss_path, "actual_search_space.yaml") def test_parse_main_func_filter(self): @@ -134,8 +142,9 @@ class TestAutotunerParse(unittest.TestCase): parse_main(self.args) self.assertEqual(context.exception.code, 0) - expected_ss_path = os.path.join(os.path.dirname( - __file__), "Outputs/parse/search_space_func_filter.yaml") + expected_ss_path = os.path.join( + os.path.dirname(__file__), "Outputs/parse/search_space_func_filter.yaml" + ) self.compare_yaml_content(expected_ss_path, "actual_search_space.yaml") def test_parse_main_file_filter(self): @@ -146,8 +155,9 @@ class TestAutotunerParse(unittest.TestCase): parse_main(self.args) self.assertEqual(context.exception.code, 0) - expected_ss_path = os.path.join(os.path.dirname( - __file__), "Outputs/parse/search_space_file_filter.yaml") + expected_ss_path = os.path.join( + os.path.dirname(__file__), "Outputs/parse/search_space_file_filter.yaml" + ) self.compare_yaml_content(expected_ss_path, "actual_search_space.yaml") def test_parse_main_loop_only_with_file_filter(self): @@ -158,9 +168,10 @@ class TestAutotunerParse(unittest.TestCase): parse_main(self.args) self.assertEqual(context.exception.code, 0) - expected_ss_path = os.path.join(os.path.dirname(__file__), - "Outputs/parse/search_space_" - "loop_only_with_file_filter.yaml") + expected_ss_path = os.path.join( + os.path.dirname(__file__), + "Outputs/parse/search_space_" "loop_only_with_file_filter.yaml", + ) self.compare_yaml_content(expected_ss_path, "actual_search_space.yaml") diff --git a/autotuner/test/test_parse_common_options.py b/autotuner/test/test_parse_common_options.py index 818cc877081730deeaa092e8b563126d32c8d010..285dedbd8a4b03d0975a1fc462624c54d3a3810e 100755 --- a/autotuner/test/test_parse_common_options.py +++ b/autotuner/test/test_parse_common_options.py @@ -22,8 +22,7 @@ class TestParseCommonOptions(unittest.TestCase): self.args.list_techniques = False self.args.command = "run" curr_dir = os.path.dirname(__file__) - self.args.config_file = os.path.join( - curr_dir, "Inputs", "test_sample.ini") + self.args.config_file = os.path.join(curr_dir, "Inputs", "test_sample.ini") self.args.search_space = True self.args.tuner = False self.args.output = False @@ -31,7 +30,7 @@ class TestParseCommonOptions(unittest.TestCase): @mock.patch("autotuner.tuners.tunerbase.get_available_tuners") def test_list_tuners(self, mock_get_available_tuners): """ - Test the --list-tuner flag for autotuner + Test the --list-tuner flag for BiSheng Autotuner """ self.args.list_tuners = True self.args.plugin_dir = "some path" @@ -72,20 +71,23 @@ class TestParseCommonOptions(unittest.TestCase): """ _, config = _parse_common_options(self.args) expected_compile_dir = os.path.abspath(os.path.dirname(self.args.config_file)) - self.assertEqual(config["Compiling Setting"]["CompileDir"], - expected_compile_dir) + self.assertEqual( + config["Compiling Setting"]["CompileDir"], expected_compile_dir + ) def test_tuner_and_plugin(self): """ - Test --tuner and --plugin flag for autotuner + Test --tuner and --plugin flag for BiSheng Autotuner """ self.args.tuner = "dummy_tuner" self.args.plugin_dir = os.path.dirname(__file__) tuner, _ = _parse_common_options(self.args) - self.assertTrue(issubclass(tuner, CustomTunerBase), - "Sample test tuner usage failed. Plugin or " \ - "tuners are not well implemented") + self.assertTrue( + issubclass(tuner, CustomTunerBase), + "Sample test tuner usage failed. Plugin or " + "tuners are not well implemented", + ) if __name__ == "__main__": diff --git a/autotuner/test/test_program_param.py b/autotuner/test/test_program_param.py index 8e92dbf1e5814676674a37e8b52a8cb857c77884..dcb06a885c25b0d3b3f3fee45f7ee89314a5f56f 100755 --- a/autotuner/test/test_program_param.py +++ b/autotuner/test/test_program_param.py @@ -1,9 +1,9 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -# Test the program-param code region in AutoTuner +# Test the program-param code region in BiSheng Autotuner # Copyright (C) 2017-2022, Huawei Technologies Co., Ltd. All rights reserved. """ -Test the program-param code region in AutoTuner +Test the program-param code region in BiSheng Autotuner """ import os @@ -12,6 +12,7 @@ import subprocess import unittest import tempfile import yaml + try: from yaml import CLoader as Loader except ImportError: @@ -22,8 +23,9 @@ from autotuner.remarkparser import AutoTuning class TestAutoTunerProgramParam(unittest.TestCase): """ - Test the program-param code region in AutoTuner. + Test the program-param code region in BiSheng Autotuner. """ + def setUp(self): self.autotuner_dir = "" self.data_dir = "" @@ -33,17 +35,16 @@ class TestAutoTunerProgramParam(unittest.TestCase): Compare two yaml files by their contents using pyyaml """ with open(expected_yaml_path) as expected_stream, open( - actual_yaml_path) as actual_stream: - for expected_remark in yaml.load_all(expected_stream, - Loader=Loader): + actual_yaml_path + ) as actual_stream: + for expected_remark in yaml.load_all(expected_stream, Loader=Loader): actual_remark = yaml.load_all(actual_stream, Loader=Loader) self.assertTrue( expected_remark in actual_remark, - "Generated Yaml is different from expected." + "Generated Yaml is different from expected.", ) actual_stream.seek(0) - def check_formated_yaml(self): """ checking: @@ -59,29 +60,32 @@ class TestAutoTunerProgramParam(unittest.TestCase): check_program_param = True check_llvm_param = True - with open(os.path.join(self.data_dir, "formated_config.yaml"), 'r') as file: + with open(os.path.join(self.data_dir, "formated_config.yaml"), "r") as file: config_dic = yaml.load_all(file, Loader=yaml.FullLoader) for code_region in config_dic: - if code_region['CodeRegionType'] == "program-param": + if code_region["CodeRegionType"] == "program-param": if program_param_args_list is None: - program_param_args_list = code_region['Args'] + program_param_args_list = code_region["Args"] else: - program_param_args_list_temp = code_region['Args'] - check_program_param &= (program_param_args_list == program_param_args_list_temp) - if code_region['CodeRegionType'] == "llvm-param": + program_param_args_list_temp = code_region["Args"] + check_program_param &= ( + program_param_args_list == program_param_args_list_temp + ) + if code_region["CodeRegionType"] == "llvm-param": if llvm_param_args_list is None: - llvm_param_args_list = code_region['Args'] + llvm_param_args_list = code_region["Args"] else: - llvm_param_args_list_temp = code_region['Args'] - check_llvm_param &= (llvm_param_args_list == llvm_param_args_list_temp) + llvm_param_args_list_temp = code_region["Args"] + check_llvm_param &= ( + llvm_param_args_list == llvm_param_args_list_temp + ) return check_program_param, check_llvm_param - def test_default_compiler_options(self): """ - Test AutoTuner uses the default options to initialize the search space - for program-param and llvm-param code regions when '--use-baseline-config' - is provided. + Test BiSheng Autotuner uses the default options to initialize the + search space for program-param and llvm-param code regions when + '--use-baseline-config' is provided. """ curr_dir = os.path.dirname(os.path.abspath(__file__)) self.autotuner_dir = os.path.join(curr_dir, "..", "..", "bin") @@ -115,32 +119,41 @@ class TestAutoTunerProgramParam(unittest.TestCase): data_dir.cleanup() - def test_program_param(self): """ - Test the program-param code region in AutoTuner. - Firstly, run autotuner with llvm-autotune minimize - Secondly, checking llvm-param and program-param code region within config.yaml + Test the program-param code region in BiSheng Autotuner. + Firstly, run BiSheng Autotuner with llvm-autotune minimize + Secondly, checking llvm-param and program-param code region within + config.yaml """ curr_dir = os.path.dirname(os.path.abspath(__file__)) self.autotuner_dir = os.path.join(curr_dir, "..", "..", "bin") input_dir = os.path.join(curr_dir, "Inputs", "program_param_opp") - search_space_file = os.path.join(curr_dir, "..", "search_space_config", "extended_search_space.yaml") + search_space_file = os.path.join( + curr_dir, "..", "search_space_config", "extended_search_space.yaml" + ) llvm_autotune_bin = os.path.join(self.autotuner_dir, "llvm-autotune") data_dir = tempfile.TemporaryDirectory() self.data_dir = data_dir.name - os.environ['AUTOTUNE_DATADIR'] = data_dir.name - opp_dir = os.path.join(self.data_dir, 'opp') + os.environ["AUTOTUNE_DATADIR"] = data_dir.name + opp_dir = os.path.join(self.data_dir, "opp") shutil.copytree(input_dir, opp_dir) - cmd = [llvm_autotune_bin, "minimize", "--deterministic=True", "--search-space", search_space_file] + cmd = [ + llvm_autotune_bin, + "minimize", + "--deterministic=True", + "--search-space", + search_space_file, + ] if os.name == "nt": cmd.insert(0, "py") subprocess.run(cmd, check=True, stderr=subprocess.DEVNULL) - with open(os.path.join(self.data_dir, "config.yaml"), 'r') as infile, \ - open(os.path.join(self.data_dir, "formated_config.yaml"), 'w') as outfile: + with open(os.path.join(self.data_dir, "config.yaml"), "r") as infile, open( + os.path.join(self.data_dir, "formated_config.yaml"), "w" + ) as outfile: data = infile.read() data = data.replace("!AutoTuning", "") outfile.write(data) @@ -152,5 +165,6 @@ class TestAutoTunerProgramParam(unittest.TestCase): data_dir.cleanup() + if __name__ == "__main__": unittest.main(buffer=True) diff --git a/autotuner/test/test_region_pruning.py b/autotuner/test/test_region_pruning.py index 6ca47c9223af14f8c1f020395b5455b81956c608..f9baa97fd024d8e69e040eaa53643a4fb4eaf183 100644 --- a/autotuner/test/test_region_pruning.py +++ b/autotuner/test/test_region_pruning.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- """ -Test to run AutoTuner in deterministic manner +Test to run BiSheng Autotuner in deterministic manner Copyright (C) 2017-2022, Huawei Technologies Co., Ltd. All rights reserved. """ import os @@ -56,11 +56,10 @@ class TestAutoTunerPruning(unittest.TestCase): if os.path.exists(os.path.join(self.backup_dir.name, "configs.db")): shutil.copyfile( os.path.join(self.backup_dir.name, "configs.db"), - os.path.join(self.data_dir.name, "configs.db") + os.path.join(self.data_dir.name, "configs.db"), ) return super().tearDown() - def db_validator(self): num_current_code_regions = 0 num_optimal_configs = 0 @@ -75,12 +74,11 @@ class TestAutoTunerPruning(unittest.TestCase): num_optimal_configs += 1 if line.startswith('INSERT INTO "currentCodeRegions"'): num_current_code_regions += 1 - if line[-3] == '1': + if line[-3] == "1": num_seen += 1 return num_current_code_regions, num_seen, num_optimal_configs - def test_loop_meta_0(self): # Testing without new features. if os.path.exists(os.path.join(self.data_dir.name, "configs.db")): @@ -113,7 +111,6 @@ class TestAutoTunerPruning(unittest.TestCase): self.assertTrue(os.path.exists(os.path.join(self.data_dir.name, "configs.db"))) self.assertEqual(current_code_regions, 0) - def test_loop_meta_1(self): # Using hashing and storing optimal configurations. # parameters used: use-hash-matching, store-optimal-configs @@ -210,11 +207,11 @@ class TestAutoTunerPruning(unittest.TestCase): self.assertIn("empty search space", out) - def test_loop_meta_4(self): # Testing the IR hashing and reusing of optimal configurations. - # AutoTuner will tune the program if there are other code regions - # in addition to the code regions in optimal configurations table. + # BiSheng Autotuner will tune the program if there are other code + # regions in addition to the code regions in optimal configurations + # table. # parameters used: use-hash-matching, use-optimal-configs reuse shutil.copyfile( os.path.join(self.input_dir, "loop_meta.yaml"), @@ -250,7 +247,6 @@ class TestAutoTunerPruning(unittest.TestCase): self.assertEqual(seen, 23) self.assertEqual(optimal_configs, 8) - def test_loop_nometa_0(self): # Testing without new features. if os.path.exists(os.path.join(self.data_dir.name, "configs.db")): @@ -283,7 +279,6 @@ class TestAutoTunerPruning(unittest.TestCase): self.assertTrue(os.path.exists(os.path.join(self.data_dir.name, "configs.db"))) self.assertEqual(current_code_regions, 0) - def test_loop_nometa_1(self): # Using hashing and storing optimal configurations. # parameters used: use-hash-matching, store-optimal-configs @@ -380,11 +375,11 @@ class TestAutoTunerPruning(unittest.TestCase): self.assertIn("empty search space", out) - def test_loop_nometa_4(self): # Testing the IR hashing and reusing of optimal configurations. - # AutoTuner will tune the program if there are other code regions - # in addition to the code regions in optimal configurations table. + # BiSheng Autotuner will tune the program if there are other code + # regions in addition to the code regions in optimal configurations + # table. # parameters used: use-hash-matching, use-optimal-configs reuse shutil.copyfile( os.path.join(self.input_dir, "loop_nometa.yaml"), @@ -420,7 +415,6 @@ class TestAutoTunerPruning(unittest.TestCase): self.assertEqual(seen, 7) self.assertEqual(optimal_configs, 8) - def test_code_region_attributes(self): """ Testing database 'config.db' to ensure that all of the attributes of @@ -462,5 +456,6 @@ class TestAutoTunerPruning(unittest.TestCase): # Comparing 'CodeRegion' fields with 'config.db' columns self.assertTrue(all(elem in result.keys() for elem in config_fields)) + if __name__ == "__main__": unittest.main(buffer=True) diff --git a/autotuner/test/test_resumable_argparser.py b/autotuner/test/test_resumable_argparser.py index cc960c8ed379a21e486752ce89cd24bb25b88443..7d06e111adc876795ea5e8df9d174554fe1200f8 100755 --- a/autotuner/test/test_resumable_argparser.py +++ b/autotuner/test/test_resumable_argparser.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- """ -Tests for Autotuner's argument parser for resumable interface. +Tests for BiSheng Autotuner's argument parser for resumable interface. Copyright (C) 2021-2021, Huawei Technologies Co., Ltd. All rights reserved. """ import argparse @@ -16,54 +16,56 @@ class ParserTest(unittest.TestCase): self.parser = create_parser() def test_custom_search_space(self): - parsed = self.parser.parse_args(['minimize']) + parsed = self.parser.parse_args(["minimize"]) self.assertIsNone(parsed.search_space, None) - parsed = self.parser.parse_args(['minimize', '--search-space', - '/path/to/search_space.yaml']) - self.assertEqual(parsed.search_space, '/path/to/search_space.yaml') + parsed = self.parser.parse_args( + ["minimize", "--search-space", "/path/to/search_space.yaml"] + ) + self.assertEqual(parsed.search_space, "/path/to/search_space.yaml") def test_max_parallelism(self): # Test the default value. - parsed = self.parser.parse_args(['minimize']) + parsed = self.parser.parse_args(["minimize"]) self.assertEqual(parsed.trials, 1) # Test with MAX_PARALLELISM allowed. max_parallelism_str = str(MAX_PARALLELISM) - parsed = self.parser.parse_args(['minimize', '--trials', - max_parallelism_str]) + parsed = self.parser.parse_args(["minimize", "--trials", max_parallelism_str]) self.assertEqual(parsed.trials, MAX_PARALLELISM) # Tests with invalid values. parse_func = self.parser.parse_args with self.assertRaises(SystemExit): - self.assertRaises(argparse.ArgumentTypeError, parse_func, - ['minimize', '--trials', '-1']) + self.assertRaises( + argparse.ArgumentTypeError, parse_func, ["minimize", "--trials", "-1"] + ) with self.assertRaises(SystemExit): - self.assertRaises(argparse.ArgumentTypeError, parse_func, - ['minimize', '--trials', '0']) + self.assertRaises( + argparse.ArgumentTypeError, parse_func, ["minimize", "--trials", "0"] + ) # Test with MAX_PARALLELISM + 1. with self.assertRaises(SystemExit): exceed = str(MAX_PARALLELISM + 1) - self.assertRaises(argparse.ArgumentTypeError, parse_func, - ['minimize', '--trials', exceed]) + self.assertRaises( + argparse.ArgumentTypeError, parse_func, ["minimize", "--trials", exceed] + ) def test_use_baseline_config(self): """ Verify the usage of '--use-baseline-config'. """ - parsed = self.parser.parse_args(['minimize']) + parsed = self.parser.parse_args(["minimize"]) self.assertFalse(parsed.use_baseline_config) - parsed = self.parser.parse_args(['minimize', '--use-baseline-config']) + parsed = self.parser.parse_args(["minimize", "--use-baseline-config"]) self.assertTrue(parsed.use_baseline_config) def test_use_dynamic_values(self): - parsed = self.parser.parse_args(['minimize']) + parsed = self.parser.parse_args(["minimize"]) self.assertFalse(parsed.use_dynamic_values) - parsed = self.parser.parse_args(['minimize', '--use-dynamic-values']) + parsed = self.parser.parse_args(["minimize", "--use-dynamic-values"]) self.assertTrue(parsed.use_dynamic_values) - diff --git a/autotuner/test/test_resumable_flow.py b/autotuner/test/test_resumable_flow.py index 799febbb41d71871de18b7dc62b01e92c4716b2d..a38feb9559af2bd696f7b29d0967c9c744b811af 100755 --- a/autotuner/test/test_resumable_flow.py +++ b/autotuner/test/test_resumable_flow.py @@ -16,8 +16,9 @@ import glob class TestAutoTunerFeedback(unittest.TestCase): """ - Test the impact of feedback value on AutoTuner flow. + Test the impact of feedback value on BiSheng Autotuner flow. """ + def setUp(self): curr_dir = os.path.dirname(os.path.abspath(__file__)) self.autotuner_dir = os.path.join(curr_dir, "..", "..", "bin") @@ -29,7 +30,7 @@ class TestAutoTunerFeedback(unittest.TestCase): def run_autotuner(self, output_dir, feedback_time, technique): """ - Running AutoTuner + Running BiSheng Autotuner """ temp_data_dir = tempfile.TemporaryDirectory(dir=self.base_dir.name) data_dir = temp_data_dir.name @@ -51,7 +52,7 @@ class TestAutoTunerFeedback(unittest.TestCase): subprocess.run(cmd, check=True, stderr=subprocess.DEVNULL) shutil.copyfile( os.path.join(data_dir, "config.yaml"), - os.path.join(output_dir, "initialize.yaml") + os.path.join(output_dir, "initialize.yaml"), ) for i in range(10): @@ -66,7 +67,7 @@ class TestAutoTunerFeedback(unittest.TestCase): subprocess.run(cmd, check=True, stderr=subprocess.DEVNULL) shutil.copyfile( os.path.join(data_dir, "config.yaml"), - os.path.join(output_dir, f"feedback-{i}.yaml") + os.path.join(output_dir, f"feedback-{i}.yaml"), ) cmd = [os.path.join(self.autotuner_dir, "llvm-autotune"), "finalize"] @@ -75,7 +76,7 @@ class TestAutoTunerFeedback(unittest.TestCase): subprocess.run(cmd, check=True, stderr=subprocess.DEVNULL) shutil.copyfile( os.path.join(data_dir, "config.yaml"), - os.path.join(output_dir, "finalize.yaml") + os.path.join(output_dir, "finalize.yaml"), ) temp_data_dir.cleanup() @@ -83,7 +84,7 @@ class TestAutoTunerFeedback(unittest.TestCase): """ UniformGreedyMutation05 uses the feedback value to determine the best config in each iteration (found so far) and use it to generate next - config. Two AutoTuner flows are executed with 1) increasing and + config. Two BiSheng Autotuner flows are executed with 1) increasing and 2) decreasing feedback values. Best config will remain the same for increasing feedback value and will change in every iteration for decreasing feedback value. As a result, both flows start with identical @@ -101,7 +102,7 @@ class TestAutoTunerFeedback(unittest.TestCase): self.assertTrue( filecmp.cmp( os.path.join(self.run_dir_1.name, "initialize.yaml"), - os.path.join(self.run_dir_2.name, "initialize.yaml") + os.path.join(self.run_dir_2.name, "initialize.yaml"), ) ) @@ -115,14 +116,14 @@ class TestAutoTunerFeedback(unittest.TestCase): for file in file_names: result &= filecmp.cmp( os.path.join(self.run_dir_1.name, file), - os.path.join(self.run_dir_2.name, file) + os.path.join(self.run_dir_2.name, file), ) self.assertFalse(result) self.assertFalse( filecmp.cmp( os.path.join(self.run_dir_1.name, "finalize.yaml"), - os.path.join(self.run_dir_2.name, "finalize.yaml") + os.path.join(self.run_dir_2.name, "finalize.yaml"), ) ) self.run_dir_1.cleanup() @@ -133,9 +134,9 @@ class TestAutoTunerFeedback(unittest.TestCase): """ DifferentialEvolution search technique generates 30 configs during initialization and does not use feedback values till all the initially - created configs are tested. This test runs AutoTuner with 1) increasing - and 2) decreasing feedback values; however generated config for both - AutoTuning flows will be identical for each iteration. + created configs are tested. This test runs BiSheng Autotuner with 1) + increasing and 2) decreasing feedback values; however generated config + for both AutoTuning flows will be identical for each iteration. """ # Deterministic tuning run with increasing feedback time. time = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] @@ -148,7 +149,7 @@ class TestAutoTunerFeedback(unittest.TestCase): self.assertTrue( filecmp.cmp( os.path.join(self.run_dir_1.name, "initialize.yaml"), - os.path.join(self.run_dir_2.name, "initialize.yaml") + os.path.join(self.run_dir_2.name, "initialize.yaml"), ) ) result = True @@ -161,7 +162,7 @@ class TestAutoTunerFeedback(unittest.TestCase): for file in file_names: result &= filecmp.cmp( os.path.join(self.run_dir_1.name, file), - os.path.join(self.run_dir_2.name, file) + os.path.join(self.run_dir_2.name, file), ) self.assertTrue(result) @@ -169,7 +170,7 @@ class TestAutoTunerFeedback(unittest.TestCase): self.assertFalse( filecmp.cmp( os.path.join(self.run_dir_1.name, "finalize.yaml"), - os.path.join(self.run_dir_2.name, "finalize.yaml") + os.path.join(self.run_dir_2.name, "finalize.yaml"), ) ) @@ -177,12 +178,11 @@ class TestAutoTunerFeedback(unittest.TestCase): self.run_dir_2.cleanup() self.base_dir.cleanup() - def test_pending_results(self): """ - Test AutoTuner to handle requests for duplicate generated configuration. - A deterministic configuration is used to ensure the generation of - duplicate configuration by given search technique. + Test BiSheng Autotuner to handle requests for duplicate generated + configuration. A deterministic configuration is used to ensure the + generation of duplicate configuration by given search technique. """ temp_data_dir = tempfile.TemporaryDirectory(dir=self.base_dir.name) data_dir = temp_data_dir.name @@ -200,7 +200,7 @@ class TestAutoTunerFeedback(unittest.TestCase): "--seed", "3", "--trials", - "15" + "15", ] if os.name == "nt": cmd.insert(0, "py") diff --git a/autotuner/test/test_resumable_interface.py b/autotuner/test/test_resumable_interface.py index dd5fb63ef209a118a56d737133a176e3b8c246d6..4d507e4321871c910f8c062bf858131cedde1e06 100755 --- a/autotuner/test/test_resumable_interface.py +++ b/autotuner/test/test_resumable_interface.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- """ -Tests for Autotuner's resumable interface. +Tests for BiSheng Autotuner's resumable interface. Copyright (C) 2017-2020, Huawei Technologies Co., Ltd. All rights reserved. """ import os @@ -16,7 +16,7 @@ from autotuner.yamlmanager import YAMLManager class TestAutoRun(unittest.TestCase): """ - Test Autotuner's resumable interface. + Test BiSheng Autotuner's resumable interface. """ def setUp(self): @@ -33,16 +33,21 @@ class TestAutoRun(unittest.TestCase): @mock.patch("autotuner.resumable.interface.ResumableRunManager") @mock.patch.object(AutoTunerState, "_init_search_space") @mock.patch.object(YAMLManager, "parse_search_space") - def test_interface_initialize(self, mock_parse_search_space, - mock_init_search_space, - mock_rusumable_run_manager, - mock_process_deterministic, - mock_create_config_db_session): + def test_interface_initialize( + self, + mock_parse_search_space, + mock_init_search_space, + mock_rusumable_run_manager, + mock_process_deterministic, + mock_create_config_db_session, + ): # Mock ResumableRunManager() mock_api = mock.MagicMock() mock_api.tuning_run.id = 1 mock_rusumable_run_manager.return_value = mock_api - mock_create_config_db_session.return_value = 'sqlite:///' + os.path.join(self.data_dir, "configs.db") + mock_create_config_db_session.return_value = "sqlite:///" + os.path.join( + self.data_dir, "configs.db" + ) # Call AutoTunerInterface.initialize self.auto_tuner.initialize(self.args, self.data_dir, self.objective) @@ -53,31 +58,40 @@ class TestAutoRun(unittest.TestCase): # Check if a new AutoTunerState populated properly. mock_init_search_space.assert_called_once() self.assertEqual(self.auto_tuner.auto_tuner_state.tuning_run_id, 1) - self.assertEqual(self.auto_tuner.auto_tuner_state.opp_dir, - os.path.join("dummy_dir", "opp")) - self.assertEqual(self.auto_tuner.auto_tuner_state.config_file, - os.path.join("dummy_dir", "config.yaml")) self.assertEqual( - self.auto_tuner.auto_tuner_state.current_desired_result_ids, []) + self.auto_tuner.auto_tuner_state.opp_dir, os.path.join("dummy_dir", "opp") + ) + self.assertEqual( + self.auto_tuner.auto_tuner_state.config_file, + os.path.join("dummy_dir", "config.yaml"), + ) + self.assertEqual( + self.auto_tuner.auto_tuner_state.current_desired_result_ids, [] + ) @mock.patch("autotuner.resumable.interface.create_config_db_session") @mock.patch("autotuner.resumable.interface.ResumableRunManager") @mock.patch.object(AutoTunerState, "_init_search_space") @mock.patch.object(YAMLManager, "parse_search_space") - def test_interface_resume(self, mock_parse_search_space, - mock_init_search_space, - mock_rusumable_run_manager, - mock_create_config_db_session): - mock_create_config_db_session.return_value = 'sqlite:///' + os.path.join(self.data_dir, "configs.db") - auto_tuner_state = AutoTunerState(self.args, self.data_dir, - self.objective) + def test_interface_resume( + self, + mock_parse_search_space, + mock_init_search_space, + mock_rusumable_run_manager, + mock_create_config_db_session, + ): + mock_create_config_db_session.return_value = "sqlite:///" + os.path.join( + self.data_dir, "configs.db" + ) + auto_tuner_state = AutoTunerState(self.args, self.data_dir, self.objective) auto_tuner_state.tuning_run_id = 3 self.auto_tuner.resume(auto_tuner_state) # Check if ResumableRunManager constructor is called with correct # arguments. - mock_rusumable_run_manager.assert_called_once_with(mock.ANY, self.args, - auto_tuner_state) + mock_rusumable_run_manager.assert_called_once_with( + mock.ANY, self.args, auto_tuner_state + ) # Check if the AutoTunerState resumed. self.assertIs(auto_tuner_state, self.auto_tuner.auto_tuner_state) @@ -86,15 +100,19 @@ class TestAutoRun(unittest.TestCase): @mock.patch.object(YAMLManager, "parse_search_space") @mock.patch.object(YAMLManager, "build_llvm_input") @mock.patch.object(YAMLManager, "create_dummy_llvm_input") - def test_interface_next_config(self, mock_build_llvm_input, - mock_parse_search_space, - mock_init_search_space, - mock_create_dummy_llvm_input, - mock_create_config_db_session): + def test_interface_next_config( + self, + mock_build_llvm_input, + mock_parse_search_space, + mock_init_search_space, + mock_create_dummy_llvm_input, + mock_create_config_db_session, + ): # Setup steps for the AutoTunerInterface that is resumed already. - mock_create_config_db_session.return_value = 'sqlite:///' + os.path.join(self.data_dir, "configs.db") - auto_tuner_state = AutoTunerState(self.args, self.data_dir, - self.objective) + mock_create_config_db_session.return_value = "sqlite:///" + os.path.join( + self.data_dir, "configs.db" + ) + auto_tuner_state = AutoTunerState(self.args, self.data_dir, self.objective) self.auto_tuner.auto_tuner_state = auto_tuner_state # Mock api to avoid database connections. mock_api = mock.MagicMock() @@ -106,37 +124,45 @@ class TestAutoRun(unittest.TestCase): # Before the first call to next_config(). self.assertEqual( - self.auto_tuner.auto_tuner_state.current_desired_result_ids, []) + self.auto_tuner.auto_tuner_state.current_desired_result_ids, [] + ) self.auto_tuner.next_config() # After the first call to next_config(). # Check if current_desired_result_ids is updated after the call. self.assertEqual( - self.auto_tuner.auto_tuner_state.current_desired_result_ids, [5]) + self.auto_tuner.auto_tuner_state.current_desired_result_ids, [5] + ) # Before the second call to next_config(). mock_desired_result.id = 6 self.assertEqual( - self.auto_tuner.auto_tuner_state.current_desired_result_ids, [5]) + self.auto_tuner.auto_tuner_state.current_desired_result_ids, [5] + ) self.auto_tuner.next_config() # After the second call to next_config(). # Check if current_desired_result_ids is updated after the call. self.assertEqual( - self.auto_tuner.auto_tuner_state.current_desired_result_ids, [6]) + self.auto_tuner.auto_tuner_state.current_desired_result_ids, [6] + ) @mock.patch("autotuner.resumable.interface.create_config_db_session") @mock.patch.object(AutoTunerState, "_init_search_space") @mock.patch.object(YAMLManager, "parse_search_space") @mock.patch.object(YAMLManager, "build_llvm_input") @mock.patch.object(YAMLManager, "create_dummy_llvm_input") - def test_interface_next_config_multi_trials(self, mock_build_llvm_input, - mock_parse_search_space, - mock_init_search_space, - mock_create_dummy_llvm_input, - mock_create_config_db_session): + def test_interface_next_config_multi_trials( + self, + mock_build_llvm_input, + mock_parse_search_space, + mock_init_search_space, + mock_create_dummy_llvm_input, + mock_create_config_db_session, + ): # Setup steps for the AutoTunerInterface that is resumed already. - mock_create_config_db_session.return_value = 'sqlite:///' + os.path.join(self.data_dir, "configs.db") - auto_tuner_state = AutoTunerState(self.args, self.data_dir, - self.objective) + mock_create_config_db_session.return_value = "sqlite:///" + os.path.join( + self.data_dir, "configs.db" + ) + auto_tuner_state = AutoTunerState(self.args, self.data_dir, self.objective) self.auto_tuner.auto_tuner_state = auto_tuner_state # Mock api to avoid database connections. mock_api = mock.MagicMock() @@ -148,40 +174,45 @@ class TestAutoRun(unittest.TestCase): # Before the first call to next_config(). self.assertEqual( - self.auto_tuner.auto_tuner_state.current_desired_result_ids, []) + self.auto_tuner.auto_tuner_state.current_desired_result_ids, [] + ) # Call next_config with 3 trials. self.auto_tuner.next_config(3) # After the first call to next_config(). # Check if current_desired_result_ids is updated after the call. self.assertEqual( - self.auto_tuner.auto_tuner_state.current_desired_result_ids, - [5, 5, 5]) + self.auto_tuner.auto_tuner_state.current_desired_result_ids, [5, 5, 5] + ) # Before the second call to next_config(). mock_desired_result.id = 6 self.assertEqual( - self.auto_tuner.auto_tuner_state.current_desired_result_ids, - [5, 5, 5]) + self.auto_tuner.auto_tuner_state.current_desired_result_ids, [5, 5, 5] + ) # Call next_config with 4 trials. self.auto_tuner.next_config(4) # After the second call to next_config(). # Check if current_desired_result_ids is updated after the call. self.assertEqual( - self.auto_tuner.auto_tuner_state.current_desired_result_ids, - [6, 6, 6, 6]) + self.auto_tuner.auto_tuner_state.current_desired_result_ids, [6, 6, 6, 6] + ) @mock.patch("autotuner.resumable.interface.create_config_db_session") @mock.patch("autotuner.resumable.interface.Result") @mock.patch.object(AutoTunerState, "_init_search_space") @mock.patch.object(YAMLManager, "parse_search_space") - def test_interface_feedback(self, mock_parse_search_space, - mock_init_search_space, - mock_result, - mock_create_config_db_session): + def test_interface_feedback( + self, + mock_parse_search_space, + mock_init_search_space, + mock_result, + mock_create_config_db_session, + ): # Setup steps for the AutoTunerInterface that is resumed already. - mock_create_config_db_session.return_value = 'sqlite:///' + os.path.join(self.data_dir, "configs.db") - auto_tuner_state = AutoTunerState(self.args, self.data_dir, - self.objective) + mock_create_config_db_session.return_value = "sqlite:///" + os.path.join( + self.data_dir, "configs.db" + ) + auto_tuner_state = AutoTunerState(self.args, self.data_dir, self.objective) auto_tuner_state.current_desired_result_ids = [1] self.auto_tuner.auto_tuner_state = auto_tuner_state # Mock api to avoid database connections. @@ -208,14 +239,18 @@ class TestAutoRun(unittest.TestCase): @mock.patch("autotuner.resumable.interface.Result") @mock.patch.object(AutoTunerState, "_init_search_space") @mock.patch.object(YAMLManager, "parse_search_space") - def test_interface_feedback_multi_trials(self, mock_parse_search_space, - mock_init_search_space, - mock_result, - mock_create_config_db_session): + def test_interface_feedback_multi_trials( + self, + mock_parse_search_space, + mock_init_search_space, + mock_result, + mock_create_config_db_session, + ): # Setup steps for the AutoTunerInterface that is resumed already. - mock_create_config_db_session.return_value = 'sqlite:///' + os.path.join(self.data_dir, "configs.db") - auto_tuner_state = AutoTunerState(self.args, self.data_dir, - self.objective) + mock_create_config_db_session.return_value = "sqlite:///" + os.path.join( + self.data_dir, "configs.db" + ) + auto_tuner_state = AutoTunerState(self.args, self.data_dir, self.objective) self.auto_tuner.auto_tuner_state = auto_tuner_state auto_tuner_state.current_desired_result_ids = [1, 2] # Mock API to avoid database connections. @@ -244,14 +279,18 @@ class TestAutoRun(unittest.TestCase): @mock.patch("autotuner.resumable.interface.Result") @mock.patch.object(AutoTunerState, "_init_search_space") @mock.patch.object(YAMLManager, "parse_search_space") - def test_interface_feedback_exceptions(self, mock_parse_search_space, - mock_init_search_space, - mock_result, - mock_create_config_db_session): + def test_interface_feedback_exceptions( + self, + mock_parse_search_space, + mock_init_search_space, + mock_result, + mock_create_config_db_session, + ): # Setup steps for the AutoTunerInterface that is resumed already. - mock_create_config_db_session.return_value = 'sqlite:///' + os.path.join(self.data_dir, "configs.db") - auto_tuner_state = AutoTunerState(self.args, self.data_dir, - self.objective) + mock_create_config_db_session.return_value = "sqlite:///" + os.path.join( + self.data_dir, "configs.db" + ) + auto_tuner_state = AutoTunerState(self.args, self.data_dir, self.objective) self.auto_tuner.auto_tuner_state = auto_tuner_state # Mock api to avoid database connections. @@ -280,22 +319,25 @@ class TestAutoRun(unittest.TestCase): @mock.patch.object(AutoTunerState, "_init_search_space") @mock.patch.object(YAMLManager, "parse_search_space") @mock.patch.object(YAMLManager, "build_llvm_input") - def test_interface_dump(self, mock_build_llvm_input, - mock_parse_search_space, - mock_init_search_space, - mock_process_all_results, - mock_create_config_db_session): + def test_interface_dump( + self, + mock_build_llvm_input, + mock_parse_search_space, + mock_init_search_space, + mock_process_all_results, + mock_create_config_db_session, + ): # Setup steps for the AutoTunerInterface that is resumed already. - mock_create_config_db_session.return_value = 'sqlite:///' + os.path.join(self.data_dir, "configs.db") - auto_tuner_state = AutoTunerState(self.args, self.data_dir, - self.objective) + mock_create_config_db_session.return_value = "sqlite:///" + os.path.join( + self.data_dir, "configs.db" + ) + auto_tuner_state = AutoTunerState(self.args, self.data_dir, self.objective) self.auto_tuner.auto_tuner_state = auto_tuner_state # Mock API to avoid database connections. self.auto_tuner.api = mock.MagicMock() # Check if the best config is dumped successfully. - self.auto_tuner.api.get_best_configuration.return_value = \ - mock.MagicMock() + self.auto_tuner.api.get_best_configuration.return_value = mock.MagicMock() self.assertTrue(self.auto_tuner.dump()) # Check if dump() returns false if no best config found. self.auto_tuner.api.get_best_configuration.return_value = None @@ -304,8 +346,7 @@ class TestAutoRun(unittest.TestCase): @mock.patch("autotuner.resumable.interface.os.path.exists") def test_file_exists_error_or_path(self, mock_exists): mock_exists.return_value = True - self.assertRaises(IOError, file_exists_error_or_path, "data_dir", - "data_file") + self.assertRaises(IOError, file_exists_error_or_path, "data_dir", "data_file") mock_exists.return_value = False self.assertEqual( os.path.join("data_dir", "data_file"), diff --git a/autotuner/test/test_resumable_yaml_manager.py b/autotuner/test/test_resumable_yaml_manager.py index bef687d26a2f8ade7618751a6e76d1240601a955..ca47f488091c555b69edec38ecd0bace61ddcb31 100644 --- a/autotuner/test/test_resumable_yaml_manager.py +++ b/autotuner/test/test_resumable_yaml_manager.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- """ -Tests for Autotuner's yaml manager for resumable interface. +Tests for BiSheng Autotuner's yaml manager for resumable interface. Copyright (C) 2022-2022, Huawei Technologies Co., Ltd. All rights reserved. """ import os @@ -14,6 +14,7 @@ import yaml from autotuner.yamlmanager import YAMLManager from autotuner import yamlmanager + try: from yaml import CLoader as Loader except ImportError: @@ -37,7 +38,6 @@ class YAMLTest(unittest.TestCase): # Clean temp directory after every tests. self.temp_dir.cleanup() - def cmp_files(self, expected_path, actual_path): line1 = line2 = True with open(expected_path, "r") as f1, open(actual_path, "r") as f2: @@ -48,7 +48,6 @@ class YAMLTest(unittest.TestCase): return False return True - def test_parse_dynamic_options(self): """ Test to handle dynamic values retrieved from the new generated @@ -63,16 +62,16 @@ class YAMLTest(unittest.TestCase): ) yaml_manager = YAMLManager() search_space = yaml_manager.generate_search_space( - [search_space_file], self.args.search_config_file) - with open(self.args.output, 'w') as f: - yaml.dump(search_space, f, sort_keys = False) + [search_space_file], self.args.search_config_file + ) + with open(self.args.output, "w") as f: + yaml.dump(search_space, f, sort_keys=False) expected = os.path.join( os.path.dirname(__file__), "Outputs", "yaml", "dynamic_options.yaml" ) self.compare_yaml_content(expected, self.args.output) - def test_parse_multiple_param(self): """ Test to handle multiple dynamic values retrieved from @@ -85,9 +84,9 @@ class YAMLTest(unittest.TestCase): ) ] - YAMLManager().generate_search_space_file(self.args.opp_file, - self.args.output, - self.args.search_config_file) + YAMLManager().generate_search_space_file( + self.args.opp_file, self.args.output, self.args.search_config_file + ) expected_ss_path = os.path.join( os.path.dirname(__file__), @@ -98,29 +97,28 @@ class YAMLTest(unittest.TestCase): self.compare_yaml_content(expected_ss_path, self.args.output) - def compare_yaml_content(self, expected_yaml_path, actual_yaml_path): """ Compare two yaml files by their contents using pyyaml. """ try: with open(expected_yaml_path) as expected_stream, open( - actual_yaml_path) as actual_stream: - actual_generator = yaml.load_all(actual_stream, - Loader=Loader) - for expected_dict in yaml.load_all(expected_stream, - Loader=Loader): + actual_yaml_path + ) as actual_stream: + actual_generator = yaml.load_all(actual_stream, Loader=Loader) + for expected_dict in yaml.load_all(expected_stream, Loader=Loader): actual_dict = next(actual_generator) self.assertEqual( - expected_dict, actual_dict, "Parsed result yaml " - "different from expected") + expected_dict, + actual_dict, + "Parsed result yaml " "different from expected", + ) # expected has finished, check whether actual finished too # if the actual_generator still has content, then fail self.assertRaises(StopIteration, next, actual_generator) finally: os.unlink(actual_yaml_path) - def test_parse_baseline_configs(self): """ Test to handle baseline values retrieved from the generated search_space @@ -131,7 +129,8 @@ class YAMLTest(unittest.TestCase): ) yaml_manager = YAMLManager() search_space = yaml_manager.generate_search_space( - [search_space_file], self.args.search_config_file) + [search_space_file], self.args.search_config_file + ) yaml_manager.parse_search_space(search_space, False, True, self.args.output) expected = os.path.join( @@ -139,7 +138,6 @@ class YAMLTest(unittest.TestCase): ) self.assertTrue(self.cmp_files(expected, self.args.output)) - def test_yaml_dump(self): """ Verify that each code region is dumped on a single line. @@ -157,7 +155,7 @@ class YAMLTest(unittest.TestCase): os.path.join(opp_dir, "core_list_join.c.yaml"), ) - # Run AutoTuner to process opportunity files and generate config.yaml. + # Run BiSheng Autotuner to process opportunity files and generate config.yaml. cmd = [os.path.join(autotuner_dir, "llvm-autotune"), "minimize"] if os.name == "nt": cmd.insert(0, "py") diff --git a/autotuner/test/test_run.py b/autotuner/test/test_run.py index f694313c6e20e968f29118fbd86a9f72ce1bf2b4..b5170ac1f49b40b747acf5bb0d1374bd11453496 100755 --- a/autotuner/test/test_run.py +++ b/autotuner/test/test_run.py @@ -13,6 +13,7 @@ from autotuner.main import run_main from autotuner.tuners.simple_tuner import SimpleTuner from autotuner.yamlmanager import YAMLManager from opentuner import Result + try: from yaml import CLoader as Loader except ImportError: @@ -21,7 +22,7 @@ except ImportError: class TestAutotunerRun(unittest.TestCase): """ - Test the 'run' subcommand in autotuner + Test the 'run' subcommand in BiSheng Autotuner """ def setUp(self): @@ -30,41 +31,41 @@ class TestAutotunerRun(unittest.TestCase): self.args.list_techniques = False self.args.command = "run" curr_dir = os.path.dirname(__file__) - self.args.config_file = os.path.join( - curr_dir, "Inputs", "test_sample.ini") + self.args.config_file = os.path.join(curr_dir, "Inputs", "test_sample.ini") self.args.search_space = os.path.join( - curr_dir, "Inputs", "run", "search_space_loop_only.yaml") + curr_dir, "Inputs", "run", "search_space_loop_only.yaml" + ) self.args.tuner = None self.args.output = None self.test_configuration_data = { - '1PeelCount': 0, - '1UnrollCount': 8, - '1MachineScheduling': 1, - '1VectorizationInterleave': 4, - '1ForceTargetMaxVectorInterleaveFactor': 4, - '1OptPass': 2, - '2PeelCount': 1, - '2UnrollCount': 8, - '2VectorizationInterleave': 4, - '2MachineScheduling': 4, - '2ForceTargetMaxVectorInterleaveFactor': 2, - '2OptPass': 1, - '3PeelCount': 0, - '3UnrollCount': 4, - '3VectorizationInterleave': 2, - '3MachineScheduling': 2, - '3DummyIntParam': 2, - '4PeelCount': 0, - '4UnrollCount': 1, - '4VectorizationInterleave': 4, - '4DummyFloatParam': 4.563, - '5PeelCount': 0, - '5UnrollCount': 1, - '5VectorizationInterleave': 2, - '6PeelCount': 0, - '6UnrollCount': 2, - '6VectorizationInterleave': 4, + "1PeelCount": 0, + "1UnrollCount": 8, + "1MachineScheduling": 1, + "1VectorizationInterleave": 4, + "1ForceTargetMaxVectorInterleaveFactor": 4, + "1OptPass": 2, + "2PeelCount": 1, + "2UnrollCount": 8, + "2VectorizationInterleave": 4, + "2MachineScheduling": 4, + "2ForceTargetMaxVectorInterleaveFactor": 2, + "2OptPass": 1, + "3PeelCount": 0, + "3UnrollCount": 4, + "3VectorizationInterleave": 2, + "3MachineScheduling": 2, + "3DummyIntParam": 2, + "4PeelCount": 0, + "4UnrollCount": 1, + "4VectorizationInterleave": 4, + "4DummyFloatParam": 4.563, + "5PeelCount": 0, + "5UnrollCount": 1, + "5VectorizationInterleave": 2, + "6PeelCount": 0, + "6UnrollCount": 2, + "6VectorizationInterleave": 4, } def compare_yaml_content(self, expected_yaml_path, actual_yaml_path): @@ -73,14 +74,16 @@ class TestAutotunerRun(unittest.TestCase): """ try: with open(expected_yaml_path) as expected_stream, open( - actual_yaml_path) as actual_stream: + actual_yaml_path + ) as actual_stream: actual_generator = yaml.load_all(actual_stream, Loader=Loader) - for expected_remark in yaml.load_all(expected_stream, - Loader=Loader): + for expected_remark in yaml.load_all(expected_stream, Loader=Loader): actual_remark = next(actual_generator) - self.assertEqual(expected_remark, actual_remark, - "Parsed result yaml different " - "from expected") + self.assertEqual( + expected_remark, + actual_remark, + "Parsed result yaml different " "from expected", + ) # expected has finished, check whether actual finished too # if the actual_generator still has content, then fail self.assertRaises(StopIteration, next, actual_generator) @@ -91,74 +94,86 @@ class TestAutotunerRun(unittest.TestCase): """ Check LLVM input can be successfully and correctly generated """ - search_space = os.path.join(os.path.dirname(__file__), "Inputs", - "run", "search_space.yaml") + search_space = os.path.join( + os.path.dirname(__file__), "Inputs", "run", "search_space.yaml" + ) yaml_manager = YAMLManager() task_map = yaml_manager.parse_search_space(search_space) - yaml_manager.build_llvm_input(self.test_configuration_data, task_map, - "actual_llvm_input.yaml", None) + yaml_manager.build_llvm_input( + self.test_configuration_data, task_map, "actual_llvm_input.yaml", None + ) - expected_llvm_input_path = os.path.join(os.path.dirname( - __file__), "Outputs/run/llvm_input.yaml") - self.compare_yaml_content(expected_llvm_input_path, - "actual_llvm_input.yaml") + expected_llvm_input_path = os.path.join( + os.path.dirname(__file__), "Outputs/run/llvm_input.yaml" + ) + self.compare_yaml_content(expected_llvm_input_path, "actual_llvm_input.yaml") def test_build_llvm_input_loop_only(self): - search_space = os.path.join(os.path.dirname(__file__), "Inputs", - "run", "search_space_loop_only.yaml") + search_space = os.path.join( + os.path.dirname(__file__), "Inputs", "run", "search_space_loop_only.yaml" + ) yaml_manager = YAMLManager() task_map = yaml_manager.parse_search_space(search_space) - yaml_manager.build_llvm_input(self.test_configuration_data, task_map, - "actual_llvm_input.yaml", None) + yaml_manager.build_llvm_input( + self.test_configuration_data, task_map, "actual_llvm_input.yaml", None + ) - expected_llvm_input_path = os.path.join(os.path.dirname( - __file__), "Outputs/run/llvm_input_loop_only.yaml") - self.compare_yaml_content(expected_llvm_input_path, - "actual_llvm_input.yaml") + expected_llvm_input_path = os.path.join( + os.path.dirname(__file__), "Outputs/run/llvm_input_loop_only.yaml" + ) + self.compare_yaml_content(expected_llvm_input_path, "actual_llvm_input.yaml") def test_build_llvm_input_mbb_only(self): - search_space = os.path.join(os.path.dirname(__file__), "Inputs", - "run", "search_space_mbb_only.yaml") + search_space = os.path.join( + os.path.dirname(__file__), "Inputs", "run", "search_space_mbb_only.yaml" + ) yaml_manager = YAMLManager() task_map = yaml_manager.parse_search_space(search_space) - yaml_manager.build_llvm_input(self.test_configuration_data, task_map, - "actual_llvm_input.yaml", None) + yaml_manager.build_llvm_input( + self.test_configuration_data, task_map, "actual_llvm_input.yaml", None + ) - expected_llvm_input_path = os.path.join(os.path.dirname( - __file__), "Outputs/run/llvm_input_mbb_only.yaml") - self.compare_yaml_content(expected_llvm_input_path, - "actual_llvm_input.yaml") + expected_llvm_input_path = os.path.join( + os.path.dirname(__file__), "Outputs/run/llvm_input_mbb_only.yaml" + ) + self.compare_yaml_content(expected_llvm_input_path, "actual_llvm_input.yaml") def test_build_llvm_input_function_only(self): - search_space = os.path.join(os.path.dirname(__file__), "Inputs", - "run", "search_space_function_only.yaml") + search_space = os.path.join( + os.path.dirname(__file__), + "Inputs", + "run", + "search_space_function_only.yaml", + ) yaml_manager = YAMLManager() task_map = yaml_manager.parse_search_space(search_space) - yaml_manager.build_llvm_input(self.test_configuration_data, task_map, - "actual_llvm_input.yaml", None) + yaml_manager.build_llvm_input( + self.test_configuration_data, task_map, "actual_llvm_input.yaml", None + ) - expected_llvm_input_path = os.path.join(os.path.dirname( - __file__), "Outputs/run/llvm_input_function_only.yaml") - self.compare_yaml_content(expected_llvm_input_path, - "actual_llvm_input.yaml") + expected_llvm_input_path = os.path.join( + os.path.dirname(__file__), "Outputs/run/llvm_input_function_only.yaml" + ) + self.compare_yaml_content(expected_llvm_input_path, "actual_llvm_input.yaml") def test_build_llvm_input_module_only(self): - search_space = os.path.join(os.path.dirname(__file__), "Inputs", - "run", "search_space_module_only.yaml") + search_space = os.path.join( + os.path.dirname(__file__), "Inputs", "run", "search_space_module_only.yaml" + ) yaml_manager = YAMLManager() task_map = yaml_manager.parse_search_space(search_space) - yaml_manager.build_llvm_input(self.test_configuration_data, task_map, - "actual_llvm_input.yaml", None) + yaml_manager.build_llvm_input( + self.test_configuration_data, task_map, "actual_llvm_input.yaml", None + ) - expected_llvm_input_path = os.path.join(os.path.dirname( - __file__), "Outputs/run/llvm_input_module_only.yaml") - self.compare_yaml_content(expected_llvm_input_path, - "actual_llvm_input.yaml") + expected_llvm_input_path = os.path.join( + os.path.dirname(__file__), "Outputs/run/llvm_input_module_only.yaml" + ) + self.compare_yaml_content(expected_llvm_input_path, "actual_llvm_input.yaml") @mock.patch("autotuner.main._parse_common_options") @mock.patch("autotuner.main.issubclass") - def test_run_main_called_tuner(self, mock_issubclass, - mock_parse_common_options): + def test_run_main_called_tuner(self, mock_issubclass, mock_parse_common_options): """ Check if tuner.main is called """ @@ -168,7 +183,8 @@ class TestAutotunerRun(unittest.TestCase): config = ConfigParser() config.optionxform = str config["DEFAULT"]["ConfigFilePath"] = os.path.abspath( - os.path.dirname(self.args.config_file)) + os.path.dirname(self.args.config_file) + ) config.read(self.args.config_file) mock_parse_common_options.return_value = (tuner, config) @@ -178,38 +194,50 @@ class TestAutotunerRun(unittest.TestCase): @mock.patch.object(SimpleTuner, "call_program") @mock.patch.object(SimpleTuner, "_print_errors") - def test_simpletuner_call_program(self, mock_print_errors, - mock_call_program): + def test_simpletuner_call_program(self, mock_print_errors, mock_call_program): """ Check if call_program function is called """ mock_call_program.return_value = {"returncode": 0, "time": 10} - tuner = SimpleTuner(self.args, None, None, - self.args.search_space, None, "some run_dir", - "some run_cmd") + tuner = SimpleTuner( + self.args, + None, + None, + self.args.search_space, + None, + "some run_dir", + "some run_cmd", + ) result = tuner.run(None, None, None) mock_call_program.assert_called_once_with( - "some run_cmd", cwd="some run_dir", limit=120) + "some run_cmd", cwd="some run_dir", limit=120 + ) self.assertIsInstance(result, Result) mock_print_errors.assert_not_called() @mock.patch.object(SimpleTuner, "call_program") @mock.patch.object(SimpleTuner, "_print_errors") - def test_simpletuner_nonzero_returncode(self, mock_print_errors, - mock_call_program): + def test_simpletuner_nonzero_returncode(self, mock_print_errors, mock_call_program): """ Check correct error handling when call_program returns non-zero """ mock_call_program.return_value = {"returncode": 1, "time": 10} - tuner = SimpleTuner(self.args, None, None, - self.args.search_space, None, "some run_dir", - "some run_cmd") + tuner = SimpleTuner( + self.args, + None, + None, + self.args.search_space, + None, + "some run_dir", + "some run_cmd", + ) result = tuner.run(None, None, None) mock_call_program.assert_called_once_with( - "some run_cmd", cwd="some run_dir", limit=120) + "some run_cmd", cwd="some run_dir", limit=120 + ) self.assertIsInstance(result, Result) mock_print_errors.assert_called() diff --git a/autotuner/test/test_utils.py b/autotuner/test/test_utils.py index 1602e0d921f847cbdd5b56144be887674567ac95..d97dc8760c04f83f0cf1dd7aa64cba366cd3ebf1 100644 --- a/autotuner/test/test_utils.py +++ b/autotuner/test/test_utils.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- """ -Tests for Autotuner's utility functions. +Tests for BiSheng Autotuner's utility functions. Copyright (C) 2017-2020, Huawei Technologies Co., Ltd. All rights reserved. """ @@ -72,7 +72,6 @@ class TestUtils(unittest.TestCase): with self.assertRaises(IOError): parse_feedback_file(self.temp_file) - @mock.patch("logging.Logger.info") def test_file_permissions(self, mock_logger): """ @@ -102,5 +101,5 @@ class TestUtils(unittest.TestCase): check_file_permissions(self.temp_file) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/autotuner/tuners/simple_tuner.py b/autotuner/tuners/simple_tuner.py index 48e695d6f5f5c8bb963461a70392b6b0a0eb2b34..e543d5d87232fd53761e696166f3e2ca7cc5b960 100644 --- a/autotuner/tuners/simple_tuner.py +++ b/autotuner/tuners/simple_tuner.py @@ -16,18 +16,17 @@ class SimpleTuner(CustomTunerBase): # and returns the calculated performance under this configuration def run(self, desired_result, desired_input, limit): """ - Compile and run a given configuration then - return performance - """ - time = float('inf') + Compile and run a given configuration then + return performance + """ + time = float("inf") # create a command for running a executable - run_result = self.call_program(self.run_cmd, - cwd=self.run_dir, limit=120) + run_result = self.call_program(self.run_cmd, cwd=self.run_dir, limit=120) # check if the source program is compiled and run successful - if run_result['returncode'] == 0: - time = run_result['time'] + if run_result["returncode"] == 0: + time = run_result["time"] else: self._print_errors(self.run_cmd, run_result) diff --git a/autotuner/tuners/tunerbase.py b/autotuner/tuners/tunerbase.py index 71f44049fa2601c996d0cade82a36bccf6e7a4bf..d7b5b0be05c0ac5a50afa7ef70c0bcdb3626bd5c 100644 --- a/autotuner/tuners/tunerbase.py +++ b/autotuner/tuners/tunerbase.py @@ -14,15 +14,21 @@ from opentuner import Result from autotuner.iomanagerutils import create_io_manager argument_parser = argparse.ArgumentParser(add_help=False) -argument_parser.add_argument('--time-after-convergence', '-tac', type=float, - metavar='TIME', - help='stop tuning if no new best results after ' - 'given seconds') -argument_parser.add_argument('-o', '--output', metavar='DIR', - help='write optimal yaml config into the given ' - 'directory') - -STAGES = ['module', 'function', 'loop', 'machine_basic_block'] +argument_parser.add_argument( + "--time-after-convergence", + "-tac", + type=float, + metavar="TIME", + help="stop tuning if no new best results after " "given seconds", +) +argument_parser.add_argument( + "-o", + "--output", + metavar="DIR", + help="write optimal yaml config into the given " "directory", +) + +STAGES = ["module", "function", "loop", "machine_basic_block"] class TunerBase(MeasurementInterface): @@ -30,12 +36,21 @@ class TunerBase(MeasurementInterface): Abstract base class for tuning-enabled LLVM related auto-tuning """ - def __init__(self, args, compile_dir, llvm_config_file, search_space, - compile_cmd, fixed_llvm_config_files=None, - enable_final_compile=False, stage=None, config_db=None, - *pargs, **kwargs): - super(TunerBase, self).__init__(args, *pargs, - **kwargs) + def __init__( + self, + args, + compile_dir, + llvm_config_file, + search_space, + compile_cmd, + fixed_llvm_config_files=None, + enable_final_compile=False, + stage=None, + config_db=None, + *pargs, + **kwargs + ): + super(TunerBase, self).__init__(args, *pargs, **kwargs) self.iomanager = create_io_manager(args.parse_format) self.compile_dir = compile_dir self.llvm_input_file = llvm_config_file @@ -46,7 +61,8 @@ class TunerBase(MeasurementInterface): if fixed_llvm_config_files: self.fixed_llvm_config_tree = self.iomanager.parse_llvm_inputs( - fixed_llvm_config_files) + fixed_llvm_config_files + ) else: self.fixed_llvm_config_tree = None @@ -78,8 +94,9 @@ class TunerBase(MeasurementInterface): def compile(self, config_data=None, compile_id=None): # run the compile command - compile_result = self.call_program(self.compile_cmd, - cwd=self.compile_dir, limit=1500) + compile_result = self.call_program( + self.compile_cmd, cwd=self.compile_dir, limit=1500 + ) return compile_result def compile_and_run(self, desired_result, desired_input, limit): @@ -89,24 +106,29 @@ class TunerBase(MeasurementInterface): cfg = desired_result.configuration.data self.iomanager.build_llvm_input( - cfg, self.task_map, self.llvm_input_file, + cfg, + self.task_map, + self.llvm_input_file, self.fixed_llvm_config_tree, - self.config_db, self.use_hash_matching) + self.config_db, + self.use_hash_matching, + ) # compiler the program compile_result = self.compile() # if compiling failed - if compile_result['timeout']: + if compile_result["timeout"]: print("compiling timeoout") - return Result(state='TIMEOUT', time=float('inf')) - elif compile_result['returncode'] != 0: + return Result(state="TIMEOUT", time=float("inf")) + elif compile_result["returncode"] != 0: print("compiling error, test failed") print(compile_result["stderr"]) else: return self.run(desired_result, desired_input, limit) - return Result(state='ERROR', time=float('inf'), cycle=float('inf'), - rate=-float('inf')) + return Result( + state="ERROR", time=float("inf"), cycle=float("inf"), rate=-float("inf") + ) def extra_convergence_criteria(self, result_list): """ @@ -116,8 +138,9 @@ class TunerBase(MeasurementInterface): """ if self.args.time_after_convergence: # check if any new best results found - is_any_new_best = any([ele.was_new_best for ele - in result_list]) if result_list else False + is_any_new_best = ( + any([ele.was_new_best for ele in result_list]) if result_list else False + ) # if there is a new best results found, # reset last_best_result_time to now @@ -127,8 +150,7 @@ class TunerBase(MeasurementInterface): else: elapsed = (datetime.now() - self.last_best_result_time).seconds if elapsed > self.args.time_after_convergence: - print("time elapsed since last best result found: " + - str(elapsed)) + print("time elapsed since last best result found: " + str(elapsed)) return True return False @@ -139,13 +161,16 @@ class TunerBase(MeasurementInterface): print("Tuning run is ending...") if self.enable_final_compile: print("Performing final compilation with opt config...") - self.iomanager.build_llvm_input(configuration.data, self.task_map, - self.llvm_input_file, - self.fixed_llvm_config_tree, - self.config_db, - self.use_hash_matching) + self.iomanager.build_llvm_input( + configuration.data, + self.task_map, + self.llvm_input_file, + self.fixed_llvm_config_tree, + self.config_db, + self.use_hash_matching, + ) compile_result = self.compile() - if compile_result['returncode'] != 0: + if compile_result["returncode"] != 0: print("Compiling error") print(compile_result["stderr"]) else: @@ -156,29 +181,41 @@ class TunerBase(MeasurementInterface): if self.args.config_update_type and self.config_db: self.iomanager.update_config_db( - configuration.data, self.task_map, + configuration.data, + self.task_map, config_db=self.config_db, - use_hash_matching=self.use_hash_matching) + use_hash_matching=self.use_hash_matching, + ) print("configs.db has been updated with optimal configurations.") self.iomanager.build_llvm_input( - configuration.data, self.task_map, output_path + - self.iomanager.get_file_extension(), self.fixed_llvm_config_tree, - self.config_db, self.use_hash_matching) - print("Optimal configuration for llvm/clang has been saved to " + - output_path + self.iomanager.get_file_extension()) - self.manipulator().save_to_file(configuration.data, - output_path + ".json") - print("Optimal json configuration for opentuner has been saved to " + - output_path + ".json") - print("You can use the json file with --seed-configuration " - "for next tuning run") + configuration.data, + self.task_map, + output_path + self.iomanager.get_file_extension(), + self.fixed_llvm_config_tree, + self.config_db, + self.use_hash_matching, + ) + print( + "Optimal configuration for llvm/clang has been saved to " + + output_path + + self.iomanager.get_file_extension() + ) + self.manipulator().save_to_file(configuration.data, output_path + ".json") + print( + "Optimal json configuration for opentuner has been saved to " + + output_path + + ".json" + ) + print( + "You can use the json file with --seed-configuration " "for next tuning run" + ) def _print_errors(self, cmd, run_result): - print('running command failed, the error was: ') - print(run_result['stderr']) - print('the cmd was: ') - print(cmd + '\n') + print("running command failed, the error was: ") + print(run_result["stderr"]) + print("the cmd was: ") + print(cmd + "\n") class CustomTunerBase(TunerBase): @@ -186,11 +223,27 @@ class CustomTunerBase(TunerBase): Abstract base class for non-coddess based tuner """ - def __init__(self, args, compile_dir, llvm_config_file, search_space, - compile_cmd, run_dir, run_cmd, *pargs, **kwargs): + def __init__( + self, + args, + compile_dir, + llvm_config_file, + search_space, + compile_cmd, + run_dir, + run_cmd, + *pargs, + **kwargs + ): super(CustomTunerBase, self).__init__( - args, compile_dir, llvm_config_file, search_space, - compile_cmd, *pargs, **kwargs) + args, + compile_dir, + llvm_config_file, + search_space, + compile_cmd, + *pargs, + **kwargs + ) self.run_dir = run_dir self.run_cmd = run_cmd @@ -213,9 +266,9 @@ def get_available_tuners(tuner_dir): def _look_up_tuners_by_dir(tuner_dir): tuners = () for exist_file in sorted(os.listdir(os.path.dirname(tuner_dir))): - match = re.match(r'^(.*)[.]py(c?)$', exist_file) + match = re.match(r"^(.*)[.]py(c?)$", exist_file) if match: module = match.group(1) - if module[-6:].lower() == '_tuner': + if module[-6:].lower() == "_tuner": tuners += (module,) return tuners diff --git a/autotuner/utils.py b/autotuner/utils.py index af8a6f879769c1874db9362538adfd36f0a25bd9..0ef2d3bc83589af5750c0519d2635c54714e5250 100644 --- a/autotuner/utils.py +++ b/autotuner/utils.py @@ -17,15 +17,18 @@ def parse_hot_function(file_path, num): """ hot_functions = [] if len(file_path) > 0: - with open(file_path[0], "r")as file_object: + with open(file_path[0], "r") as file_object: lines = file_object.readlines() line_number = 0 index = 0 while line_number < len(lines): - if lines[line_number].split()[0] != '#': + if lines[line_number].split()[0] != "#": line_number = line_number + 2 - while index < num and line_number < len( - lines) and len(lines[line_number].split()) >= 3: + while ( + index < num + and line_number < len(lines) + and len(lines[line_number].split()) >= 3 + ): hot_functions.append(lines[line_number].split()[-1]) index = index + 1 line_number = line_number + 1 @@ -62,8 +65,10 @@ def check_file_permissions(file_path): stat_info = os.stat(file_path) if bool(stat_info.st_mode & (stat.S_IWGRP | stat.S_IWOTH)): - raise IOError("file is writable by other users " - "(potential security risk): {}".format(file_path)) + raise IOError( + "file is writable by other users " + "(potential security risk): {}".format(file_path) + ) def parse_feedback_file(filename): @@ -73,7 +78,7 @@ def parse_feedback_file(filename): check_file_permissions(filename) feedback_values = [] try: - with open(filename, newline='') as file: + with open(filename, newline="") as file: # csv.reader is responsible for reporting format issues by # raising exceptions. reader = csv.reader(file) @@ -82,6 +87,7 @@ def parse_feedback_file(filename): if ele: feedback_values.append(float(ele)) except Exception as error: - raise IOError("Invalid format in feedback file " - ":{}: {}".format(filename, str(error))) + raise IOError( + "Invalid format in feedback file " ":{}: {}".format(filename, str(error)) + ) return feedback_values diff --git a/autotuner/xmlmanager.py b/autotuner/xmlmanager.py index d33f11f54d1ab2c589dd16e206b3018c19deb7b5..19fbdc08dd1d7a4a708dc972e3ea562115da0cc3 100644 --- a/autotuner/xmlmanager.py +++ b/autotuner/xmlmanager.py @@ -38,8 +38,7 @@ def _parse_param(tuning_id, xml_param): param_type = xml_param.get("type") if param_type == "bool": - return EnumParameter(tuning_id + xml_param.find("name").text, ["1", - "0"]) + return EnumParameter(tuning_id + xml_param.find("name").text, ["1", "0"]) elif param_type == "enum": options = [value.text for value in xml_param.findall("value")] @@ -49,30 +48,31 @@ def _parse_param(tuning_id, xml_param): # Keep param_type == "range" for backwards compatability min_value = int(xml_param.find("min").text) max_value = int(xml_param.find("max").text) - return IntegerParameter(tuning_id + xml_param.find("name").text, - min_value, max_value) + return IntegerParameter( + tuning_id + xml_param.find("name").text, min_value, max_value + ) elif param_type == "int": min_value = int(xml_param.find("Min").text) max_value = int(xml_param.find("Max").text) - return IntegerParameter(tuning_id + xml_param.find("name").text, - min_value, max_value) + return IntegerParameter( + tuning_id + xml_param.find("name").text, min_value, max_value + ) elif param_type == "float": min_value = float(xml_param.find("Min").text) max_value = float(xml_param.find("Max").text) - return FloatParameter(tuning_id + xml_param.find("name").text, - min_value, max_value) + return FloatParameter( + tuning_id + xml_param.find("name").text, min_value, max_value + ) elif param_type == "permutation": options = [value.text for value in xml_param.findall("value")] - return PermutationParameter(tuning_id + xml_param.find("name").text, - options) + return PermutationParameter(tuning_id + xml_param.find("name").text, options) elif param_type == "selection": options = [value.text for value in xml_param.findall("value")] - return SelectionParameter(tuning_id + xml_param.find("name").text, - options) + return SelectionParameter(tuning_id + xml_param.find("name").text, options) else: raise Exception("No type specified for params in xml") @@ -81,7 +81,7 @@ def _parse_param(tuning_id, xml_param): def _merge_llvm_input_trees(tree_a, tree_b): root_a = _convert_defusedxml_to_etree(tree_a.getroot()) root_b = _convert_defusedxml_to_etree(tree_b.getroot()) - for ele in root_b.findall('input'): + for ele in root_b.findall("input"): root_a.append(ele) tree_a._setroot(root_a) return tree_a @@ -97,9 +97,10 @@ def _divide_llvm_input_tree(tree): if file_name is not None: if file_name.text in tree_map: tree_map[file_name.text].getroot().append( - _convert_defusedxml_to_etree(input_ele)) + _convert_defusedxml_to_etree(input_ele) + ) else: - inputs = xml_writer.Element('inputs') + inputs = xml_writer.Element("inputs") inputs.append(_convert_defusedxml_to_etree(input_ele)) new_tree = xml_writer.ElementTree(inputs) tree_map[file_name.text] = new_tree @@ -107,9 +108,17 @@ def _divide_llvm_input_tree(tree): return tree_map -def _generate_search_space(file_path, new_xml_root, start_tuning_id, - config_file, name_filter, func_name_filter, - file_name_filter, type_filter, pass_filter): +def _generate_search_space( + file_path, + new_xml_root, + start_tuning_id, + config_file, + name_filter, + func_name_filter, + file_name_filter, + type_filter, + pass_filter, +): tuning_id = start_tuning_id # search space of xml configuration file @@ -121,13 +130,13 @@ def _generate_search_space(file_path, new_xml_root, start_tuning_id, if global_params is not None: global_param_config[code_region.attrib["type"]] = global_params - with open(file_path, 'r+') as input_file: + with open(file_path, "r+") as input_file: content = input_file.read() # parse the input xml parser = xml_reader.XMLParser() - parser.feed(b'') + parser.feed(b"") parser.feed(content) - parser.feed(b'') + parser.feed(b"") opp_root = parser.close() code_regions_list = opp_root.findall("code_regions") @@ -142,26 +151,26 @@ def _generate_search_space(file_path, new_xml_root, start_tuning_id, func_name = code_region.find("func_name").text name = code_region.find("name").text code_region_type = code_region.attrib["type"] - filtered = _apply_code_region_filter(file_name, - file_name_filter) and \ - _apply_code_region_filter(func_name, - func_name_filter) and \ - _apply_code_region_filter(code_region_type, - type_filter) and \ - _apply_code_region_filter(name, - name_filter) + filtered = ( + _apply_code_region_filter(file_name, file_name_filter) + and _apply_code_region_filter(func_name, func_name_filter) + and _apply_code_region_filter(code_region_type, type_filter) + and _apply_code_region_filter(name, name_filter) + ) if filtered: tuning_id += 1 task = xml_writer.SubElement(new_xml_root, "task") task.append(code_region) - xml_writer.SubElement(task, "tuning_id").text = \ - str(tuning_id) + xml_writer.SubElement(task, "tuning_id").text = str(tuning_id) params = xml_writer.SubElement(task, "params") # add params from global config file - params.extend(_convert_defusedxml_to_etree( - global_param_config[code_region_type])) + params.extend( + _convert_defusedxml_to_etree( + global_param_config[code_region_type] + ) + ) # return the last tuning id return tuning_id @@ -178,8 +187,13 @@ class XMLManager(IOManager): def get_file_extension(self): return ".xml" - def parse_search_space(self, search_space, use_dynamic_values=False, - use_baseline_config = False, filepath = None): + def parse_search_space( + self, + search_space, + use_dynamic_values=False, + use_baseline_config=False, + filepath=None, + ): """ Parse the xml search space config file to init the tuner @@ -225,24 +239,30 @@ class XMLManager(IOManager): if param_list: # retrieve code_region information from xml code_region_xml = task_xml.find("code_region") - code_region = \ - LegacyCodeRegion(code_region_xml.find("name").text, - code_region_xml.find("file_name").text, - code_region_xml.find("func_name").text, - int(code_region_xml.find( - "start_line").text), - int(code_region_xml.find( - "end_line").text), - code_region_xml.get("type")) + code_region = LegacyCodeRegion( + code_region_xml.find("name").text, + code_region_xml.find("file_name").text, + code_region_xml.find("func_name").text, + int(code_region_xml.find("start_line").text), + int(code_region_xml.find("end_line").text), + code_region_xml.get("type"), + ) if code_region not in code_region_set: code_region_set.add(code_region) - task_map[int(tuning_id)] = Task(int(tuning_id), - param_list, code_region) + task_map[int(tuning_id)] = Task( + int(tuning_id), param_list, code_region + ) return task_map - def build_llvm_input(self, configuration_data, task_map, output_file, - fixed_llvm_input_tree=None, config_db=None, - use_hash_matching=False): + def build_llvm_input( + self, + configuration_data, + task_map, + output_file, + fixed_llvm_input_tree=None, + config_db=None, + use_hash_matching=False, + ): """ Build input xml file for tuning-enabled LLVM based on task_map and configuration_data, and output as output_file @@ -262,13 +282,13 @@ class XMLManager(IOManager): """ # generating the root - inputs = xml_writer.Element('inputs') + inputs = xml_writer.Element("inputs") # loop through the task map to generate xml input for tuning-enabled # LLVM for tuning_id, task in task_map.items(): - input_ele = xml_writer.SubElement(inputs, 'input') - params_xml = xml_writer.SubElement(input_ele, 'params') + input_ele = xml_writer.SubElement(inputs, "input") + params_xml = xml_writer.SubElement(input_ele, "params") for param in task.param_list: # Since param.name is in the form of ID+Param @@ -289,23 +309,23 @@ class XMLManager(IOManager): pass_list = choice if pass_list: - param_xml = xml_writer.SubElement(params_xml, 'param') - param_xml.set('type', 'list') - name = xml_writer.SubElement(param_xml, 'name') + param_xml = xml_writer.SubElement(params_xml, "param") + param_xml.set("type", "list") + name = xml_writer.SubElement(param_xml, "name") name.text = raw_param_name for ele in pass_list: - value = xml_writer.SubElement(param_xml, 'value') + value = xml_writer.SubElement(param_xml, "value") value.text = ele # FIXME elif raw_param_name == "MachineScheduling": - param_xml = xml_writer.SubElement(params_xml, 'param') - name = xml_writer.SubElement(param_xml, 'name') - value = xml_writer.SubElement(param_xml, 'value') - sed_param_xml = xml_writer.SubElement(params_xml, 'param') - sed_name = xml_writer.SubElement(sed_param_xml, 'name') - sed_value = xml_writer.SubElement(sed_param_xml, 'value') + param_xml = xml_writer.SubElement(params_xml, "param") + name = xml_writer.SubElement(param_xml, "name") + value = xml_writer.SubElement(param_xml, "value") + sed_param_xml = xml_writer.SubElement(params_xml, "param") + sed_name = xml_writer.SubElement(sed_param_xml, "name") + sed_value = xml_writer.SubElement(sed_param_xml, "value") if configuration_data[param.name] == "TopDown": name.text = "ForceTopDown" value.text = "1" @@ -323,33 +343,29 @@ class XMLManager(IOManager): sed_value.text = "0" else: - param_xml = xml_writer.SubElement(params_xml, 'param') - name = xml_writer.SubElement(param_xml, 'name') + param_xml = xml_writer.SubElement(params_xml, "param") + name = xml_writer.SubElement(param_xml, "name") name.text = raw_param_name - value = xml_writer.SubElement(param_xml, 'value') + value = xml_writer.SubElement(param_xml, "value") value.text = str(configuration_data[param.name]) # construct the code region in the xml tree - code_region = xml_writer.SubElement(input_ele, 'code_region') - code_region.set('type', task.code_region.code_region_type) + code_region = xml_writer.SubElement(input_ele, "code_region") + code_region.set("type", task.code_region.code_region_type) - code_region_file_name = xml_writer.SubElement(code_region, 'name') + code_region_file_name = xml_writer.SubElement(code_region, "name") code_region_file_name.text = task.code_region.name - code_region_file_name = xml_writer.SubElement(code_region, - 'file_name') + code_region_file_name = xml_writer.SubElement(code_region, "file_name") code_region_file_name.text = task.code_region.file_name - code_region_func_name = xml_writer.SubElement(code_region, - 'func_name') + code_region_func_name = xml_writer.SubElement(code_region, "func_name") code_region_func_name.text = task.code_region.func_name - code_region_start_line = xml_writer.SubElement(code_region, - 'start_line') + code_region_start_line = xml_writer.SubElement(code_region, "start_line") code_region_start_line.text = str(task.code_region.start_line) - code_region_end_line = xml_writer.SubElement(code_region, - 'end_line') + code_region_end_line = xml_writer.SubElement(code_region, "end_line") code_region_end_line.text = str(task.code_region.end_line) tree = xml_writer.ElementTree(inputs) @@ -384,12 +400,21 @@ class XMLManager(IOManager): xml_tree = xml_reader.parse(input_file) return _divide_llvm_input_tree(xml_tree) - def generate_search_space_file(self, files, output_file, config_file, - name_filter=None, func_name_filter=None, - file_name_filter=None, type_filter=None, - pass_filter=None, config_db=None, - use_hash_matching=False, - use_prev_configs=False, inject_seed=False): + def generate_search_space_file( + self, + files, + output_file, + config_file, + name_filter=None, + func_name_filter=None, + file_name_filter=None, + type_filter=None, + pass_filter=None, + config_db=None, + use_hash_matching=False, + use_prev_configs=False, + inject_seed=False, + ): """ Generate search space file for auto-tuner driver based on opportunities files which are generated by llvm, and output as output_file @@ -402,19 +427,31 @@ class XMLManager(IOManager): where the global search space settings are defined. """ - new_xml_tree = self.generate_search_space(files, config_file, - file_name_filter, - func_name_filter, - name_filter, - type_filter, - pass_filter) + new_xml_tree = self.generate_search_space( + files, + config_file, + file_name_filter, + func_name_filter, + name_filter, + type_filter, + pass_filter, + ) new_xml_tree.write(output_file) - def generate_search_space(self, files, config_file, file_name_filter=None, - func_name_filter=None, name_filter=None, - type_filter=None, pass_filter=None, - config_db=None, use_hash_matching=False, - use_prev_configs=False, inject_seed=False): + def generate_search_space( + self, + files, + config_file, + file_name_filter=None, + func_name_filter=None, + name_filter=None, + type_filter=None, + pass_filter=None, + config_db=None, + use_hash_matching=False, + use_prev_configs=False, + inject_seed=False, + ): """ Parse opportunities files generated by llvm and return a search space as ElementTree. @@ -431,17 +468,21 @@ class XMLManager(IOManager): """ # new xml file for output - new_xml_root = xml_writer.Element('tuning_request') + new_xml_root = xml_writer.Element("tuning_request") # if the given path is a directory tuning_id = 0 for filename in files: - end_tuning_id = _generate_search_space(filename, new_xml_root, - tuning_id, config_file, - name_filter, - func_name_filter, - file_name_filter, - type_filter, - pass_filter) + end_tuning_id = _generate_search_space( + filename, + new_xml_root, + tuning_id, + config_file, + name_filter, + func_name_filter, + file_name_filter, + type_filter, + pass_filter, + ) tuning_id = end_tuning_id # output xml tree into the output_file new_xml_tree = xml_writer.ElementTree(new_xml_root) @@ -453,7 +494,7 @@ class XMLManager(IOManager): Output an etree.ElementTree into the file """ fd = create_secure_fd(output_file) - with os.fdopen(fd, 'wb') as output_file_handler: + with os.fdopen(fd, "wb") as output_file_handler: tree.write(output_file_handler) @staticmethod diff --git a/autotuner/yamlmanager.py b/autotuner/yamlmanager.py index 91b53b1aaa270f6c4845acd866a9a83569fe81c3..4809e1d1933e6fe9816faca784612b677d3716f1 100644 --- a/autotuner/yamlmanager.py +++ b/autotuner/yamlmanager.py @@ -56,7 +56,7 @@ def _parse_param(tuning_id, yaml_param, ele): return EnumParameter(str(tuning_id) + ele, ["1", "0"]) elif param_type == "enum": - options = yaml_param['Value'] + options = yaml_param["Value"] return EnumParameter(str(tuning_id) + ele, options) elif param_type == "range": @@ -76,11 +76,11 @@ def _parse_param(tuning_id, yaml_param, ele): return FloatParameter(str(tuning_id) + ele, min_value, max_value) elif param_type == "permutation": - options = yaml_param['Value'] + options = yaml_param["Value"] return PermutationParameter(str(tuning_id) + ele, options) elif param_type == "selection": - options = yaml_param['Value'] + options = yaml_param["Value"] return SelectionParameter(str(tuning_id) + ele, options) else: @@ -93,15 +93,19 @@ def _update_program_param_code_regions(config_db, code_region): This function is added to handle program-param explicitly. """ add_current_code_region(config_db, code_region, seen=False) - if (is_duplicate_hash(config_db, - code_region['Hashcode'], code_region["CodeRegionType"], - code_region["Pass"])): + if is_duplicate_hash( + config_db, + code_region["Hashcode"], + code_region["CodeRegionType"], + code_region["Pass"], + ): return False return True -def _update_current_code_regions(config_db, code_region, use_hash_matching, - use_prev_configs, inject_seed): +def _update_current_code_regions( + config_db, code_region, use_hash_matching, use_prev_configs, inject_seed +): """ Stores `code_region` into the CurrentCodeRegions table and determines if it should be added to the search space. @@ -123,30 +127,49 @@ def _update_current_code_regions(config_db, code_region, use_hash_matching, Returns True iff the given code_region should be added to the search space. """ - if (use_prev_configs and optimal_config_exists(config_db, - code_region['Hashcode'], code_region["CodeRegionType"], - code_region["Pass"])): + if use_prev_configs and optimal_config_exists( + config_db, + code_region["Hashcode"], + code_region["CodeRegionType"], + code_region["Pass"], + ): add_current_code_region(config_db, code_region, seen=True) - if inject_seed and not is_duplicate_hash(config_db, - code_region['Hashcode'], code_region["CodeRegionType"], - code_region["Pass"]): + if inject_seed and not is_duplicate_hash( + config_db, + code_region["Hashcode"], + code_region["CodeRegionType"], + code_region["Pass"], + ): return True return False else: add_current_code_region(config_db, code_region, seen=False) - if (use_hash_matching and is_duplicate_hash(config_db, - code_region['Hashcode'], code_region["CodeRegionType"], - code_region["Pass"])): + if use_hash_matching and is_duplicate_hash( + config_db, + code_region["Hashcode"], + code_region["CodeRegionType"], + code_region["Pass"], + ): return False return True -def _generate_search_space(file_path, yaml_list, start_tuning_id, - config_file, name_filter, func_name_filter, - file_name_filter, type_filter, pass_filter, - config_db, use_hash_matching, use_prev_configs, - inject_seed): +def _generate_search_space( + file_path, + yaml_list, + start_tuning_id, + config_file, + name_filter, + func_name_filter, + file_name_filter, + type_filter, + pass_filter, + config_db, + use_hash_matching, + use_prev_configs, + inject_seed, +): tuning_id = start_tuning_id coderegion_found = 0 @@ -158,11 +181,12 @@ def _generate_search_space(file_path, yaml_list, start_tuning_id, config_dic = yaml.load_all(config_file_handler, Loader=yaml.FullLoader) for code_region in config_dic: - global_params = code_region['CodeRegion']["Args"] + global_params = code_region["CodeRegion"]["Args"] if global_params is not None: type_pass_tuple = ( - code_region['CodeRegion']["CodeRegionType"], - code_region['CodeRegion']['Pass']) + code_region["CodeRegion"]["CodeRegionType"], + code_region["CodeRegion"]["Pass"], + ) global_param_config[type_pass_tuple] = global_params # A list of all opportunites found by the compiler remarks_list = get_remarks(file_path) @@ -171,49 +195,57 @@ def _generate_search_space(file_path, yaml_list, start_tuning_id, type_pass_tuple = (remark.CodeRegionType, remark.Pass) if type_pass_tuple in global_param_config: coderegion_found += 1 - code_region['Function'] = (remark.Function if - hasattr(remark, 'Function') else "") - code_region['Name'] = (remark.Name if - hasattr(remark, 'Name') else "") - code_region['CodeRegionType'] = remark.CodeRegionType - code_region['Pass'] = remark.Pass - code_region['Hashcode'] = str(remark.CodeRegionHash) - code_region['BaselineConfig'] = (remark.BaselineConfig if - hasattr(remark, 'BaselineConfig') - else {}) + code_region["Function"] = ( + remark.Function if hasattr(remark, "Function") else "" + ) + code_region["Name"] = remark.Name if hasattr(remark, "Name") else "" + code_region["CodeRegionType"] = remark.CodeRegionType + code_region["Pass"] = remark.Pass + code_region["Hashcode"] = str(remark.CodeRegionHash) + code_region["BaselineConfig"] = ( + remark.BaselineConfig if hasattr(remark, "BaselineConfig") else {} + ) # DynamicConfigs is a dic: str -> list[int]. # Where the str represents a tuning parameter associated with the # code region. List[int] represent the possible dynamic tuning # values associated with the tuning parameter. - code_region['DynamicConfigs'] = (remark.DynamicConfigs - if hasattr(remark, 'DynamicConfigs') - else None) - code_region['Invocation'] = str(remark.Invocation - if hasattr(remark, 'Invocation') - else "0") - filtered = _apply_code_region_filter(code_region['Function'], - func_name_filter) and \ - _apply_code_region_filter(code_region['CodeRegionType'], - type_filter) and \ - _apply_code_region_filter(code_region['Name'], - name_filter) and \ - _apply_code_region_filter(code_region['Pass'], pass_filter) + code_region["DynamicConfigs"] = ( + remark.DynamicConfigs if hasattr(remark, "DynamicConfigs") else None + ) + code_region["Invocation"] = str( + remark.Invocation if hasattr(remark, "Invocation") else "0" + ) + filtered = ( + _apply_code_region_filter(code_region["Function"], func_name_filter) + and _apply_code_region_filter( + code_region["CodeRegionType"], type_filter + ) + and _apply_code_region_filter(code_region["Name"], name_filter) + and _apply_code_region_filter(code_region["Pass"], pass_filter) + ) if hasattr(remark, "DebugLoc"): - code_region['DebugLoc'] = {"File": remark.File, - "Line": remark.Line, - "Column": remark.Column} + code_region["DebugLoc"] = { + "File": remark.File, + "Line": remark.Line, + "Column": remark.Column, + } filtered = filtered and _apply_code_region_filter( - remark.File, file_name_filter) + remark.File, file_name_filter + ) should_add = True if filtered: - if code_region.get('CodeRegionType') == "program-param": - should_add = \ - _update_program_param_code_regions(config_db, code_region) + if code_region.get("CodeRegionType") == "program-param": + should_add = _update_program_param_code_regions( + config_db, code_region + ) elif use_hash_matching: should_add = _update_current_code_regions( - config_db, code_region, use_hash_matching, - use_prev_configs, inject_seed + config_db, + code_region, + use_hash_matching, + use_prev_configs, + inject_seed, ) if not should_add: continue @@ -221,8 +253,9 @@ def _generate_search_space(file_path, yaml_list, start_tuning_id, # Add this code region to the search space tuning_id += 1 param = global_param_config[type_pass_tuple] - yaml_list.append({'TuningId': tuning_id, - 'CodeRegion': code_region, 'Params': param}) + yaml_list.append( + {"TuningId": tuning_id, "CodeRegion": code_region, "Params": param} + ) # return the last tuning id and code regions found in this file. return tuning_id, coderegion_found @@ -298,8 +331,9 @@ def _prepare_remarks(configuration_data, task_map, use_hash_matching): return remark_lookup if use_hash_matching else remark_list -def _construct_remarks(configuration_data, task_map, config_db, - use_hash_matching, fixed_llvm_input=None): +def _construct_remarks( + configuration_data, task_map, config_db, use_hash_matching, fixed_llvm_input=None +): """ Build a list of remarks ready to be serialized for tuning-enabled LLVM based on the task_map and configuration_data. @@ -331,15 +365,15 @@ def _construct_remarks(configuration_data, task_map, config_db, # If use_hash_matching == True, will be # a dictionary of equivalance classes (hash, type, pass). # Otherwise, will be a list of remarks. - remark_lookup = _prepare_remarks( - configuration_data, task_map, use_hash_matching - ) + remark_lookup = _prepare_remarks(configuration_data, task_map, use_hash_matching) # There is at most one program-param within the remark_lookup and we want to # filter out the arguments that the only program-param code region has # If use_hash_matching is set to ture, the remark_loopup is a dict type # which would be updated in the same way as llvm-param. if not use_hash_matching: - program_param_remark_list = list(filter(lambda r: r.CodeRegionType == "program-param", remark_lookup)) + program_param_remark_list = list( + filter(lambda r: r.CodeRegionType == "program-param", remark_lookup) + ) if len(program_param_remark_list) == 1: # There is at most one program-param code region. program_param_remark = program_param_remark_list[0] @@ -354,7 +388,7 @@ def _construct_remarks(configuration_data, task_map, config_db, # add the existing auto-tuning remarks # as the constant configuration into the current llvm input # file being generated. - if (fixed_llvm_input and len(fixed_llvm_input) >= 1): + if fixed_llvm_input and len(fixed_llvm_input) >= 1: remark_list += fixed_llvm_input # For each code region in CurrentCodeRegions, create a remark. @@ -374,15 +408,20 @@ def _construct_remarks(configuration_data, task_map, config_db, # When use_hash_matching is false, remark_lookup is a list object. # If program_param_remark is found, we want to use the same arguments # for all remarks. - if program_param_remark and remark.CodeRegionType == "program-param": + if ( + program_param_remark + and remark.CodeRegionType == "program-param" + ): remark.Args = program_param_remark.Args remark_list.append(remark) continue # Find the arguments for the corresponding # (hash, type, pass) triple. - key = (int(code_region.hashcode), - code_region.code_region_type, - code_region.pass_name) + key = ( + int(code_region.hashcode), + code_region.code_region_type, + code_region.pass_name, + ) remark.Args = remark_lookup[key].Args remark_list.append(remark) @@ -412,9 +451,9 @@ def code_region_to_remark(code_region): remark.Invocation = int(code_region.invocation) if code_region.debug_loc: remark.DebugLoc = {} - remark.DebugLoc['File'] = code_region.debug_loc.file_name - remark.DebugLoc['Line'] = code_region.debug_loc.line - remark.DebugLoc['Column'] = code_region.debug_loc.column + remark.DebugLoc["File"] = code_region.debug_loc.file_name + remark.DebugLoc["Line"] = code_region.debug_loc.line + remark.DebugLoc["Column"] = code_region.debug_loc.column remark.Pass = code_region.pass_name return remark @@ -424,9 +463,13 @@ class YAMLManager(IOManager): def get_file_extension(self): return ".yaml" - - def parse_search_space(self, search_space, use_dynamic_values = False, - use_baseline_config = False, filepath = None): + def parse_search_space( + self, + search_space, + use_dynamic_values=False, + use_baseline_config=False, + filepath=None, + ): """ Parse the yaml search space config file to init the tuner @@ -465,8 +508,8 @@ class YAMLManager(IOManager): # Loop through the task elements in the yaml_list to create a task map. for yaml_elem in yaml_list: - tuning_id = yaml_elem['TuningId'] - param_list_yaml = yaml_elem['Params'] + tuning_id = yaml_elem["TuningId"] + param_list_yaml = yaml_elem["Params"] # Retrieve available (multiple) baseline compiler decisions (stored # as dictionary). @@ -476,12 +519,13 @@ class YAMLManager(IOManager): # Retrieve all possible dynamic args available. dynamic_dic = {} - if 'DynamicConfigs' in yaml_elem['CodeRegion']: - dynamic_dic = yaml_elem['CodeRegion']['DynamicConfigs'] + if "DynamicConfigs" in yaml_elem["CodeRegion"]: + dynamic_dic = yaml_elem["CodeRegion"]["DynamicConfigs"] tune_compilation_flags = False - if ((yaml_elem['CodeRegion']['CodeRegionType'] == 'program-param') or - (yaml_elem['CodeRegion']['CodeRegionType'] == 'llvm-param')): + if (yaml_elem["CodeRegion"]["CodeRegionType"] == "program-param") or ( + yaml_elem["CodeRegion"]["CodeRegionType"] == "llvm-param" + ): tune_compilation_flags = True param_list = [] @@ -489,8 +533,8 @@ class YAMLManager(IOManager): for ele in param_list_yaml.keys(): options = {} if use_dynamic_values and ele in dynamic_dic: - options['Type'] = param_list_yaml[ele]['Type'] - options['Value'] = dynamic_dic[ele] + options["Type"] = param_list_yaml[ele]["Type"] + options["Value"] = dynamic_dic[ele] else: options = param_list_yaml[ele] param = _parse_param(tuning_id, options, ele) @@ -505,44 +549,57 @@ class YAMLManager(IOManager): # stored in 'extended_search_space.yaml' file for LLVMParam and # ProgramParam code regions. if tune_compilation_flags: - baseline_config = (options['Default'] - if 'Default' in options - else param.seed_value()) - seed_configuration[param_name] = (param.seed_value() - if (baseline_config is None) - else baseline_config) + baseline_config = ( + options["Default"] + if "Default" in options + else param.seed_value() + ) + seed_configuration[param_name] = ( + param.seed_value() if (baseline_config is None) else baseline_config + ) if param: param_list.append(param) if param_list: # retrieve code_region information from yaml code_region_yaml = yaml_elem["CodeRegion"] - code_region = CodeRegion(code_region_yaml["Pass"], - code_region_yaml["Name"], - code_region_yaml["Function"], - code_region_yaml["CodeRegionType"], - code_region_yaml["Hashcode"], - code_region_yaml["Invocation"]) - if 'DebugLoc' in code_region_yaml: - code_region.set_debug_loc(code_region_yaml['DebugLoc']) + code_region = CodeRegion( + code_region_yaml["Pass"], + code_region_yaml["Name"], + code_region_yaml["Function"], + code_region_yaml["CodeRegionType"], + code_region_yaml["Hashcode"], + code_region_yaml["Invocation"], + ) + if "DebugLoc" in code_region_yaml: + code_region.set_debug_loc(code_region_yaml["DebugLoc"]) if code_region not in code_region_set: code_region_set.add(code_region) - task_map[int(tuning_id)] = Task(int(tuning_id), - param_list, code_region) + task_map[int(tuning_id)] = Task( + int(tuning_id), param_list, code_region + ) if use_baseline_config: - with open(filepath, 'w') as file: + with open(filepath, "w") as file: json.dump(seed_configuration, file) return task_map - - def generate_search_space_file(self, files, output_file, config_file, - name_filter=None, func_name_filter=None, - file_name_filter=None, type_filter=None, - pass_filter=None, config_db=None, - use_hash_matching=False, - use_prev_configs=False, inject_seed=False): + def generate_search_space_file( + self, + files, + output_file, + config_file, + name_filter=None, + func_name_filter=None, + file_name_filter=None, + type_filter=None, + pass_filter=None, + config_db=None, + use_hash_matching=False, + use_prev_configs=False, + inject_seed=False, + ): """ Generate search space file for auto-tuner driver based on opportunities files which are generated by llvm, and output as output_file @@ -555,20 +612,35 @@ class YAMLManager(IOManager): where the global search space settings are defined. """ - yaml_list = self.generate_search_space(files, config_file, - file_name_filter, - func_name_filter, name_filter, - type_filter, pass_filter, - config_db, use_hash_matching, - use_prev_configs, inject_seed) + yaml_list = self.generate_search_space( + files, + config_file, + file_name_filter, + func_name_filter, + name_filter, + type_filter, + pass_filter, + config_db, + use_hash_matching, + use_prev_configs, + inject_seed, + ) self.output_to_file(output_file, yaml_list) - - def generate_search_space(self, files, config_file, file_name_filter=None, - func_name_filter=None, name_filter=None, - type_filter=None, pass_filter=None, - config_db=None, use_hash_matching=False, - use_prev_configs=False, inject_seed=False): + def generate_search_space( + self, + files, + config_file, + file_name_filter=None, + func_name_filter=None, + name_filter=None, + type_filter=None, + pass_filter=None, + config_db=None, + use_hash_matching=False, + use_prev_configs=False, + inject_seed=False, + ): """ Parse opportunities files generated by llvm and return a search space as list. @@ -599,15 +671,21 @@ class YAMLManager(IOManager): # ID to keep track of code regions added to create search space. tuning_id = 0 for filename in files: - end_tuning_id, coderegion_found = _generate_search_space(filename, - yaml_list, tuning_id, - config_file, name_filter, - func_name_filter, - file_name_filter, - type_filter, pass_filter, - config_db, use_hash_matching, - use_prev_configs, - inject_seed) + end_tuning_id, coderegion_found = _generate_search_space( + filename, + yaml_list, + tuning_id, + config_file, + name_filter, + func_name_filter, + file_name_filter, + type_filter, + pass_filter, + config_db, + use_hash_matching, + use_prev_configs, + inject_seed, + ) tuning_id = end_tuning_id total_coderegion_found += coderegion_found @@ -619,32 +697,40 @@ class YAMLManager(IOManager): return yaml_list - @staticmethod - def update_config_db(configuration_data, task_map, - fixed_llvm_input=None, config_db=None, - use_hash_matching=True): - remark_list = _construct_remarks(configuration_data, task_map, - config_db, use_hash_matching, fixed_llvm_input) + def update_config_db( + configuration_data, + task_map, + fixed_llvm_input=None, + config_db=None, + use_hash_matching=True, + ): + remark_list = _construct_remarks( + configuration_data, task_map, config_db, use_hash_matching, fixed_llvm_input + ) update_optimal_configs(config_db, remark_list) - - def build_llvm_input(self, configuration_data, task_map, output_file, - fixed_llvm_input=None, config_db=None, - use_hash_matching=False): + def build_llvm_input( + self, + configuration_data, + task_map, + output_file, + fixed_llvm_input=None, + config_db=None, + use_hash_matching=False, + ): """ Build an input yaml file for tuning-enabled LLVM based on task_map and configuration_data, and output as output_file """ remark_list = _construct_remarks( - configuration_data, task_map, - config_db, use_hash_matching, fixed_llvm_input) + configuration_data, task_map, config_db, use_hash_matching, fixed_llvm_input + ) self.output_to_file(output_file, remark_list) def generate_baseline_llvm_input(self, output_file, config_db=None): remark_list = [] - for code_region_config in get_current_code_regions(config_db, - ignore_seen=True): + for code_region_config in get_current_code_regions(config_db, ignore_seen=True): if code_region_config.parameters is None: # Skip the code region if no previous setting was found. continue @@ -658,7 +744,6 @@ class YAMLManager(IOManager): else: self.output_to_file(output_file, remark_list) - @staticmethod def seed_baseline(task_map, config_db, filepath): """ @@ -673,8 +758,9 @@ class YAMLManager(IOManager): task_hash = tuning_task.code_region.hashcode task_pass = tuning_task.code_region.pass_name task_type = tuning_task.code_region.code_region_type - stored_params = get_optimal_config(config_db, task_hash, - task_type, task_pass) + stored_params = get_optimal_config( + config_db, task_hash, task_type, task_pass + ) if stored_params: param_map = {k: v for x in stored_params for k, v in x.items()} else: @@ -692,8 +778,8 @@ class YAMLManager(IOManager): elif raw_name == "OptPass": # Creating parameter according to SelectionParameter. parameter = deepcopy(param_map) - parameter['order'] = parameter.pop(raw_name) - parameter['size'] = len(parameter['order']) + parameter["order"] = parameter.pop(raw_name) + parameter["size"] = len(parameter["order"]) seed_configuration[param.name] = parameter elif raw_name in param_map: seed_configuration[param.name] = param_map[raw_name] @@ -702,11 +788,10 @@ class YAMLManager(IOManager): # there may be unseen paramters that are not stored. seed_configuration[param.name] = param.seed_value() - with open(filepath, 'w') as file: + with open(filepath, "w") as file: json.dump(seed_configuration, file) return filepath - def parse_llvm_inputs(self, input_files): """ Parse a list of llvm yaml input files @@ -723,15 +808,11 @@ class YAMLManager(IOManager): def output_to_file(self, output_file, remark_list): fd = create_secure_fd(output_file) - with os.fdopen(fd, 'w') as output_file_handler: + with os.fdopen(fd, "w") as output_file_handler: yaml.dump_all( - remark_list, - output_file_handler, - width=1200, - default_flow_style=True + remark_list, output_file_handler, width=1200, default_flow_style=True ) - def divide_llvm_input(self, input_file): remark_map = {} remarks = get_remarks(input_file) @@ -742,13 +823,12 @@ class YAMLManager(IOManager): else: remark_map[remark.DebugLoc["File"]] = [remark] else: - if ("no_name" in remark_map.keys()): + if "no_name" in remark_map.keys(): remark_map["no_name"].append(remark) else: remark_map["no_name"] = [remark] return remark_map - def create_dummy_llvm_input(self, output_file): dummy_remark = AutoTuning() dummy_remark.Name = "dummy" diff --git a/dev_install.sh b/dev_install.sh index ead47576b0919db506f8da9858a48edcc4e495aa..33d4ca17657326463b24e81d081bf583d57e5aba 100755 --- a/dev_install.sh +++ b/dev_install.sh @@ -1,6 +1,6 @@ #!/bin/bash -e -# Script to install autotuner +# Script to install BiSheng Autotuner # Copyright (C) 2017-2020, Huawei Technologies Co., Ltd. All rights reserved. SCRIPT_DIR=$(dirname $(realpath -s $0)) @@ -11,13 +11,12 @@ usage() { cat <=3.5.0', - 'defusedxml', - 'dill', - 'pyyaml>=5.4.1', - 'requests>=2.18.4', + name="autotuner", + include_package_data=True, + version="2.2.0", + description="BiSheng Autotuner", + long_description=open("README.rst").read(), + keywords=create_keyword_metadata(), + url="https://www.hikunpeng.com/document/detail/en/kunpengdevps/compiler/fg-autotuner/kunpengbisheng_20_0002.html", + author="Huawei Technologies Co. Ltd.", + packages=find_packages(), + install_requires=[ + "huawei-opentuner", + "configparser>=3.5.0", + "defusedxml", + "dill", + "pyyaml>=5.4.1", + "requests>=2.18.4", 'importlib-metadata; python_version < "3.8.0"', ], - cmdclass = { - 'build_py' : BuildPyCommand, + cmdclass={ + "build_py": BuildPyCommand, }, ) diff --git a/uninstall.sh b/uninstall.sh index 550602f55edf958ecb187bc1775faf388de3f4d7..bda16dbc42b7f17b8c6eb2a999eac32ca9122d31 100755 --- a/uninstall.sh +++ b/uninstall.sh @@ -1,6 +1,6 @@ #!/bin/bash -e -# Script to uninstall huawei_opentuner and autotuner +# Uninstall BiSheng OpenTuner and BiSheng Autotuner. # Copyright (C) 2017-2020, Huawei Technologies Co., Ltd. All rights reserved. python3 -m pip uninstall autotuner -y