123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295 |
- import unittest
- import sys
- import os
- import sys
- import time
- import json
- import urllib
- import urllib2
- import subprocess
- import signal
- import random
- import operator
- sys.path.append("interface/HUTN")
- sys.path.append("scripts")
- from hutn_compiler.compiler import main as do_compile
- from check_objects import to_recompile
- USERNAME = "test_task"
- PARALLEL_PUSH = True
- BOOTSTRAP_FOLDER_NAME = "bootstrap"
- CURRENT_FOLDER_NAME = "performance"
- PORTS = set()
- OPTIMIZATION_LEVEL_LEGACY_INTERPRETER = "legacy-interpreter"
- OPTIMIZATION_LEVEL_INTERPRETER = "interpreter"
- OPTIMIZATION_LEVEL_BASELINE_JIT = "baseline-jit"
- OPTIMIZATION_LEVEL_BASELINE_JIT_NO_THUNKS = "baseline-jit,no-thunks"
- OPTIMIZATION_LEVEL_FAST_JIT = "fast-jit"
- OPTIMIZATION_LEVEL_ADAPTIVE_JIT_FAVOR_LARGE_FUNCTIONS = "adaptive-jit-favor-large-functions"
- OPTIMIZATION_LEVEL_ADAPTIVE_JIT_FAVOR_SMALL_FUNCTIONS = "adaptive-jit-favor-small-functions"
- OPTIMIZATION_LEVEL_ADAPTIVE_JIT_FAVOR_LOOPS = "adaptive-jit-favor-loops"
- ALL_OPTIMIZATION_LEVELS = [
- OPTIMIZATION_LEVEL_LEGACY_INTERPRETER,
- OPTIMIZATION_LEVEL_INTERPRETER,
- OPTIMIZATION_LEVEL_BASELINE_JIT,
- OPTIMIZATION_LEVEL_BASELINE_JIT_NO_THUNKS,
- OPTIMIZATION_LEVEL_FAST_JIT,
- OPTIMIZATION_LEVEL_ADAPTIVE_JIT_FAVOR_LARGE_FUNCTIONS,
- OPTIMIZATION_LEVEL_ADAPTIVE_JIT_FAVOR_SMALL_FUNCTIONS,
- OPTIMIZATION_LEVEL_ADAPTIVE_JIT_FAVOR_LOOPS
- ]
- class ModelverseTerminated(Exception):
- """An exception that tells the task that the Modelverse has terminated."""
- pass
- def get_code_folder_name():
- """Gets the name of the code folder."""
- return '%s/code' % CURRENT_FOLDER_NAME
- def get_free_port():
- """Gets a unique new port."""
- while 1:
- port = random.randint(10000, 20000)
- # Check if this port is in the set of ports.
- if port not in PORTS:
- # We have found a unique port. Add it to the set and return.
- PORTS.add(port)
- return port
- def execute(scriptname, parameters=None, wait=False):
- """Runs a script."""
- if os.name not in ["nt", "posix"]:
- # Stop now, as we would have no clue on how to kill its subtree
- raise Exception("Unknown OS version: " + str(os.name))
- command = [sys.executable, "scripts/%s.py" % scriptname] + (
- [] if parameters is None else parameters)
- if wait:
- return subprocess.call(command, shell=False)
- else:
- return subprocess.Popen(command, shell=False)
- def kill(process):
- """Kills the given process."""
- if os.name == "nt":
- subprocess.call(["taskkill", "/F", "/T", "/PID", "%i" % process.pid])
- elif os.name == "posix":
- subprocess.call(["pkill", "-P", "%i" % process.pid])
- def set_input_data(address, data):
- """Sets the Modelverse program's input data."""
- if data is not None:
- urllib2.urlopen(
- urllib2.Request(
- address,
- urllib.urlencode(
- {"op": "set_input", "data": json.dumps(data), "taskname": USERNAME})),
- timeout=10).read()
- else:
- return []
- def compile_file(address, mod_filename, filename, mode, proc):
- """Compiles the given file."""
- # Load in the file required
- try:
- timeout_val = 240
- taskname = str(random.random())
- while 1:
- proc2 = execute(
- "compile", [address, mod_filename, taskname, filename, mode], wait=False)
- if proc.returncode is not None:
- # Modelverse has already terminated, which isn't a good sign!
- raise Exception("Modelverse died!")
- while proc2.returncode is None:
- time.sleep(0.01)
- proc2.poll()
- timeout_val -= 0.01
- if timeout_val < 0:
- kill(proc2)
- print("Compilation timeout expired!")
- return False
- if proc2.returncode != 2:
- break
- # Make sure everything stopped correctly
- assert proc2.returncode == 0
- if proc2.returncode != 0:
- return False
- except:
- raise
- finally:
- try:
- kill(proc2)
- except UnboundLocalError:
- pass
- def run_file(files, parameters, mode, handle_output, optimization_level=None):
- """Compiles the given sequence of files, feeds them the given input in the given mode,
- and handles their output."""
- # Resolve file
- import os.path
- time.sleep(0.01)
- port = get_free_port()
- address = "http://127.0.0.1:%i" % port
- try:
- # Run Modelverse server
- modelverse_args = [str(port)]
- if optimization_level is not None:
- modelverse_args.append('--kernel=%s' % optimization_level)
- proc = execute("run_local_modelverse", modelverse_args, wait=False)
- threads = []
- mod_files = []
- for filename in files:
- if os.path.isfile("%s/%s" % (get_code_folder_name(), filename)):
- mod_filename = "%s/%s" % (get_code_folder_name(), filename)
- elif os.path.isfile("%s/%s" % (BOOTSTRAP_FOLDER_NAME, filename)):
- mod_filename = "%s/%s" % (BOOTSTRAP_FOLDER_NAME, filename)
- else:
- raise Exception("File not found: %s" % filename)
- mod_files.append(mod_filename)
- to_compile = to_recompile(address, mod_files)
- for mod_filename in to_compile:
- if PARALLEL_PUSH:
- import threading
- threads.append(
- threading.Thread(
- target=compile_file,
- args=[address, mod_filename, mod_filename, mode, proc]))
- threads[-1].start()
- else:
- compile_file(address, mod_filename, mod_filename, mode, proc)
- if PARALLEL_PUSH:
- for t in threads:
- t.join()
- if mode[-1] == "O":
- # Fire up the linker
- val = execute("link_and_load", [address, USERNAME] + mod_files, wait=True)
- if val != 0:
- raise Exception("Linking error")
- # Send the request ...
- set_input_data(address, parameters)
- # ... and wait for replies
- while 1:
- val = urllib2.urlopen(
- urllib2.Request(
- address,
- urllib.urlencode({"op": "get_output", "taskname": USERNAME})),
- timeout=240).read()
- val = json.loads(val)
- if proc.returncode is not None:
- # Modelverse has terminated. This may or may not be what we want.
- raise ModelverseTerminated()
- if not handle_output(val):
- return
- # All passed!
- return
- except:
- raise
- finally:
- try:
- kill(proc)
- except UnboundLocalError:
- pass
- def run_file_to_completion(files, parameters, mode):
- """Compiles the given sequence of files, feeds them the given input in the given mode,
- and then collects and returns output."""
- results = []
- def handle_output(output):
- """Appends the given output to the list of results."""
- results.append(output)
- return True
- try:
- run_file(files, parameters, mode, handle_output)
- except ModelverseTerminated:
- return results
- def run_file_fixed_output_count(files, parameters, mode, output_count, optimization_level=None):
- """Compiles the given sequence of files, feeds them the given input in the given mode,
- and then collects and returns a fixed number of outputs."""
- results = []
- def handle_output(output):
- """Appends the given output to the list of results."""
- if len(results) < output_count:
- results.append(output)
- return True
- else:
- return False
- run_file(files, parameters, mode, handle_output, optimization_level)
- return results
- def run_file_single_output(files, parameters, mode, optimization_level=None):
- """Compiles the given sequence of files, feeds them the given input in the given mode,
- and then collects and returns a single output."""
- return run_file_fixed_output_count(files, parameters, mode, 1, optimization_level)[0]
- def run_perf_test(files, parameters, optimization_level, n_iterations=1):
- """Compiles the given sequence of files, feeds them the given input in the given mode,
- and then collects their output. This process is repeated n_iterations times. The
- return value is the average of all outputs."""
- result = 0.0
- for _ in xrange(n_iterations):
- result += float(
- run_file_single_output(
- files, parameters + [0], 'CO',
- optimization_level)) / float(n_iterations)
- return result
- def format_output(output):
- """Formats the output of `run_file_to_completion` as a string."""
- return '\n'.join(output)
- def define_perf_test(target_class, test_function, optimization_level):
- """Defines a performance test in the given class. The performance test calls the given function
- at the given optimization level."""
- setattr(
- target_class,
- 'test_%s' % optimization_level.replace('-', '_').lower(),
- lambda self: test_function(self, optimization_level))
- def define_perf_tests(target_class, test_function):
- """Defines performance tests in the given class. Each test calls the given function."""
- for optimization_level in ALL_OPTIMIZATION_LEVELS:
- define_perf_test(target_class, test_function, optimization_level)
- DEFAULT_PERF_FILE_NAME = 'perf_data.txt'
- def write_perf_to_file(test_name, optimization_level, result, file_name=DEFAULT_PERF_FILE_NAME):
- """Writes performance data to a file."""
- with open(file_name, "a") as perf_file:
- perf_file.write('%s:%s:%f\n' % (test_name, optimization_level, result))
- def parse_perf_data(file_name):
- """Parses the performance data in the given file."""
- results = {}
- with open(file_name, 'r') as perf_file:
- for line in perf_file.readlines():
- test_name, optimization_level, result = line.strip().split(':')
- if optimization_level not in results:
- results[optimization_level] = []
- results[optimization_level].append((test_name, result))
- return sorted(results.items(), key=operator.itemgetter(1))
|