utils.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402
  1. import unittest
  2. import sys
  3. import os
  4. import sys
  5. import time
  6. import json
  7. import urllib
  8. import urllib2
  9. import subprocess
  10. import signal
  11. import random
  12. import operator
  13. from collections import defaultdict
  14. sys.path.append("interface/HUTN")
  15. sys.path.append("scripts")
  16. from hutn_compiler.compiler import main as do_compile
  17. from check_objects import to_recompile
  18. USERNAME = "test_task"
  19. PARALLEL_PUSH = True
  20. BOOTSTRAP_FOLDER_NAME = "bootstrap"
  21. CURRENT_FOLDER_NAME = "performance"
  22. PORTS = set()
  23. OPTIMIZATION_LEVEL_LEGACY_INTERPRETER = "legacy-interpreter"
  24. OPTIMIZATION_LEVEL_INTERPRETER = "interpreter"
  25. OPTIMIZATION_LEVEL_BASELINE_JIT = "baseline-jit"
  26. OPTIMIZATION_LEVEL_BASELINE_JIT_NO_THUNKS = "baseline-jit,no-thunks"
  27. OPTIMIZATION_LEVEL_FAST_JIT = "fast-jit"
  28. OPTIMIZATION_LEVEL_FAST_JIT_NO_NOPS = "fast-jit,no-insert-nops"
  29. OPTIMIZATION_LEVEL_ADAPTIVE_JIT_FAVOR_LARGE_FUNCTIONS = "adaptive-jit-favor-large-functions"
  30. OPTIMIZATION_LEVEL_ADAPTIVE_JIT_FAVOR_SMALL_FUNCTIONS = "adaptive-jit-favor-small-functions"
  31. OPTIMIZATION_LEVEL_ADAPTIVE_JIT_FAVOR_LOOPS = "adaptive-jit-favor-loops"
  32. ALL_OPTIMIZATION_LEVELS = [
  33. OPTIMIZATION_LEVEL_LEGACY_INTERPRETER,
  34. OPTIMIZATION_LEVEL_INTERPRETER,
  35. OPTIMIZATION_LEVEL_BASELINE_JIT,
  36. OPTIMIZATION_LEVEL_BASELINE_JIT_NO_THUNKS,
  37. OPTIMIZATION_LEVEL_FAST_JIT,
  38. OPTIMIZATION_LEVEL_FAST_JIT_NO_NOPS,
  39. OPTIMIZATION_LEVEL_ADAPTIVE_JIT_FAVOR_LARGE_FUNCTIONS,
  40. OPTIMIZATION_LEVEL_ADAPTIVE_JIT_FAVOR_SMALL_FUNCTIONS,
  41. OPTIMIZATION_LEVEL_ADAPTIVE_JIT_FAVOR_LOOPS
  42. ]
  43. class ModelverseTerminated(Exception):
  44. """An exception that tells the task that the Modelverse has terminated."""
  45. pass
  46. def get_code_folder_name():
  47. """Gets the name of the code folder."""
  48. return '%s/code' % CURRENT_FOLDER_NAME
  49. def get_free_port():
  50. """Gets a unique new port."""
  51. while 1:
  52. port = random.randint(10000, 20000)
  53. # Check if this port is in the set of ports.
  54. if port not in PORTS:
  55. # We have found a unique port. Add it to the set and return.
  56. PORTS.add(port)
  57. return port
  58. def execute(scriptname, parameters=None, wait=False):
  59. """Runs a script."""
  60. if os.name not in ["nt", "posix"]:
  61. # Stop now, as we would have no clue on how to kill its subtree
  62. raise Exception("Unknown OS version: " + str(os.name))
  63. command = [sys.executable, "scripts/%s.py" % scriptname] + (
  64. [] if parameters is None else parameters)
  65. if wait:
  66. return subprocess.call(command, shell=False)
  67. else:
  68. return subprocess.Popen(command, shell=False)
  69. def kill(process):
  70. """Kills the given process."""
  71. if os.name == "nt":
  72. subprocess.call(["taskkill", "/F", "/T", "/PID", "%i" % process.pid])
  73. elif os.name == "posix":
  74. subprocess.call(["pkill", "-P", "%i" % process.pid])
  75. def set_input_data(address, data):
  76. """Sets the Modelverse program's input data."""
  77. if data is not None:
  78. urllib2.urlopen(
  79. urllib2.Request(
  80. address,
  81. urllib.urlencode(
  82. {"op": "set_input", "data": json.dumps(data), "taskname": USERNAME})),
  83. timeout=10).read()
  84. else:
  85. return []
  86. def compile_file(address, mod_filename, filename, mode, proc):
  87. """Compiles the given file."""
  88. # Load in the file required
  89. try:
  90. timeout_val = 240
  91. taskname = str(random.random())
  92. while 1:
  93. proc2 = execute(
  94. "compile", [address, mod_filename, taskname, filename, mode], wait=False)
  95. if proc.returncode is not None:
  96. # Modelverse has already terminated, which isn't a good sign!
  97. raise Exception("Modelverse died!")
  98. while proc2.returncode is None:
  99. time.sleep(0.01)
  100. proc2.poll()
  101. timeout_val -= 0.01
  102. if timeout_val < 0:
  103. kill(proc2)
  104. print("Compilation timeout expired!")
  105. return False
  106. if proc2.returncode != 2:
  107. break
  108. # Make sure everything stopped correctly
  109. assert proc2.returncode == 0
  110. if proc2.returncode != 0:
  111. return False
  112. except:
  113. raise
  114. finally:
  115. try:
  116. kill(proc2)
  117. except UnboundLocalError:
  118. pass
  119. def compile_files(address, process, files, mode):
  120. """Compiles the given files in the given mode."""
  121. threads = []
  122. mod_files = []
  123. for filename in files:
  124. if os.path.isfile(filename):
  125. mod_filename = filename
  126. elif os.path.isfile("%s/%s" % (get_code_folder_name(), filename)):
  127. mod_filename = "%s/%s" % (get_code_folder_name(), filename)
  128. elif os.path.isfile("%s/%s" % (BOOTSTRAP_FOLDER_NAME, filename)):
  129. mod_filename = "%s/%s" % (BOOTSTRAP_FOLDER_NAME, filename)
  130. else:
  131. raise Exception("File not found: %s" % filename)
  132. mod_files.append(mod_filename)
  133. to_compile = to_recompile(address, mod_files)
  134. for mod_filename in to_compile:
  135. if mod_filename.endswith(".mvc"):
  136. model_mode = "MO"
  137. mod_files.remove(mod_filename)
  138. else:
  139. model_mode = mode
  140. if PARALLEL_PUSH:
  141. import threading
  142. threads.append(
  143. threading.Thread(
  144. target=compile_file,
  145. args=[address, mod_filename, mod_filename, model_mode, process]))
  146. threads[-1].start()
  147. else:
  148. compile_file(address, mod_filename, mod_filename, model_mode, process)
  149. if PARALLEL_PUSH:
  150. for t in threads:
  151. t.join()
  152. if mode[-1] == "O":
  153. # Fire up the linker
  154. val = execute("link_and_load", [address, USERNAME] + mod_files, wait=True)
  155. if val != 0:
  156. raise Exception("Linking error")
  157. def run_file(files, parameters, mode, handle_output, optimization_level=None):
  158. """Compiles the given sequence of files, feeds them the given input in the given mode,
  159. and handles their output."""
  160. # Resolve file
  161. import os.path
  162. time.sleep(0.01)
  163. port = get_free_port()
  164. address = "http://127.0.0.1:%i" % port
  165. try:
  166. # Run Modelverse server
  167. modelverse_args = [str(port)]
  168. if optimization_level is not None:
  169. modelverse_args.append('--kernel=%s' % optimization_level)
  170. proc = execute("run_local_modelverse", modelverse_args, wait=False)
  171. # Compile, push and link the source code files.
  172. compile_files(address, proc, files, mode)
  173. # Send the request ...
  174. set_input_data(address, parameters)
  175. # ... and wait for replies
  176. while 1:
  177. val = urllib2.urlopen(
  178. urllib2.Request(
  179. address,
  180. urllib.urlencode({"op": "get_output", "taskname": USERNAME})),
  181. timeout=240).read()
  182. val = json.loads(val)
  183. if proc.returncode is not None:
  184. # Modelverse has terminated. This may or may not be what we want.
  185. raise ModelverseTerminated()
  186. if not handle_output(val):
  187. return
  188. # All passed!
  189. return
  190. except:
  191. raise
  192. finally:
  193. try:
  194. kill(proc)
  195. except UnboundLocalError:
  196. pass
  197. def run_file_to_completion(files, parameters, mode):
  198. """Compiles the given sequence of files, feeds them the given input in the given mode,
  199. and then collects and returns output."""
  200. results = []
  201. def handle_output(output):
  202. """Appends the given output to the list of results."""
  203. results.append(output)
  204. return True
  205. try:
  206. run_file(files, parameters, mode, handle_output)
  207. except ModelverseTerminated:
  208. return results
  209. def run_file_fixed_output_count(files, parameters, mode, output_count, optimization_level=None):
  210. """Compiles the given sequence of files, feeds them the given input in the given mode,
  211. and then collects and returns a fixed number of outputs."""
  212. results = []
  213. def handle_output(output):
  214. """Appends the given output to the list of results."""
  215. results.append(output)
  216. if len(results) < output_count:
  217. return True
  218. else:
  219. return False
  220. run_file(files, parameters, mode, handle_output, optimization_level)
  221. return results
  222. def run_file_single_output(files, parameters, mode, optimization_level=None):
  223. """Compiles the given sequence of files, feeds them the given input in the given mode,
  224. and then collects and returns a single output."""
  225. return run_file_fixed_output_count(files, parameters, mode, 1, optimization_level)[0]
  226. def mean(values):
  227. """Computes the arithmetic mean of the given values."""
  228. return float(sum(values)) / max(len(values), 1)
  229. def run_perf_test(files, parameters, optimization_level, n_iterations=1):
  230. """Compiles the given sequence of files, feeds them the given input in the given mode,
  231. and then collects their output. This process is repeated n_iterations times. The
  232. return value is the average of all outputs, along with the mean total run-time."""
  233. test_runtimes = []
  234. total_runtimes = []
  235. for _ in xrange(n_iterations):
  236. start_time = time.time()
  237. test_time = run_file_single_output(
  238. files, parameters, 'CO',
  239. optimization_level)
  240. end_time = time.time()
  241. total_time = end_time - start_time
  242. test_runtimes.append(test_time)
  243. total_runtimes.append(total_time)
  244. return mean(test_runtimes), mean(total_runtimes)
  245. def get_expectation_checks(expected_values):
  246. """Converts the given sequence of expected values to a sequence of functions which tell
  247. if an input is allowed. Every function is accompanied by an expected value."""
  248. def get_single_expectation_checks(expectation):
  249. """Gets an expectation checker for a single expected value."""
  250. if isinstance(expectation, set):
  251. # We expect to receive a number of outputs equal to the size of the set, but their
  252. # order does not matter.
  253. for _ in xrange(len(expectation)):
  254. yield lambda val: val in expectation
  255. elif expectation is None:
  256. # Skip output value
  257. yield lambda _: True
  258. else:
  259. yield lambda val: val == expectation
  260. for expectation in expected_values:
  261. for checker in get_single_expectation_checks(expectation):
  262. yield checker, expectation
  263. def run_correctness_test(files, parameters, expected, optimization_level):
  264. """Compiles the given sequence of files, feeds them the given input in the given mode,
  265. and then compares the output with the expected output. The return value is the total
  266. run-time of the test."""
  267. checks = iter(list(get_expectation_checks(expected)))
  268. next_check = [next(checks)]
  269. def handle_output(output):
  270. """Checks the given output against the expected output."""
  271. check, expectation = next_check[0]
  272. print("Got %s, expect %s" % (output, expectation))
  273. assert check(output)
  274. try:
  275. next_check[0] = next(checks)
  276. return True
  277. except StopIteration:
  278. return False
  279. start_time = time.time()
  280. try:
  281. run_file(files, parameters, 'CO', handle_output, optimization_level)
  282. except ModelverseTerminated:
  283. return
  284. end_time = time.time()
  285. return end_time - start_time
  286. def format_output(output):
  287. """Formats the output of `run_file_to_completion` as a string."""
  288. return '\n'.join(output)
  289. def define_perf_test(target_class, test_function, optimization_level):
  290. """Defines a performance test in the given class. The performance test calls the given function
  291. at the given optimization level."""
  292. setattr(
  293. target_class,
  294. 'test_%s' % optimization_level.replace('-', '_').lower(),
  295. lambda self: test_function(self, optimization_level))
  296. def define_perf_tests(target_class, test_function):
  297. """Defines performance tests in the given class. Each test calls the given function."""
  298. for optimization_level in ALL_OPTIMIZATION_LEVELS:
  299. define_perf_test(target_class, test_function, optimization_level)
  300. def get_model_constructor(code):
  301. # First change multiple spaces to a tab
  302. code_fragments = code.split("\n")
  303. code_fragments = [i for i in code_fragments if i.strip() != ""]
  304. code_fragments = [i.replace(" ", "\t") for i in code_fragments]
  305. initial_tabs = min([len(i) - len(i.lstrip("\t")) for i in code_fragments])
  306. code_fragments = [i[initial_tabs:] for i in code_fragments]
  307. code = "\n".join(code_fragments)
  308. with open("__model.mvc", "w") as f:
  309. f.write(code)
  310. f.flush()
  311. constructors = do_compile("__model.mvc", "interface/HUTN/grammars/modelling.g", "M") + ["exit"]
  312. return constructors
  313. DEFAULT_PERF_FILE_NAME = 'perf_data.txt'
  314. TOTAL_TIME_QUANTITY = 'total-runtime'
  315. TEST_TIME_QUANTITY = 'test-runtime'
  316. def write_perf_entry_to_stream(
  317. test_name, optimization_level, quantity,
  318. result, output_stream):
  319. """Writes a performance measurement entry to the given stream."""
  320. output_stream.write('%s:%s:%s:%f\n' % (test_name, optimization_level, quantity, result))
  321. def write_perf_to_file(
  322. test_name, optimization_level, runtimes, file_name=DEFAULT_PERF_FILE_NAME):
  323. """Writes performance data to a file."""
  324. test_runtime, total_runtime = runtimes
  325. with open(file_name, "a") as perf_file:
  326. write_perf_entry_to_stream(
  327. test_name, optimization_level, TEST_TIME_QUANTITY, test_runtime, perf_file)
  328. write_perf_entry_to_stream(
  329. test_name, optimization_level, TOTAL_TIME_QUANTITY, total_runtime, perf_file)
  330. def write_total_runtime_to_file(
  331. test_name, optimization_level, total_runtime, file_name=DEFAULT_PERF_FILE_NAME):
  332. """Writes a total runtime entry to a file."""
  333. with open(file_name, "a") as perf_file:
  334. write_perf_entry_to_stream(
  335. test_name, optimization_level, TOTAL_TIME_QUANTITY, total_runtime, perf_file)
  336. def parse_perf_data(file_name):
  337. """Parses the performance data in the given file."""
  338. results = defaultdict(lambda: defaultdict(list))
  339. with open(file_name, 'r') as perf_file:
  340. for line in perf_file.readlines():
  341. test_name, optimization_level, quantity, result = line.strip().split(':')
  342. results[quantity][optimization_level].append((test_name, result))
  343. return sorted(results.items(), key=operator.itemgetter(1))