utils.py 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448
  1. import unittest
  2. import sys
  3. import os
  4. import tempfile
  5. import sys
  6. import time
  7. import json
  8. import urllib
  9. import urllib2
  10. import subprocess
  11. import signal
  12. import random
  13. import operator
  14. from collections import defaultdict
  15. sys.path.append("interface/HUTN")
  16. sys.path.append("scripts")
  17. from hutn_compiler.compiler import main as do_compile
  18. from check_objects import to_recompile
  19. USERNAME = "test_task"
  20. PARALLEL_PUSH = True
  21. BOOTSTRAP_FOLDER_NAME = "bootstrap"
  22. CURRENT_FOLDER_NAME = "performance"
  23. PORTS = set()
  24. OPTIMIZATION_LEVEL_LEGACY_INTERPRETER = "legacy-interpreter"
  25. OPTIMIZATION_LEVEL_INTERPRETER = "interpreter"
  26. OPTIMIZATION_LEVEL_BYTECODE_INTERPRETER = "bytecode-interpreter"
  27. OPTIMIZATION_LEVEL_BASELINE_JIT = "baseline-jit"
  28. OPTIMIZATION_LEVEL_BASELINE_JIT_NO_THUNKS = "baseline-jit,no-thunks"
  29. OPTIMIZATION_LEVEL_FAST_JIT = "fast-jit"
  30. OPTIMIZATION_LEVEL_FAST_JIT_NO_NOPS = "fast-jit,no-insert-nops"
  31. OPTIMIZATION_LEVEL_ADAPTIVE_JIT_FAVOR_LARGE_FUNCTIONS = "adaptive-jit-favor-large-functions"
  32. OPTIMIZATION_LEVEL_ADAPTIVE_JIT_FAVOR_SMALL_FUNCTIONS = "adaptive-jit-favor-small-functions"
  33. OPTIMIZATION_LEVEL_ADAPTIVE_JIT_FAVOR_LOOPS = "adaptive-jit-favor-loops"
  34. OPTIMIZATION_LEVEL_ADAPTIVE_JIT_FAVOR_SMALL_LOOPS = "adaptive-jit-favor-small-loops"
  35. ALL_OPTIMIZATION_LEVELS = [
  36. OPTIMIZATION_LEVEL_LEGACY_INTERPRETER,
  37. OPTIMIZATION_LEVEL_INTERPRETER,
  38. OPTIMIZATION_LEVEL_BYTECODE_INTERPRETER,
  39. OPTIMIZATION_LEVEL_BASELINE_JIT,
  40. OPTIMIZATION_LEVEL_BASELINE_JIT_NO_THUNKS,
  41. OPTIMIZATION_LEVEL_FAST_JIT,
  42. OPTIMIZATION_LEVEL_FAST_JIT_NO_NOPS,
  43. OPTIMIZATION_LEVEL_ADAPTIVE_JIT_FAVOR_LARGE_FUNCTIONS,
  44. OPTIMIZATION_LEVEL_ADAPTIVE_JIT_FAVOR_SMALL_FUNCTIONS,
  45. OPTIMIZATION_LEVEL_ADAPTIVE_JIT_FAVOR_LOOPS,
  46. OPTIMIZATION_LEVEL_ADAPTIVE_JIT_FAVOR_SMALL_LOOPS
  47. ]
  48. class ModelverseTerminated(Exception):
  49. """An exception that tells the task that the Modelverse has terminated."""
  50. pass
  51. def get_code_folder_name():
  52. """Gets the name of the code folder."""
  53. return '%s/code' % CURRENT_FOLDER_NAME
  54. def get_free_port():
  55. """Gets a unique new port."""
  56. while 1:
  57. port = random.randint(10000, 20000)
  58. # Check if this port is in the set of ports.
  59. if port not in PORTS:
  60. # We have found a unique port. Add it to the set and return.
  61. PORTS.add(port)
  62. return port
  63. def execute(scriptname, parameters=None, wait=False):
  64. """Runs a script."""
  65. if os.name not in ["nt", "posix"]:
  66. # Stop now, as we would have no clue on how to kill its subtree
  67. raise Exception("Unknown OS version: " + str(os.name))
  68. command = [sys.executable, "scripts/%s.py" % scriptname] + (
  69. [] if parameters is None else parameters)
  70. if wait:
  71. return subprocess.call(command, shell=False)
  72. else:
  73. return subprocess.Popen(command, shell=False)
  74. def kill(process):
  75. """Kills the given process."""
  76. if os.name == "nt":
  77. subprocess.call(["taskkill", "/F", "/T", "/PID", "%i" % process.pid])
  78. elif os.name == "posix":
  79. subprocess.call(["pkill", "-P", "%i" % process.pid])
  80. def set_input_data(address, data):
  81. """Sets the Modelverse program's input data."""
  82. if data is not None:
  83. urllib2.urlopen(
  84. urllib2.Request(
  85. address,
  86. urllib.urlencode(
  87. {"op": "set_input", "data": json.dumps(data), "taskname": USERNAME})),
  88. timeout=10).read()
  89. else:
  90. return []
  91. def compile_file(address, mod_filename, filename, mode, proc):
  92. """Compiles the given file."""
  93. # Load in the file required
  94. try:
  95. timeout_val = 600
  96. taskname = str(random.random())
  97. while 1:
  98. proc2 = execute(
  99. "compile", [address, mod_filename, taskname, filename, mode], wait=False)
  100. if proc.returncode is not None:
  101. # Modelverse has already terminated, which isn't a good sign!
  102. raise Exception("Modelverse died!")
  103. while proc2.returncode is None:
  104. time.sleep(0.01)
  105. proc2.poll()
  106. timeout_val -= 0.01
  107. if timeout_val < 0:
  108. kill(proc2)
  109. print("Compilation timeout expired!")
  110. return False
  111. if proc2.returncode != 2:
  112. break
  113. # Make sure everything stopped correctly
  114. assert proc2.returncode == 0
  115. if proc2.returncode != 0:
  116. return False
  117. except:
  118. raise
  119. finally:
  120. try:
  121. kill(proc2)
  122. except UnboundLocalError:
  123. pass
  124. def compile_files(address, process, files, mode):
  125. """Compiles the given files in the given mode."""
  126. threads = []
  127. mod_files = []
  128. for filename in files:
  129. if os.path.isfile(filename):
  130. mod_filename = filename
  131. elif os.path.isfile("%s/%s" % (get_code_folder_name(), filename)):
  132. mod_filename = "%s/%s" % (get_code_folder_name(), filename)
  133. elif os.path.isfile("%s/%s" % (BOOTSTRAP_FOLDER_NAME, filename)):
  134. mod_filename = "%s/%s" % (BOOTSTRAP_FOLDER_NAME, filename)
  135. else:
  136. raise Exception("File not found: %s" % filename)
  137. mod_files.append(mod_filename)
  138. to_compile = to_recompile(address, mod_files)
  139. for mod_filename in to_compile:
  140. if mod_filename.endswith(".mvc"):
  141. model_mode = "MO"
  142. mod_files.remove(mod_filename)
  143. else:
  144. model_mode = mode
  145. if PARALLEL_PUSH:
  146. import threading
  147. threads.append(
  148. threading.Thread(
  149. target=compile_file,
  150. args=[address, mod_filename, mod_filename, model_mode, process]))
  151. threads[-1].start()
  152. else:
  153. compile_file(address, mod_filename, mod_filename, model_mode, process)
  154. if PARALLEL_PUSH:
  155. for t in threads:
  156. t.join()
  157. if mode[-1] == "O":
  158. # Fire up the linker
  159. val = execute("link_and_load", [address, USERNAME] + mod_files, wait=True)
  160. if val != 0:
  161. raise Exception("Linking error")
  162. def run_file(files, parameters, mode, handle_output, optimization_level=None, jit_timing_log=None):
  163. """Compiles the given sequence of files, feeds them the given input in the given mode,
  164. and handles their output."""
  165. # Resolve file
  166. import os.path
  167. time.sleep(0.01)
  168. port = get_free_port()
  169. address = "http://127.0.0.1:%i" % port
  170. try:
  171. # Run Modelverse server
  172. modelverse_args = [str(port)]
  173. if optimization_level is not None:
  174. modelverse_args.append('--kernel=%s' % optimization_level)
  175. if jit_timing_log is not None:
  176. modelverse_args.append('--jit-timing-log=%s' % jit_timing_log)
  177. proc = execute("run_local_modelverse", modelverse_args, wait=False)
  178. # Compile, push and link the source code files.
  179. compile_files(address, proc, files, mode)
  180. # Send the request ...
  181. set_input_data(address, parameters)
  182. # ... and wait for replies
  183. while 1:
  184. val = urllib2.urlopen(
  185. urllib2.Request(
  186. address,
  187. urllib.urlencode({"op": "get_output", "taskname": USERNAME})),
  188. timeout=240).read()
  189. val = json.loads(val)
  190. if proc.returncode is not None:
  191. # Modelverse has terminated. This may or may not be what we want.
  192. raise ModelverseTerminated()
  193. if not handle_output(val):
  194. return
  195. # All passed!
  196. return
  197. except:
  198. raise
  199. finally:
  200. try:
  201. kill(proc)
  202. except UnboundLocalError:
  203. pass
  204. def run_file_to_completion(files, parameters, mode):
  205. """Compiles the given sequence of files, feeds them the given input in the given mode,
  206. and then collects and returns output."""
  207. results = []
  208. def handle_output(output):
  209. """Appends the given output to the list of results."""
  210. results.append(output)
  211. return True
  212. try:
  213. run_file(files, parameters, mode, handle_output)
  214. except ModelverseTerminated:
  215. return results
  216. def run_file_fixed_output_count(
  217. files, parameters, mode, output_count, optimization_level=None, jit_timing_log=None):
  218. """Compiles the given sequence of files, feeds them the given input in the given mode,
  219. and then collects and returns a fixed number of outputs."""
  220. results = []
  221. def handle_output(output):
  222. """Appends the given output to the list of results."""
  223. results.append(output)
  224. if len(results) < output_count:
  225. return True
  226. else:
  227. return False
  228. run_file(files, parameters, mode, handle_output, optimization_level, jit_timing_log)
  229. return results
  230. def run_file_single_output(files, parameters, mode, optimization_level=None, jit_timing_log=None):
  231. """Compiles the given sequence of files, feeds them the given input in the given mode,
  232. and then collects and returns a single output."""
  233. return run_file_fixed_output_count(
  234. files, parameters, mode, 1, optimization_level, jit_timing_log)[0]
  235. def mean(values):
  236. """Computes the arithmetic mean of the given values."""
  237. return float(sum(values)) / max(len(values), 1)
  238. def parse_jit_timing_log(log_file):
  239. """Parses the JIT timing log entries from the given file."""
  240. results = []
  241. for line in log_file.readlines():
  242. first, _, data = line.strip().rpartition(':')
  243. _, _, name = first.strip().rpartition(' ')
  244. results.append((name, float(data)))
  245. return results
  246. def run_perf_test(files, parameters, optimization_level, n_iterations=1):
  247. """Compiles the given sequence of files, feeds them the given input in the given mode,
  248. and then collects their output. This process is repeated n_iterations times. The
  249. return value is the average of all outputs, along with the mean total run-time."""
  250. test_runtimes = []
  251. total_runtimes = []
  252. compile_times = []
  253. for _ in xrange(n_iterations):
  254. try:
  255. timing_log = tempfile.mktemp()
  256. start_time = time.time()
  257. test_time = run_file_single_output(
  258. files, parameters, 'CO',
  259. optimization_level, timing_log)
  260. end_time = time.time()
  261. total_time = end_time - start_time
  262. test_runtimes.append(test_time)
  263. total_runtimes.append(total_time)
  264. with open(timing_log, 'r') as log_file:
  265. parsed_times = parse_jit_timing_log(log_file)
  266. compile_times.append(sum([data for _, data in parsed_times]))
  267. finally:
  268. os.remove(timing_log)
  269. return {
  270. TEST_TIME_QUANTITY: mean(test_runtimes),
  271. TOTAL_TIME_QUANTITY: mean(total_runtimes),
  272. COMPILE_TIME_QUANTITY: mean(compile_times)
  273. }
  274. def get_expectation_checks(expected_values):
  275. """Converts the given sequence of expected values to a sequence of functions which tell
  276. if an input is allowed. Every function is accompanied by an expected value."""
  277. def get_single_expectation_checks(expectation):
  278. """Gets an expectation checker for a single expected value."""
  279. if isinstance(expectation, set):
  280. # We expect to receive a number of outputs equal to the size of the set, but their
  281. # order does not matter.
  282. for _ in xrange(len(expectation)):
  283. yield lambda val: val in expectation
  284. elif expectation is None:
  285. # Skip output value
  286. yield lambda _: True
  287. else:
  288. yield lambda val: val == expectation
  289. for expectation in expected_values:
  290. for checker in get_single_expectation_checks(expectation):
  291. yield checker, expectation
  292. def run_correctness_test(files, parameters, expected, optimization_level):
  293. """Compiles the given sequence of files, feeds them the given input in the given mode,
  294. and then compares the output with the expected output. The return value is a dictionary
  295. of measured quantities."""
  296. checks = iter(list(get_expectation_checks(expected)))
  297. next_check = [next(checks)]
  298. def handle_output(output):
  299. """Checks the given output against the expected output."""
  300. check, expectation = next_check[0]
  301. print("Got %s, expect %s" % (output, expectation))
  302. assert check(output)
  303. try:
  304. next_check[0] = next(checks)
  305. return True
  306. except StopIteration:
  307. return False
  308. timing_log = tempfile.mktemp()
  309. start_time = time.time()
  310. try:
  311. run_file(files, parameters, 'CO', handle_output, optimization_level, timing_log)
  312. with open(timing_log, 'r') as log_file:
  313. parsed_times = parse_jit_timing_log(log_file)
  314. compile_time = sum([data for _, data in parsed_times])
  315. except ModelverseTerminated:
  316. return
  317. finally:
  318. os.remove(timing_log)
  319. end_time = time.time()
  320. return {
  321. TOTAL_TIME_QUANTITY: end_time - start_time,
  322. COMPILE_TIME_QUANTITY: compile_time
  323. }
  324. def format_output(output):
  325. """Formats the output of `run_file_to_completion` as a string."""
  326. return '\n'.join(output)
  327. def define_perf_test(target_class, test_function, optimization_level):
  328. """Defines a performance test in the given class. The performance test calls the given function
  329. at the given optimization level."""
  330. setattr(
  331. target_class,
  332. 'test_%s' % optimization_level.replace('-', '_').lower(),
  333. lambda self: test_function(self, optimization_level))
  334. def define_perf_tests(target_class, test_function, optimization_levels=None):
  335. """Defines performance tests in the given class. Each test calls the given function."""
  336. if optimization_levels is None:
  337. optimization_levels = ALL_OPTIMIZATION_LEVELS
  338. for opt_level in optimization_levels:
  339. define_perf_test(target_class, test_function, opt_level)
  340. def get_model_constructor(code):
  341. # First change multiple spaces to a tab
  342. code_fragments = code.split("\n")
  343. code_fragments = [i for i in code_fragments if i.strip() != ""]
  344. code_fragments = [i.replace(" ", "\t") for i in code_fragments]
  345. initial_tabs = min([len(i) - len(i.lstrip("\t")) for i in code_fragments])
  346. code_fragments = [i[initial_tabs:] for i in code_fragments]
  347. code = "\n".join(code_fragments)
  348. with open("__model.mvc", "w") as f:
  349. f.write(code)
  350. f.flush()
  351. constructors = do_compile("__model.mvc", "interface/HUTN/grammars/modelling.g", "M") + ["exit"]
  352. return constructors
  353. DEFAULT_PERF_FILE_NAME = 'perf_data.txt'
  354. TOTAL_TIME_QUANTITY = 'total-runtime'
  355. TEST_TIME_QUANTITY = 'test-runtime'
  356. COMPILE_TIME_QUANTITY = 'compile-time'
  357. def write_perf_entry_to_stream(
  358. test_name, optimization_level, quantity,
  359. result, output_stream):
  360. """Writes a performance measurement entry to the given stream."""
  361. output_stream.write('%s:%s:%s:%f\n' % (test_name, optimization_level, quantity, result))
  362. def write_perf_to_file(
  363. test_name, optimization_level, runtimes, file_name=DEFAULT_PERF_FILE_NAME):
  364. """Writes performance data to a file."""
  365. with open(file_name, "a") as perf_file:
  366. for quantity, data_point in runtimes.items():
  367. write_perf_entry_to_stream(
  368. test_name, optimization_level, quantity, data_point, perf_file)
  369. def write_total_runtime_to_file(
  370. test_name, optimization_level, total_runtime, file_name=DEFAULT_PERF_FILE_NAME):
  371. """Writes a total runtime entry to a file."""
  372. with open(file_name, "a") as perf_file:
  373. write_perf_entry_to_stream(
  374. test_name, optimization_level, TOTAL_TIME_QUANTITY, total_runtime, perf_file)
  375. def parse_perf_data(file_name):
  376. """Parses the performance data in the given file."""
  377. results = defaultdict(lambda: defaultdict(list))
  378. with open(file_name, 'r') as perf_file:
  379. for line in perf_file.readlines():
  380. test_name, optimization_level, quantity, result = line.strip().split(':')
  381. results[quantity][optimization_level].append((test_name, float(result)))
  382. return {
  383. quantity: sorted(result_dict.items(), key=operator.itemgetter(0))
  384. for quantity, result_dict in results.items()
  385. }