compiler.py 7.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218
  1. import cPickle as pickle
  2. import os
  3. import sys
  4. import time
  5. from grammar_compiler_visitor import GrammarCompilerVisitor
  6. from hutnparser import Parser, Tree
  7. from meta_grammar import Grammar
  8. from cached_exception import CachedException
  9. import hashlib
  10. global parsers
  11. parsers = {}
  12. sys.setrecursionlimit(2000)
  13. def read(filename):
  14. with open(filename, 'r') as f:
  15. return f.read()
  16. def md5digest(data):
  17. hasher = hashlib.md5()
  18. hasher.update(data)
  19. return hasher.hexdigest()
  20. fetch_caches = {}
  21. def fetch_cached(data, mode=None):
  22. global fetch_caches
  23. try:
  24. md5 = md5digest(data)
  25. if md5 in fetch_caches:
  26. return fetch_caches[md5]
  27. cache_folder = os.path.abspath("%s/../caches/" % (os.path.dirname(os.path.abspath(__file__))))
  28. if mode is None:
  29. picklefile = cache_folder + "/%s.pickle" % md5
  30. else:
  31. picklefile = cache_folder + "/%s_%s.pickle" % (mode, md5)
  32. with open(picklefile, "rb") as f:
  33. d = pickle.load(f)
  34. fetch_caches[md5] = d
  35. return d
  36. except:
  37. return None
  38. def make_cached(original_data, data, mode=None):
  39. md5 = md5digest(original_data)
  40. cache_folder = os.path.abspath("%s/../caches/" % (os.path.dirname(os.path.abspath(__file__))))
  41. if mode is None:
  42. picklefile = cache_folder + "/%s.pickle" % md5
  43. else:
  44. picklefile = cache_folder + "/%s_%s.pickle" % (mode, md5)
  45. with open(picklefile, "wb") as f:
  46. pickle.dump(data, f, pickle.HIGHEST_PROTOCOL)
  47. def do_parse(inputfile, grammarfile):
  48. if grammarfile not in parsers:
  49. grammar = fetch_cached(read(grammarfile))
  50. if grammar is None:
  51. result = parser = Parser(Grammar(), hide_implicit = True).parse(read(grammarfile))
  52. if result['status'] != Parser.Constants.Success:
  53. print 'not a valid grammar!'
  54. print result
  55. tree = result['tree']
  56. visitor = GrammarCompilerVisitor()
  57. structure = visitor.visit(tree)
  58. grammar = Grammar()
  59. grammar.rules = structure['rules']
  60. grammar.tokens = structure['tokens']
  61. make_cached(read(grammarfile), grammar)
  62. parsers[grammarfile] = grammar
  63. else:
  64. grammar = parsers[grammarfile]
  65. result = fetch_cached(read(inputfile))
  66. if result is None:
  67. result = Parser(grammar, line_position = True).parse(read(inputfile))
  68. if result['status'] != Parser.Constants.Success:
  69. lines = open(inputfile, 'r').readlines()
  70. begin_line = max(result["line"] - 3, 0)
  71. end_line = min(result["line"] + 3, len(lines))
  72. lines = lines[begin_line:end_line]
  73. lines = ["%s: %s" % (begin_line + i + 1, line) for i, line in enumerate(lines)]
  74. lines = "".join(lines)
  75. msg = "%s:%s:%s: %s\nContext:\n%s" % (inputfile, result["line"], result["column"], result["text"], lines)
  76. raise Exception(msg)
  77. make_cached(read(inputfile), result)
  78. return result
  79. def find_file(filename, include_paths):
  80. import os.path
  81. include_paths = ["."] + \
  82. [os.path.abspath(os.path.dirname(working_file))] + \
  83. [os.path.abspath("%s/../includes/" % (os.path.dirname(os.path.abspath(__file__))))] + \
  84. include_paths + \
  85. []
  86. attempts = []
  87. for include in include_paths:
  88. testfile = include + os.sep + filename
  89. if os.path.isfile(os.path.abspath(testfile)):
  90. return os.path.abspath(testfile)
  91. else:
  92. attempts.append(os.path.abspath(testfile))
  93. else:
  94. raise Exception("Could not resolve file %s. Tried: %s" % (filename, attempts))
  95. def do_compile(inputfile, grammarfile, visitors=[], include_paths = [], mode=""):
  96. import os.path
  97. global working_file
  98. working_file = os.path.abspath(inputfile)
  99. result = do_parse(inputfile, grammarfile)
  100. error = result["status"] != Parser.Constants.Success
  101. if error:
  102. lines = open(working_file, 'r').readlines()
  103. begin_line = max(result["line"] - 3, 0)
  104. end_line = max(result["line"] + 3, len(lines))
  105. lines = lines[begin_line:end_line]
  106. lines = ["%s: %s" % (begin_line + i + 1, line) for i, line in enumerate(lines)]
  107. lines = "".join(lines)
  108. msg = "%s:%s:%s: %s\nContext:\n%s" % (inputfile, result["line"], result["column"], result["text"], lines)
  109. raise Exception(msg)
  110. else:
  111. for child in result["tree"].tail:
  112. child.inputfile = inputfile
  113. included = set()
  114. while True:
  115. for i, v in enumerate(result["tree"].tail):
  116. if v.head == "include":
  117. # Expand this node
  118. for j in v.tail:
  119. if j.head == "STRVALUE":
  120. f = str(j.tail[0])[1:-1]
  121. if f in included:
  122. subtree = []
  123. else:
  124. name = str(j.tail[0])[1:-1]
  125. subtree = do_parse(find_file(name, include_paths), grammarfile)["tree"].tail
  126. if subtree is None:
  127. raise Exception("Parsing error for included file %s" % find_file(name, include_paths))
  128. for t in subtree:
  129. t.inputfile = name
  130. included.add(f)
  131. # Found the string value, so break from the inner for ("searching for element")
  132. break
  133. # Merge all nodes in
  134. before = result["tree"].tail[:i]
  135. after = result["tree"].tail[i+1:]
  136. result["tree"].tail = before + subtree + after
  137. # Found an include node, but to prevent corruption of the tree, we need to start over again, so break from the outer for loop
  138. break
  139. else:
  140. # The outer for finally finished, so there were no includes remaining, thus terminate the infinite while loop
  141. break
  142. pruned = result["tree"].prune()
  143. import json
  144. tree_data = json.dumps(pruned)
  145. new_result = fetch_cached(tree_data, mode)
  146. if new_result is None:
  147. result["tree"].fix_tracability(inputfile)
  148. for visitor in visitors:
  149. visitor.visit(result["tree"])
  150. if visitors:
  151. result = visitors[-1].dump()
  152. make_cached(tree_data, result, mode)
  153. else:
  154. result = new_result
  155. if visitors:
  156. return result
  157. def main(input_file, grammar_file, mode, args=[]):
  158. from prettyprint_visitor import PrettyPrintVisitor
  159. from prettyprint_visitor import PrintVisitor
  160. from semantics_visitor import SemanticsVisitor
  161. from bootstrap_visitor import BootstrapVisitor
  162. from constructors_visitor import ConstructorsVisitor
  163. from model_visitor import ModelVisitor
  164. from model_bootstrap_visitor import ModelBootstrapVisitor
  165. modes = {
  166. "N" : [],
  167. "P" : [PrintVisitor],
  168. "PP" : [PrettyPrintVisitor],
  169. "BS" : [SemanticsVisitor, BootstrapVisitor],
  170. "CS" : [SemanticsVisitor, ConstructorsVisitor],
  171. "M" : [ModelVisitor],
  172. "MB" : [ModelBootstrapVisitor],
  173. }
  174. try:
  175. visitors = [v(args) for v in modes[mode]]
  176. result = do_compile(input_file, grammar_file, visitors, mode=mode)
  177. except CachedException:
  178. return True
  179. return result
  180. if __name__ == "__main__":
  181. if len(sys.argv) <= 2:
  182. print("Invocation: ")
  183. print(" %s input_file grammar_file mode [mode_params]*" % sys.argv[0])
  184. sys.exit(1)
  185. else:
  186. value = main(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4:])
  187. if value is not None:
  188. print(value)