compiler.py 7.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202
  1. import cPickle as pickle
  2. import os
  3. import sys
  4. from grammar_compiler_visitor import GrammarCompilerVisitor
  5. from hutnparser import Parser, Tree
  6. from meta_grammar import Grammar
  7. from cached_exception import CachedException
  8. import hashlib
  9. global parsers
  10. parsers = {}
  11. sys.setrecursionlimit(2000)
  12. def read(filename):
  13. with open(filename, 'r') as f:
  14. return f.read()
  15. def md5digest(filename):
  16. hasher = hashlib.md5()
  17. with open(filename, 'rb') as afile:
  18. hasher.update(afile.read())
  19. return hasher.hexdigest()
  20. def fetch_cached(filename, mode=None):
  21. try:
  22. md5 = md5digest(filename)
  23. cache_folder = os.path.abspath("%s/../caches/" % (os.path.dirname(os.path.abspath(__file__))))
  24. if mode is None:
  25. picklefile = cache_folder + "/%s.pickle" % md5
  26. else:
  27. picklefile = cache_folder + "/%s_%s.pickle" % (mode, md5)
  28. with open(picklefile, "rb") as f:
  29. return pickle.load(f)
  30. except:
  31. return None
  32. def make_cached(filename, data, mode=None):
  33. md5 = md5digest(filename)
  34. cache_folder = os.path.abspath("%s/../caches/" % (os.path.dirname(os.path.abspath(__file__))))
  35. if mode is None:
  36. picklefile = cache_folder + "/%s.pickle" % md5
  37. else:
  38. picklefile = cache_folder + "/%s_%s.pickle" % (mode, md5)
  39. with open(picklefile, "wb") as f:
  40. pickle.dump(data, f, pickle.HIGHEST_PROTOCOL)
  41. def do_parse(inputfile, grammarfile):
  42. if grammarfile not in parsers:
  43. grammar = fetch_cached(grammarfile)
  44. if grammar is None:
  45. result = parser = Parser(Grammar(), hide_implicit = True).parse(read(grammarfile))
  46. if result['status'] != Parser.Constants.Success:
  47. print 'not a valid grammar!'
  48. print result
  49. tree = result['tree']
  50. visitor = GrammarCompilerVisitor()
  51. structure = visitor.visit(tree)
  52. grammar = Grammar()
  53. grammar.rules = structure['rules']
  54. grammar.tokens = structure['tokens']
  55. make_cached(grammarfile, grammar)
  56. parsers[grammarfile] = grammar
  57. else:
  58. grammar = parsers[grammarfile]
  59. result = fetch_cached(inputfile)
  60. if result is None:
  61. result = Parser(grammar, line_position = True).parse(read(inputfile))
  62. if result['status'] != Parser.Constants.Success:
  63. lines = open(inputfile, 'r').readlines()
  64. begin_line = max(result["line"] - 3, 0)
  65. end_line = min(result["line"] + 3, len(lines))
  66. lines = lines[begin_line:end_line]
  67. lines = ["%s: %s" % (begin_line + i + 1, line) for i, line in enumerate(lines)]
  68. lines = "".join(lines)
  69. msg = "%s:%s:%s: %s\nContext:\n%s" % (inputfile, result["line"], result["column"], result["text"], lines)
  70. raise Exception(msg)
  71. make_cached(inputfile, result)
  72. return result
  73. def find_file(filename, include_paths):
  74. import os.path
  75. include_paths = ["."] + \
  76. [os.path.abspath(os.path.dirname(working_file))] + \
  77. [os.path.abspath("%s/../includes/" % (os.path.dirname(os.path.abspath(__file__))))] + \
  78. include_paths + \
  79. []
  80. attempts = []
  81. for include in include_paths:
  82. testfile = include + os.sep + filename
  83. if os.path.isfile(os.path.abspath(testfile)):
  84. return os.path.abspath(testfile)
  85. else:
  86. attempts.append(os.path.abspath(testfile))
  87. else:
  88. raise Exception("Could not resolve file %s. Tried: %s" % (filename, attempts))
  89. def do_compile(inputfile, grammarfile, visitors=[], include_paths = [], mode=""):
  90. import os.path
  91. global working_file
  92. working_file = os.path.abspath(inputfile)
  93. result = fetch_cached(inputfile, mode)
  94. if result is None:
  95. result = do_parse(inputfile, grammarfile)
  96. error = result["status"] != Parser.Constants.Success
  97. if error:
  98. lines = open(working_file, 'r').readlines()
  99. begin_line = max(result["line"] - 3, 0)
  100. end_line = max(result["line"] + 3, len(lines))
  101. lines = lines[begin_line:end_line]
  102. lines = ["%s: %s" % (begin_line + i + 1, line) for i, line in enumerate(lines)]
  103. lines = "".join(lines)
  104. msg = "%s:%s:%s: %s\nContext:\n%s" % (inputfile, result["line"], result["column"], result["text"], lines)
  105. raise Exception(msg)
  106. else:
  107. for child in result["tree"].tail:
  108. child.inputfile = inputfile
  109. included = set()
  110. while True:
  111. for i, v in enumerate(result["tree"].tail):
  112. if v.head == "include":
  113. # Expand this node
  114. for j in v.tail:
  115. if j.head == "STRVALUE":
  116. f = str(j.tail[0])[1:-1]
  117. if f in included:
  118. subtree = []
  119. else:
  120. name = str(j.tail[0])[1:-1]
  121. subtree = do_parse(find_file(name, include_paths), grammarfile)["tree"].tail
  122. if subtree is None:
  123. raise Exception("Parsing error for included file %s" % find_file(name, include_paths))
  124. for t in subtree:
  125. t.inputfile = name
  126. included.add(f)
  127. # Found the string value, so break from the inner for ("searching for element")
  128. break
  129. # Merge all nodes in
  130. before = result["tree"].tail[:i]
  131. after = result["tree"].tail[i+1:]
  132. result["tree"].tail = before + subtree + after
  133. # Found an include node, but to prevent corruption of the tree, we need to start over again, so break from the outer for loop
  134. break
  135. else:
  136. # The outer for finally finished, so there were no includes remaining, thus terminate the infinite while loop
  137. break
  138. result["tree"].fix_tracability(inputfile)
  139. for visitor in visitors:
  140. visitor.visit(result["tree"])
  141. if visitors:
  142. result = visitors[-1].dump()
  143. make_cached(inputfile, result, mode)
  144. if visitors:
  145. return result
  146. def main(input_file, grammar_file, mode, args=[]):
  147. from prettyprint_visitor import PrettyPrintVisitor
  148. from prettyprint_visitor import PrintVisitor
  149. from semantics_visitor import SemanticsVisitor
  150. from bootstrap_visitor import BootstrapVisitor
  151. from constructors_visitor import ConstructorsVisitor
  152. from model_visitor import ModelVisitor
  153. from model_bootstrap_visitor import ModelBootstrapVisitor
  154. modes = {
  155. "N" : [],
  156. "P" : [PrintVisitor],
  157. "PP" : [PrettyPrintVisitor],
  158. "BS" : [SemanticsVisitor, BootstrapVisitor],
  159. "CS" : [SemanticsVisitor, ConstructorsVisitor],
  160. "M" : [ModelVisitor],
  161. "MB" : [ModelBootstrapVisitor],
  162. }
  163. try:
  164. visitors = [v(args) for v in modes[mode]]
  165. result = do_compile(input_file, grammar_file, visitors, mode=mode)
  166. except CachedException:
  167. return True
  168. return result
  169. if __name__ == "__main__":
  170. if len(sys.argv) <= 2:
  171. print("Invocation: ")
  172. print(" %s input_file grammar_file mode [mode_params]*" % sys.argv[0])
  173. sys.exit(1)
  174. else:
  175. value = main(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4:])
  176. if value is not None:
  177. print(value)