check_objects.py 1.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748
  1. import urllib
  2. import urllib2
  3. import json
  4. import random
  5. import hashlib
  6. import time
  7. def to_recompile(address, files):
  8. taskname = str(random.random())
  9. files = sorted(files)
  10. rebuild = []
  11. def flush_data(data):
  12. urllib2.urlopen(urllib2.Request(address, urllib.urlencode({"op": "set_input", "data": json.dumps(data), "taskname": taskname}))).read()
  13. while 1:
  14. try:
  15. # Create new task
  16. urllib2.urlopen(urllib2.Request(address, urllib.urlencode({"op": "set_input", "value": '"%s"' % taskname, "taskname": "task_manager"}))).read()
  17. break
  18. except:
  19. time.sleep(0.01)
  20. data = []
  21. for f in files:
  22. data.extend([3, "is_defined", f])
  23. flush_data(data)
  24. md5_values = {}
  25. for f in files:
  26. md5 = hashlib.md5()
  27. md5.update(open(f, 'r').read())
  28. md5_values[f] = md5.hexdigest()
  29. for f in files:
  30. v = urllib2.urlopen(urllib2.Request(address, urllib.urlencode({"op": "get_output", "taskname": taskname}))).read()
  31. mv_md5 = json.loads(v)
  32. if md5_values[f] == mv_md5:
  33. # Identical, so don't rebuild
  34. pass
  35. print("[CACHE] %s" % f)
  36. else:
  37. # Different, so rebuild
  38. rebuild.append(f)
  39. urllib2.urlopen(urllib2.Request(address, urllib.urlencode({"op": "set_input", "value": '-1', "taskname": taskname}))).read()
  40. return rebuild