123456789101112131415161718192021222324252627282930313233343536373839404142434445464748 |
- import urllib
- import urllib2
- import json
- import random
- import hashlib
- import time
- def to_recompile(address, files):
- taskname = str(random.random())
- files = sorted(files)
- rebuild = []
- def flush_data(data):
- urllib2.urlopen(urllib2.Request(address, urllib.urlencode({"op": "set_input", "data": json.dumps(data), "taskname": taskname}))).read()
- while 1:
- try:
- # Create new task
- urllib2.urlopen(urllib2.Request(address, urllib.urlencode({"op": "set_input", "value": '"%s"' % taskname, "taskname": "task_manager"}))).read()
- break
- except:
- time.sleep(0.01)
- data = []
- for f in files:
- data.extend([3, "is_defined", f])
- flush_data(data)
- md5_values = {}
- for f in files:
- md5 = hashlib.md5()
- md5.update(open(f, 'r').read())
- md5_values[f] = md5.hexdigest()
- for f in files:
- v = urllib2.urlopen(urllib2.Request(address, urllib.urlencode({"op": "get_output", "taskname": taskname}))).read()
- mv_md5 = json.loads(v)
- if md5_values[f] == mv_md5:
- # Identical, so don't rebuild
- pass
- print("[CACHE] %s" % f)
- else:
- # Different, so rebuild
- rebuild.append(f)
- urllib2.urlopen(urllib2.Request(address, urllib.urlencode({"op": "set_input", "value": '-1', "taskname": taskname}))).read()
- return rebuild
|