Skip to content

Commit

Permalink
- Fixes #12
Browse files Browse the repository at this point in the history
- Fixes #7
- Starting on #9
  • Loading branch information
Jonnycake committed Jan 10, 2019
1 parent f8bcff6 commit f2d2164
Showing 1 changed file with 14 additions and 5 deletions.
19 changes: 14 additions & 5 deletions Explorer.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@ class Explorer:
data = {}
differ = None
stats = {}
structure = {}
renames = {}

def __init__(self, path="."):
Expand Down Expand Up @@ -115,22 +114,24 @@ def collectData(self, from_cache=False):
print("\tLoading from cache (%s)..." % (cache_path))
try:
with open(cache_path, "r") as f:
data = json.load(f)
json_data = json.load(f)
data, self.renames = self.loadCache(json_data)
except:
# @todo Do something with the error
print(sys.exc_info()[0])
else:
print("\tLoading live data...")
data = self.loadLiveData()

# @todo Make this not needed....
data['files'] = self.data['files']
# @todo Make this not needed....
data['files'] = self.data['files']

self.data = data

if self.config.getboolean('General', 'enable_cache'):
print("\tWriting cache file '%s'..." % (cache_path))
with open(cache_path, "w") as f:
f.write(json.dumps(self.data))
f.write(json.dumps(self.getCache()))

def getDiffStats(self, commit, last_commit):
files = {}
Expand Down Expand Up @@ -442,3 +443,11 @@ def aggregateBasicInfo(self):
basic_stats['last'] = {commit_hashes[-1]: self.data['commits'][commit_hashes[-1]]}
return basic_stats

def getCache(self):
return {"data": self.data, "renames": {k: list(self.renames[k]) for k in self.renames}, "timestamp": int(time.time())}

def loadCache(self, cache, force=False):
# 60 (seconds) * 60 (minutes) * 24 (hours) = 86,400 seconds = 1 day
if (not force) and cache['timestamp'] < time.time() - (int(self.config.get('Caching', 'cache_ttl')) * 86400):
raise Exception("Expired cache")
return (cache['data'], {k: llist.sllist(cache['renames'][k]) for k in cache['renames']})

0 comments on commit f2d2164

Please sign in to comment.