Skip to content

Commit

Permalink
Co-authored-by: aswilson87 <aswilson87@users.noreply.github.com>
Browse files Browse the repository at this point in the history
  • Loading branch information
djdalfaro committed Apr 9, 2021
2 parents 35e89a4 + 05be862 commit 595c0b3
Show file tree
Hide file tree
Showing 10 changed files with 365 additions and 35 deletions.
5 changes: 3 additions & 2 deletions documentation/garbage-collection-doc.js
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ class Cache {
* - remove any hash reference that is a member of the deleted hash Set
* - for any hash reference that has not been deleted
* - add that hash to a Set of accessible hashes
* - recrusively trace that hash and continue removing any deleted hash references and updating the Set of accesible hashes
* - recursively trace that hash and continue removing any deleted hash references and updating the Set of accesible hashes
* 4. remove any hashes that are not a member of the accessible hash Set
*/

Expand Down Expand Up @@ -85,7 +85,8 @@ const cacheAfterGC = {
'favoriteMovie(id:2)': 'Movie~2',
"addMovie(input: {title: 'The Fugitive', releaseYear: 1993, genre: ACTION })":
'Movie~5',
'deleteMovie(id:4)': 'Movie~4',
// 'deleteMovie(id:4)': 'Movie~4', // mistake?
'deleteMovie(id:3)': 'Movie~3',
},

'Movie~1': {
Expand Down
84 changes: 80 additions & 4 deletions src/CacheClassBrowser.js
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,9 @@ export default class Cache {
return undefined;
}
}
return { data: responseObject };
return {
data: responseObject
};
}

async write(queryStr, respObj, deleteFlag) {
Expand All @@ -67,8 +69,77 @@ export default class Cache {

gc() {
// garbageCollection; garbage collection: removes any inaccessible hashes from the cache
const badHashes = getBadHashes();
const goodHashes = rootQueryCleaner(badHashes);
const goodHashes2 = getGoodHashes(badHashes, goodHashes);
removeInaccessibleHashes(badHashes, goodHashes2);
}

// remove hashes that are flagged for deletion and store records of them in a set badHashes for removal inside root queries
getBadHashes() {
const badHashes = new Set();
for (let key in this.storage) {
if (key === 'ROOT_QUERY' || key === 'ROOT_MUTATION') continue;
if (this.storage[key] === 'DELETED') {
badHashes.add(key);
delete this.storage[key];
}
}
return badHashes;
}

// go through root queries, remove all instances of bad hashes, add remaining hashes into goodHashes set
rootQueryCleaner(badHashes) {
const goodHashes = new Set();
const rootQuery = this.storage['ROOT_QUERY'];
for (let key in rootQuery) {
if (Array.isArray(rootQuery[key])) {
rootQuery[key] = rootQuery[key].filter(x => !badHashes.has(x));
if (rootQuery[key].length === 0) delete rootQuery[key];
for (let el of rootQuery[key]) goodHashes.add(el);
} else (badHashes.has(rootQuery[key])) ? delete rootQuery[key] : goodHashes.add(rootQuery[key]);
}
return goodHashes;
}

// Go through the cache, check good hashes for any nested hashes and add them to goodHashes set
getGoodHashes(badHashes, goodHashes) {
for (let key in this.storage) {
if (key === 'ROOT_QUERY' || key === 'ROOT_MUTATION') continue;
for (let i in this.storage[key]) {
if (Array.isArray(this.storage[key][i])) {
for (let el of this.storage[key][i]) {
if (el.includes('~') && !badHashes.has(el)) {
goodHashes.add(el);
}
}
} else if (typeof this.storage[key][i] === 'string') {
if (this.storage[key][i].includes('~') && !badHashes.has(this.storage[key][i])) {
goodHashes.add(this.storage[key][i]);
}
}
}
}
return goodHashes;
}

// Remove inaccessible hashes by checking if they are in goodhashes set or not
removeInaccessibleHashes(badHashes, goodHashes) {
for (let key in this.storage) {
if (key === 'ROOT_QUERY' || key === 'ROOT_MUTATION') continue;
if (!goodHashes.has(key)) delete this.storage[key];
for (let i in this.storage[key]) {
if (Array.isArray(this.storage[key][i])) {
this.storage[key][i] = this.storage[key][i].filter(x => !badHashes.has(x));
} else if (typeof this.storage[key][i] === 'string') {
if (this.storage[key][i].includes('~') && badHashes.has(this.storage[key][i])) {
delete this.storage[key][i];
}
}
}
}
}

// cache read/write helper methods
async cacheRead(hash) {
return this.storage[hash];
Expand All @@ -83,7 +154,10 @@ export default class Cache {
}

async cacheClear() {
this.storage = { ROOT_QUERY: {}, ROOT_MUTATION: {} };
this.storage = {
ROOT_QUERY: {},
ROOT_MUTATION: {}
};
}

// functionality to stop polling
Expand All @@ -100,7 +174,9 @@ export default class Cache {
readWholeQuery(queryStr) {
const hash = queryStr.replace(/\s/g, '');
const root = this.cacheRead('ROOT_QUERY');
if (root[hash]) return { data: root[hash] };
if (root[hash]) return {
data: root[hash]
};
return undefined;
}

Expand Down Expand Up @@ -171,4 +247,4 @@ export default class Cache {
return dataObj;
}
}
}
}
104 changes: 99 additions & 5 deletions src/CacheClassServer.js
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,14 @@ export class Cache {
this.context = window.Deno ? 'server' : 'client';
}

insertIntoRedis() {
for (let key in this.storage) {
redis.set(key, JSON.stringify(this.storage[key]));
}
}

pullOutCache() {}

// Main functionality methods
async read(queryStr) {
if (typeof queryStr !== 'string')
Expand Down Expand Up @@ -81,6 +89,86 @@ export class Cache {

gc() {
// garbageCollection; garbage collection: removes any inaccessible hashes from the cache
const badHashes = getBadHashes();
const goodHashes = rootQueryCleaner(badHashes);
const goodHashes2 = getGoodHashes(badHashes, goodHashes);
removeInaccessibleHashes(badHashes, goodHashes2);
}

// remove hashes that are flagged for deletion and store records of them in a set badHashes for removal inside root queries
getBadHashes() {
const badHashes = new Set();
for (let key in this.storage) {
if (key === 'ROOT_QUERY' || key === 'ROOT_MUTATION') continue;
if (this.storage[key] === 'DELETED') {
badHashes.add(key);
delete this.storage[key];
}
}
return badHashes;
}

// go through root queries, remove all instances of bad hashes, add remaining hashes into goodHashes set
rootQueryCleaner(badHashes) {
const goodHashes = new Set();
const rootQuery = this.storage['ROOT_QUERY'];
for (let key in rootQuery) {
if (Array.isArray(rootQuery[key])) {
rootQuery[key] = rootQuery[key].filter((x) => !badHashes.has(x));
if (rootQuery[key].length === 0) delete rootQuery[key];
for (let el of rootQuery[key]) goodHashes.add(el);
} else
badHashes.has(rootQuery[key])
? delete rootQuery[key]
: goodHashes.add(rootQuery[key]);
}
return goodHashes;
}

// Go through the cache, check good hashes for any nested hashes and add them to goodHashes set
getGoodHashes(badHashes, goodHashes) {
for (let key in this.storage) {
if (key === 'ROOT_QUERY' || key === 'ROOT_MUTATION') continue;
for (let i in this.storage[key]) {
if (Array.isArray(this.storage[key][i])) {
for (let el of this.storage[key][i]) {
if (el.includes('~') && !badHashes.has(el)) {
goodHashes.add(el);
}
}
} else if (typeof this.storage[key][i] === 'string') {
if (
this.storage[key][i].includes('~') &&
!badHashes.has(this.storage[key][i])
) {
goodHashes.add(this.storage[key][i]);
}
}
}
}
return goodHashes;
}

// Remove inaccessible hashes by checking if they are in goodhashes set or not
removeInaccessibleHashes(badHashes, goodHashes) {
for (let key in this.storage) {
if (key === 'ROOT_QUERY' || key === 'ROOT_MUTATION') continue;
if (!goodHashes.has(key)) delete this.storage[key];
for (let i in this.storage[key]) {
if (Array.isArray(this.storage[key][i])) {
this.storage[key][i] = this.storage[key][i].filter(
(x) => !badHashes.has(x)
);
} else if (typeof this.storage[key][i] === 'string') {
if (
this.storage[key][i].includes('~') &&
badHashes.has(this.storage[key][i])
) {
delete this.storage[key][i];
}
}
}
}
}

// cache read/write helper methods
Expand Down Expand Up @@ -112,7 +200,7 @@ export class Cache {
this.storage[hash] = value;
} else {
value = JSON.stringify(value);
await redis.setex(hash, 30, value);
await redis.setex(hash, 6000, value);
let hashedQuery = await redis.get(hash);
}
}
Expand Down Expand Up @@ -164,6 +252,8 @@ export class Cache {
return allHashesFromQuery.reduce(async (acc, hash) => {
// for each hash from the input query, build the response object
const readVal = await this.cacheRead(hash);
// return undefine if hash has been garbage collected
if (readVal === undefined) return undefined;
if (readVal === 'DELETED') return acc;
const dataObj = {};
for (const field in fields) {
Expand All @@ -185,10 +275,14 @@ export class Cache {
if (dataObj[field] === undefined) return undefined;
}
}
// acc is an array of response object for each hash
const resolvedProm = await Promise.resolve(acc);
resolvedProm.push(dataObj);
return resolvedProm;
// acc is an array within a Response object for each hash
try {
const resolvedProm = await Promise.resolve(acc);
resolvedProm.push(dataObj);
return resolvedProm;
} catch (error) {
return undefined;
}
}, []);
}
// Case where allHashesFromQuery has only one hash and is not an array but a single string
Expand Down
2 changes: 1 addition & 1 deletion src/destructure.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
* 2. We won't worry about arguments on fields for now
* 3. We won't worry about aliases for now
* 4. We won't worry about handling directives for now
* 5. We wont' worry about fragments for now
* 5. We won't worry about fragments for now
* 6. This function will assume that everything passed in can be a query or a mutation (not both).
* 7. We won't handle variables for now, but we may very well find we need to
* 8. We will handle only the meta field "__typename" for now
Expand Down
8 changes: 7 additions & 1 deletion src/obsidian.ts
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,13 @@ export async function ObsidianRouter<T>({
const router = new Router();

const schema = makeExecutableSchema({ typeDefs, resolvers });
const cache = new LFUCache(50);

// If using LFU Browser Caching, the following cache line needs to be uncommented.
// const cache = new LFUCache(50);

// If using Redis caching, the following lines need to be uncommented.
const cache = new Cache();
cache.insertIntoRedis();

// clear redis cache when restarting the server
cache.cacheClear();
Expand Down
18 changes: 9 additions & 9 deletions test_files/rhum_test_files/destructure_test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,10 @@ Rhum.testPlan('destructure.ts', () => {
test.createQueriesObjTestData,
'queries'
);
console.log('RESULTS', results)
Rhum.asserts.assertEquals(test.createQueriesObjResultsData, results);
});
Rhum.testCase('findQueryFields test', () => {
console.log(test.findQueryFieldsTestData);
const results = findQueryFields(test.findQueryFieldsTestData);
console.log(results);
Rhum.asserts.assertEquals(test.findQueryFieldsResultData, results);
});
Rhum.testCase('findClosingBrace test', () => {
Expand Down Expand Up @@ -59,14 +56,17 @@ Rhum.testPlan('destructure.ts', () => {
Rhum.asserts.assertEquals(test.newAliasTestResult, result);
});
});

Rhum.testSuite('destructure fragment tests', () => {
Rhum.testCase('destructure fragment tests - results in two seperate queries', () => {
const result = destructureQueries(test.fragmentTestData);
Rhum.asserts.assertEquals(test.fragmentResultData, result);
});
Rhum.testCase(
'destructure fragment tests - results in two seperate queries',
() => {
const result = destructureQueries(test.fragmentTestData);
Rhum.asserts.assertEquals(test.fragmentResultData, result);
}
);
Rhum.testCase('destructure fragment tests - results in one query', () => {
const result = destructureQueries(test.fragmentTestData2);
const result = destructureQueries(test.fragmentTestData2);
Rhum.asserts.assertEquals(test.fragmentResultData2, result);
});
Rhum.testCase('destructure fragment tests - nested fragments', () => {
Expand Down
Loading

0 comments on commit 595c0b3

Please sign in to comment.