diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index c7cdaf0..0000000 --- a/.travis.yml +++ /dev/null @@ -1,10 +0,0 @@ -# sudo: required -# services: -# - docker -# before_install: -# - docker build -t open-source-labs/obsidian . -# script: -# - docker run open-source-labs/obsidian test --allow-net --allow-read --allow-env --unstable deno.test.ts -# env: -# global: -# secure: sfaHXoKGXnAkwS/QK2pdTPC1NVqd9+pVWImEcz8W9IXFRsOcHpt9lVsmB0dFvDpVm+9KFpcBwnpfOtiyoj6Q9NGIY71jG58kYHdbcWBlR3onS7/JBvgEu94DC7HZR+rQ4/GW+ROh4avBt6RjDSuLk4qQ73Yc3+SDKAl+M0PTADlVZpkicCID59qcdynbAjXu5W8lW2Hp0hqO72Prx/8hgmchI0I7zSYcPBFSy3WaEPJa52yKesVwsHcFtzOBMrDAdE+R028AzdBAXUoiqh6cTVeLSTL1jnIWbCBtfAROlTR82cZyo4c7PJxYyqT3mhRSZvBN/3hdW7+xMOzq6gmpmcl1UO2Q5i4xXEGnatfuzMVa/8SqJZoG2IFIWZ4mvelwufHVuLgF+6JvK2BKSpjFfSUGo0p9G0bMg+GHwRipTPIq1If3ELkflAM6QJwL7TritwtWzWXfAfoZ3KALdPTiFzJAKyQfFvSwWbfXqAgqZIbLjlzSgOJ4QKWD6CBksU7b4Oky6hr/+R+ZihzQLtWKkk/8cklEG/NJlknS2vPRG8xRRF7/C+vSFPrCkmsakPc8c1iGfai8J3Vc09Pg0UeShJDWkSQ6QP165ub6LEL5nz0Qzp0CD1sSQu5re5/M5ef9V69L2pdYhEj0RaZ241DF5efzYAgLI8SvMr5TcTr06+8= diff --git a/ObsidianWrapper/ObsidianWrapper.jsx b/ObsidianWrapper/ObsidianWrapper.jsx index a5fff78..ced3c9a 100644 --- a/ObsidianWrapper/ObsidianWrapper.jsx +++ b/ObsidianWrapper/ObsidianWrapper.jsx @@ -1,35 +1,61 @@ import * as React from "https://esm.sh/react@18"; import LFUCache from '../src/Browser/lfuBrowserCache.js'; import LRUCache from '../src/Browser/lruBrowserCache.js'; +import WTinyLFUCache from "../src/Browser/wTinyLFUBrowserCache.js"; import { insertTypenames } from '../src/Browser/insertTypenames.js'; +import { sha256 } from 'https://denopkg.com/chiefbiiko/sha256@v1.0.0/mod.ts'; const cacheContext = React.createContext(); function ObsidianWrapper(props) { - const { algo, capacity } = props - const [cache, setCache] = React.useState(new LFUCache(Number(capacity || 2000))); - if(algo === 'LRU') setCache(new LRUCache(Number(capacity || 2000))); // You have to put your Google Chrome Obsidian developer tool extension id to connect Obsidian Wrapper with dev tool - const chromeExtensionId = 'apcpdmmbhhephobnmnllbklplpaoiemo'; - // initialice cache in local storage - //window.localStorage.setItem('cache', JSON.stringify(cache)); + // props to be inputted by user when using the Obsdian Wrapper + const { algo, capacity, searchTerms, useCache, persistQueries } = props; + // if useCache hasn't been set, default caching to true + let caching = true; + // if it has been set to false, turn client-side caching off + if (useCache === false) caching = false; + + // algo defaults to LFU, capacity defaults to 2000 + const setAlgoCap = (algo, capacity) => { + let cache; + if(caching && algo === 'LRU'){ + cache = new LRUCache(Number(capacity || 2000)) + } else if (caching && algo === 'W-TinyLFU'){ + cache = new WTinyLFUCache(Number(capacity || 2000)) + } else if (caching) { + cache = new LFUCache(Number(capacity || 2000)) + } + return cache; + } + + // once cache is initialized, cannot setCache + // state for cache is initialized based on developer settings in wrapper + // to successfully change between algo types for testing, kill the server, change the algo type in wrapper, then restart server + const [cache, setCache] = React.useState(setAlgoCap(algo, capacity)); + + // FOR DEVTOOL - listening for message from content.js to be able to send algo type and capacity to devtool + window.addEventListener('message', msg => { + if(msg.data.type === 'algocap'){ + window.postMessage({ + algo: algo ? algo : 'LFU', + capacity: capacity ? capacity : 2000 + }) + } + }); async function query(query, options = {}) { - // dev tool messages + // FOR DEVTOOL - startTime is used to calculate the performance of the cache + // startDate is to find out when query was made, this data is passed to devtools const startTime = Date.now(); - /* - chrome.runtime.sendMessage(chromeExtensionId, { query: query }); - chrome.runtime.sendMessage(chromeExtensionId, { - cache: window.localStorage.getItem('cache'), - }); - */ + const startDate = new Date(Date.now()); // set the options object default properties if not provided const { endpoint = '/graphql', - cacheRead = true, - cacheWrite = true, + cacheRead = !caching ? false : true, + cacheWrite = !caching ? false : true, pollInterval = null, - wholeQuery = true, + wholeQuery = false, //Note: logic for true is currently nonfunctional } = options; // when pollInterval is not null the query will be sent to the server every inputted number of milliseconds @@ -45,70 +71,101 @@ function ObsidianWrapper(props) { return interval; } - // when cacheRead set to true - if (cacheRead) { + // when cacheRead set to true & we are utilizing client side caching + if (cacheRead && caching) { let resObj; // when the developer decides to only utilize whole query for cache - if (!wholeQuery) resObj = await cache.readWholeQuery(query); + if (wholeQuery) resObj = await cache.readWholeQuery(query); + // attempt to read from the cache else resObj = await cache.read(query); // check if query is stored in cache if (resObj) { // returning cached response as a promise const cacheHitResponseTime = Date.now() - startTime; - // Allow for access of the response time - // const cacheCopy = {...cache}; - // cacheCopy.callTime = cacheHitResponseTime; - // setCache(cacheCopy); - resObj['time'] = cacheHitResponseTime + // FOR DEVTOOL - sends message to content.js with query metrics when query is a hit + window.postMessage({ + type: 'query', + time: cacheHitResponseTime, + date: startDate.toDateString().slice(0, 24), + query: query, + hit: true + }); - console.log( - "From cacheRead: Here's the response time on the front end: ", - cacheHitResponseTime - ); - /*chrome.runtime.sendMessage(chromeExtensionId, { - cacheHitResponseTime: cacheHitResponseTime, - });*/ return new Promise((resolve, reject) => resolve(resObj)); } // execute graphql fetch request if cache miss return new Promise((resolve, reject) => resolve(hunt(query))); - // when cacheRead set to false } - if (!cacheRead) { + // when cacheRead set to false & not using client-side cache + if (!cacheRead || !caching) { return new Promise((resolve, reject) => resolve(hunt(query))); } - // when cache miss or on intervals + // function to be called on cache miss or on intervals or not looking in the cache async function hunt(query) { - if (wholeQuery) query = insertTypenames(query); + if (!wholeQuery) query = insertTypenames(query); try { - // send fetch request with query - const resJSON = await fetch(endpoint, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - Accept: 'application/json', - }, - body: JSON.stringify({ query }), - }); + let resJSON; + // IF WE ARE USING PERSIST QUERIES + if (persistQueries) { + // SEND THE HASH + const hash = sha256(query, 'utf8', 'hex'); + resJSON = await fetch(endpoint, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Accept: 'application/json', + }, + body: JSON.stringify({ hash }), + }); + + // IF HASH WAS NOT FOUND IN HASH TABLE + if (resJSON.status === 204) { + // SEND NEW REQUEST WITH HASH AND QUERY + resJSON = await fetch(endpoint, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Accept: 'application/json', + }, + body: JSON.stringify({ hash, query }), + }); + + } + + // IF WE ARE NOT USING PERSIST QUERIES + } else { + // JUST SEND THE QUERY ONLY + resJSON = await fetch(endpoint, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Accept: 'application/json', + }, + body: JSON.stringify({ query }), + }); + } + const resObj = await resJSON.json(); const deepResObj = { ...resObj }; // update result in cache if cacheWrite is set to true - if (cacheWrite && resObj.data[Object.keys(resObj.data)[0]] !== null) { - if (!wholeQuery) cache.writeWholeQuery(query, deepResObj); + if (cacheWrite && caching && resObj.data[Object.keys(resObj.data)[0]] !== null) { + if (wholeQuery) cache.writeWholeQuery(query, deepResObj); else if(resObj.data[Object.keys(resObj.data)[0]].length > cache.capacity) console.log('Please increase cache capacity'); - else cache.write(query, deepResObj); + else cache.write(query, deepResObj, searchTerms); } const cacheMissResponseTime = Date.now() - startTime; - /*chrome.runtime.sendMessage(chromeExtensionId, { - cacheMissResponseTime: cacheMissResponseTime, - });*/ - resObj['time'] = cacheMissResponseTime - console.log( - "After the hunt: Here's the response time on the front end: ", - cacheMissResponseTime - ); + + // FOR DEVTOOL - sends message to content.js when query is a miss + window.postMessage({ + type: 'query', + time: cacheMissResponseTime, + date: startDate.toDateString().slice(0, 24), + query: query, + hit: false + }); + return resObj; } catch (e) { console.log(e); @@ -121,20 +178,19 @@ function ObsidianWrapper(props) { cache.cacheClear(); } + // NOTE - FOR DEVTOOL - no messages are currently being passed for mutations + // so some logic in content.js and background.js may be missing to handle mutations + // breaking out writethrough logic vs. non-writethrough logic async function mutate(mutation, options = {}) { - // dev tool messages - // chrome.runtime.sendMessage(chromeExtensionId, { - // mutation: mutation, - // }); const startTime = Date.now(); mutation = insertTypenames(mutation); const { endpoint = '/graphql', - cacheWrite = true, + cacheWrite = !caching ? false : true, toDelete = false, update = null, - writeThrough = true, // not true + writeThrough = true, // unsure if boolean is symantically backwards or not } = options; try { if (!writeThrough) { @@ -147,9 +203,6 @@ function ObsidianWrapper(props) { endpoint ); const deleteMutationResponseTime = Date.now() - startTime; - chrome.runtime.sendMessage(chromeExtensionId, { - deleteMutationResponseTime: deleteMutationResponseTime, - }); return responseObj; } else { // for add mutation @@ -168,15 +221,9 @@ function ObsidianWrapper(props) { // GQL call to make changes and synchronize database console.log('WriteThrough - false ', responseObj); const addOrUpdateMutationResponseTime = Date.now() - startTime; - chrome.runtime.sendMessage(chromeExtensionId, { - addOrUpdateMutationResponseTime: addOrUpdateMutationResponseTime, - }); return responseObj; } } else { - // copy-paste mutate logic from 4. - - // use cache.write instead of cache.writeThrough const responseObj = await fetch(endpoint, { method: 'POST', headers: { @@ -185,10 +232,10 @@ function ObsidianWrapper(props) { }, body: JSON.stringify({ query: mutation }), }).then((resp) => resp.json()); - if (!cacheWrite) return responseObj; + if (!cacheWrite || !caching) return responseObj; // first behaviour when delete cache is set to true if (toDelete) { - cache.write(mutation, responseObj, true); + cache.write(mutation, responseObj, searchTerms, true); return responseObj; } // second behaviour if update function provided @@ -196,7 +243,7 @@ function ObsidianWrapper(props) { update(cache, responseObj); } - if(!responseObj.errors) cache.write(mutation, responseObj); + if(!responseObj.errors) cache.write(mutation, responseObj, searchTerms); // third behaviour just for normal update (no-delete, no update function) console.log('WriteThrough - true ', responseObj); return responseObj; diff --git a/README.md b/README.md index 22ea69e..224f019 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -![Obsidian](./assets/logoSilver.jpg) +![Obsidian](./assets/bannerfull_gradient.png)
from Lascaux
-
@@ -22,9 +20,12 @@
## Features
-- (New!) Server-side cache invalidation only on affected entries
-- (New!) Flexible cache responds with only data requested from selected fields
-- (New!) Developer tool for Obsidian is now updated to Manifest version 3 and invalid Bootstrap module imports were also fixed along with CodeMirror dependencies
+- (New!) Support for W-TinyLFU client-side cache that brings great hit-ratio performance with minimal memory overhead
+- (New!) Option to provide Obsidian with the search types your application uses, allowing data cached from complete dataset pulls to be accessible later on in searches for individual items
+- (New!) Refactored server-side caching with Redis
+- (New!) Rebuilt developer tool for Obsidian 8.0 for testing and analytics related to the new client caching options
+- (New!) Option for persistent queries, allowing only a smaller hash to be sent to the server on client-side cache misses, minimizing the cost of queries. Note that while this will increase the overall performance for frequent, repeat queries.
+- Flexible cache responds only with data requested from selected fields
- GraphQL query abstraction and caching improving the performance of your app
- SSR React wrapper, allowing you to cache in browser
- Configurable caching options, giving you complete control over your cache
@@ -60,21 +61,37 @@ const GraphQLRouter =
(await ObsidianRouter) <
ObsRouter >
{
- Router,
- typeDefs: types,
- resolvers: resolvers,
- redisPort: 6379, //Desired redis port
- useCache: true, //Boolean to toggle all cache functionality
- usePlayground: true, //Boolean to allow for graphQL playground
- useQueryCache: true, //Boolean to toogle full query cache
- useRebuildCache: true, //Boolean to toggle rebuilding from normalized data
- customIdentifier: ['id', '__typename'],
- mutationTableMap = {}, //Object where keys are add mutation types and value is an array of affected tables (e.g. {addPlants: ['plants'], addMovie: ['movies']})
+ Router, // your router in deno
+ typeDefs: types, // graphQL typeDefs
+ resolvers: resolvers, // graphQL resolvers
};
-// attach the graphql routers routes to our app
+// attach the graphql router's routes to your deno app
app.use(GraphQLRouter.routes(), GraphQLRouter.allowedMethods());
```
+## Selecting options for the Router
+```javascript
+const GraphQLRouter =
+ (await ObsidianRouter) <
+ ObsRouter >
+ {
+ Router, // Router that is initialized by server.
+ path: '/graphql', // endpoint for graphQL queries, default to '/graphql'
+ typeDefs: types, // graphQL typeDefs
+ resolvers: resolvers, // graphQL resolvers
+ usePlayground: true, // Boolean to allow for graphQL playground, default to false
+ useCache: true, // Boolean to toggle all cache functionality, default to true
+ redisPort: 6379, // Desired redis port, default to 6379
+ policy: 'allkeys-lru', // Option select your Redis policy, default to allkeys-lru
+ maxmemory: '2000mb', // Option to select Redis capacity, default to 2000mb
+ searchTerms: [] //Optional array to allow broad queries to store according to search fields so individual searches are found in cache
+ persistQueries: true, //Boolean to toggle the use of persistent queries, default to false - NOTE: if using, must also be enabled in client wrapper
+ hashTableSize: 16, // Size of hash table for persistent queries, default to 16
+ maxQueryDepth: 0, // Maximum depth of query, default to 0
+ customIdentifier: ['__typename', '_id'], // keys to be used to idedntify and normalize object
+ mutationTableMap: {}, //Object where keys are add mutation types and value is an array of affected tables (e.g. {addPlants: ['plants'], addMovie: ['movies']})
+ };
+```
## Creating the Wrapper
@@ -90,6 +107,20 @@ const App = () => {
};
```
+## Selecting options for the Wrapper
+
+```javascript
+
-works with Obsidian 5.0
-[oslabs-beta/obsidian-developer-tool](https://github.com/oslabs-beta/obsidian-developer-tool)
+works with Obsidian 8.0
+[open-source-labs/obsidian-developer-tool](https://github.com/open-source-labs/obsidian-developer-tool)
-## Obsidian 5.0 Demo
+## Obsidian 8.0 Demo
Github for a demo with some example code to play with:
-[oslabs-beta/obsidian-demo-5.0](https://github.com/oslabs-beta/obsidian-demo-5.0)
-
-## Dockerized Demo
-
-Working demo to install locally in docker:
-[oslabs-beta/obsidian-demo-docker](https://github.com/oslabs-beta/obsidian-demo-docker)
+[oslabs-beta/obsidian-demo-8.0](https://github.com/oslabs-beta/obsidian-8.0-demo)
## Features In Progress
-- Ability to query the database for only those fields missing from the cache
-- Developer Tool Settings component, fully functioning Playground component
+- Server-side caching improvements
+- More comprehensive mutation support
+- searchTerms option optimization
+- Ability to store/read only the whole query
+- Hill Climber optimization for W-TinyLFU cache size allocation
+- Developer Tool server-side cache integration
+- Developer Tool View Cache component, and Playground component
## Authors
-
-[Alex Lopez](https://github.com/AlexLopez7)
-[Kevin Huang](https://github.com/kevin-06-huang)
-[Matthew Weisker](https://github.com/mweisker)
-[Ryan Ranjbaran](https://github.com/ranjrover)
+[David Kim](https://github.com/davidtoyoukim)
+[David Norman](https://github.com/DavidMNorman)
+[Eileen Cho](https://github.com/exlxxn)
+[Joan Manto](https://github.com/JoanManto)
+[Alex Lopez](https://github.com/AlexLopez7)
+[Kevin Huang](https://github.com/kevin-06-huang)
+[Matthew Weisker](https://github.com/mweisker)
+[Ryan Ranjbaran](https://github.com/ranjrover)
[Derek Okuno](https://github.com/okunod)
[Liam Johnson](https://github.com/liamdimitri)
[Josh Reed](https://github.com/joshreed104)
diff --git a/assets/Obsidian_New.png b/assets/Obsidian_New.png
new file mode 100644
index 0000000..d6ebfd1
Binary files /dev/null and b/assets/Obsidian_New.png differ
diff --git a/assets/Obsidian_New_nobg.png b/assets/Obsidian_New_nobg.png
new file mode 100644
index 0000000..dc5557a
Binary files /dev/null and b/assets/Obsidian_New_nobg.png differ
diff --git a/assets/banner_black.png b/assets/banner_black.png
new file mode 100644
index 0000000..6360a18
Binary files /dev/null and b/assets/banner_black.png differ
diff --git a/assets/banner_gradient.png b/assets/banner_gradient.png
new file mode 100644
index 0000000..67a75a4
Binary files /dev/null and b/assets/banner_gradient.png differ
diff --git a/assets/bannerfull_gradient.png b/assets/bannerfull_gradient.png
new file mode 100644
index 0000000..6472b07
Binary files /dev/null and b/assets/bannerfull_gradient.png differ
diff --git a/assets/bannerfull_mutegradient.png b/assets/bannerfull_mutegradient.png
new file mode 100644
index 0000000..7cccf81
Binary files /dev/null and b/assets/bannerfull_mutegradient.png differ
diff --git a/assets/full_black.png b/assets/full_black.png
new file mode 100644
index 0000000..cad9aa5
Binary files /dev/null and b/assets/full_black.png differ
diff --git a/assets/logoSilver.jpg b/assets/logoSilver.jpg
deleted file mode 100644
index 215115e..0000000
Binary files a/assets/logoSilver.jpg and /dev/null differ
diff --git a/assets/logo_whitebg.png b/assets/logo_whitebg.png
new file mode 100644
index 0000000..b0890b4
Binary files /dev/null and b/assets/logo_whitebg.png differ
diff --git a/documentation/browserCache/cache-documentation.js b/documentation/browserCache/cache-documentation.js
deleted file mode 100644
index 9fbca09..0000000
--- a/documentation/browserCache/cache-documentation.js
+++ /dev/null
@@ -1,689 +0,0 @@
-// SCHEMA EXAMPLE =====================================================
-// sample schema for examples
-
-const typeDefs = gql`
- enum MovieGenre {
- ACTION
- SCIFI
- DRAMA
- }
- enum releaseYearOrder {
- LATESTFIRST
- EARLIESTFIRST
- }
-
- enum alphabeticalOrder {
- ASCENDING
- DESCENDING
- }
-
- type Movie {
- id: ID!
- title: String!
- releaseYear: Int!
- actors: [Actor]
- genre: MovieGenre!
- isFavorite: Boolean!
- }
-
- type Actor {
- id: ID!
- firstName: String!
- lastName: String!
- age: Int!
- films: [Movie]!
- isFavorite: Boolean!
- }
-
- input MovieInput {
- genre: MovieGenre
- order: releaseYearOrder
- }
- input ActorInput {
- orderFirstName: alphabeticalOrder
- orderLastName: alphabeticalOrder
- }
- input newMovieInput {
- title: String!
- releaseYear: Int!
- genre: MovieGenre!
- }
-
- type Query {
- movie(id: ID!): Movie!
- movies(input: MovieInput): [Movie]!
- actor(id: ID!): Actor!
- actors(input: ActorInput): [Actor]!
- }
-
- type Mutation {
- addMovie(input: NewMovieInput): Movie!
- favoriteMovie(id: ID!): Movie!
- favoriteActor(id: ID!): Actor!
- }
-`;
-
-// QUERY EXAMPLES =================================================================
-
-// EXAMPLE 1
-// ================================================================================
-// sample query to show how the cachee stores basic queries
-
-const ALL_MOVIES = gql`
- query AllMovies {
- movies {
- id
- title
- actors {
- id
- firstName
- }
- }
- }
-`;
-const respAllMovies = {
- data: {
- movies: [
- {
- id: '1',
- title: 'Indiana Jones and the Last Crusade',
- actors: [
- { id: '1', firstName: 'Harrison' },
- { id: '2', firstName: 'Sean' },
- ],
- },
- {
- id: '2',
- title: 'Empire Strikes Back',
- actors: [
- { id: '1', firstName: 'Harrison' },
- { id: '3', firstName: 'Mark' },
- ],
- },
- {
- id: '3',
- title: 'Witness',
- actors: [
- { id: '1', firstName: 'Harrison' },
- { id: '4', firstName: 'Patti' },
- ],
- },
- {
- id: '4',
- title: 'Air Force One',
- actors: [
- { id: '1', firstName: 'Harrison' },
- { id: '5', firstName: 'Gary' },
- ],
- },
- ],
- },
-};
-
-const cache = {
- ROOT_QUERY: {
- movies: ['Movie~1', 'Movie~2', 'Movie~3', 'Movie~4'],
- },
- 'Movie~1': {
- id: '1',
- title: 'Indiana Jones and the Last Crusade',
- actors: ['Actor~1', 'Actor~2'],
- },
- 'Movie~2': {
- id: '2',
- title: 'Empire Strikes Back',
- actors: ['Actor~1', 'Actor~3'],
- },
- 'Movie~3': { id: '3', title: 'Witness', actors: ['Actor~1', 'Actor~4'] },
- 'Movie~4': {
- id: '4',
- title: 'Air Force One',
- actors: ['Actor~1', 'Actor~5'],
- },
- 'Actor~1': { id: '1', firstName: 'Harrison' },
- 'Actor~2': { id: '2', firstName: 'Sean' },
- 'Actor~3': { id: '3', firstName: 'Mark' },
- 'Actor~4': { id: '4', firstName: 'Patti' },
- 'Actor~5': { id: '4', firstName: 'Gary' },
-};
-
-// EXAMPLE 2
-// ================================================================================
-// sample query to show how the cache stores queries with arguments
-
-const ALL_ACTION_MOVIES = gql`
- query AllActionMovies {
- movies(input: { genre: ACTION }) {
- id
- title
- genre
- releaseYear
- }
- }
-`;
-
-const respAllActionMovies = {
- data: {
- movies: [
- {
- id: '1',
- title: 'Indiana Jones and the Last Crusade',
- genre: 'ACTION',
- releaseYear: 1989,
- },
- {
- id: '4',
- title: 'Air Force One',
- genre: 'ACTION',
- releaseYear: 1997,
- },
- ],
- },
-};
-
-const cache2 = {
- ROOT_QUERY: {
- movies: ['Movie~1', 'Movie~2', 'Movie~3', 'Movie~4'],
- 'movies(input:{genre:ACTION})': ['Movie~1', 'Movie~4'], // Added
- },
- 'Movie~1': {
- id: '1',
- title: 'Indiana Jones and the Last Crusade',
- actors: ['Actor~1', 'Actor~2'],
- genre: 'ACTION', // Added
- releaseYear: 1989, // Added
- },
- 'Movie~2': {
- id: '2',
- title: 'Empire Strikes Back',
- actors: ['Actor~1', 'Actor~3'],
- },
- 'Movie~3': { id: '3', title: 'Witness', actors: ['Actor~1', 'Actor~4'] },
- 'Movie~4': {
- id: '4',
- title: 'Air Force One',
- actors: ['Actor~1', 'Actor~5'],
- genre: 'ACTION', // Added
- releaseYear: 1997, // Added
- },
- 'Actor~1': { id: '1', firstName: 'Harrison' },
- 'Actor~2': { id: '2', firstName: 'Sean' },
- 'Actor~3': { id: '3', firstName: 'Mark' },
- 'Actor~4': { id: '4', firstName: 'Patti' },
- 'Actor~5': { id: '5', firstName: 'Gary' },
-};
-
-// EXAMPLE 3
-// ================================================================================
-// Another sample query to show how the cacbe stores queries with arguments and preserves order of response data
-
-const ALL_MOVIES_CHRONOLOGICAL = gql`
- query AllMoviesChronological {
- movies(input: { order: EARLIESTFIRST }) {
- id
- title
- releaseYear
- }
- }
-`;
-
-const respAllMoviesChronological = {
- data: {
- movies: [
- {
- id: '2',
- title: 'Empire Strikes Back',
- releaseYear: 1980,
- },
- {
- id: '3',
- title: 'Witness',
- releaseYear: 1985,
- },
- {
- id: '1',
- title: 'Indiana Jones and the Last Crusade',
- releaseYear: 1989,
- },
- {
- id: '4',
- title: 'Air Force One',
- releaseYear: 1997,
- },
- ],
- },
-};
-
-const cache3 = {
- ROOT_QUERY: {
- movies: ['Movie~1', 'Movie~2', 'Movie~3', 'Movie~4'],
- 'movies(input:{genre:ACTION})': ['Movie~1', 'Movie~4'],
- // added
- 'movies(input:{order:EARLIESTFIRST})': [
- 'Movie~2',
- 'Movie~3',
- 'Movie~1',
- 'Movie~4',
- ],
- },
- 'Movie~1': {
- id: '1',
- title: 'Indiana Jones and the Last Crusade',
- actors: ['Actor~1', 'Actor~2'],
- genre: 'ACTION',
- releaseYear: 1989,
- },
- 'Movie~2': {
- id: '2',
- title: 'Empire Strikes Back',
- actors: ['Actor~1', 'Actor~3'],
- releaseYear: 1980, // added
- },
- 'Movie~3': {
- id: '3',
- title: 'Witness',
- actors: ['Actor~1', 'Actor~4'],
- releaseYear: 1985, // added
- },
- 'Movie~4': {
- id: '4',
- title: 'Air Force One',
- actors: ['Actor~1', 'Actor~5'],
- genre: 'ACTION',
- releaseYear: 1997,
- },
- 'Actor~1': { id: '1', firstName: 'Harrison' },
- 'Actor~2': { id: '2', firstName: 'Sean' },
- 'Actor~3': { id: '3', firstName: 'Mark' },
- 'Actor~4': { id: '4', firstName: 'Patti' },
- 'Actor~5': { id: '5', firstName: 'Gary' },
-};
-
-// EXAMPLE 4
-// ================================================================================
-// Another sample query to show how the cacbe stores queries with arguments and preserves order of response data
-
-const ALL_ACTORS_ALPHABETICAL_LAST_NAME = gql`
- query AllActorsAlphabeticalLastName {
- actors(input: { orderLastName: DESCENDING }) {
- id
- firstName
- LastName
- }
- }
-`;
-
-const respAllActorsAlphabeticalLastName = {
- data: {
- actors: [
- {
- id: '2',
- firstName: 'Sean',
- lastName: 'Connery',
- },
- {
- id: '1',
- firstName: 'Harrion',
- lastName: 'Ford',
- },
- {
- id: '3',
- firstName: 'Mark',
- lastName: 'Hamill',
- },
- {
- id: '4',
- firstName: 'Patti',
- lastName: 'LuPone',
- },
- {
- id: '5',
- firstName: 'Gary',
- lastName: 'Oldman',
- },
- ],
- },
-};
-
-const cache4 = {
- ROOT_QUERY: {
- movies: ['Movie~1', 'Movie~2', 'Movie~3', 'Movie~4'],
- 'movies(input:{genre:ACTION})': ['Movie~1', 'Movie~4'],
- 'movies(input:{order:EARLIESTFIRST})': [
- 'Movie~2',
- 'Movie~3',
- 'Movie~1',
- 'Movie~4',
- ],
- // added
- 'actors(input:{ orderLastName:DESCENDING})': [
- 'Actor~2',
- 'Actor~1',
- 'Actor~3',
- 'Actor~4',
- 'Actor~5',
- ],
- },
- 'Movie~1': {
- id: '1',
- title: 'Indiana Jones and the Last Crusade',
- actors: ['Actor~1', 'Actor~2'],
- genre: 'ACTION',
- releaseYear: 1989,
- },
- 'Movie~2': {
- id: '2',
- title: 'Empire Strikes Back',
- actors: ['Actor~1', 'Actor~3'],
- releaseYear: 1980,
- },
- 'Movie~3': {
- id: '3',
- title: 'Witness',
- actors: ['Actor~1', 'Actor~4'],
- releaseYear: 1985,
- },
- 'Movie~4': {
- id: '4',
- title: 'Air Force One',
- actors: ['Actor~1', 'Actor~5'],
- genre: 'ACTION',
- releaseYear: 1997,
- },
- 'Actor~1': { id: '1', firstName: 'Harrison', lastName: 'Ford' }, //added lastName to actors
- 'Actor~2': { id: '2', firstName: 'Sean', lastName: 'Connery' },
- 'Actor~3': { id: '3', firstName: 'Mark', lastName: 'Hamill' },
- 'Actor~4': { id: '4', firstName: 'Patti', lastName: 'LuPone' },
- 'Actor~5': { id: '5', firstName: 'Gary', lastName: 'Oldman' },
-};
-
-// EXAMPLE 5
-// ================================================================================
-// A sample query by id that we might want to create soecial logic for to save network requests
-
-const GET_ACTOR_BY_ID = gql`
- query getActorById {
- actor(id: 1) {
- id
- firstName
- LastName
- }
- }
-`;
-
-const respGetActorById = {
- data: {
- actor: [
- {
- id: '1',
- firstName: 'Harrion',
- lastName: 'Ford',
- },
- ],
- },
-};
-
-// is there any way to stop this request from going to the server and just serve from the cache if we have all the information???
-// do we ant to hard code specialized check for arguments that are just a single id????
-
-const cache5 = {
- ROOT_QUERY: {
- 'actor(id:1)': 'Actor~1', // Added CAN WE STOP IT?
- movies: ['Movie~1', 'Movie~2', 'Movie~3', 'Movie~4'],
- 'movies(input:{genre:ACTION})': ['Movie~1', 'Movie~4'],
- 'movies(input:{order:EARLIESTFIRST})': [
- 'Movie~2',
- 'Movie~3',
- 'Movie~1',
- 'Movie~4',
- ],
- 'actors(input:{ orderLastName:DESCENDING})': [
- 'Actor~2',
- 'Actor~1',
- 'Actor~3',
- 'Actor~4',
- 'Actor~5',
- ],
- },
- 'Movie~1': {
- id: '1',
- title: 'Indiana Jones and the Last Crusade',
- actors: ['Actor~1', 'Actor~2'],
- genre: 'ACTION',
- releaseYear: 1989,
- },
- 'Movie~2': {
- id: '2',
- title: 'Empire Strikes Back',
- actors: ['Actor~1', 'Actor~3'],
- releaseYear: 1980,
- },
- 'Movie~3': {
- id: '3',
- title: 'Witness',
- actors: ['Actor~1', 'Actor~4'],
- releaseYear: 1985,
- },
- 'Movie~4': {
- id: '4',
- title: 'Air Force One',
- actors: ['Actor~1', 'Actor~5'],
- genre: 'ACTION',
- releaseYear: 1997,
- },
- 'Actor~1': { id: '1', firstName: 'Harrison', lastName: 'Ford' },
- 'Actor~2': { id: '2', firstName: 'Sean', lastName: 'Connery' },
- 'Actor~3': { id: '3', firstName: 'Mark', lastName: 'Hamill' },
- 'Actor~4': { id: '4', firstName: 'Patti', lastName: 'LuPone' },
- 'Actor~5': { id: '5', firstName: 'Gary', lastName: 'Oldman' },
-};
-
-// EXAMPLE 6
-// ================================================================================
-// The following queries should be able to be served from the cache without making a network request
-
-const ALL_MOVIES_WITH_RELEASE_YEAR = gql`
- query AllMoviesWithReleaseYear {
- movies {
- id
- title
- releaseYear
- }
- }
-`;
-
-const ALL_MOVIES_WITH_ACTOR_LAST_NAMES = gql`
- query AllMoviesWithActorLastNames {
- movies {
- id
- title
- actors {
- id
- lastName
- }
- }
- }
-`;
-
-// MUTATIONS
-
-// EXAMPLE 7
-// ================================================================================
-// simple update example the cache would automatically update
-
-const ADD_FAVORITE_MOVIE = gql`
- mutation AddFavoriteMovie {
- favoriteMovie(id: 2) {
- id
- isFavorite
- }
- }
-`;
-
-const respAddFavoriteMovie = {
- data: {
- favoriteMovie: [
- {
- id: '2',
- isFavorite: true,
- },
- ],
- },
-};
-
-const cache6 = {
- ROOT_QUERY: {
- 'actor(id:1)': 'Actor~1',
- movies: ['Movie~1', 'Movie~2', 'Movie~3', 'Movie~4'],
- 'movies(input:{genre:ACTION})': ['Movie~1', 'Movie~4'],
- 'movies(input:{order:EARLIESTFIRST})': [
- 'Movie~2',
- 'Movie~3',
- 'Movie~1',
- 'Movie~4',
- ],
- 'actors(input:{ orderLastName:DESCENDING})': [
- 'Actor~2',
- 'Actor~1',
- 'Actor~3',
- 'Actor~4',
- 'Actor~5',
- ],
- },
- ROOT_MUTATION: {
- 'favoriteMovie(id:2)': 'Movie~2', // Added
- },
-
- 'Movie~1': {
- id: '1',
- title: 'Indiana Jones and the Last Crusade',
- actors: ['Actor~1', 'Actor~2'],
- genre: 'ACTION',
- releaseYear: 1989,
- },
- 'Movie~2': {
- id: '2',
- title: 'Empire Strikes Back',
- actors: ['Actor~1', 'Actor~3'],
- releaseYear: 1980,
- isFavorite: true, // Added
- },
- 'Movie~3': {
- id: '3',
- title: 'Witness',
- actors: ['Actor~1', 'Actor~4'],
- releaseYear: 1985,
- },
- 'Movie~4': {
- id: '4',
- title: 'Air Force One',
- actors: ['Actor~1', 'Actor~5'],
- genre: 'ACTION',
- releaseYear: 1997,
- },
- 'Actor~1': { id: '1', firstName: 'Harrison', lastName: 'Ford' },
- 'Actor~2': { id: '2', firstName: 'Sean', lastName: 'Connery' },
- 'Actor~3': { id: '3', firstName: 'Mark', lastName: 'Hamill' },
- 'Actor~4': { id: '4', firstName: 'Patti', lastName: 'LuPone' },
- 'Actor~5': { id: '5', firstName: 'Gary', lastName: 'Oldman' },
-};
-
-// EXAMPLE 8
-// ================================================================================
-// add movie mutation example: the returned data would automically be cached.
-// but the developer would have to assist in adding the movie to the appropriate spot in the root queries.
-
-const ADD_MOVIE = gql`
- mutation AddMovie {
- addMovie(input: {title: 'The Fugitive', releaseYear: 1993, genre: ACTION }) {
- id
- title
- releaseYear
- genre
- isFavorite
- }
- }
-`;
-
-const respAddMovie = {
- data: {
- addMovie: [
- {
- id: '5',
- title: 'The Fugitive',
- releaseYear: 1993,
- genre: 'ACTION',
- isFavorite: false,
- },
- ],
- },
-};
-
-const cache7 = {
- ROOT_QUERY: {
- 'actor(id:1)': 'Actor~1',
- movies: ['Movie~1', 'Movie~2', 'Movie~3', 'Movie~4', 'Movie~5'], // Added added new movie with help from developer
- 'movies(input:{genre:ACTION})': ['Movie~1', 'Movie~4', 'Movie~5'], // Added added new movie with help from developer
- // Added new movie with help from developer
- 'movies(input:{order:EARLIESTFIRST})': [
- 'Movie~2',
- 'Movie~3',
- 'Movie~1',
- 'Movie~5',
- 'Movie~4',
- ],
- 'actors(input:{ orderLastName:DESCENDING})': [
- 'Actor~2',
- 'Actor~1',
- 'Actor~3',
- 'Actor~4',
- 'Actor~5',
- ],
- },
- ROOT_MUTATION: {
- 'favoriteMovie(id:2)': 'Movie~2',
- "addMovie(input: {title: 'The Fugitive', releaseYear: 1993, genre: ACTION })":
- 'Movie~5',
- },
-
- 'Movie~1': {
- id: '1',
- title: 'Indiana Jones and the Last Crusade',
- actors: ['Actor~1', 'Actor~2'],
- genre: 'ACTION',
- releaseYear: 1989,
- },
- 'Movie~2': {
- id: '2',
- title: 'Empire Strikes Back',
- actors: ['Actor~1', 'Actor~3'],
- releaseYear: 1980,
- isFavorite: true,
- },
- 'Movie~3': {
- id: '3',
- title: 'Witness',
- actors: ['Actor~1', 'Actor~4'],
- releaseYear: 1985,
- },
- 'Movie~4': {
- id: '4',
- title: 'Air Force One',
- actors: ['Actor~1', 'Actor~5'],
- genre: 'ACTION',
- releaseYear: 1997,
- },
- // Added
- 'Movie~5': {
- id: '5',
- title: 'The Fugitive',
- genre: 'ACTION',
- releaseYear: 1993,
- isFavorite: false,
- },
- 'Actor~1': { id: '1', firstName: 'Harrison', lastName: 'Ford' },
- 'Actor~2': { id: '2', firstName: 'Sean', lastName: 'Connery' },
- 'Actor~3': { id: '3', firstName: 'Mark', lastName: 'Hamill' },
- 'Actor~4': { id: '4', firstName: 'Patti', lastName: 'LuPone' },
- 'Actor~5': { id: '5', firstName: 'Gary', lastName: 'Oldman' },
-};
diff --git a/documentation/browserCache/cache-refactor-doc.js b/documentation/browserCache/cache-refactor-doc.js
deleted file mode 100644
index 19b975c..0000000
--- a/documentation/browserCache/cache-refactor-doc.js
+++ /dev/null
@@ -1,94 +0,0 @@
-/**
- * GOALS
- * 1. refactor all cache related functionality as methods on Cache object
- * 2. Create a one source of truth for the naming of those different methods and defining their agruments, return, other functionality
- * 3. Create a template for refactoring our cache interaction so that it will work for both redis cache and the object cache
- *
- */
-
-// CLIENT-SIDE OBJECT CACHE VS SERVER-SIDE REDIS CACHE
-/**
- * NOTES:
- * 1. Any direct reads/writes of the cache will be replaced with the cacheRead and cacheWrite methods.
- * 2. These methods will be dual purpose functions that will read and write key value pairs from the client/side Cache
- * 3. Both will first have a check to see if we are in the client side cache or server side cache
- * 4. Client-Side: will read/write to cache.storage via normal object lookup/assignment
- * 5. Server-Side: will read/write to redis a JSON.stringified version of the value using redis methods
- * 6. See Obsidian 1.0's dbOps for reference/inspiration
- * 7. Reading or writing to the Root_Query or Root_Mutation will now be a 2 part process via cacheRead, cacheWrite:
- * first retrieving the entire ROOT_QUERY object from the cache and then reading values from that object.
- * 8. We can no longer pass the cache in as an argument, clone the cache or update the cache with Object.assign
- *
- */
-
-// Cache constructor
-
-class Cache {
- constructor(
- cache = {
- ROOT_QUERY: {},
- ROOT_MUTATION: {},
- }
- ) {
- this.storage = cache;
- this.context = window.deno ? 'server' : 'client';
- }
-
- // Main functionality methods
- read(qryStr) {
- // readCache; returns gql response object || undefined
- }
- write(qryStr, respObj) {
- // writeCache; updates cache with all data from response object
- }
- delete(qryStr, respObj) {
- // deleteCache; sets any top level hashed values of response object to 'DELETE'
- }
- gc() {
- // garbageCollection; garbage collection: removes any inaccessible hashes from the cache
- }
-
- // cache read/write helper methods
- cacheRead(hash) {
- // returns value from either object cache or redis cache || 'DELETED' || undefined
- if (this.context === 'client') {
- return this.storage[hash];
- } else {
- throw Error('Redis functionality has not been implemented');
- }
- }
- cacheWrite(hash, value) {
- // writes value to object cache or JSON.stringified value to redis cache
- if (this.context === 'client') {
- this.storage[hash] = value;
- } else {
- throw Error('Redis functionality has not been implemented');
- }
- }
- cacheDelete(hash) {
- // deletes the hash/value pair on either object cache or redis cache
- if (this.context === 'client') {
- delete this.storage[hash];
- } else {
- throw Error('Redis functionality has not been implemented');
- }
- }
- cacheClear(hash) {
- // erases either object cache or redis cache
- if (this.context === 'client') {
- this.storage = {};
- } else {
- throw Error('Redis functionality has not been implemented');
- }
- }
-}
-
-/**
- * OPEN QUESTIONS
- * (We think no) 1. Do we need any more than two functions for cache read/write. why does Obsidian 1.0 have 4???
- * (yes, maybe) 2. Can we utilize helper methods in our main methods (will we have to worry about binding context?)
- * (could maybe explore 2020) 3. We will be exposing all these methods, when we probably only want to expose some to the devleloper. Is this an issue?
- * 4. Will we run into any issues with live incremental updates of the cache object (always the same reference)?
- * (how will React know the cache has been updated?)
- *
- */
diff --git a/documentation/browserCache/destructure-documentation.js b/documentation/browserCache/destructure-documentation.js
deleted file mode 100644
index de8f99f..0000000
--- a/documentation/browserCache/destructure-documentation.js
+++ /dev/null
@@ -1,256 +0,0 @@
-// DESTRUCTURE
-
-destructureQueries(query, ObsidianSchema, cache);
-
-// INPUT:
-
-const query = gql`
- {
- Country(_id: "4425") {
- _id
- name
- population
- flag {
- _id
-
- emoji
- }
-
- borders {
- _id
- name
- capital
- }
- }
- }
-`;
-
-const obsidianSchema = {
- returnTypes: {
- Country: {
- kind: 'NamedType',
- type: 'Country',
- },
- },
- argTypes: {
- Country: { _id: 'ID' },
- },
- obsidianTypeSchema: {
- Country: {
- borders: { type: 'Country', scalar: false },
- capital: { type: 'String', scalar: true },
- flag: { type: 'Flag', scalar: false },
- name: { type: 'String', scalar: true },
- population: { type: 'Int', scalar: true },
- _id: { type: 'ID', scalar: true },
- },
- Flag: {
- emoji: { type: 'String', scalar: true },
- _id: { type: 'ID', scalar: true },
- },
- },
-};
-
-const cache = {
- 'Country(_id:"4425"){_idnamepopulationflag{_idemoji}borders{_idnamecapital}}': {
- 'Country~4425~name': true,
- 'Country~4425~population': true,
- 'Country~4425~flag': true,
- 'Country~4425~borders': true,
- },
- 'Country~860~capital': 'Ottawa',
- 'Country~860~name': 'Canada',
- 'Country~2741~capital': 'Mexico City',
- 'Country~2741~name': 'Mexico',
- 'Country~4425~borders': { 'Country~2741': true, 'Country~860': true },
- 'Country~4425~flag': 'Flag~4440',
- 'Country~4425~name': 'United States of America',
- 'Country~4425~population': 323947000,
- 'Flag~4440~emoji': '🇺🇸',
-};
-
-// PROCESS:
-// finds the specific queries if there is more than one
-findSpecificQueries;
-// checks to see if the fields on each querie are currerently stored in the cache
-createQueryObj;
-// converts the query string into a query object for reference
-// creates a hash array from the keys on queryHashes
-buildResultsObject;
-// attempts to build result object by comparing the cache, queryObj, and hashes
-
-// OUTPUT:
-
-// if any part of the query string is a mutation return 'mutation'???
-
-// if everything is not found in cache return undefined;
-
-// if everything is found in cache
-const obsidianReturn = {
- data: {
- Country: [
- {
- borders: [
- { _id: '2741', name: 'Mexico', capital: 'Mexico City' },
- { _id: '860', name: 'Canada', capital: 'Ottawa' },
- ],
- flag: {
- emoji: '',
- _id: '4440',
- },
- _id: '4425',
- name: 'United States of America',
- population: 323947000,
- },
- ],
- },
-};
-
-// =====================================================================
-queryHashes = findSpecificQueries(query, obsidianSchema, cache);
-// INPUT: nothing new
-
-// PROCESS:
-findqueryname; // finds the first query name
-// iterate through the rest of the query to find all of the other querie names and hash them onto the queryHashes object
-specificQueryParser;
-// this is what query hashes looks like now
-queryHashes = {
- Country:
- 'Country(_id:"4425"){_idnamepopulationflag{_idemoji}borders{_idnamecapital}}',
-};
-checkAndRetrieveHash;
-
-// OUTPUT:
-const queryHashes = { Country: undefined }; // if cache does not have everything
-// if cache has everything
-const queryHashes = {
- Country: {
- 'Country~4425~borders': true,
- 'Country~4425~flag': true,
- 'Country~4425~name': true,
- 'Country~4425~population': true,
- },
-};
-
-// =====================================================================
-const nameOfQuery = findQueryName(query);
-
-// INPUT: nothing new
-
-// PROCESS:
-// parsing function to find the name of the query
-
-// OUTPUT:
-const nameOfQuery = 'Country';
-// will return 'mutation' if it's a mutation query; this will casue the destucture to break out
-
-// ==========================================================
-const next = specificQueryParser(startIndexOfName, query);
-
-// INPUT: startIndexOfName is where we left off in the query parsing
-const startIndexOfName = 9;
-// PROCESS:
-// parses individual queries into minified strings and finds the end index
-// OUTPUT:
-const next = {
- output:
- 'Country(_id:"4425"){_idnamepopulationflag{_idemoji}borders{_idnamecapital}}',
- endIdx: 256,
-};
-
-// ========================================================================
-
-redisResults[queryHash] = checkAndRetrieveQuery(queryHashes[queryHash], cache);
-
-// INPUT: individual query hash for one query and the cache
-
-// PROCESS:
-// checks to see if the minified query string is stored in cache, if so return corresp[ponding object
-
-// OUTPUT:
-// if cache doesn't have everything return undefined
-
-// if cache has everything
-const redisResults = {
- 'Country~4425~name': true,
- 'Country~4425~population': true,
- 'Country~4425~flag': true,
- 'Country~4425~borders': true,
-};
-
-// =======================================================================
-const queryObj = createQueryObj(queryName, query, obsidianSchema);
-
-// INPUT: nothing special
-
-// PROCESS:
-// parses query string and converts it to query object
-// NOTES:
-// this seems to be the first place they actually use the schema. do they need the schema?
-// There is a note here about not supporting any paramters other than ID??
-// We should look at this closer later and see if there is abetter way;
-// OUTPUT:
-
-const queryObj = {
- queryName: 'Country',
- paramaters: { _id: '"4425"' },
- properties: {
- _id: true,
- name: true,
- population: true,
- flag: { _id: true, emoji: true },
- borders: { _id: true, name: true, capital: true },
- },
-};
-
-// ==========================================================================
-result.data[queryName] = await buildResultsObject(
- hashes,
- obsidianSchema,
- queryObj,
- cache
-);
-
-// INPUT:
-const hashes = [
- 'Country~4425~borders',
- 'Country~4425~flag',
- 'Country~4425~name',
- 'Country~4425~population',
-];
-
-// PROCESS:
-// parsing to store the 3 parts of a hash as variables
-retrieveScalar;
-restrieveComplex;
-batchHash;
-nestedPropertyHashConstructor;
-// finds the corresponding values in the cache and stores them as appropriate on the passed in results object
-// Notes:
-// this is also using the schema? is there a better way?
-// this is complex parsing? should definitly take another look at some point
-// OUTPUT:
-
-// if not able to find everything return undefined
-
-// if able to find everything in cache
-const obsidianReturn = {
- data: {
- Country: [
- {
- borders: [
- { _id: '2741', name: 'Mexico', capital: 'Mexico City' },
- { _id: '860', name: 'Canada', capital: 'Ottawa' },
- ],
- flag: {
- emoji: '',
- _id: '4440',
- },
- _id: '4425',
- name: 'United States of America',
- population: 323947000,
- },
- ],
- },
-};
diff --git a/documentation/browserCache/garbage-collection-doc.js b/documentation/browserCache/garbage-collection-doc.js
deleted file mode 100644
index b3dad76..0000000
--- a/documentation/browserCache/garbage-collection-doc.js
+++ /dev/null
@@ -1,123 +0,0 @@
-/**
- * NOTES:
- * 1. is a method on the cache object that removes all references to DELETED and inaccessible hashes
- * 2. a reference is considered inaccessible if there is no way to access it from any of the root queries
- * 3. How/when should this be called?
- * after every query/mutation?
- * on some sort of time interval?
- * only when the developer asks for it?
- * when the cache reaches a certain size?
- */
-class Cache {
- gc() {
- // where the magic happens
- }
-}
-
-/**
- * Possible high-level approach
- * 1. iterate through all the hashes and generate a Set of all the deleted hashes.
- * 2. delete those hashes
- * 3. iterate through all of the non-wholeQuery ROOT_QUERIES
- * - remove any hash reference that is a member of the deleted hash Set
- * - for any hash reference that has not been deleted
- * - add that hash to a Set of accessible hashes
- * - recursively trace that hash and continue removing any deleted hash references and updating the Set of accesible hashes
- * 4. remove any hashes that are not a member of the accessible hash Set
- */
-
-// EXAMPLE ===========================================================================================
-const cacheBeforeGC = {
- ROOT_QUERY: {
- 'actor(id:1)': 'Actor~1',
- favoriteMovies: ['Movie~1', 'Movie~2', 'Movie~3'], // includes reference to deleted hash
- 'movies(input:{genre:ACTION})': ['Movie~1', 'Movie~3', 'Movie~5'], // includes reference to deleted hash
- },
- ROOT_MUTATION: {
- 'favoriteMovie(id:2)': 'Movie~2',
- "addMovie(input: {title: 'The Fugitive', releaseYear: 1993, genre: ACTION })":
- 'Movie~5',
- 'deleteMovie(id:3)': 'Movie~3',
- },
-
- 'Movie~1': {
- id: '1',
- title: 'Indiana Jones and the Last Crusade',
- actors: ['Actor~1', 'Actor~2'],
- genre: 'ACTION',
- releaseYear: 1989,
- },
- 'Movie~2': {
- id: '2',
- title: 'Empire Strikes Back',
- actors: ['Actor~1', 'Actor~3'],
- releaseYear: 1980,
- isFavorite: true,
- },
- // DELETED
- 'Movie~3': 'DELETED',
- 'Movie~5': {
- id: '5',
- title: 'The Fugitive',
- genre: 'ACTION',
- releaseYear: 1993,
- isFavorite: false,
- },
- 'Actor~1': {
- id: '1',
- firstName: 'Harrison',
- lastName: 'Ford',
- films: ['Movie~1', 'Movie~2', 'Movie~3', 'Movie~5'], // includes reference to deleted hash
- },
- 'Actor~2': { id: '2', firstName: 'Sean', lastName: 'Connery' },
- 'Actor~3': { id: '3', firstName: 'Mark', lastName: 'Hamill' },
- 'Actor~4': { id: '4', firstName: 'Patti', lastName: 'LuPone' }, // INACCESSIBLE
- 'Actor~5': { id: '5', firstName: 'Gary', lastName: 'Oldman' }, // INACCESSIBLE
-};
-
-const cacheAfterGC = {
- ROOT_QUERY: {
- 'actor(id:1)': 'Actor~1',
- favoriteMovies: ['Movie~1', 'Movie~2'], // deleted reference removed
- 'movies(input:{genre:ACTION})': ['Movie~1', 'Movie~5'], // deleted reference removed
- },
- ROOT_MUTATION: {
- 'favoriteMovie(id:2)': 'Movie~2',
- "addMovie(input: {title: 'The Fugitive', releaseYear: 1993, genre: ACTION })":
- 'Movie~5',
- // 'deleteMovie(id:4)': 'Movie~4', // mistake?
- 'deleteMovie(id:3)': 'Movie~3',
- },
-
- 'Movie~1': {
- id: '1',
- title: 'Indiana Jones and the Last Crusade',
- actors: ['Actor~1', 'Actor~2'],
- genre: 'ACTION',
- releaseYear: 1989,
- },
- 'Movie~2': {
- id: '2',
- title: 'Empire Strikes Back',
- actors: ['Actor~1', 'Actor~3'],
- releaseYear: 1980,
- isFavorite: true,
- },
- // deleted hash removed
- 'Movie~5': {
- id: '5',
- title: 'The Fugitive',
- genre: 'ACTION',
- releaseYear: 1993,
- isFavorite: false,
- },
- 'Actor~1': {
- id: '1',
- firstName: 'Harrison',
- lastName: 'Ford',
- films: ['Movie~1', 'Movie~2', 'Movie~5'], // deleted reference removed
- },
- 'Actor~2': { id: '2', firstName: 'Sean', lastName: 'Connery' },
- 'Actor~3': { id: '3', firstName: 'Mark', lastName: 'Hamill' },
- // inaccessible hashes removed
-};
diff --git a/documentation/browserCache/mutation-documentation.js b/documentation/browserCache/mutation-documentation.js
deleted file mode 100644
index dbe2ab4..0000000
--- a/documentation/browserCache/mutation-documentation.js
+++ /dev/null
@@ -1,310 +0,0 @@
-/**
- * NOTES:
- * 1. This implementation does not handle variables currently
- * 2. This function will always send the inputted mutation operation string to the inputted endpoint
- * 3. Once receiving a response object it will have three different behaviors depending on what is passed
- * into the options object:
- * 1. Will update fields for any elements that we find if the hash is present and not set to 'DELETE'.
- * - will not do anything with any fields associated with unknown hashes.
- * 2. If the delete flag is set to true, the function will set the value of every top level hash that currently exists to 'DELETE'
- * - cache.read() will need to be updated to ignore any hashes with the value 'DELETE' (not treat as cache miss)
- * 3. If the update property is set to a function. That function will be executed causing a cache update as specified by the developer.
- * - the cache object and respObj will automatically be passed into the update object as arguments
- * 4. After implementing garbage collection: This function would invoke gc() every time a mutation is made except when an update function is provided by the developer.
- * 5. This implementation would update the cache only if the flag cache is set to true.
- * 6. This function takes in a mutation string and an optional options object and returns the response object from the request made.
- */
-
-function mutate(mutation, options) {
- // where the magic happens
-}
-
-// options object
-const options = {
- endpoint: '/graphql', // the endpoint where the post request with mutation string will be sent; DEFAULT: '/graphql'
- cache: true, // flag to enable automatic cache updates; DEFAULT: 'true'
- delete: false, // flag the developer can set to indicate delete mutation; DEFAULT: 'false'
- update: updateFunc(cache, respObj), // optional update function to customize cache updating behavior; DEFAULT: null
-};
-
-// EXAMPLES
-
-// EXAMPLE 1: SIMPLE UPDATE ===================================================================================================
-
-const cachePreMut = {
- ROOT_QUERY: {
- 'movies(input:{genre:ACTION})': ['Movie~1', 'Movie~4'],
- },
- ROOT_MUTATION: {},
-
- 'Movie~1': {
- id: '1',
- title: 'Indiana Jones and the Last Crusade',
- genre: 'ACTION',
- releaseYear: 1989,
- isFavorite: false,
- },
- 'Movie~4': {
- id: '4',
- title: 'Air Force One',
- genre: 'ACTION',
- releaseYear: 1997,
- isFavorite: false,
- },
-};
-
-const ADD_FAVORITE_MOVIE = gql`
- mutation AddFavoriteMovie {
- favoriteMovie(id: 4) {
- __typename
- id
- isFavorite
- }
- }
-`;
-
-mutate(ADD_FAVORITE_MOVIE); // we don't need an options object since we are using /graphql endpoint
-
-const respObj = {
- data: {
- favoriteMovie: {
- __typename: 'Movie',
- id: '4',
- isFavorite: true,
- },
- },
-};
-
-const cachePostMut = {
- ROOT_QUERY: {
- 'movies(input:{genre:ACTION})': ['Movie~1', 'Movie~4'],
- },
- ROOT_MUTATION: {
- 'favoriteMovie(id: 4)': 'Movie~4',
- },
-
- 'Movie~1': {
- id: '1',
- title: 'Indiana Jones and the Last Crusade',
- genre: 'ACTION',
- releaseYear: 1989,
- isFavorite: false,
- },
- 'Movie~4': {
- id: '4',
- title: 'Air Force One',
- genre: 'ACTION',
- releaseYear: 1997,
- isFavorite: true, // updated value
- },
-};
-
-// SPECIAL NOTE: this mutation string would result in no cache change because Movie~2 is not currently cached
-const ADD_FAVORITE_MOVIE = gql`
- mutation AddFavoriteMovie {
- favoriteMovie(id: 2) {
- id
- isFavorite
- }
- }
-`;
-
-// EXAMPLE 2: SIMPLE DELETE ===================================================================================================
-
-const cachePreMut = {
- ROOT_QUERY: {
- 'movies(input:{genre:ACTION})': ['Movie~1', 'Movie~4'],
- },
- ROOT_MUTATION: {},
-
- 'Movie~1': {
- id: '1',
- title: 'Indiana Jones and the Last Crusade',
- genre: 'ACTION',
- releaseYear: 1989,
- isFavorite: false,
- },
- 'Movie~4': {
- id: '4',
- title: 'Air Force One',
- genre: 'ACTION',
- releaseYear: 1997,
- isFavorite: false,
- },
-};
-
-const DELETE_MOVIE = gql`
- mutation DeleteMovie {
- deleteMovie(id: 4) {
- __typename
- id
- }
- }
-`;
-
-mutate(DELETE_MOVIE, { delete: true });
-
-const respObj = {
- data: {
- deleteMovie: {
- __typename: 'Movie',
- id: '4',
- },
- },
-};
-
-const cachePostMut = {
- ROOT_QUERY: {
- 'movies(input:{genre:ACTION})': ['Movie~1', 'Movie~4'],
- },
- ROOT_MUTATION: {
- 'deleteMovie(id:4)': 'Movie~4',
- },
-
- 'Movie~1': {
- id: '1',
- title: 'Indiana Jones and the Last Crusade',
- genre: 'ACTION',
- releaseYear: 1989,
- isFavorite: false,
- },
- 'Movie~4': 'DELETED', // Movie~4 set to DELETED
-};
-
-// SPECIAL NOTE: DELETED hashes will be ignored in future queries and not throw a cache miss
-
-const ALL_MOVIES = gql`
- query movies(input: {genre: ACTION}) {
- movies {
- __typename
- id
- title
- }
- }
-`;
-
-gather(ALL_MOVIES);
-
-// this will be the response object served for the above query from the cache
-const respObj = {
- data: {
- movies: [
- {
- __typename,
- id: '1',
- title: 'Indiana Jones and the Last Crusade',
- },
- ],
- },
-};
-
-// EXAMPLE 3: SIMPLE CREATE ===================================================================================================
-
-const ALL_MOVIES_BY_RELEASE_DATE = gql`
- query AllMoviesByDate {
- movies(sort: { release: ASC }) {
- __typename
- id
- title
- releaseYear
- genre
- isFavorite
- }
- }
-`;
-
-// cache after the above query
-const cachePreMut = {
- ROOT_QUERY: {
- 'movies(sort:{release:ASC})': ['Movie~1', 'Movie~4'],
- },
- ROOT_MUTATION: {},
-
- 'Movie~1': {
- id: '1',
- title: 'Indiana Jones and the Last Crusade',
- genre: 'ACTION',
- releaseYear: 1989,
- isFavorite: false,
- },
- 'Movie~4': {
- id: '4',
- title: 'Air Force One',
- genre: 'ACTION',
- releaseYear: 1997,
- isFavorite: false,
- },
-};
-
-const ADD_MOVIE = gql`
- mutation AddMovie {
- addMovie(input: {title: 'The Fugitive', releaseYear: 1993, genre: ACTION }) {
- __typename
- id
- title
- releaseYear
- genre
- isFavorite
- }
- }
-`;
-
-// developer defined update function to correctly add movies into ALL_MOVIES_BY_RELEASE_DATE query
-function movieUpdate(cache, respObj) {
- const result = cache.read(ALL_MOVIES_BY_RELEASE_DATE);
- const { movies } = result.data;
- const newMovie = respObj.data.addMovie;
- const updatedMovieArr = movies.push(newMovie).sort((movie1, movie2) => {
- return movie1.releaseYear - movie2.releaseYear;
- });
- const updatedRespObj = { data: { movies: updatedMovieArr } };
- cache.write(ALL_MOVIES_BY_RELEASE_DATE, updatedRespObj);
-}
-
-mutate(ADD_MOVIE, { update: movieUpdate });
-
-const respAddMovie = {
- data: {
- addMovie: {
- __typename: 'Movie',
- id: '5',
- title: 'The Fugitive',
- releaseYear: 1993,
- genre: 'ACTION',
- isFavorite: false,
- },
- },
-};
-
-const cachePostMut = {
- ROOT_QUERY: {
- 'movies(sort:{release:ASC})': ['Movie~1', 'Movie~5', 'Movie~4'], // Movie~5 is slotted into the appropriate place
- },
- ROOT_MUTATION: {
- "addMovie(input:{title:'TheFugitive',releaseYear:1993,genre:ACTION})":
- 'Movie~5',
- },
-
- 'Movie~1': {
- id: '1',
- title: 'Indiana Jones and the Last Crusade',
- genre: 'ACTION',
- releaseYear: 1989,
- isFavorite: false,
- },
- 'Movie~4': {
- id: '4',
- title: 'Air Force One',
- genre: 'ACTION',
- releaseYear: 1997,
- isFavorite: false,
- },
- // Movie~5 added
- 'Movie~5': {
- id: '5',
- title: 'The Fugitive',
- genre: 'ACTION',
- releaseYear: 1993,
- isFavorite: false,
- },
-};
diff --git a/documentation/browserCache/normalize-documentation.js b/documentation/browserCache/normalize-documentation.js
deleted file mode 100644
index f6ef50a..0000000
--- a/documentation/browserCache/normalize-documentation.js
+++ /dev/null
@@ -1,370 +0,0 @@
-//*=========================================================================*//
-/*
- normalizeResult
-
- Description: Takes a query, a response object, an obsidianSchema, and a cache
- and 'normalizes' (flattens) the data, merges it into the cache, and returns
- the updated cache
-
- Summary:
- Breaks response object into various queries
- Creates an object of hashes and values from the response object
- {
- hash: queryStringHashed
- value: {
- dataHashes: true,
- ...,
- ...
- }
- }
- Recursively adds all base data to the cache
- Checks to see if the hash exists in the cache and returns a new cache
-*/
-
-normalizeResult(query, result, obsidianSchema, cache)
-
-// INPUT:
-const query = gql`
- {
- Country(_id: "4425") {
- _id
- name
- population
- flag {
- _id
- emoji
- }
- borders {
- _id
- name
- capital
- }
- }
- }
-`;
-
-const result = {
- data: {
- Country: [
- {
- _id: '4425',
- name: 'United States of America',
- population: 323947000,
- flag: {
- _id: '4440',
- emoji:'🇺🇸'
- },
- borders: [
- {
- capital: "Mexico City",
- name: "Mexico",
- _id: "2741"
- },
- {
- capital: "Ottawa",
- name: "Canada",
- _id: "860",
- }
- ],
- },
- ]
- }
-};
-
-const obsidianSchema = {
- returnTypes: {
- Country: {
- kind: 'NamedType',
- type: 'Country',
- },
- },
- argTypes: {
- Country: { _id: 'ID' },
- },
- obsidianTypeSchema: {
- Country: {
- borders: { type: 'Country', scalar: false },
- capital: { type: 'String', scalar: true },
- flag: { type: 'Flag', scalar: false },
- name: { type: 'String', scalar: true },
- population: { type: 'Int', scalar: true },
- _id: { type: 'ID', scalar: true },
- },
- Flag: {
- emoji: { type: 'String', scalar: true },
- _id: { type: 'ID', scalar: true },
- },
- },
-};
-
-const cache = {};
-
-// PROCESS:
-hashSpecificQuery; // CORE NORMALIZATION FLOW, stores base data and returns queryHash and data
-checkAndInsert; // checks if the query string is in the cache, then caches
-
-// OUTPUT:
-const obsidianReturn = [{
- data: {
- Country: [
- {
- borders: [
- { _id: '2741', name: 'Mexico', capital: 'Mexico City' },
- { _id: '860', name: 'Canada', capital: 'Ottawa' },
- ],
- flag: {
- emoji: '',
- _id: '4440',
- },
- _id: '4425',
- name: 'United States of America',
- population: 323947000,
- },
- ],
- },
-}];
-
-//*=========================================================================*//
-/*
- hashSpecificQuery
-
- Description: Takes a query and spits out an object composed of
- hashes for that query and values for that query
-*/
-async function hashSpecificQuery(queryType, fields, returnTypes, query, obsidianTypeSchema, cache)
-
-// INPUT:
-queryType = 'Country'
-
-fields = [
- {
- _id: '4425',
- name: 'United States of America',
- population: 323947000,
- flag: {
- _id: '4440',
- emoji:'🇺🇸'
- },
- borders: [
- {
- capital: "Mexico City",
- name: "Mexico",
- _id: "2741"
- },
- {
- capital: "Ottawa",
- name: "Canada",
- _id: "860",
- }
- ],
- },
-]
-returnTypes
-query
-obsidianTypeSchema
-cache
-
-// PROCESS:
-specificQueryParser;
-hashAndStoreFields;
-
-// OUTPUT:
-hashedQuery = {
- hash: "Country(_id:\"4425\"){_idnamepopulationflag{_idemoji}borders{_idnamecapital}}",
- value: {
- 'Country~4425~borders': true,
- 'Country~4425~flag': true,
- 'Country~4425~name': true,
- 'Country~4425~population': true
- }
-}
-
-//*=========================================================================*//
-/*
- checkAndInsert
-
- Description: Checks if the hash exists in the cache, if not then insert the
- hash and its value into the cache
-
- Doesn't appear to upidate previously hashed data
-*/
-async function checkAndInsert(hash, value, cache, expiration = 20)
-
-// INPUT
-hash
-value
-cache
-expiration
-
-// PROCESS
-connectFunc
-
-// OUTPUT
-cache = {
- "Country(_id:\"4425\"){_idnamepopulationflag{_idemoji}borders{_idnamecapital}}": {
- "Country~4425~borders": true,
- "Country~4425~flag": true,
- "Country~4425~name": true,
- "Country~4425~population": true
- },
- "Country~860~capital": "Ottawa",
- "Country~860~name": "Canada",
- "Country~2741~capital": "Mexico City",
- "Country~2741~name": "Mexico",
- "Country~4425~flag": "Flag~4440",
- "Country~4425~name": "United States of America",
- "Country~4425~population": 323947000,
- "Flag~4440~emoji": "🇺🇸",
- "Country~4425~borders": {
- "Country~2741": true, "Country~860": true
- },
-}
-
-//*=========================================================================*//
-/*
- specificQueryParser
-
- Description: takes a starting index and a query string and returns a
- minified query and end index
-*/
-specificQueryParser(startIdx, query).output;
-
-// INPUT
-startIdx // starting index of query
-query
-
-// PROCESS:
-
-// OUTPUT:
-output = "Country(_id:\"4425\"){_idnamepopulationflag{_idemoji}borders{_idnamecapital}}"
-
-//*=========================================================================*//
-/*
- hashAndStoreFields
-
- Description: Takes a set of fields, generates hashes with them, gives them a value of true and
- and outputs an object
-*/
-await hashAndStoreFields(queryType, fields, returnTypes, obsidianTypeSchema, cache);
-//INPUT
-queryType
-fields
-returnTypes
-obsidianTypeSchema
-cache
-
-//PROCESS
-hashAndStoreFieldsOfObject
-
-//OUTPUT
-output = {
- 'Country~4425~borders': true,
- 'Country~4425~flag': true,
- 'Country~4425~name': true,
- 'Country~4425~population': true
-}
-
-//*=========================================================================*//
-/*
- hashAndStoreFieldsOfObject
-
- Description: Takes in an object of properties and eventually
- returns an object of hashes with values true
-*/
-async function hashAndStoreFieldsOfObject(typeSchemaName, fields, obsidianTypeSchema, queryType, returnTypes, cache)
-
-// INPUT
-typeSchemaName
-fields = {
- _id: '4425',
- name: 'United States of America',
- population: 323947000,
- flag: {
- _id: '4440',
- emoji:'🇺🇸'
- },
- borders: [
- {
- capital: "Mexico City",
- name: "Mexico",
- _id: "2741"
- },
- {
- capital: "Ottawa",
- name: "Canada",
- _id: "860",
- }
- ],
-}
-obsidianTypeSchema
-queryType
-returnTypes
-cache
-
-// PROCESS
-oldReduce
-
-// OUTPUT
-hashes = {
- "Country~4425~name": true,
- "Country~4425~population": true,
- "Country~4425~flag": true,
- "Country~4425~borders": true
-}
-
-//*=========================================================================*//
-/*
- oldReduce
-
- Description: Takes a property, creates a hash, and enters it into the
- hash object
-*/
-async function oldReduce(property)
-
-// INPUT
-property = "name"
-
-// PROCESS
-hashGenerator // generates a hash
-hashAndStoreFields // hashes and stores values within the nested obj
-checkID // returns null or id of a named type (if it exists)
-checkAndInsert // hashes pieces of data with their values
-
-// OUTPUT
-// Adds an element to the hash object
-
-
-//*=========================================================================*//
-/*
- hashGenerator
-
- Description: takes various fields and creates a hash
-*/
-async function hashGenerator(typeSchemaName, id, property)
-
-// INPUT
-typeSchemaName
-id
-property
-
-// PROCESS
-
-// OUTPUT
-hash = "Country~4425~name"
-
-//*=========================================================================*//
-/*
- checkID
-
- Description: takes a the value associated with a property
- and returns null or an id
-*/
-
-function checkID(propObj)
-// INPUT
-propObj
-
-// PROCESS
-
-// OUTPUT
-newID = id || null
\ No newline at end of file
diff --git a/documentation/browserCache/query-documentation.js b/documentation/browserCache/query-documentation.js
deleted file mode 100644
index 858a72b..0000000
--- a/documentation/browserCache/query-documentation.js
+++ /dev/null
@@ -1,150 +0,0 @@
-/**
- * NOTES:
- * 1. This implementation does not handle variables or aliases currently
- * 2. Potential updates needed for implementation:
- * - gather_hunt.jsx will need to be updated to handle the combining of hunt and gather.
- * - making simple edits to gather_hunt should be sufficient for handling the cacheRead, cacheWrite, PollInterval flags;
- * - some combination of newNormalize, newDestructure, and the read and write methods on the cache
- * would need to be updated to account for wholeQuery caching
- * - it may make more sense to create dedicated cache methods for whole query caching
- */
-
-function query(query, options) {
- // where the magic happens
-}
-
-// options object
-const options = {
- endpoint: '/graphql', // the endpoint where the post request with mutation string will be sent; DEFAULT: '/graphql'
- cacheRead: true, // determines whether the cache should be checked before making a server request; DEFAULT: true
- cacheWrite: true, // determines whether the response from a server request should be written into the cache; DEFAULT: true
- pollInterval: null, // if non-null the query will be sent the server every inputted number of ms; DEFAULT: null
- wholeQuery: false, // for any cache reads or writes this will conduct wholeQuery writes or retrieval; DEFAULT: false
-};
-
-/**
- * cacheRead
- * - If set to false the query will always be sent to the server; the cache will not be checked.
- * - __typenames will still be inserted into this request
- */
-
-/**
- * cacheWrite
- * - If set to false, the cache will never be updated even if new data is retrieved from the server.
- */
-
-/**
- * pollInterval
- * - null disables this feature
- * - This same query will be sent to the server every inputed number of milliseconds
- * - This query will not check the client-side cache before being sent to the server
- * - The response from the server will be written into cache upon receipt
- */
-
-/**
- * wholeQuery
- * - if enabled the entire query and response will be stored in the cache as one key value pair
- * - a minified version of the entire query operation string will be stored as the hash key
- * - the response object will be stored as the value without any normalization
- * - the only way to retrieve a cached whole query is to make another query request with an identical
- * operation query string and the wholeQuery flag set to true
- * - if the WholeQuery flag is true the caheRead and cacheWrite flags will be ignored.
- * - __typenames will not get inserted into wholeQuery requests
- */
-
-// WHOLE QUERY EXAMPLE ======================================================================================
-
-const ALL_MOVIES = gql`
- query AllMovies {
- movies {
- id
- title
- actors {
- id
- firstName
- }
- }
- }
-`;
-const respAllMovies = {
- data: {
- movies: [
- {
- id: '1',
- title: 'Indiana Jones and the Last Crusade',
- actors: [
- { id: '1', firstName: 'Harrison' },
- { id: '2', firstName: 'Sean' },
- ],
- },
- {
- id: '2',
- title: 'Empire Strikes Back',
- actors: [
- { id: '1', firstName: 'Harrison' },
- { id: '3', firstName: 'Mark' },
- ],
- },
- {
- id: '3',
- title: 'Witness',
- actors: [
- { id: '1', firstName: 'Harrison' },
- { id: '4', firstName: 'Patti' },
- ],
- },
- {
- id: '4',
- title: 'Air Force One',
- actors: [
- { id: '1', firstName: 'Harrison' },
- { id: '5', firstName: 'Gary' },
- ],
- },
- ],
- },
-};
-
-const cache = {
- ROOT_QUERY: {
- 'queryAllMovies{movies{idtitleactors{idfirstName}}}': {
- data: {
- movies: [
- {
- id: '1',
- title: 'Indiana Jones and the Last Crusade',
- actors: [
- { id: '1', firstName: 'Harrison' },
- { id: '2', firstName: 'Sean' },
- ],
- },
- {
- id: '2',
- title: 'Empire Strikes Back',
- actors: [
- { id: '1', firstName: 'Harrison' },
- { id: '3', firstName: 'Mark' },
- ],
- },
- {
- id: '3',
- title: 'Witness',
- actors: [
- { id: '1', firstName: 'Harrison' },
- { id: '4', firstName: 'Patti' },
- ],
- },
- {
- id: '4',
- title: 'Air Force One',
- actors: [
- { id: '1', firstName: 'Harrison' },
- { id: '5', firstName: 'Gary' },
- ],
- },
- ],
- },
- },
- },
- ROOT_MUTATION: {},
-};
diff --git a/src/Browser/CacheClassBrowser.js b/src/Browser/CacheClassBrowser.js
deleted file mode 100644
index fb58f3f..0000000
--- a/src/Browser/CacheClassBrowser.js
+++ /dev/null
@@ -1,342 +0,0 @@
-/** @format */
-
-import normalizeResult from "./normalize.js";
-import destructureQueries from "./destructure.js";
-
-export default class BrowserCache {
- constructor(
- initialCache = {
- ROOT_QUERY: {},
- ROOT_MUTATION: {},
- // match resolvers to types in order to add them in write-through
- writeThroughInfo: {},
- },
- ) {
- this.storage = initialCache;
- this.context = "client";
- }
-
- // Main functionality methods
- async read(queryStr) {
- if (typeof queryStr !== "string") {
- throw TypeError("input should be a string");
- }
- // destructure the query string into an object
- const queries = destructureQueries(queryStr).queries;
- // breaks out of function if queryStr is a mutation
- if (!queries) return undefined;
- const responseObject = {};
- // iterate through each query in the input queries object
- for (const query in queries) {
- // get the entire str query from the name input query and arguments
- const queryHash = queries[query].name.concat(queries[query].arguments);
- const rootQuery = await this.cacheRead("ROOT_QUERY");
- // match in ROOT_QUERY
- if (rootQuery[queryHash]) {
- // get the hashs to populate from the existent query in the cache
- const arrayHashes = rootQuery[queryHash];
- // Determines responseObject property labels - use alias if applicable, otherwise use name
- const respObjProp = queries[query].alias ?? queries[query].name;
- // invoke populateAllHashes and add data objects to the response object for each input query
- responseObject[respObjProp] = await this.populateAllHashes(
- arrayHashes,
- queries[query].fields,
- );
- if (!responseObject[respObjProp]) return undefined;
-
- // no match with ROOT_QUERY return null or ...
- } else {
- return undefined;
- }
- }
- return { data: responseObject };
- }
-
- async writeThrough(queryStr, respObj, deleteFlag, endpoint) {
- try {
- const queryObj = destructureQueries(queryStr);
- const mutationName = queryObj.mutations[0].name;
- // check if it's a mutation
- if (queryObj.mutations) {
- // check to see if the mutation/type has been stored in the cache yet
- // if so, make the graphQL call
- if (!this.storage.writeThroughInfo.hasOwnProperty(mutationName)) {
- respObj = await fetch(endpoint, {
- method: "POST",
- headers: {
- "Content-Type": "application/json",
- Accept: "application/json",
- },
- body: JSON.stringify({ query: queryStr }),
- }).then((resp) => resp.json());
- // store the mutation/type in cache
- this.storage.writeThroughInfo[mutationName] = {};
- this.storage.writeThroughInfo[mutationName].type =
- respObj.data[mutationName].__typename;
- this.storage.writeThroughInfo[mutationName].lastId =
- respObj.data[mutationName].id;
- // below is for situations when the type is already stored
- } else {
- // construct the response object ourselves
- const dummyResponse = await fetch(endpoint, {
- method: "POST",
- headers: {
- "Content-Type": "application/json",
- Accept: "application/json",
- },
- body: JSON.stringify({ query: queryStr }),
- }).then((resp) => resp.json());
- this.constructResponseObject(queryObj, respObj, deleteFlag);
- }
- // same logic for both situations
- // normalize the result, invalidate the cache and return the appropriate object
- await this.write(queryStr, respObj, deleteFlag);
- return respObj;
- }
- } catch (e) {
- console.log(e);
- }
- }
-
- async write(queryStr, respObj, deleteFlag) {
- const queryObj = destructureQueries(queryStr);
- const resFromNormalize = normalizeResult(queryObj, respObj, deleteFlag);
- // update the original cache with same reference
- for (const hash in resFromNormalize) {
- const resp = await this.cacheRead(hash);
- if (resFromNormalize[hash] === "DELETED") {
- await this.cacheWrite(hash, "DELETED");
- } else if (resp) {
- const newObj = Object.assign(resp, resFromNormalize[hash]);
- await this.cacheWrite(hash, newObj);
- } else {
- await this.cacheWrite(hash, resFromNormalize[hash]);
- }
- }
- }
-
- constructResponseObject(queryObj, respObj, deleteFlag) {
- const mutationData = queryObj.mutations[0];
- const mutationName = mutationData.name;
- const __typename = this.storage.writeThroughInfo[mutationName].type;
- // this.storage.writeThroughInfo[mutationName].type;
- respObj.data = {};
- const obj = {};
- respObj.data[mutationName] = obj;
- obj.__typename = __typename;
- // delete logic
- if (deleteFlag) {
- // add id and value from the queryObj
- let idAndVal = mutationData.arguments;
- idAndVal = idAndVal.split(":");
- const id = idAndVal[0].substring(1);
- const val = idAndVal[1].substring(0, idAndVal[1].length - 1);
- obj[id] = val;
- // return out of this function so we don't continue
- // onto add/update logic
- return respObj;
- }
- // increment ID for ADD mutations only
- obj.id = (++this.storage.writeThroughInfo[mutationName].lastId).toString();
-
- // ADD mutation logic
- // grab arguments (which is a string)
- const argumentsStr = mutationData.arguments;
- this.addNonScalarFields(argumentsStr, respObj, mutationData);
- this.separateArguments(argumentsStr, respObj, mutationName);
- }
-
- separateArguments(str, respObj, mutationName) {
- const startIndex = str.indexOf("{");
- const slicedStr = str.slice(startIndex + 1, str.length - 2);
- const argumentPairs = slicedStr.split(",");
- for (const argumentPair of argumentPairs) {
- const argumentKeyAndValue = argumentPair.split(":");
- const argumentKey = argumentKeyAndValue[0];
- let argumentValue = Number(argumentKeyAndValue[1])
- ? Number(argumentKeyAndValue[1])
- : argumentKeyAndValue[1];
- if (typeof argumentValue === "string") {
- argumentValue = argumentValue.replace(/\"/g, "");
- }
- respObj.data[mutationName][argumentKey] = argumentValue;
- }
- }
-
- addNonScalarFields(respObj, mutationData) {
- for (const field in mutationData.fields) {
- if (
- mutationData.fields[field] !== "scalar" &&
- mutationData.fields[field] !== "meta"
- ) {
- respObj.data[mutationData.name][field] = [];
- }
- }
- }
-
- gc() {
- // garbageCollection; garbage collection: removes any inaccessible hashes from the cache
- const badHashes = getBadHashes();
- const goodHashes = rootQueryCleaner(badHashes);
- const goodHashes2 = getGoodHashes(badHashes, goodHashes);
- removeInaccessibleHashes(badHashes, goodHashes2);
- }
-
- // remove hashes that are flagged for deletion and store records of them in a set badHashes for removal inside root queries
- getBadHashes() {
- const badHashes = new Set();
- for (let key in this.storage) {
- if (key === "ROOT_QUERY" || key === "ROOT_MUTATION") continue;
- if (this.storage[key] === "DELETED") {
- badHashes.add(key);
- delete this.storage[key];
- }
- }
- return badHashes;
- }
-
- // go through root queries, remove all instances of bad hashes, add remaining hashes into goodHashes set
- rootQueryCleaner(badHashes) {
- const goodHashes = new Set();
- const rootQuery = this.storage["ROOT_QUERY"];
- for (let key in rootQuery) {
- if (Array.isArray(rootQuery[key])) {
- rootQuery[key] = rootQuery[key].filter((x) => !badHashes.has(x));
- if (rootQuery[key].length === 0) delete rootQuery[key];
- for (let el of rootQuery[key]) goodHashes.add(el);
- } else {
- badHashes.has(rootQuery[key])
- ? delete rootQuery[key]
- : goodHashes.add(rootQuery[key]);
- }
- }
- return goodHashes;
- }
-
- // Go through the cache, check good hashes for any nested hashes and add them to goodHashes set
- getGoodHashes(badHashes, goodHashes) {
- for (let key in this.storage) {
- if (key === "ROOT_QUERY" || key === "ROOT_MUTATION") continue;
- for (let i in this.storage[key]) {
- if (Array.isArray(this.storage[key][i])) {
- for (let el of this.storage[key][i]) {
- if (el.includes("~") && !badHashes.has(el)) {
- goodHashes.add(el);
- }
- }
- } else if (typeof this.storage[key][i] === "string") {
- if (
- this.storage[key][i].includes("~") &&
- !badHashes.has(this.storage[key][i])
- ) {
- goodHashes.add(this.storage[key][i]);
- }
- }
- }
- }
- return goodHashes;
- }
-
- // Remove inaccessible hashes by checking if they are in goodhashes set or not
- removeInaccessibleHashes(badHashes, goodHashes) {
- for (let key in this.storage) {
- if (key === "ROOT_QUERY" || key === "ROOT_MUTATION") continue;
- if (!goodHashes.has(key)) delete this.storage[key];
- for (let i in this.storage[key]) {
- if (Array.isArray(this.storage[key][i])) {
- this.storage[key][i] = this.storage[key][i].filter(
- (x) => !badHashes.has(x),
- );
- } else if (typeof this.storage[key][i] === "string") {
- if (
- this.storage[key][i].includes("~") &&
- badHashes.has(this.storage[key][i])
- ) {
- delete this.storage[key][i];
- }
- }
- }
- }
- }
-
- // cache read/write helper methods
- async cacheRead(hash) {
- return this.storage[hash];
- }
-
- async cacheWrite(hash, value) {
- this.storage[hash] = value;
- }
-
- async cacheDelete(hash) {
- delete this.storage[hash];
- }
-
- async cacheClear() {
- this.storage = {
- ROOT_QUERY: {},
- ROOT_MUTATION: {},
- };
- }
-
- // functionality to stop polling
- stopPollInterval(interval) {
- clearInterval(interval);
- }
-
- writeWholeQuery(queryStr, respObj) {
- const hash = queryStr.replace(/\s/g, "");
- this.cacheWrite(ROOT_QUERY[hash], respObj);
- return respObj;
- }
-
- readWholeQuery(queryStr) {
- const hash = queryStr.replace(/\s/g, "");
- const root = this.cacheRead("ROOT_QUERY");
- if (root[hash]) return { data: root[hash] };
- return undefined;
- }
-
- // specialized helper methods
- async populateAllHashes(allHashesFromQuery, fields) {
- // include the hashname for each hash
- if (!allHashesFromQuery.length) return [];
- const hyphenIdx = allHashesFromQuery[0].indexOf("~");
- const typeName = allHashesFromQuery[0].slice(0, hyphenIdx);
- return allHashesFromQuery.reduce(async (acc, hash) => {
- // for each hash from the input query, build the response object
- const readVal = await this.cacheRead(hash);
- // return undefine if hash has been garbage collected
- if (readVal === undefined) return undefined;
- if (readVal === "DELETED") return acc;
- const dataObj = {};
- for (const field in fields) {
- if (readVal[field] === "DELETED") continue;
- // for each field in the fields input query, add the corresponding value from the cache if the field is not another array of hashs
- if (readVal[field] === undefined && field !== "__typename") {
- return undefined;
- } else if (typeof fields[field] !== "object") {
- // add the typename for the type
- if (field === "__typename") {
- dataObj[field] = typeName;
- } else dataObj[field] = readVal[field];
- } else {
- // case where the field from the input query is an array of hashes, recursively invoke populateAllHashes
- dataObj[field] = await this.populateAllHashes(
- readVal[field],
- fields[field],
- );
- if (dataObj[field] === undefined) return undefined;
- }
- }
- // acc is an array within a Response object for each hash
- try {
- const resolvedProm = await Promise.resolve(acc);
- resolvedProm.push(dataObj);
- return resolvedProm;
- } catch (error) {
- return undefined;
- }
- }, []);
- }
-}
diff --git a/src/Browser/FrequencySketch.js b/src/Browser/FrequencySketch.js
new file mode 100644
index 0000000..92143df
--- /dev/null
+++ b/src/Browser/FrequencySketch.js
@@ -0,0 +1,163 @@
+export function FrequencySketch() {
+
+ const RESET_MASK = 0x77777777; // 011101110111... 0001 0000 0000 0001 0000
+ const ONE_MASK = 0x11111111; // 0001 0001 0001
+
+ let sampleSize, blockMask, size;
+ let table = [];
+
+ /**
+ * Initializes and increases the capacity of this FrequencySketch instance
+ * so it can accurately estimate the popularity of data given the maximum
+ * size of the cache. Frequency counts become zero when resizing.
+ *
+ * @param maxSize cache capacity
+ */
+ this.updateCapacity = function(maxSize) {
+ const max = Math.floor(maxSize); //to ensure it's an integer
+ if(table.length >= max) return;
+
+ table = Array(Math.max(nearestPowerOfTwo(max), 8)).fill().map(()=>Array(2).fill(0));
+ sampleSize = (maxSize === 0) ? 10 : (10*max);
+ blockMask = (table.length >>> 3) - 1;
+
+ if (sampleSize <= 0) sampleSize = Number.MAX_SAFE_INTEGER;
+ size = 0;
+ }
+ /**
+ * Returns true if the sketch has not been initialized, indicating updateCapcity
+ * needs to be called before tracking frequencies.
+ */
+ const isNotInitialized = () => {
+ return table.length === 0;
+ }
+ /**
+ * Returns the estimated frequency of an element, up to the maximum(15).
+ *
+ * @param el the element being counted
+ * @return the estimated frequency - required to be nonnegative
+ */
+
+ this.frequency = function(el) {
+ if(isNotInitialized()) return 0;
+ const count = Array(4);
+
+ const blockHash = supphash(hashCode(el));
+ const counterHash = rehash(blockHash);
+ const block = (blockHash & blockMask) << 3;
+
+ for (let i = 0; i < 4; i++) {
+ const h = counterHash >>> (i << 3);
+ const index = (h >>> 1) & 15;
+ const row = index % 2;
+ const offset = h & 1;
+ count[i] = ((table[block+offset+(i<<1)][row] >>> ((index >> 1) << 2)) & 15);
+ }
+ return Math.min(...count);
+ }
+
+ /**
+ * Increment the frequency of the element if it does not exceed the maximum(15)
+ * @param el element to add
+ */
+ this.increment = function(el) {
+ if (isNotInitialized()) return;
+
+ const index = Array(8);
+ const blockHash = supphash(hashCode(el));
+ const counterHash = rehash(blockHash);
+ const block = (blockHash & blockMask) << 3;
+ //in case we get that [Object object] bs
+
+ for (let i = 0; i < 4; i++) {
+ const h = counterHash >>> (i << 3);
+ index[i] = (h >>> 1) & 15;
+ const offset = h & 1;
+ index[i + 4] = block + offset + (i << 1);
+ }
+ const incremented =
+ incrementAt(index[4], index[0])
+ | incrementAt(index[5], index[1])
+ | incrementAt(index[6], index[2])
+ | incrementAt(index[7], index[3]);
+ if (incremented && (++size == sampleSize)) {
+ reset();
+ }
+
+ }
+
+ /**
+ * Increments the specified counter by 1 if it is not already at the maximum value (15).
+ *
+ * @param i the table index (16 counters)
+ * @param j the counter to increment
+ * @return if incremented
+ */
+ const incrementAt = (i,j) => {
+ const row = j % 2;
+ const offset = (j >> 1) << 2;
+ const mask = (15 << offset);
+ if ((table[i][row] & mask) != mask) { //if curr counter is not at maximum(15)
+ table[i][row] += (1 << offset);
+ return true;
+ }
+ return false;
+ }
+
+ /** Reduces every counter by half of its original value. */
+ const reset = () => {
+ let count = 0;
+ for (let i = 0; i < table.length; i++) {
+ count += bitCount(table[i][0] & ONE_MASK) + bitCount(table[i][1] & ONE_MASK);
+ table[i][0] = (table[i][0] >>> 1) & RESET_MASK;
+ table[i][1] = (table[i][1] >>> 1) & RESET_MASK;
+ }
+ size = (size - (count >>> 2)) >>> 1;
+ }
+ /** Applies a supplemental hash functions for less collisions. */
+ const supphash = x => {
+ x ^= x >> 17;
+ x *= 0xed5ad4bb;
+ x ^= x >> 11;
+ x *= 0xac4c1b51;
+ x ^= x >> 15;
+ return x;
+}
+
+ /** Applies another round of hashing to acheive three round hashing. */
+ const rehash = x => {
+ x *= 0x31848bab;
+ x ^= x >> 14;
+ return x;
+ }
+
+ const nearestPowerOfTwo = num => {
+ const exp = Math.floor(Math.log2(num));
+ if (Math.pow(2, exp) === num) return num;
+
+ return Math.pow(2, exp+1);
+ }
+
+ const hashCode = (input) => {
+ let hash, code;
+ hash = 0;
+ for (let i = 0; i < input.length; i++) {
+ code = input.charCodeAt(i);
+ hash = ((hash<<5)-hash)+code;
+ hash = hash & hash;
+ }
+ return hash;
+ }
+
+
+ /** bitcounting for 32-bit integers (reference: https://graphics.stanford.edu/~seander/bithacks.html) */
+
+ const bitCount = n => {
+ n = n - ((n >> 1) & 0x55555555);
+ n = (n & 0x33333333) + ((n >> 2) & 0x33333333);
+ const count = ((n + (n >> 4) & 0xF0F0F0F) * 0x1010101) >> 24;
+ return count;
+ }
+}
+
+FrequencySketch();
\ No newline at end of file
diff --git a/src/Browser/lfuBrowserCache.js b/src/Browser/lfuBrowserCache.js
index 814773c..172f9f8 100644
--- a/src/Browser/lfuBrowserCache.js
+++ b/src/Browser/lfuBrowserCache.js
@@ -1,5 +1,5 @@
/** @format */
-import { plural } from "https://deno.land/x/deno_plural/mod.ts";
+import { plural } from "https://deno.land/x/deno_plural@2.0.0/mod.ts";
import normalizeResult from "./normalize.js";
import destructureQueries from "./destructure.js";
@@ -29,14 +29,14 @@ class DoublyLinkedList {
}
removeNode(node) {
- let prev = node.prev;
- let next = node.next;
+ const prev = node.prev;
+ const next = node.next;
prev.next = next;
next.prev = prev;
}
removeTail() {
- let node = this.tail.prev;
+ const node = this.tail.prev;
this.removeNode(node);
return node.key;
}
@@ -152,7 +152,7 @@ LFUCache.prototype.read = async function (queryStr) {
return { data: responseObject };
};
-LFUCache.prototype.write = async function (queryStr, respObj, deleteFlag) {
+LFUCache.prototype.write = async function (queryStr, respObj, searchTerms, deleteFlag) {
let nullFlag = false;
let deleteMutation = "";
for(const query in respObj.data) {
@@ -195,6 +195,20 @@ LFUCache.prototype.write = async function (queryStr, respObj, deleteFlag) {
this.ROOT_QUERY[key].push(hash);
}
}
+ /****
+ * if search terms were provided in the wrapper and the query is an
+ * "all"-type query, build out queries in ROOT_QUERY that match the
+ * search terms for each item retrieved from the "all"-type query so
+ * that future single queries can be looked up directly from the cache
+ ****/
+ if (searchTerms && queryStr.slice(8, 11) === 'all'){
+ searchTerms.forEach(el => {
+ const elVal = resFromNormalize[hash][el].replaceAll(' ', '');
+ const hashKey = `one${typeName}(${el}:"${elVal}")`;
+ if (!this.ROOT_QUERY[hashKey]) this.ROOT_QUERY[hashKey] = [];
+ this.ROOT_QUERY[hashKey].push(hash);
+ });
+ }
}
}
}
@@ -205,13 +219,13 @@ function labelId(obj) {
return obj.__typename + "~" + id;
}
-LFUCache.prototype.cacheDelete = async function (hash) {
- let node = this.nodeHash.get(hash);
+LFUCache.prototype.cacheDelete = function (hash) {
+ const node = this.nodeHash.get(hash);
this.freqHash.get(node.freq).removeNode(node);
this.nodeHash.delete(hash);
};
-LFUCache.prototype.cacheClear = async function () {
+LFUCache.prototype.cacheClear = function () {
this.currentSize = 0;
this.leastFreq = 0;
this.ROOT_QUERY = {};
diff --git a/src/Browser/lruBrowserCache.js b/src/Browser/lruBrowserCache.js
index 42ebbbd..7da6dec 100644
--- a/src/Browser/lruBrowserCache.js
+++ b/src/Browser/lruBrowserCache.js
@@ -16,8 +16,10 @@ export default function LRUCache(capacity) {
this.currentSize = 0;
this.ROOT_QUERY = {};
this.ROOT_MUTATION = {};
+ // node hash for cache lookup and storage
this.nodeHash = new Map();
+ // doubly-linked list to keep track of recency and handle eviction
this.head = new Node('head', null);
this.tail = new Node('tail', null);
this.head.next = this.tail;
@@ -31,6 +33,7 @@ LRUCache.prototype.removeNode = function (node) {
next.prev = prev;
};
+
LRUCache.prototype.addNode = function (node) {
const tempTail = this.tail.prev;
tempTail.next = node;
@@ -61,15 +64,15 @@ LRUCache.prototype.put = function (key, value) {
this.addNode(newNode);
this.nodeHash.set(key, newNode);
- // check capacity - if over capacity, remove and reassign head node
- // if (Object.nodeHash[this.nodeHash].length > capacity)
+ // check capacity - if over capacity, remove and reassign head node
if (this.nodeHash.get(key).size > this.capacity){
const tempHead = this.head.next;
this.removeNode(tempHead);
- this.nodeHash.delete(tempTail.key);
+ this.nodeHash.delete(tempHead.key);
}
}
+// read from the cache and generate a response object to be populated with values from cache
LRUCache.prototype.read = async function (queryStr) {
if (typeof queryStr !== "string") throw TypeError("input should be a string");
// destructure the query string into an object
@@ -105,7 +108,7 @@ LRUCache.prototype.read = async function (queryStr) {
return { data: responseObject };
};
-LRUCache.prototype.write = async function (queryStr, respObj, deleteFlag) {
+LRUCache.prototype.write = async function (queryStr, respObj, searchTerms, deleteFlag) {
let nullFlag = false;
let deleteMutation = "";
for(const query in respObj.data) {
@@ -148,6 +151,20 @@ LRUCache.prototype.write = async function (queryStr, respObj, deleteFlag) {
this.ROOT_QUERY[key].push(hash);
}
}
+ /****
+ * if search terms were provided in the wrapper and the query is an
+ * "all"-type query, build out queries in ROOT_QUERY that match the
+ * search terms for each item retrieved from the "all"-type query so
+ * that future single queries can be looked up directly from the cache
+ ****/
+ if (searchTerms && queryStr.slice(8, 11) === 'all'){
+ searchTerms.forEach(el => {
+ const elVal = resFromNormalize[hash][el].replaceAll(' ', '');
+ const hashKey = `one${typeName}(${el}:"${elVal}")`;
+ if (!this.ROOT_QUERY[hashKey]) this.ROOT_QUERY[hashKey] = [];
+ this.ROOT_QUERY[hashKey].push(hash);
+ });
+ }
}
}
}
@@ -158,6 +175,7 @@ function labelId(obj) {
return obj.__typename + "~" + id;
}
+// fills in placeholder data in response object with values found in cache
LRUCache.prototype.populateAllHashes = function (
allHashesFromQuery,
fields
diff --git a/src/Browser/normalize.js b/src/Browser/normalize.js
index b14ec50..6c8c04f 100644
--- a/src/Browser/normalize.js
+++ b/src/Browser/normalize.js
@@ -15,7 +15,7 @@ export default function normalizeResult(queryObj, resultObj, deleteFlag) {
);
//iterate thru the different response objects that were mutated
-4
+4 // Please do not disturb the mysterious, load-bearing 4. This is its home.
const obj = resultObj.data;
//checks if the current element is an array
if (Array.isArray(obj)) {
diff --git a/src/Browser/wTinyLFU Sub-Caches/lruSub-cache.js b/src/Browser/wTinyLFU Sub-Caches/lruSub-cache.js
new file mode 100644
index 0000000..5781d5c
--- /dev/null
+++ b/src/Browser/wTinyLFU Sub-Caches/lruSub-cache.js
@@ -0,0 +1,98 @@
+import { plural } from "https://deno.land/x/deno_plural@2.0.0/mod.ts";
+
+class Node {
+ constructor (key, value) {
+ this.key = key;
+ this.value = value;
+ this.next = this.prev = null;
+ }
+}
+
+export default function LRUCache(capacity) {
+ this.capacity = capacity;
+ this.currentSize = 0;
+ // node hash for cache lookup and storage
+ this.nodeHash = new Map();
+
+ // doubly-linked list to keep track of recency and handle eviction
+ this.head = new Node('head', null);
+ this.tail = new Node('tail', null);
+ this.head.next = this.tail;
+ this.tail.prev = this.head;
+}
+
+LRUCache.prototype.removeNode = function (node) {
+ const prev = node.prev;
+ const next = node.next;
+ prev.next = next;
+ next.prev = prev;
+};
+
+
+LRUCache.prototype.addNode = function (node) {
+ const tempTail = this.tail.prev;
+ tempTail.next = node;
+
+ this.tail.prev = node;
+ node.next = this.tail;
+ node.prev = tempTail;
+}
+
+// Like get, but doesn't update anything
+LRUCache.prototype.peek = function(key) {
+ const node = this.nodeHash.get(key);
+ if (!node) return null;
+ return node.value;
+}
+
+// Like removeNode, but takes key and deletes from hash
+LRUCache.prototype.delete = function (key) {
+ const node = this.nodeHash.get(key);
+ const prev = node.prev;
+ const next = node.next;
+ prev.next = next;
+ next.prev = prev;
+ this.nodeHash.delete(key);
+}
+
+LRUCache.prototype.get = function(key) {
+ const node = this.nodeHash.get(key);
+
+ // check if node does not exist in nodeHash obj
+ if (!node) return null;
+ // update position to most recent in list
+ this.removeNode(node);
+ this.addNode(node);
+ return node.value;
+}
+
+// used by wTinyLFU to get SLRU eviction candidates for TinyLFU decision
+LRUCache.prototype.getCandidate = function () {
+ const tempHead = this.head.next;
+ this.removeNode(tempHead);
+ this.nodeHash.delete(tempHead.key);
+ return {key: tempHead.key, value: tempHead.value};
+}
+
+LRUCache.prototype.put = function (key, value) {
+ // create a new node
+ const newNode = new Node(key, value);
+
+ // remove node from old position
+ const node = this.nodeHash.get(key);
+ if (node) this.removeNode(node);
+
+ // add new node to tail
+ this.addNode(newNode);
+ this.nodeHash.set(key, newNode);
+
+ // check capacity - if over capacity, remove and reassign head node
+ if (this.nodeHash.size > this.capacity){
+ const tempHead = this.head.next;
+ this.removeNode(tempHead);
+ this.nodeHash.delete(tempHead.key);
+ // return tempHead for use in w-TinyLFU's SLRU cache
+ return {key: tempHead.key, value: tempHead.value};
+ }
+}
+
diff --git a/src/Browser/wTinyLFU Sub-Caches/slruSub-cache.js b/src/Browser/wTinyLFU Sub-Caches/slruSub-cache.js
new file mode 100644
index 0000000..62bbb8f
--- /dev/null
+++ b/src/Browser/wTinyLFU Sub-Caches/slruSub-cache.js
@@ -0,0 +1,58 @@
+import LRUCache from './lruSub-cache.js';
+
+/*****
+* Main SLRU Cache
+*****/
+export default function SLRUCache(capacity) {
+ // Probationary LRU Cache using existing LRU structure in lruBrowserCache.js
+ this.probationaryLRU = new LRUCache(capacity * .20);
+ // Protected LRU Cache
+ this.protectedLRU = new LRUCache(capacity * .80);
+}
+
+// Get item from cache, updates last access,
+// and promotes existing items to protected
+SLRUCache.prototype.get = function (key) {
+ // get the item from the protectedLRU
+ const protectedItem = this.protectedLRU.get(key);
+ // check to see if the item is in the probationaryLRU
+ const probationaryItem = this.probationaryLRU.peek(key);
+
+ // If the item is in neither segment, return undefined
+ if (protectedItem === null && probationaryItem === null) return;
+
+ // If the item only exists in the protected segment, return that item
+ if (protectedItem !== null) return protectedItem;
+
+ // If the item only exists in the probationary segment, promote to protected and return item
+ // if adding an item to the protectedLRU results in ejection, demote ejected node
+ this.probationaryLRU.delete(key);
+ this.putAndDemote(key, probationaryItem);
+ return probationaryItem;
+}
+
+// add or update item in cache
+SLRUCache.prototype.put = function (key, node) {
+ // if the item is in the protected segment, update it
+ if (this.protectedLRU.nodeHash.get(key)) this.putAndDemote(key, node);
+ else if (this.probationaryLRU.nodeHash(key)) {
+ // if the item is in the probationary segment,
+ // promote and update it
+ this.probationaryLRU.delete(key);
+ this.putAndDemote(key, node);
+ }
+ // if in neither, add item to the probationary segment
+ else this.probationaryLRU.put(key, node)
+}
+
+// Check to see if the item exists in the cache without updating access
+SLRUCache.prototype.has = function (key) {
+ return this.protectedLRU.nodeHash.get(key) || this.probationaryLRU.nodeHash.get(key);
+}
+
+// Adds a node to the protectedLRU
+SLRUCache.prototype.putAndDemote = function (key, value) {
+ // if adding an item to the protectedLRU results in ejection, demote ejected node
+ const demoted = this.protectedLRU.put(key, value);
+ if (demoted) this.probationaryLRU.put(demoted.key, demoted.value);
+}
\ No newline at end of file
diff --git a/src/Browser/wTinyLFUBrowserCache.js b/src/Browser/wTinyLFUBrowserCache.js
new file mode 100644
index 0000000..1b54e5f
--- /dev/null
+++ b/src/Browser/wTinyLFUBrowserCache.js
@@ -0,0 +1,222 @@
+import { plural } from "https://deno.land/x/deno_plural@2.0.0/mod.ts";
+
+import normalizeResult from "./normalize.js";
+import destructureQueries from "./destructure.js";
+import SLRUCache from "./wTinyLFU%20Sub-Caches/slruSub-cache.js"
+import LRUCache from "./wTinyLFU%20Sub-Caches/lruSub-cache.js";
+import { FrequencySketch } from './FrequencySketch.js';
+
+/*****
+* Overall w-TinyLFU Cache
+*****/
+export default function WTinyLFUCache (capacity) {
+ this.capacity = capacity;
+ this.ROOT_QUERY = {};
+ this.ROOT_MUTATION = {};
+ this.sketch = new FrequencySketch();
+
+ // initialize window cache with access to frequency sketch
+ this.WLRU = new LRUCache(capacity * .01);
+ this.WLRU.sketch = this.sketch;
+ // initialize segmented main cache with access to frequency sketch
+ this.SLRU = new SLRUCache(capacity * .99);
+ this.SLRU.probationaryLRU.sketch = this.sketch;
+ this.SLRU.protectedLRU.sketch = this.sketch;
+}
+
+WTinyLFUCache.prototype.putAndPromote = async function (key, value) {
+ const WLRUCandidate = this.WLRU.put(key, value);
+ // if adding to the WLRU cache results in an eviction...
+ if (WLRUCandidate) {
+ // if the probationary cache is at capacity...
+ let winner = WLRUCandidate;
+ if (this.SLRU.probationaryLRU.nodeHash.size >= Math.floor(this.SLRU.probationaryLRU.capacity)) {
+ // send the last accessed item in the probationary cache to the TinyLFU
+ const SLRUCandidate = this.SLRU.probationaryLRU.getCandidate();
+ // determine which item will improve the hit-ratio most
+ winner = await this.TinyLFU(WLRUCandidate, SLRUCandidate);
+ }
+ // add the winner to the probationary SLRU
+ this.SLRU.probationaryLRU.put(winner.key, winner.value);
+ }
+}
+
+// fills in placeholder data in response object with values found in cache
+WTinyLFUCache.prototype.populateAllHashes = function (
+ allHashesFromQuery,
+ fields
+) {
+ if (!allHashesFromQuery.length) return [];
+ // isolate the type of search from the rest of the hash name
+ const hyphenIdx = allHashesFromQuery[0].indexOf("~");
+ const typeName = allHashesFromQuery[0].slice(0, hyphenIdx);
+ const reduction = allHashesFromQuery.reduce(async (acc, hash) => {
+ // for each hash from the input query, build the response object
+ // first, check the SLRU cache
+ let readVal = await this.SLRU.get(hash);
+ // if the hash is not in the SLRU, check the WLRU
+ if (!readVal) readVal = await this.WLRU.get(hash);
+ if (readVal === "DELETED") return acc;
+ if (readVal) this.sketch.increment(JSON.stringify(readVal));
+ if (!readVal) return undefined;
+ const dataObj = {};
+ for (const field in fields) {
+ if (readVal[field] === "DELETED") continue;
+ // for each field in the fields input query, add the corresponding value from the cache
+ // if the field is not another array of hashes
+ if (readVal[field] === undefined && field !== "__typename") {
+ return undefined;
+ }
+ if (typeof fields[field] !== "object") {
+ // add the typename for the type
+ if (field === "__typename") {
+ dataObj[field] = typeName;
+ } else dataObj[field] = readVal[field]; // assign the value from the cache to the key in the response
+ } else {
+ // case where the field from the input query is an array of hashes, recursively invoke populateAllHashes
+ dataObj[field] = await this.populateAllHashes(
+ readVal[field],
+ fields[field]
+ );
+ if (dataObj[field] === undefined) return undefined;
+ }
+ }
+ // acc is an array of response object for each hash
+ const resolvedProm = await Promise.resolve(acc);
+ resolvedProm.push(dataObj);
+ return resolvedProm;
+ }, []);
+ return reduction;
+};
+
+// read from the cache and generate a response object to be populated with values from cache
+WTinyLFUCache.prototype.read = async function (queryStr) {
+ if (typeof queryStr !== "string") throw TypeError("input should be a string");
+ // destructure the query string into an object
+ const queries = destructureQueries(queryStr).queries;
+ // breaks out of function if queryStr is a mutation
+ if (!queries) return undefined;
+ const responseObject = {};
+ // iterate through each query in the input queries object
+ for (const query in queries) {
+ // get the entire str query from the name input query and arguments
+ const queryHash = queries[query].name.concat(queries[query].arguments);
+ const rootQuery = this.ROOT_QUERY;
+ // match in ROOT_QUERY
+ if (rootQuery[queryHash]) {
+ // get the hashes to populate from the existent query in the cache
+ const arrayHashes = rootQuery[queryHash];
+ // Determines responseObject property labels - use alias if applicable, otherwise use name
+ const respObjProp = queries[query].alias ?? queries[query].name;
+ // invoke populateAllHashes and add data objects to the response object for each input query
+ responseObject[respObjProp] = await this.populateAllHashes(
+ arrayHashes,
+ queries[query].fields
+ );
+
+ if (!responseObject[respObjProp]) return undefined;
+
+ // no match with ROOT_QUERY return null or ...
+ } else {
+ return null;
+ }
+ }
+ return { data: responseObject };
+};
+
+WTinyLFUCache.prototype.write = async function (queryStr, respObj, searchTerms, deleteFlag) {
+ let nullFlag = false;
+ let deleteMutation = "";
+ let wasFoundIn = null;
+ for(const query in respObj.data) {
+ if(respObj.data[query] === null) nullFlag = true
+ else if(query.toLowerCase().includes('delete')) deleteMutation = labelId(respObj.data[query]);
+ }
+ if(!nullFlag) {
+ const queryObj = destructureQueries(queryStr);
+ const resFromNormalize = normalizeResult(queryObj, respObj, deleteFlag);
+ // update the original cache with same reference
+ for (const hash in resFromNormalize) {
+ // first check SLRU
+ let resp = await this.SLRU.get(hash);
+ // next, check the window LRU
+ if (resp) wasFoundIn = 'SLRU'
+ if (!resp) resp = await this.WLRU.get(hash);
+ if (resp && !wasFoundIn) wasFoundIn = 'WLRU';
+ if (resp) this.sketch.increment(JSON.stringify(resp));
+ if (hash === "ROOT_QUERY" || hash === "ROOT_MUTATION") {
+ if(deleteMutation === "") {
+ this[hash] = Object.assign(this[hash], resFromNormalize[hash]);
+ } else {
+ const typeName = deleteMutation.slice(0, deleteMutation.indexOf('~'));
+ for(const key in this.ROOT_QUERY) {
+ if(key.includes(typeName + 's') || key.includes(plural(typeName))) {
+ for(let i = 0; i < this.ROOT_QUERY[key].length; i++) {
+ if(this.ROOT_QUERY[key][i] === deleteMutation) {
+ this.ROOT_QUERY[key].splice(i, 1);
+ i--;
+ }
+ }
+ }
+ }
+ }
+ } else if (resFromNormalize[hash] === "DELETED") {
+ // Should we delete directly or do we still need to flag as DELETED
+ if (wasFoundIn === 'SLRU') await this.SLRU.put(hash, "DELETED");
+ else if (wasFoundIn === 'WLRU') await this.WLRU.put(hash, "DELETED");
+ } else if (resp) {
+ const newObj = Object.assign(resp, resFromNormalize[hash]);
+ // write to the appropriate cache
+ if (wasFoundIn === 'SLRU') await this.SLRU.put(hash, newObj);
+ else if (wasFoundIn === 'WLRU') await this.WLRU.put(hash, newObj);
+ } else {
+ const typeName = hash.slice(0, hash.indexOf('~'));
+ await this.putAndPromote(hash, resFromNormalize[hash]);
+ for(const key in this.ROOT_QUERY) {
+ if(key.includes(typeName + 's') || key.includes(plural(typeName))) {
+ this.ROOT_QUERY[key].push(hash);
+ }
+ }
+ /****
+ * if search terms were provided in the wrapper and the query is an
+ * "all"-type query, build out queries in ROOT_QUERY that match the
+ * search terms for each item retrieved from the "all"-type query so
+ * that future single queries can be looked up directly from the cache
+ ****/
+ if (searchTerms && queryStr.slice(8, 11) === 'all'){
+ searchTerms.forEach(el => {
+ const elVal = resFromNormalize[hash][el].replaceAll(' ', '');
+ const hashKey = `one${typeName}(${el}:"${elVal}")`;
+ if (!this.ROOT_QUERY[hashKey]) this.ROOT_QUERY[hashKey] = [];
+ this.ROOT_QUERY[hashKey].push(hash);
+ });
+ }
+ }
+ }
+ }
+};
+
+// Note: WholeQuery is not a currently-functioning option in Obsidian Wrapper
+WTinyLFUCache.prototype.writeWholeQuery = function (queryStr, respObj) {
+ const hash = queryStr.replace(/\s/g, "");
+ this.put(this.ROOT_QUERY[hash], respObj);
+ return respObj;
+};
+
+// Note: WholeQuery is not a currently-functioning option in Obsidian Wrapper
+WTinyLFUCache.prototype.readWholeQuery = function (queryStr) {
+ const hash = queryStr.replace(/\s/g, "");
+ if (this.ROOT_QUERY[hash]) return this.get(this.ROOT_QUERY[hash]);
+ return undefined;
+};
+
+/*****
+* TinyLFU Admission Policy
+*****/
+WTinyLFUCache.prototype.TinyLFU = async function (WLRUCandidate, SLRUCandidate) {
+ // get the frequency values of both items
+ const WLRUFreq = await this.sketch.frequency(JSON.stringify(WLRUCandidate.value));
+ const SLRUFreq = await this.sketch.frequency(JSON.stringify(SLRUCandidate.value));
+ // return the object with the higher frequency, prioritizing items in the window cache,
+ return WLRUFreq >= SLRUFreq ? WLRUCandidate : SLRUCandidate;
+}
\ No newline at end of file
diff --git a/src/Obsidian.ts b/src/Obsidian.ts
index 08f9de2..ea12e66 100644
--- a/src/Obsidian.ts
+++ b/src/Obsidian.ts
@@ -2,14 +2,12 @@ import { graphql } from 'https://cdn.pika.dev/graphql@15.0.0';
import { renderPlaygroundPage } from 'https://deno.land/x/oak_graphql@0.6.2/graphql-playground-html/render-playground-html.ts';
import { makeExecutableSchema } from 'https://deno.land/x/oak_graphql@0.6.2/graphql-tools/schema/makeExecutableSchema.ts';
import { Cache } from './quickCache.js';
-import LFUCache from './Browser/lfuBrowserCache.js';
import queryDepthLimiter from './DoSSecurity.ts';
import { restructure } from './restructure.ts';
-import { rebuildFromQuery } from './rebuild.js';
import { normalizeObject } from './normalize.ts';
-import { transformResponse, detransformResponse } from './transformResponse.ts';
import { isMutation, invalidateCache } from './invalidateCacheCheck.ts';
import { mapSelectionSet } from './mapSelections.js';
+import { HashTable } from './queryHash.js';
interface Constructable