@@ -102,10 +102,10 @@
class="results-progress"
v-show="runningQuery"
>
-
@@ -133,12 +133,14 @@ import debounce from '@/modules/debounce'
import accordion from '@/components/accordion.vue'
import yasr from '@/components/explorer/yasr'
import yasqe from '@/components/explorer/yasqe'
+import spinner from '@/components/Spinner'
export default {
components: {
accordion,
yasqe,
- yasr
+ yasr,
+ spinner
},
data () {
return {
diff --git a/app/src/router/module/explorer.js b/app/src/router/module/explorer.js
index eb5a4a78..af940501 100644
--- a/app/src/router/module/explorer.js
+++ b/app/src/router/module/explorer.js
@@ -78,6 +78,23 @@ const explorerRoutes = [
}
]
},
+ {
+ path: 'curate/validlist',
+ component: () => import('@/pages/explorer/curate/CurateBase.vue'),
+ meta: { requiresAuth: true },
+ children: [
+ {
+ path: '',
+ name: 'validList',
+ component: () => import('@/pages/explorer/curate/validlist/XlsList.vue')
+ },
+ {
+ path: 'update',
+ name: 'xlsUpdate',
+ component: () => import('@/pages/explorer/curate/validlist/UpdateXlsList.vue')
+ }
+ ]
+ },
// {
// path: 'stepper',
// name: 'CurateStepper',
@@ -171,6 +188,15 @@ const explorerRoutes = [
'@/pages/explorer/parameterized-query/parameterized-query-page.vue'
),
meta: { requiresAuth: true }
+ },
+ {
+ path: 'sparql',
+ name: 'Sparql',
+ component: () =>
+ import(
+ '@/pages/explorer/Sparql.vue'
+ ),
+ meta: { requiresAuth: true }
}
]
diff --git a/app/src/store/modules/auth/actions.js b/app/src/store/modules/auth/actions.js
index 0c66a06b..351cdb6d 100644
--- a/app/src/store/modules/auth/actions.js
+++ b/app/src/store/modules/auth/actions.js
@@ -53,13 +53,13 @@ export default {
router.push('/nm')
context.commit('setSnackbar', {
message: 'Authenticating...',
- duration: 1000
+ duration: 3000
}, { root: true })
const token = res.token ?? null
const userId = res.userId ?? null
const displayName = res.displayName ?? null
- const expiresIn = 450 * 1000
+ const expiresIn = 9000 * 60 * 60
const expirationDate = new Date().getTime() + expiresIn
if (token && userId && displayName) {
diff --git a/app/tests/unit/components/drawer.spec.js b/app/tests/unit/components/drawer.spec.js
index 845c7711..2c6a1f61 100644
--- a/app/tests/unit/components/drawer.spec.js
+++ b/app/tests/unit/components/drawer.spec.js
@@ -14,15 +14,26 @@ describe('Drawer.vue', () => {
}, false)
})
- it('renders curate and its children on the drawer', () => {
- expect.assertions(6)
+ it('renders the right number of dropdowns drawer when not authenticated', () => {
+ expect.assertions(7)
expect(wrapper.html()).toContain('Home')
expect(wrapper.html()).toContain('About')
expect(wrapper.html()).toContain('Visualize')
- expect(wrapper.html()).toContain('Curate')
expect(wrapper.html()).toContain('Tools')
- // TODO: Rewrite this test (for when auth enabled/auth disabled)
- // expect(wrapper.html()).toContain('Log out')
expect(wrapper.html()).toContain('Login')
+ expect(wrapper.html()).not.toContain('Curate')
+ expect(wrapper.html()).not.toContain('Log out')
+ })
+
+ it('renders curate and its children on the drawer', async () => {
+ expect.assertions(7)
+ await wrapper.vm.$store.commit('auth/setUser', { token: 'Randomtoken', userId: 'userId', displayName: 'Test user' })
+ expect(wrapper.html()).toContain('Home')
+ expect(wrapper.html()).toContain('About')
+ expect(wrapper.html()).toContain('Visualize')
+ expect(wrapper.html()).toContain('Curate')
+ expect(wrapper.html()).toContain('Tools')
+ expect(wrapper.html()).toContain('Log out')
+ expect(wrapper.html()).not.toContain('Login')
})
})
diff --git a/app/tests/unit/components/explorer/Header.spec.js b/app/tests/unit/components/explorer/Header.spec.js
index 409c4fd4..4e646b17 100644
--- a/app/tests/unit/components/explorer/Header.spec.js
+++ b/app/tests/unit/components/explorer/Header.spec.js
@@ -24,6 +24,6 @@ describe('Header.vue', () => {
it('renders Menu tabs correctly', async () => {
expect.assertions(1)
const menuItems = wrapper.findAll('._menutabs')
- expect(menuItems.length).toBe(4)
+ expect(menuItems.length).toBe(5)
})
})
diff --git a/app/tests/unit/components/explorer/yasgui.spec.js b/app/tests/unit/components/explorer/yasgui.spec.js
new file mode 100644
index 00000000..b236ca8d
--- /dev/null
+++ b/app/tests/unit/components/explorer/yasgui.spec.js
@@ -0,0 +1,24 @@
+import createWrapper from '../../../jest/script/wrapper'
+import YasguiWrapper from '@/components/explorer/yasgui.vue'
+
+document.body.createTextRange = (elem) => {
+ const textRange = {
+ getBoundingClientRect: () => 1,
+ getClientRects: () => 1
+ }
+ return textRange
+}
+
+describe('yasgui.vue', () => {
+ it('renders yasgui in correct place', () => {
+ const wrapper = createWrapper(YasguiWrapper, {}, true)
+ const yasguiComponent = wrapper.findComponent('#YASGUI > .yasgui')
+ expect(yasguiComponent.exists()).toBeTruthy()
+ })
+ it('renders yasgui in correct place with custom id prop', () => {
+ const id = 'yasguiTest'
+ const wrapper = createWrapper(YasguiWrapper, { props: { id } }, true)
+ const yasguiComponent = wrapper.findComponent('#yasguiTest > .yasgui')
+ expect(yasguiComponent.exists()).toBeTruthy()
+ })
+})
diff --git a/app/tests/unit/pages/explorer/Curate.spec.js b/app/tests/unit/pages/explorer/Curate.spec.js
index 6f6d5dab..5aa48346 100644
--- a/app/tests/unit/pages/explorer/Curate.spec.js
+++ b/app/tests/unit/pages/explorer/Curate.spec.js
@@ -1,5 +1,5 @@
import createWrapper from '../../../jest/script/wrapper'
-import { enableAutoDestroy } from '@vue/test-utils'
+import { enableAutoDestroy, RouterLinkStub } from '@vue/test-utils'
import ExplorerCurate from '@/pages/explorer/Curate.vue'
describe('Curate.vue', () => {
@@ -16,11 +16,33 @@ describe('Curate.vue', () => {
expect(catHeader.exists()).toBe(true)
})
+ it('renders the right page layout', () => {
+ expect.assertions(4)
+ const sections = wrapper.findAll('div > .section_teams > .curate > div')
+ expect(sections.length).toEqual(3)
+ expect(sections.at(0).attributes().class).toBeUndefined()
+ expect(sections.at(1).attributes().class).toEqual('u_margin-top-med')
+ expect(sections.at(2).attributes().class).toEqual('u_margin-top-med')
+ })
+
it('renders curate category headers', () => {
- expect.assertions(3)
+ expect.assertions(4)
const catHeaders = wrapper.findAll('.visualize_header-h1')
- expect(catHeaders.length).toEqual(2)
+ expect(catHeaders.length).toEqual(3)
expect(catHeaders.at(0).text()).toBe('Curate')
- expect(catHeaders.at(1).text()).toBe('Create Visualization')
+ expect(catHeaders.at(1).text()).toBe('Valid Curation List Entry')
+ expect(catHeaders.at(2).text()).toBe('Create Visualization')
+ })
+
+ it('renders valid curation list Item', () => {
+ expect.assertions(6)
+ const section = wrapper.findAll('div > .section_teams > .curate > div').at(1)
+ const sectionItem = section.findAll('.md-layout-item.md-layout-item_card')
+ expect(section.find('h2.visualize_header-h1.metamine_footer-ref-header').text()).toEqual('Valid Curation List Entry')
+ expect(section.find('.md-layout.md-layout-responsive').exists()).toBeTruthy()
+ expect(sectionItem.length).toBe(2)
+ expect(sectionItem.at(0).attributes().class).toEqual('md-layout-item md-size-30 md-medium-size-50 md-medium-size-100 md-layout-item_card')
+ expect(sectionItem.at(0).findComponent(RouterLinkStub).props().to).toEqual('/explorer/curate/validList')
+ expect(sectionItem.at(1).findComponent(RouterLinkStub).props().to).toEqual('/explorer/curate/validList/update')
})
})
diff --git a/app/tests/unit/pages/explorer/SparqlUI.spec.js b/app/tests/unit/pages/explorer/SparqlUI.spec.js
new file mode 100644
index 00000000..ff7c17aa
--- /dev/null
+++ b/app/tests/unit/pages/explorer/SparqlUI.spec.js
@@ -0,0 +1,22 @@
+import createWrapper from '../../../jest/script/wrapper'
+import SparqlUI from '@/pages/explorer/Sparql.vue'
+
+document.body.createTextRange = (elem) => {
+ const textRange = {
+ getBoundingClientRect: () => 1,
+ getClientRects: () => 1
+ }
+ return textRange
+}
+
+describe('Sparql.vue', () => {
+ it('contains yasgui component', () => {
+ const wrapper = createWrapper(SparqlUI, {}, true)
+ const yasguiComponent = wrapper.findComponent('.yasgui')
+ expect(yasguiComponent.exists()).toBeTruthy()
+ })
+ it('has a header', () => {
+ const wrapper = createWrapper(SparqlUI, { }, true)
+ expect(wrapper.text()).toContain('Sparql Query')
+ })
+})
diff --git a/app/tests/unit/pages/explorer/XlsList.spec.js b/app/tests/unit/pages/explorer/XlsList.spec.js
new file mode 100644
index 00000000..ac061fa3
--- /dev/null
+++ b/app/tests/unit/pages/explorer/XlsList.spec.js
@@ -0,0 +1,57 @@
+import createWrapper from '../../../jest/script/wrapper'
+import { enableAutoDestroy } from '@vue/test-utils'
+import XlsList from '@/pages/explorer/curate/validlist/XlsList.vue'
+
+describe('Spreadsheet List Form.vue', () => {
+ let wrapper
+ beforeEach(async () => {
+ wrapper = await createWrapper(XlsList, {
+ stubs: {
+ MdField: { template: '
' },
+ MdInput: { template: '
' }
+ }
+ }, false)
+ })
+
+ enableAutoDestroy(afterEach)
+
+ it('renders header tab correctly', () => {
+ expect.assertions(2)
+ const catHeader = wrapper.find('.visualize_header-h1.article_title.u_centralize_text')
+ expect(catHeader.exists()).toBe(true)
+ expect(catHeader.html()).toMatch('Spreadsheet List Form')
+ })
+
+ it('renders page structure properly', () => {
+ expect.assertions(4)
+ expect(wrapper.find('.section_teams').exists()).toBe(true)
+ expect(wrapper.find('.md-layout.md-gutter.md-alignment-top-center').exists()).toBe(true)
+ expect(wrapper.find('.md-layout-item.md-size-50.md-medium-size-70.md-small-size-85.md-xsmall-size-95').exists()).toBe(true)
+ expect(wrapper.findAll('.section_teams > div > div > .md-layout > .md-layout-item').length).toBe(1)
+ })
+
+ it('renders page input properly', () => {
+ expect.assertions(4)
+ const field = wrapper.findAll('.md-field-stub')
+ expect(field.length).toBe(2)
+ const name = field.at(0)
+ expect(name.find('p').text()).toBe('FieldName:')
+ expect(name.find('span').text()).toBe('Section::Subsection::Unit')
+ const value = field.at(1)
+ expect(value.find('p').text()).toBe('Value:')
+ })
+
+ it('renders buttons properly', () => {
+ expect.assertions(7)
+ expect(wrapper.find('.form__group.search_box_form-item-2.explorer_page-nav.u_margin-top-med').exists()).toBe(true)
+ expect(wrapper.findAll('.btn.btn--noradius.search_box_form_btn.mid-first-li.display-text.u--margin-pos').length).toBe(2)
+ const button = wrapper.findAll('button')
+ expect(button.length).toBe(2)
+ expect(button.at(0).attributes().class).toContain('btn--tertiary')
+ expect(button.at(0).text()).toContain('Add more')
+ expect(button.at(1).attributes().class).toContain('btn--primary')
+ expect(button.at(1).text()).toContain('Submit')
+ })
+
+ // Todo: Test for the tables
+})
diff --git a/docker-compose.yml b/docker-compose.yml
index 9bc63305..63ddd53f 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -90,6 +90,7 @@ services:
- MM_AUTH_SURNAME_HEADER=${MM_AUTH_SURNAME_HEADER}
- MM_AUTH_EMAIL_HEADER=${MM_AUTH_EMAIL_HEADER}
- MM_AUTH_USER_HEADER=${MM_AUTH_USER_HEADER}
+ - KNOWLEDGE_ADDRESS=${WHYIS_ADDRESS}
ports:
- '3001:3001'
client:
@@ -106,65 +107,65 @@ services:
volumes:
- /app/node_modules
- ./app:/app
-# redis:
-# image: "redis:alpine"
-# command: redis-server
-# volumes:
-# - ./mockDB/redis-data:/var/lib/redis
-# celery:
-# depends_on:
-# - redis
-# - fuseki
-# volumes:
-# - ./mockDB/fuseki:/app/run
-# - ./whyis/materialsmine:/app
-# command: /opt/venv/bin/celery -A wsgi.celery worker -l INFO -c 4 --uid=nobody --gid=nogroup
-# environment:
-# - CHOKIDAR_USEPOLLING=true
-# build: whyis
-# celerybeat:
-# depends_on:
-# - redis
-# - fuseki
-# volumes:
-# - ./mockDB/fuseki:/app/run
-# - ./whyis/materialsmine:/app
-# command: /opt/venv/bin/celery -A wsgi.celery beat -l INFO --uid=nobody --gid=nogroup
-# environment:
-# - CHOKIDAR_USEPOLLING=true
-# build: whyis
-# whyis:
-# depends_on:
-# - redis
-# - fuseki
-# stdin_open: true
-# command: /opt/venv/bin/gunicorn wsgi:application --workers ${WEB_CONCURRENCY:-8} --timeout 0 -b :8000
-# environment:
-# - CHOKIDAR_USEPOLLING=true
-# build: whyis
-# volumes:
-# - ./whyis/materialsmine:/app
-# - ./mockDB/fuseki:/app/run
-# ports:
-# - '8000:8000'
-# - '5000:5000'
-# fuseki:
-# build: whyis
-# command: /opt/venv/bin/fuseki-server --mem /ds
-# # Needs fuseki 4.3.2 to use our full text search, stain is still at 4.0.0.
-# # image: 'stain/jena-fuseki:4.0.0
-# restart: always
-# mem_limit: 5G
-# mem_reservation: 5G
-# environment:
-# # - ADMIN_USER=admin
-# # - ADMIN_PASSWORD=${ADMIN_PASSWORD}
-# - JVM_ARGS=-Xmx4g
-# - FUSEKI_BASE=/fuseki
-# ports:
-# - '3030:3030'
-# volumes:
-# - ./mockDB/fuseki:/fuseki
+ redis:
+ image: "redis:alpine"
+ command: redis-server
+ volumes:
+ - ./mockDB/redis-data:/var/lib/redis
+ celery:
+ depends_on:
+ - redis
+ - fuseki
+ volumes:
+ - ./mockDB/fuseki:/app/run
+ - ./whyis/materialsmine:/app
+ command: /opt/venv/bin/celery -A wsgi.celery worker -l INFO -c 4 --uid=nobody --gid=nogroup
+ environment:
+ - CHOKIDAR_USEPOLLING=true
+ build: whyis
+ celerybeat:
+ depends_on:
+ - redis
+ - fuseki
+ volumes:
+ - ./mockDB/fuseki:/app/run
+ - ./whyis/materialsmine:/app
+ command: /opt/venv/bin/celery -A wsgi.celery beat -l INFO --uid=nobody --gid=nogroup
+ environment:
+ - CHOKIDAR_USEPOLLING=true
+ build: whyis
+ whyis:
+ container_name: whyis
+ depends_on:
+ - redis
+ - fuseki
+ stdin_open: true
+ command: /opt/venv/bin/gunicorn wsgi:application --workers ${WEB_CONCURRENCY:-8} --timeout 0 -b :8000
+ environment:
+ - CHOKIDAR_USEPOLLING=true
+ build: whyis
+ volumes:
+ - ./whyis/materialsmine:/app
+ - ./mockDB/fuseki:/app/run
+ ports:
+ - '8000:8000'
+ fuseki:
+ build: whyis
+ command: /opt/venv/bin/fuseki-server --mem /ds
+# Needs fuseki 4.3.2 to use our full text search, stain is still at 4.0.0.
+# image: 'stain/jena-fuseki:4.0.0
+ restart: always
+ # mem_limit: 9G
+ # mem_reservation: 9G
+ environment:
+# - ADMIN_USER=admin
+# - ADMIN_PASSWORD=${ADMIN_PASSWORD}
+ - JVM_ARGS=-Xmx10g
+ - FUSEKI_BASE=/fuseki
+ ports:
+ - '3030:3030'
+ volumes:
+ - ./mockDB/fuseki:/fuseki
volumes:
mockDB:
diff --git a/resfulservice/.nycrc.json b/resfulservice/.nycrc.json
index ace035be..ea6f65bb 100644
--- a/resfulservice/.nycrc.json
+++ b/resfulservice/.nycrc.json
@@ -3,7 +3,7 @@
"include": ["src/"],
"exclude": ["**/*.spec.js"],
"check-coverage": true,
- "statements": 31,
+ "statements": 30,
"branches": 22,
"functions": 21,
"lines": 31
diff --git a/resfulservice/config/constant.js b/resfulservice/config/constant.js
index 609e8e60..07fa9d1c 100644
--- a/resfulservice/config/constant.js
+++ b/resfulservice/config/constant.js
@@ -1,7 +1,9 @@
+
module.exports = {
- samples: 'https://materialsmine.org/wi/about?view=instances&uri=http://materialsmine.org/ns/PolymerNanocomposite',
- articles: 'https://materialsmine.org/wi/about?view=instances&uri=http%3A%2F%2Fmaterialsmine.org%2Fns%2FResearchArticle',
- images: 'https://materialsmine.org/wi/about?view=instances&uri=http://semanticscience.org/resource/Image',
- charts: 'https://materialsmine.org/wi/about?view=instances&uri=http://semanticscience.org/resource/Chart',
+ samples: 'about?view=instances&uri=http://materialsmine.org/ns/PolymerNanocomposite',
+ articles: 'about?view=instances&uri=http%3A%2F%2Fmaterialsmine.org%2Fns%2FResearchArticle',
+ images: 'about?view=instances&uri=http://semanticscience.org/resource/Image',
+ charts: 'about?view=instances&uri=http://semanticscience.org/resource/Chart',
+ sparql: 'sparql',
supportedBrowser: ['Firefox', 'Chrome', 'Canary', 'Safari', 'Opera', 'IE']
};
diff --git a/resfulservice/package.json b/resfulservice/package.json
index 7deaff98..ef02e99f 100644
--- a/resfulservice/package.json
+++ b/resfulservice/package.json
@@ -16,7 +16,6 @@
"apollo-server-express": "^3.7.0",
"axios": "^0.26.1",
"bcryptjs": "^2.4.3",
- "body-parser": "1.19.2",
"csvtojson": "^2.0.10",
"express": "^4.17.1",
"express-validator": "^6.14.0",
@@ -31,6 +30,7 @@
"multer": "^1.4.4",
"node-schedule": "^2.1.0",
"nodemailer": "^6.7.2",
+ "read-excel-file": "^5.6.1",
"sinon": "^14.0.0",
"swagger-ui-express": "^4.2.0",
"ua-parser-js": "^1.0.33",
diff --git a/resfulservice/script/getTiffImage.js b/resfulservice/script/getTiffImage.js
index eb828a5a..d36417b0 100644
--- a/resfulservice/script/getTiffImage.js
+++ b/resfulservice/script/getTiffImage.js
@@ -1,22 +1,17 @@
const mongoose = require('mongoose');
-const storedFiles = require('../src/models/fsFiles');
+// const storedFiles = require('../src/models/fsFiles');
const env = process.env;
/**
* Ensure the environment variable is set
*/
-const db = mongoose.connection
+const db = mongoose.connection;
async function getImagesFromMongo () {
- try {
- await mongoose.connect(`mongodb://${env.MM_MONGO_USER}:${env.MM_MONGO_PWD}@localhost:${env.MONGO_PORT}/${env.MM_DB}`, {keepAlive: true, keepAliveInitialDelay: 300000});
- db.on('error', () => console.log('An error occurred'));
- db.once('open', () => console.log('Open successfully'));
- console.log('ran out')
- } catch (err) {
- throw err
- }
+ await mongoose.connect(`mongodb://${env.MM_MONGO_USER}:${env.MM_MONGO_PWD}@localhost:${env.MONGO_PORT}/${env.MM_DB}`, { keepAlive: true, keepAliveInitialDelay: 300000 });
+ db.on('error', () => console.log('An error occurred'));
+ db.once('open', () => console.log('Open successfully'));
// mongoose
// .connect(`mongodb://${env.MM_MONGO_USER}:${env.MM_MONGO_PWD}@localhost:${env.MONGO_PORT}/${env.MM_DB}`, {
// useNewUrlParser: true, useUnifiedTopology: true
@@ -30,4 +25,4 @@ async function getImagesFromMongo () {
// })
}
-getImagesFromMongo();
\ No newline at end of file
+getImagesFromMongo();
diff --git a/resfulservice/spec/graphql/resolver/dataset.spec.js b/resfulservice/spec/graphql/resolver/dataset.spec.js
index 14a6000a..80c7fadb 100644
--- a/resfulservice/spec/graphql/resolver/dataset.spec.js
+++ b/resfulservice/spec/graphql/resolver/dataset.spec.js
@@ -23,9 +23,9 @@ describe('Dataset Resolver Unit Tests:', function () {
originalname: 'Hopetoun_falls.jpg',
encoding: '7bit',
mimetype: 'image/jpeg',
- destination: 'mm_fils',
+ destination: 'mm_files',
filename: 'comparative_aphid_agathe-2022-08-18T10:00:40.910Z-Hopetoun_falls.jpg',
- path: 'mm_fils/comparative_aphid_agathe-2022-08-18T10:00:40.910Z-Hopetoun_falls.jpg',
+ path: 'mm_files/comparative_aphid_agathe-2022-08-18T10:00:40.910Z-Hopetoun_falls.jpg',
size: 2954043,
},
{
@@ -33,9 +33,9 @@ describe('Dataset Resolver Unit Tests:', function () {
originalname: 'flowers-276014__340.jpg',
encoding: '7bit',
mimetype: 'image/jpeg',
- destination: 'mm_fils',
+ destination: 'mm_files',
filename: 'comparative_aphid_agathe-2022-08-18T10:00:41.018Z-flowers-276014__340.jpg',
- path: 'mm_fils/comparative_aphid_agathe-2022-08-18T10:00:41.018Z-flowers-276014__340.jpg',
+ path: 'mm_files/comparative_aphid_agathe-2022-08-18T10:00:41.018Z-flowers-276014__340.jpg',
size: 56575,
},
],
diff --git a/resfulservice/spec/graphql/resolver/material_template.spec.js b/resfulservice/spec/graphql/resolver/material_template.spec.js
new file mode 100644
index 00000000..0f36a1af
--- /dev/null
+++ b/resfulservice/spec/graphql/resolver/material_template.spec.js
@@ -0,0 +1,193 @@
+const chai = require('chai');
+const sinon = require('sinon');
+const MaterialTemplate = require('../../../src/models/xlsxCurationList')
+const graphQlSchema = require('../../../src/graphql');
+const { Mutation: { createXlsxCurationList, updateXlsxCurationList }, Query: { getXlsxCurationList }} = require('../../../src/graphql/resolver');
+
+
+const { expect } = chai;
+const user = {
+ _id: 'ai094oja09aw40-o',
+ displayName: "test"
+}
+const mockColumn = {
+ field: "Flight_width::Uniter",
+ values: [
+ "nm",
+ "um",
+ "mm",
+ "cm",
+ "m"
+ ]
+}
+
+const mockColumnsInput = {
+ columns: [
+ {
+ field: "Flight_width::Units",
+ values: [
+ "nm",
+ "um",
+ "mm",
+ "cm",
+ "m"
+ ]
+ },
+ {
+ field: "Origins",
+ values: [
+ "experiments",
+ "informatics (data science)",
+ "simulations",
+ "theory"
+ ]
+ }
+ ]
+}
+
+const mockDBColumn = {
+ _id: 'kas2344nlkla',
+ ...mockColumn,
+ lean: () => this
+}
+
+const mockConflictError = {
+ writeErrors: [
+ {
+ err: {
+ index: 0,
+ code: 11000,
+ errmsg: 'E11000 duplicate key error collection: mgi.materialtemplates index: field_1 dup key: { field: "Flight_width::Units" }',
+ }
+ },
+ {
+ err: {
+ index: 1,
+ code: 11000,
+ errmsg: 'E11000 duplicate key error collection: mgi.materialtemplates index: field_1 dup key: { field: "Origins" }',
+ }
+ }
+ ]
+}
+
+describe('Material Template Resolver Unit Tests:', function () {
+
+ afterEach(() => sinon.restore());
+
+ const req = { logger: { info: (message) => { }, error: (message) => { } } }
+
+ context('createXlsxCurationList', () => {
+ const input = {
+ ...mockColumnsInput
+ }
+
+ it('should have createXlsxCurationList(...) as a Mutation resolver', async function () {
+ const { createXlsxCurationList } = graphQlSchema.getMutationType().getFields();
+ expect(createXlsxCurationList.name).to.equal('createXlsxCurationList');
+ });
+
+ it('should create new material colums', async () => {
+ sinon.stub(MaterialTemplate, 'insertMany').returns(true);
+ // sinon.stub(MaterialTemplate.prototype, 'save').callsFake(() => ({...input, _id: 'b39ak9qna'}))
+ const columns = await createXlsxCurationList({}, { input }, { user, req, isAuthenticated: true });
+
+ expect(columns).to.have.property('columns');
+ });
+
+ it('should return a 401 unauthenticated error', async () => {
+
+ const result = await createXlsxCurationList({}, { input: { } }, { user, req, isAuthenticated: false });
+
+ expect(result).to.have.property('extensions');
+ expect(result.extensions.code).to.be.equal(401);
+ });
+
+ it('should return a 409 conflict error', async () => {
+ sinon.stub(MaterialTemplate, 'insertMany').throws(mockConflictError);
+ const result = await createXlsxCurationList({}, { input }, { user, req, isAuthenticated: true });
+
+ expect(result).to.have.property('extensions');
+ expect(result.extensions.code).to.be.equal(409);
+ });
+
+ it('should return MaterialColumns!! datatype for createXlsxCurationList(...) mutation', () => {
+ const { createXlsxCurationList } = graphQlSchema.getMutationType().getFields();
+ expect(createXlsxCurationList.type.toString()).to.equal('MaterialColumns!');
+ });
+ })
+
+ context('updateXlsxCurationList', () => {
+ const input = { ...mockColumn }
+ it("should throw a 401, not authenticated error", async () => {
+
+ const error = await updateXlsxCurationList({}, { input }, { user, req, isAuthenticated: false });
+ expect(error).to.have.property('extensions');
+ expect(error.extensions.code).to.be.equal(401);
+ });
+
+ it("should return a 404 error if column doesn't exist", async () => {
+ sinon.stub(MaterialTemplate, 'findOne').returns(null);
+ const error = await updateXlsxCurationList({}, { input }, { user, req, isAuthenticated: true });
+
+ expect(error).to.have.property('extensions');
+ expect(error.extensions.code).to.be.equal(404);
+ });
+
+ it("should update column if column exists", async () => {
+ sinon.stub(MaterialTemplate, 'findOne').returns(mockDBColumn);
+ sinon.stub(MaterialTemplate, 'findOneAndUpdate').returns({...mockDBColumn, ...input, user});
+
+ const result = await updateXlsxCurationList({}, { input }, { user, req, isAuthenticated: true });
+
+ expect(result).to.have.property('field');
+ expect(result).to.have.property('values');
+ });
+
+ it("should throw a 500, server error", async () => {
+ sinon.stub(MaterialTemplate, 'findOne').throws();
+ const error = await updateXlsxCurationList({}, { input }, { user, req, isAuthenticated: true });
+
+ expect(error).to.have.property('extensions');
+ expect(error.extensions.code).to.be.equal(500);
+ });
+ });
+
+ context('getXlsxCurationList', () => {
+ const input = { ...mockColumn, pageNumber: 1, pageSize: 10 }
+ it("should throw a 401, not authenticated error", async () => {
+
+ const error = await getXlsxCurationList({}, { input }, { user, req, isAuthenticated: false });
+ expect(error).to.have.property('extensions');
+ expect(error.extensions.code).to.be.equal(401);
+ });
+
+ it("should return paginated lists of columns", async () => {
+ sinon.stub(MaterialTemplate, 'countDocuments').returns(2);
+ sinon.stub(MaterialTemplate, 'find').returns(mockColumnsInput.columns);
+ const result = await getXlsxCurationList({}, { input }, { user, req, isAuthenticated: true });
+
+ expect(result).to.have.property('columns');
+ expect(result.columns).to.be.an('Array');
+ });
+
+ it("should return paginated lists of columns", async () => {
+ const input = { field: mockColumn.field };
+ sinon.stub(MaterialTemplate, 'countDocuments').returns(2)
+ sinon.stub(MaterialTemplate, 'find').returns(mockColumnsInput.columns);
+
+ const result = await getXlsxCurationList({}, { input }, { user, req, isAuthenticated: true });
+
+ expect(result).to.have.property('columns');
+ expect(result.columns).to.be.an('Array');
+ });
+
+ it("should throw a 500, server error", async () => {
+ sinon.stub(MaterialTemplate, 'countDocuments').returns(2)
+ sinon.stub(MaterialTemplate, 'find').throws();
+ const error = await getXlsxCurationList({}, { input }, { user, req, isAuthenticated: true });
+
+ expect(error).to.have.property('extensions');
+ expect(error.extensions.code).to.be.equal(500);
+ });
+ });
+});
diff --git a/resfulservice/src/controllers/adminController.js b/resfulservice/src/controllers/adminController.js
index 6e7daabb..51e19a16 100644
--- a/resfulservice/src/controllers/adminController.js
+++ b/resfulservice/src/controllers/adminController.js
@@ -49,15 +49,24 @@ const _loadBulkElasticSearch = async (req, res, next) => {
try {
const total = data.length;
let rejected = 0;
+
+ // Delete existing docs in this index type
+ log.info(`_loadBulkElasticSearch(): Deleting existing ${type} indices`);
+ if (total) await elasticSearch.deleteIndexDocs(type);
+ log.info(`_loadBulkElasticSearch(): Successfully deleted ${type} indices`);
+
for (const item of data) {
const response = await elasticSearch.indexDocument(req, type, item);
+
if (!response) {
log.debug(`_loadBulkElasticSearch()::error: rejected - ${response.statusText}`);
rejected = rejected + 1;
}
}
+
await elasticSearch.refreshIndices(req, type);
successWriter(req, 'success', '_loadBulkElasticSearch');
+
return res.status(200).json({
total,
rejected
@@ -76,19 +85,21 @@ const _loadBulkElasticSearch = async (req, res, next) => {
*/
exports.loadElasticSearch = async (req, res, next) => {
const log = req.logger;
- log.info('loadElasticSearch(): Function entry');
const body = JSON.parse(req?.body);
const type = body?.type;
const doc = body?.doc;
+ log.info('loadElasticSearch(): Function entry');
if (!type || !doc) {
return next(errorWriter(req, 'Category type or doc is missing', 'loadElasticSearch', 422));
}
try {
const response = await elasticSearch.indexDocument(req, type, doc);
+
await elasticSearch.refreshIndices(req, type);
successWriter(req, 'success', 'loadElasticSearch');
+
return res.status(200).json({
response
});
@@ -119,7 +130,8 @@ exports.pingElasticSearch = async (req, res, next) => {
};
/**
- * Data dump into ES
+ * Fetch data from knowledge graph and dump into ES
+ * NOTE: It overwrites the index
* @param {*} req
* @param {*} res
* @param {*} next
@@ -139,6 +151,10 @@ exports.dataDump = async (req, res, next) => {
}
};
+/** This function allows for upload already fetched data
+ * into ES. It will NOT call the knowledge graph as it assumes
+ * user already have the data. NOTE: It overwrites the index
+ */
exports.bulkElasticSearchImport = (req, res, next) => {
const log = req.logger;
log.info('bulkElasticSearchImport(): Function entry');
diff --git a/resfulservice/src/controllers/fileController.js b/resfulservice/src/controllers/fileController.js
index 241405e9..07e8b04b 100644
--- a/resfulservice/src/controllers/fileController.js
+++ b/resfulservice/src/controllers/fileController.js
@@ -1,6 +1,10 @@
const mongoose = require('mongoose');
+const { PassThrough } = require('stream');
+const fsFiles = require('../models/fsFiles');
const { errorWriter, successWriter } = require('../utils/logWriter');
+const _createEmptyStream = () => new PassThrough('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAgAAAAIAQMAAAD+wSzIAAAABlBMVEX///+/v7+jQ3Y5AAAADklEQVQI12P4AIX8EAgALgAD/aNpbtEAAAAASUVORK5CYII').end();
+
exports.imageMigration = async (req, res, next) => {
const { imageType } = req.params;
@@ -29,6 +33,11 @@ exports.fileContent = async (req, res, next) => {
try {
const _id = new mongoose.Types.ObjectId(fileId);
+ const exist = await fsFiles.findById(_id).limit(1);
+ if (!exist) {
+ res.setHeader('Content-Type', 'image/png');
+ return _createEmptyStream().pipe(res);
+ }
const downloadStream = bucket.openDownloadStream(_id);
downloadStream.pipe(res);
} catch (error) {
diff --git a/resfulservice/src/controllers/kgWrapperController.js b/resfulservice/src/controllers/kgWrapperController.js
index dcaab7e3..8e65de2c 100644
--- a/resfulservice/src/controllers/kgWrapperController.js
+++ b/resfulservice/src/controllers/kgWrapperController.js
@@ -12,23 +12,47 @@ const _outboundRequest = async (req, next) => {
const log = req.logger;
log.info('_outboundRequest(): Function entry');
+ if (!req.env.KNOWLEDGE_ADDRESS) {
+ return next(errorWriter(req, 'Knowledge endpoint address missing', '_outboundRequest', 422));
+ }
+
const query = req?.query;
const type = query?.type;
- const uri = query?.uri || constant[type];
+ let url = query?.uri;
+ let altMethod;
+
+ if (!url) {
+ url = `${req.env.KNOWLEDGE_ADDRESS}/${constant[type]}`;
+ altMethod = 'get';
+ }
- if (!type) {
+ if (!query?.uri && !type) {
return next(errorWriter(req, 'Category type is missing', '_outboundRequest', 422));
}
- if (!uri) {
+ if (!url) {
return next(errorWriter(req, 'URI is missing in the request body', '_outboundRequest', 422));
}
- const response = await axios({
- method: 'get',
- url: uri,
- httpsAgent: new https.Agent(httpsAgent)
- });
+ const preparedRequest = {
+ method: altMethod ?? req.method,
+ httpsAgent: new https.Agent(httpsAgent),
+ url
+ };
+
+ if (query?.queryString) {
+ preparedRequest.params = {
+ query: query.queryString
+ };
+ }
+
+ if (req.isBackendCall || req.query?.responseType === 'json') {
+ preparedRequest.headers = {
+ accept: 'application/sparql-results+json'
+ };
+ }
+
+ const response = await axios(preparedRequest);
return {
type,
@@ -82,6 +106,36 @@ exports.getKnowledge = async (req, res, next) => {
}
};
+/**
+ * getSparql - Retrieves data from the KG via SPARQL query
+ * @param {*} req
+ * @param {*} res
+ * @param {*} next
+ * @returns {*} response.data
+ */
+exports.getSparql = async (req, res, next) => {
+ try {
+ if (!req.env.KNOWLEDGE_ADDRESS) {
+ return next(errorWriter(req, 'Knowledge endpoint address missing', 'getSparql', 422));
+ }
+
+ req.query.queryString = req?.body?.query ?? req?.query?.query;
+ req.query.uri = `${req.env.KNOWLEDGE_ADDRESS}/${constant.sparql}`;
+
+ successWriter(req, { message: 'sending request' }, 'getSparql');
+ const response = await _outboundRequest(req, next);
+ successWriter(req, { message: 'success' }, 'getSparql');
+
+ // Needed `isBackendCall` flag to enforce internal calls and return response
+ // through the function that triggers the call.
+ if (req.isBackendCall) return response?.data;
+
+ return res.status(200).json({ ...response?.data });
+ } catch (err) {
+ next(errorWriter(req, err, 'getSparql'));
+ }
+};
+
/**
* Load chart gallery from elastic function
* @param {*} req
diff --git a/resfulservice/src/graphql/resolver/index.js b/resfulservice/src/graphql/resolver/index.js
index 754708c8..49cf3e8e 100644
--- a/resfulservice/src/graphql/resolver/index.js
+++ b/resfulservice/src/graphql/resolver/index.js
@@ -7,11 +7,13 @@ const contactMutation = require('./contact/mutation');
const datasetMutation = require('./dataset/mutation');
const datasetQuery = require('./dataset/query');
const pixelatedDataExplorerQuery = require('./pixelated/query');
+const materialMutation = require('./material_template/mutation');
+const materialQuery = require('./material_template/query');
const { filesetsUnionResolveType } = require('./dataset/field-resolver');
const resolvers = {
- Query: Object.assign({}, userQuery, imageExplorerQuery, contactQuery, datasetQuery, pixelatedDataExplorerQuery),
- Mutation: Object.assign({}, userMutation, apiAccessMutation, contactMutation, datasetMutation),
+ Query: Object.assign({}, userQuery, imageExplorerQuery, contactQuery, datasetQuery, pixelatedDataExplorerQuery, materialQuery),
+ Mutation: Object.assign({}, userMutation, apiAccessMutation, contactMutation, datasetMutation, materialMutation),
Filesets: { __resolveType: filesetsUnionResolveType }
};
diff --git a/resfulservice/src/graphql/resolver/material_template/input.graphql b/resfulservice/src/graphql/resolver/material_template/input.graphql
new file mode 100644
index 00000000..3e425bca
--- /dev/null
+++ b/resfulservice/src/graphql/resolver/material_template/input.graphql
@@ -0,0 +1,31 @@
+input columnsInput {
+ "Column field name"
+ field: String!
+
+"List of possible values for column field"
+ values: [String!]!
+}
+
+input materialsInput {
+ "list of material column template to store"
+ columns: [columnsInput!]!
+}
+
+input materialQueryInput {
+ "Column field name"
+ field: String
+
+ """
+ Pagination option for the page of columns to display to return.
+ This defaults to 1 if no value is specified.
+ The columns are grouped into pages based on the specified pageSize(defaults to 20, if not specified)
+ """
+ pageNumber: Int
+
+ """
+ Number of columns per page.
+ Pagination option for specifying how the server should group the columns.
+ Defaults to 1 if none is specified.
+ """
+ pageSize: Int
+}
diff --git a/resfulservice/src/graphql/resolver/material_template/mutation.js b/resfulservice/src/graphql/resolver/material_template/mutation.js
new file mode 100644
index 00000000..bace86e2
--- /dev/null
+++ b/resfulservice/src/graphql/resolver/material_template/mutation.js
@@ -0,0 +1,44 @@
+const MaterialTemplate = require('../../../models/xlsxCurationList');
+const errorFormater = require('../../../utils/errorFormater');
+
+const materialMutation = {
+ createXlsxCurationList: async (_, { input }, { user, req, isAuthenticated }) => {
+ req.logger?.info('[createMaterialColumn] Function Entry:');
+ if (!isAuthenticated) {
+ req.logger?.error('[createMaterialColumn]: User not authenticated to create Material column');
+ return errorFormater('not authenticated', 401);
+ }
+ const { columns } = input;
+ const curatedList = columns.map(column => ({ ...column, user: user._id }));
+ const result = await insertMany(curatedList);
+ if (result) return errorFormater(result, 409);
+ return { columns: curatedList };
+ },
+
+ updateXlsxCurationList: async (_, { input }, { user, req, isAuthenticated }) => {
+ req.logger?.info('[updateMaterialColumn] Function Entry:');
+ if (!isAuthenticated) {
+ req.logger?.error('[updateMaterialColumn]: User not authenticated to view contact listing');
+ return errorFormater('not authenticated', 401);
+ }
+ const { field } = input;
+ try {
+ const columnExists = await MaterialTemplate.findOne({ field });
+ if (!columnExists) return errorFormater('column not found', 404);
+ const column = await MaterialTemplate.findOneAndUpdate({ field }, { $set: { ...input, user: user._id } }, { new: true, lean: true, populate: { path: 'user', select: 'displayName' } });
+ return { ...column, user: column.user.displayName };
+ } catch (error) {
+ return errorFormater(error.message, 500);
+ }
+ }
+};
+
+async function insertMany (columns) {
+ try {
+ await MaterialTemplate.insertMany(columns, { ordered: false, rawResult: true, lean: true });
+ } catch (e) {
+ return e.writeErrors.map(({ err: { errmsg } }) => errmsg.split('key:')[1]);
+ }
+};
+
+module.exports = materialMutation;
diff --git a/resfulservice/src/graphql/resolver/material_template/query.js b/resfulservice/src/graphql/resolver/material_template/query.js
new file mode 100644
index 00000000..0a4ce806
--- /dev/null
+++ b/resfulservice/src/graphql/resolver/material_template/query.js
@@ -0,0 +1,24 @@
+const MaterialTemplate = require('../../../models/xlsxCurationList');
+const errorFormater = require('../../../utils/errorFormater');
+const paginator = require('../../../utils/paginator');
+
+const materialQuery = {
+ getXlsxCurationList: async (_, { input }, { req, isAuthenticated }) => {
+ req.logger?.info('[getMaterialColumns] Function Entry:');
+ if (!isAuthenticated) {
+ req.logger?.error('[getMaterialColumns]: User not authenticated to view material column listing');
+ return errorFormater('not authenticated', 401);
+ }
+ const { field, pageSize, pageNumber } = input;
+ const filter = field ? { field: { $regex: new RegExp(field.toString(), 'gi') } } : {};
+ try {
+ const pagination = pageSize || pageNumber ? paginator(await MaterialTemplate.countDocuments(filter), pageNumber, pageSize) : paginator(await MaterialTemplate.countDocuments(filter));
+ const curatedList = await MaterialTemplate.find(filter, null, { lean: true, populate: { path: 'user', select: 'displayName' } });
+ return Object.assign(pagination, { columns: curatedList.map((list) => ({ ...list, user: list?.user?.displayName })) });
+ } catch (error) {
+ return errorFormater(error.message, 500);
+ }
+ }
+};
+
+module.exports = materialQuery;
diff --git a/resfulservice/src/graphql/resolver/material_template/type.graphql b/resfulservice/src/graphql/resolver/material_template/type.graphql
new file mode 100644
index 00000000..81933eb9
--- /dev/null
+++ b/resfulservice/src/graphql/resolver/material_template/type.graphql
@@ -0,0 +1,53 @@
+type MaterialColumns {
+ "list of Columns data returned from the server"
+ columns: [Column]
+}
+
+type Column {
+ "Column field name"
+ field: String
+
+ "List of possible values for column field"
+ values: [String]
+
+ user: String
+}
+
+type SuccessMessage {
+ "Success message to notify successfully parsing the excel sheet"
+ message: String!
+}
+
+type MaterialTemplates {
+ "Total number of columns that exists"
+ totalItems: Int!
+
+ """
+ Number of columns per page.
+ Pagination option for specifying how the server should group the columns.
+ Defaults to 1 if none is specified.
+ """
+ pageSize: Int!
+
+ """
+ Pagination option for the page of columns to display to return.
+ This defaults to 1 if no value is specified.
+ The columns are grouped into pages based on the specified pageSize(defaults to 20, if not specified)
+ """
+ pageNumber: Int!
+
+ """
+ Total number of pages that are available.
+ This value is based on the pageSize and totalItems
+ """
+ totalPages: Int!
+
+ "boolean value specifying If the currently returned page has a previous page"
+ hasPreviousPage: Boolean!
+
+ "boolean value specifying If the currently returned page has a next page"
+ hasNextPage: Boolean!
+
+ "list of Columns data returned from the server"
+ columns: [Column!]!
+}
diff --git a/resfulservice/src/graphql/schema.graphql b/resfulservice/src/graphql/schema.graphql
index 55edc6b6..1d5b3d55 100644
--- a/resfulservice/src/graphql/schema.graphql
+++ b/resfulservice/src/graphql/schema.graphql
@@ -10,6 +10,7 @@ type Query {
getUserDataset(input: datasetStatusInput): UserDataset!
getFilesets(input: filesetQueryInput!): Filesets!
pixelData(input: pixelDataQueryInput!): PixelatedData!
+ getXlsxCurationList(input: materialQueryInput): MaterialTemplates!
}
type Mutation {
@@ -19,5 +20,8 @@ type Mutation {
deleteUser(input: userIdInput): User!
submitContact(input: createContactInput!): Contact!
createDataset(input: datasetInput): CreatedDataset!
+ createXlsxCurationList(input: materialsInput): MaterialColumns!
+ updateXlsxCurationList(input: columnsInput): Column!
+ parseTemplateSheet: SuccessMessage!
createDatasetId: Datasets!
}
\ No newline at end of file
diff --git a/resfulservice/src/middlewares/index.js b/resfulservice/src/middlewares/index.js
index 9f015de6..b16b4cc4 100644
--- a/resfulservice/src/middlewares/index.js
+++ b/resfulservice/src/middlewares/index.js
@@ -15,7 +15,7 @@
// app.use(bodyParser.json());
// app.use((req, res, next) => logParser(log, req, next));
// app.use(fileMgr);
-// app.use('/mm_fils', fileServer);
+// app.use('/mm_files', fileServer);
// app.use(acceptedHeaders);
// app.use(getEnv);
// };
@@ -29,7 +29,7 @@
// globalMiddleWare
// };
-const bodyParser = require('body-parser');
+const express = require('express');
const acceptedHeaders = require('./accept');
const getEnv = require('./parseEnv');
const { fileMgr, fileServer } = require('./fileStorage');
@@ -43,10 +43,11 @@ const log = mmLogger();
* @param {*} app Express app object
*/
const globalMiddleWare = async (app) => {
- app.use(bodyParser.json());
+ app.use(express.json());
+ app.use(express.urlencoded({ extended: true }));
app.use((req, res, next) => logParser(log, req, next));
app.use(fileMgr);
- app.use('/mm_fils', fileServer);
+ app.use('/mm_files', fileServer);
app.use(acceptedHeaders);
app.use(getEnv);
};
diff --git a/resfulservice/src/models/xlsxCurationList.js b/resfulservice/src/models/xlsxCurationList.js
new file mode 100644
index 00000000..02c8902d
--- /dev/null
+++ b/resfulservice/src/models/xlsxCurationList.js
@@ -0,0 +1,22 @@
+const mongoose = require('mongoose');
+const Schema = mongoose.Schema;
+
+const xlsxCurationListSchema = new Schema({
+ field: {
+ type: String,
+ required: true,
+ unique: true,
+ dropDups: true
+ },
+ values: {
+ type: [String],
+ required: true
+ },
+ user: {
+ type: Schema.Types.ObjectId,
+ ref: 'User',
+ required: true
+ }
+}, { timestamps: true });
+
+module.exports = mongoose.model('xlsxCurationList', xlsxCurationListSchema);
diff --git a/resfulservice/src/routes/admin.js b/resfulservice/src/routes/admin.js
index b172660d..6db29863 100644
--- a/resfulservice/src/routes/admin.js
+++ b/resfulservice/src/routes/admin.js
@@ -6,8 +6,6 @@ const { getInternal } = require('../middlewares/isInternal');
router
.route('/es/bulkinsert')
- // .get(getInternal, AdminController.pingElasticSearch)
- // .post(getInternal, AdminController.initializeElasticSearch);
.post(AdminController.bulkElasticSearchImport)
.put(AdminController.dataDump);
@@ -20,11 +18,10 @@ router.route('/populate-datasets')
router
.route('/es')
- // .get(getInternal, AdminController.pingElasticSearch)
- // .post(getInternal, AdminController.initializeElasticSearch);
.get(AdminController.pingElasticSearch)
.post(AdminController.initializeElasticSearch)
.put(getInternal, AdminController.loadElasticSearch);
+// Note: Not in use. Deprecated for authService.js route.
router.route('/login').post(loginController.login);
module.exports = router;
diff --git a/resfulservice/src/routes/kg-wrapper.js b/resfulservice/src/routes/kg-wrapper.js
index eb4477d6..56107f81 100644
--- a/resfulservice/src/routes/kg-wrapper.js
+++ b/resfulservice/src/routes/kg-wrapper.js
@@ -1,6 +1,6 @@
const express = require('express');
const router = express.Router();
-const { getKnowledge, getFacetValues, getAllCharts } = require('../controllers/kgWrapperController');
+const { getKnowledge, getFacetValues, getAllCharts, getSparql } = require('../controllers/kgWrapperController');
const isAuth = require('../middlewares/isAuth');
const { getInternal } = require('../middlewares/isInternal');
@@ -25,4 +25,8 @@ router.route('/facets')
router.route('/charts')
.get(getAllCharts);
+router.route('/sparql')
+ .post(getSparql)
+ .get(getSparql);
+
module.exports = router;
diff --git a/resfulservice/src/utils/elasticSearch.js b/resfulservice/src/utils/elasticSearch.js
index 4e63c972..3efdd41a 100644
--- a/resfulservice/src/utils/elasticSearch.js
+++ b/resfulservice/src/utils/elasticSearch.js
@@ -49,19 +49,20 @@ class ElasticSearch {
}
/**
- * Deletes a type group and all its docs
+ * Deletes all documents of an index
* @param {String} type
- * @returns {Object} response
+ * @returns response
*/
- async deleteAType (type) {
- const configResponse = await axios({
- method: 'delete',
- url: `http://${env.ESADDRESS}/${type}`,
- headers: {
- 'Content-Type': 'application/json'
- }
+ async deleteIndexDocs (type) {
+ return this.client.deleteByQuery({
+ index: type,
+ body: {
+ query: {
+ match_all: {}
+ }
+ },
+ timeout: '5m' // Todo: Increase when data becomes larger
});
- return configResponse;
}
async _putMappings (type, schema) {
@@ -74,21 +75,34 @@ class ElasticSearch {
});
}
+ async _getExistingIndices () {
+ return await this.client.cat.indices({ format: 'json' });
+ }
+
async initES (req) {
const log = req.logger;
log.info('elasticsearch.initES(): Function entry');
- try {
- const allSchemas = {
- articles: configPayload.articles,
- samples: configPayload.samples,
- charts: configPayload.charts,
- images: configPayload.images
- };
+ // Check and ignore existing indexes before create
+ const existingIndexes = await this._getExistingIndices();
+ if (existingIndexes.length >= Object.keys(configPayload).length) {
+ log.info('elasticsearch.initES(): All indexes exist in Elastic search');
+ return;
+ }
+
+ // Remove elastic search index config from list of keys
+ let preparedKeys = Object.keys(configPayload)?.filter(e => e !== 'config');
+ if (existingIndexes.length) {
+ preparedKeys = preparedKeys.filter(preppedKey => !existingIndexes.some(existingIndex => (existingIndex?.index === preppedKey)));
+ log.info(`elasticsearch.initES(): Adding the following missing index(es) ${preparedKeys.join(',')}`);
+ }
- Object.entries(allSchemas).forEach(async ([key, value]) => {
- await this._createConfig(key);
- await this._putMappings(key, value);
+ try {
+ Object.entries(configPayload).forEach(async ([key, value]) => {
+ if (preparedKeys.includes(key)) {
+ await this._createConfig(key);
+ await this._putMappings(key, value);
+ }
});
return {
diff --git a/resfulservice/src/utils/iterator.js b/resfulservice/src/utils/iterator.js
index 7edbb199..50b2d919 100644
--- a/resfulservice/src/utils/iterator.js
+++ b/resfulservice/src/utils/iterator.js
@@ -35,6 +35,7 @@ exports.generateMongoUrl = (req) => {
* @returns {Promise}
*/
exports.iteration = (arr, iterationFn, batchSize) => new Promise((resolve, reject) => {
+ // const chunks: Buffer[] = arr;
let pendingPromises = [];
const pausePromises = async () => {
try {