Skip to content
This repository was archived by the owner on Jan 14, 2020. It is now read-only.

Commit 19526f0

Browse files
author
Franck
authored
Apollo GraphQL server skeleton (#335)
* checkpoint * Tweaks * Fix comments * Typo fix * Re-structured directories. Added support for DB migration * Update readme to use preformatted code sections. * Postgres wasn’t returning name/description/price for listings * DB inserting should be idempotent. It should be fine to rerun the listener and reindex all listings.
1 parent 849d157 commit 19526f0

14 files changed

+1290
-98
lines changed
File renamed without changes.

daemon/indexing/.npmignore

+4
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
# See https://docs.npmjs.com/misc/developers#keeping-files-out-of-your-package for more about ignoring files.
2+
3+
# dependencies
4+
/node_modules

daemon/indexing/README.md

+47
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
This directory contains code for indexing servers:
2+
- listener: Server that listens to events emitted by Origin contracts and indexes them.
3+
- apollo: GraphQL server for indexed data
4+
- lib: library for indexing data in various backend. Currently Postgres and Elasticsearch are supported.
5+
6+
To start the listener:
7+
======================
8+
9+
Use origin-box to start an origin-js container.
10+
11+
docker-compose up origin-js
12+
13+
If you want to index data in Postgres:
14+
15+
docker-compose up postgres # start the postgres container.
16+
17+
# create the postgres DB schema:
18+
docker exec -ti -w /app/daemon/indexing origin-js node node_modules/db-migrate/bin/db-migrate -e origin-box-genesis db:create indexing
19+
docker exec -ti -w /app/daemon/indexing origin-js node node_modules/db-migrate/bin/db-migrate up
20+
21+
If you want to index data in Elasticsearch, start the elasticsearch container.
22+
23+
docker-compose up elasticsearch
24+
25+
Start the listener in the the origin-js container. Use --elasticsearch and/or --db options to pick the indexer(s).
26+
27+
docker exec -ti origin-js node daemon/indexing/listener/listener.js --elasticsearch --db
28+
29+
You should see messages in the console indicating events are being indexed.
30+
31+
32+
To start the Apollo GraphQL server:
33+
===================================
34+
35+
You will need to update the origin-box:docker-compose.yml. For the image origin-js, proxy port 4000 to 4000 for the Apollo server. [TODO: update origin-box config]
36+
37+
Use origin-box to start an origin-js container.
38+
39+
docker-compose up origin-js
40+
41+
Start the Apollo server in the origin-js container
42+
43+
docker exec -ti origin-js node daemon/indexing/apollo/index.js
44+
45+
The server should start and you can point your browser to http://localhost:4000 to access the GraphQL playground.
46+
47+

daemon/indexing/apollo/index.js

+48
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
const { ApolloServer, gql } = require('apollo-server')
2+
3+
var search = require('../lib/search.js')
4+
var db = require('../lib/db.js')
5+
6+
/*
7+
* Implentation of a GraphQL server using the Apollo framework.
8+
* See https://www.apollographql.com/server
9+
*
10+
*/
11+
12+
// Type definitions define the "shape" of the data and specify
13+
// which ways the data can be fetched from the GraphQL server.
14+
const typeDefs = gql`
15+
type Listing {
16+
id: String
17+
name: String
18+
description: String
19+
price: Float
20+
}
21+
22+
# The "Query" type is the root of all GraphQL queries.
23+
type Query {
24+
Listings(query: String): [Listing],
25+
}
26+
`
27+
28+
// Resolvers define the technique for fetching the types in the schema.
29+
const resolvers = {
30+
Query: {
31+
Listings(root, args, context, info) {
32+
if (args.query) {
33+
return search.Listing.search(args.query)
34+
} else {
35+
return db.Listing.all()
36+
}
37+
},
38+
},
39+
}
40+
41+
// Start ApolloServer by passing type definitions (typeDefs) and the resolvers
42+
// responsible for fetching the data for those types.
43+
const server = new ApolloServer({ typeDefs, resolvers })
44+
45+
// The `listen` method launches a web-server.
46+
server.listen().then(({ url }) => {
47+
console.log(`Apollo server ready at ${url}`)
48+
})

daemon/indexing/database.json

+17
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
{
2+
"defaultEnv": "origin-box",
3+
"sql-file": true,
4+
"origin-box-genesis": {
5+
"driver": "pg",
6+
"user": "origin",
7+
"password": "origin",
8+
"host": "postgres"
9+
},
10+
"origin-box": {
11+
"driver": "pg",
12+
"user": "origin",
13+
"password": "origin",
14+
"host": "postgres",
15+
"database": "indexing"
16+
}
17+
}

daemon/lib/db.js renamed to daemon/indexing/lib/db.js

+18-8
Original file line numberDiff line numberDiff line change
@@ -4,15 +4,13 @@ const { Pool } = require('pg')
44
Module to interface with Postgres database.
55
*/
66

7-
const dbName = 'graphql'
8-
97
// TODO(franck): dynamically configure client.
108
const pool = new Pool(
119
{
12-
host: 'localhost',
13-
database: dbName,
14-
//user: 'franck',
15-
//password: 'franck',
10+
host: 'postgres',
11+
database: 'indexing',
12+
user: 'origin',
13+
password: 'origin',
1614
})
1715

1816

@@ -36,7 +34,17 @@ class Listing {
3634
*/
3735
static async all() {
3836
const res = await pool.query(`SELECT * FROM ${Listing.table}`, [])
39-
return res.rows
37+
// Match the format of the data coming from elasticsearch
38+
const results = res.rows.map((row)=>{
39+
const json = JSON.parse(row.data)
40+
return {
41+
id: row.id,
42+
name: json.name,
43+
description: json.description,
44+
price: json.price
45+
}
46+
})
47+
return results
4048
}
4149

4250
/*
@@ -47,8 +55,10 @@ class Listing {
4755
* @returns The listingId indexed.
4856
*/
4957
static async insert(listingId, listing) {
58+
// TODO: Check that we are not replacing new data with old
5059
const res = await pool.query(
51-
`INSERT INTO ${Listing.table}(id, data) VALUES($1, $2)`, [listingId, listing])
60+
`INSERT INTO ${Listing.table}(id, data) VALUES($1, $2)
61+
ON CONFLICT (id) DO UPDATE SET data = excluded.data`, [listingId, listing])
5262
console.log(`Added row ${listingId} to listing table.`)
5363
return listingId
5464
}

daemon/lib/search.js renamed to daemon/indexing/lib/search.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ var elasticsearch = require('elasticsearch')
88
// TODO(franck): dynamically configure client.
99
var client = new elasticsearch.Client({
1010
hosts: [
11-
'localhost:9200/'
11+
'elasticsearch:9200/'
1212
]
1313
})
1414

daemon/listener/listener.js renamed to daemon/indexing/listener/listener.js

+16-3
Original file line numberDiff line numberDiff line change
@@ -2,13 +2,18 @@ const http = require('http')
22
const urllib = require('url')
33
const Web3 = require('web3')
44

5-
const Origin = require('../../dist/index') // FIXME: replace with origin-js package
5+
const Origin = require('../../../dist/index') // FIXME: replace with origin-js package
66
const search = require ('../lib/search.js')
77
const db = require('../lib//db.js')
88

99
const web3Provider = new Web3.providers.HttpProvider('http://localhost:8545')
1010
const web3 = new Web3(web3Provider)
11-
const o = new Origin({ web3 })
11+
const o = new Origin({
12+
ipfsDomain: 'origin-js',
13+
ipfsGatewayProtocol: 'http',
14+
ipfsGatewayPort: 8080,
15+
web3,
16+
})
1217

1318
// Origin Listener
1419
// ---------------
@@ -49,9 +54,15 @@ const generateOfferId = log => {
4954
].join('-')
5055
}
5156
const getListingDetails = async log => {
57+
const listingId = generateListingId(log)
58+
console.log("CALLING getListing for ID ", listingId)
59+
const listing = await o.marketplace.getListing(listingId)
5260
return {
53-
listing: await o.marketplace.getListing(generateListingId(log))
61+
listing: listing,
5462
}
63+
//return {
64+
// listing: await o.marketplace.getListing(generateListingId(log))
65+
//}
5566
}
5667
const getOfferDetails = async log => {
5768
return {
@@ -174,6 +185,8 @@ async function runBatch(opts, context) {
174185
// handleLog - annotates, runs rule, and ouputs a particular log
175186
async function handleLog(log, rule, contractVersion, context) {
176187
console.log(`Processing log blockNumber=${log.blockNumber} transactionIndex=${log.transactionIndex}`)
188+
console.log("LOG=", log)
189+
177190
log.decoded = web3.eth.abi.decodeLog(
178191
rule.eventAbi.inputs,
179192
log.data,
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
'use strict';
2+
3+
var dbm;
4+
var type;
5+
var seed;
6+
var fs = require('fs');
7+
var path = require('path');
8+
var Promise;
9+
10+
/**
11+
* We receive the dbmigrate dependency from dbmigrate initially.
12+
* This enables us to not have to rely on NODE_PATH.
13+
*/
14+
exports.setup = function(options, seedLink) {
15+
dbm = options.dbmigrate;
16+
type = dbm.dataType;
17+
seed = seedLink;
18+
Promise = options.Promise;
19+
};
20+
21+
exports.up = function(db) {
22+
var filePath = path.join(__dirname, 'sqls', '20180815061230-add-listing-table-up.sql');
23+
return new Promise( function( resolve, reject ) {
24+
fs.readFile(filePath, {encoding: 'utf-8'}, function(err,data){
25+
if (err) return reject(err);
26+
console.log('received data: ' + data);
27+
28+
resolve(data);
29+
});
30+
})
31+
.then(function(data) {
32+
return db.runSql(data);
33+
});
34+
};
35+
36+
exports.down = function(db) {
37+
var filePath = path.join(__dirname, 'sqls', '20180815061230-add-listing-table-down.sql');
38+
return new Promise( function( resolve, reject ) {
39+
fs.readFile(filePath, {encoding: 'utf-8'}, function(err,data){
40+
if (err) return reject(err);
41+
console.log('received data: ' + data);
42+
43+
resolve(data);
44+
});
45+
})
46+
.then(function(data) {
47+
return db.runSql(data);
48+
});
49+
};
50+
51+
exports._meta = {
52+
"version": 1
53+
};
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
DROP TABLE listing;
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
CREATE TABLE listing (
2+
id VARCHAR(32) PRIMARY KEY,
3+
data VARCHAR(4096)
4+
);

0 commit comments

Comments
 (0)