Skip to content
This repository has been archived by the owner on Aug 31, 2021. It is now read-only.

Commit

Permalink
Merge pull request #86 from vulcanize/vdb-371-recheck-queued-storage
Browse files Browse the repository at this point in the history
(VDB-371) recheck queued storage
  • Loading branch information
rmulhol authored May 1, 2019
2 parents c93e6ad + 6716c3b commit 782e3fd
Show file tree
Hide file tree
Showing 27 changed files with 733 additions and 236 deletions.
16 changes: 11 additions & 5 deletions cmd/composeAndExecute.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,16 +16,20 @@
package cmd

import (
"os"
"plugin"
syn "sync"
"time"

log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"

"github.com/vulcanize/vulcanizedb/libraries/shared/fetcher"
"github.com/vulcanize/vulcanizedb/libraries/shared/watcher"
"github.com/vulcanize/vulcanizedb/pkg/fs"
p2 "github.com/vulcanize/vulcanizedb/pkg/plugin"
"github.com/vulcanize/vulcanizedb/pkg/plugin/helpers"
"github.com/vulcanize/vulcanizedb/utils"
"os"
"plugin"
syn "sync"
)

// composeAndExecuteCmd represents the composeAndExecute command
Expand Down Expand Up @@ -170,7 +174,8 @@ func composeAndExecute() {

if len(ethStorageInitializers) > 0 {
tailer := fs.FileTailer{Path: storageDiffsPath}
sw := watcher.NewStorageWatcher(tailer, &db)
storageFetcher := fetcher.NewCsvTailStorageFetcher(tailer)
sw := watcher.NewStorageWatcher(storageFetcher, &db)
sw.AddTransformers(ethStorageInitializers)
wg.Add(1)
go watchEthStorage(&sw, &wg)
Expand All @@ -187,5 +192,6 @@ func composeAndExecute() {

func init() {
rootCmd.AddCommand(composeAndExecuteCmd)
composeAndExecuteCmd.Flags().BoolVar(&recheckHeadersArg, "recheckHeaders", false, "checks headers that are already checked for each transformer.")
composeAndExecuteCmd.Flags().BoolVarP(&recheckHeadersArg, "recheck-headers", "r", false, "whether to re-check headers for watched events")
composeAndExecuteCmd.Flags().DurationVarP(&queueRecheckInterval, "queue-recheck-interval", "q", 5 * time.Minute, "how often to recheck queued storage diffs")
}
12 changes: 9 additions & 3 deletions cmd/execute.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,8 @@ import (
"github.com/spf13/cobra"

"github.com/vulcanize/vulcanizedb/libraries/shared/constants"
"github.com/vulcanize/vulcanizedb/libraries/shared/fetcher"
storageUtils "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
"github.com/vulcanize/vulcanizedb/libraries/shared/transformer"
"github.com/vulcanize/vulcanizedb/libraries/shared/watcher"
"github.com/vulcanize/vulcanizedb/pkg/fs"
Expand Down Expand Up @@ -118,7 +120,8 @@ func execute() {

if len(ethStorageInitializers) > 0 {
tailer := fs.FileTailer{Path: storageDiffsPath}
sw := watcher.NewStorageWatcher(tailer, &db)
storageFetcher := fetcher.NewCsvTailStorageFetcher(tailer)
sw := watcher.NewStorageWatcher(storageFetcher, &db)
sw.AddTransformers(ethStorageInitializers)
wg.Add(1)
go watchEthStorage(&sw, &wg)
Expand All @@ -135,7 +138,8 @@ func execute() {

func init() {
rootCmd.AddCommand(executeCmd)
executeCmd.Flags().BoolVar(&recheckHeadersArg, "recheckHeaders", false, "checks headers that are already checked for each transformer.")
executeCmd.Flags().BoolVarP(&recheckHeadersArg, "recheck-headers", "r", false, "whether to re-check headers for watched events")
executeCmd.Flags().DurationVarP(&queueRecheckInterval, "queue-recheck-interval", "q", 5 * time.Minute, "how often to recheck queued storage diffs")
}

type Exporter interface {
Expand Down Expand Up @@ -166,7 +170,9 @@ func watchEthStorage(w *watcher.StorageWatcher, wg *syn.WaitGroup) {
ticker := time.NewTicker(pollingInterval)
defer ticker.Stop()
for range ticker.C {
w.Execute()
errs := make(chan error)
rows := make(chan storageUtils.StorageDiffRow)
w.Execute(rows, errs, queueRecheckInterval)
}
}

Expand Down
5 changes: 3 additions & 2 deletions cmd/root.go
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ var (
genConfig config.Plugin
ipc string
levelDbPath string
queueRecheckInterval time.Duration
startingBlockNumber int64
storageDiffsPath string
syncAll bool
Expand All @@ -49,8 +50,8 @@ var (
)

const (
pollingInterval = 7 * time.Second
validationWindow = 15
pollingInterval = 7 * time.Second
validationWindow = 15
)

var rootCmd = &cobra.Command{
Expand Down
14 changes: 14 additions & 0 deletions documentation/composeAndExecute.md
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,20 @@ composeAndExecute:

`./vulcanizedb composeAndExecute --config=./environments/config_name.toml`

## Flags

The `compose` and `composeAndExecute` commands can be passed optional flags to specify the operation of the watchers:

- `--recheck-headers`/`-r` - specifies whether to re-check headers for events after the header has already been queried for watched logs.
Can be useful for redundancy if you suspect that your node is not always returning all desired logs on every query.
Argument is expected to be a boolean: e.g. `-r=true`.
Defaults to `false`.

- `query-recheck-interval`/`-q` - specifies interval for re-checking storage diffs that haven been queued for later processing
(by default, the storage watched queues storage diffs if transformer execution fails, on the assumption that subsequent data derived from the event transformers may enable us to decode storage keys that we don't recognize right now).
Argument is expected to be a duration (integer measured in nanoseconds): e.g. `-q=10m30s` (for 10 minute, 30 second intervals).
Defaults to `5m` (5 minutes).

## Configuration
A .toml config file is specified when executing the commands.
The config provides information for composing a set of transformers from external repositories:
Expand Down
12 changes: 6 additions & 6 deletions libraries/shared/fetcher/log_fetcher.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,22 +24,22 @@ import (
"github.com/vulcanize/vulcanizedb/pkg/core"
)

type LogFetcher interface {
type ILogFetcher interface {
FetchLogs(contractAddresses []common.Address, topics []common.Hash, missingHeader core.Header) ([]types.Log, error)
}

type Fetcher struct {
type LogFetcher struct {
blockChain core.BlockChain
}

func NewFetcher(blockchain core.BlockChain) *Fetcher {
return &Fetcher{
func NewLogFetcher(blockchain core.BlockChain) *LogFetcher {
return &LogFetcher{
blockChain: blockchain,
}
}

// Checks all topic0s, on all addresses, fetching matching logs for the given header
func (fetcher Fetcher) FetchLogs(addresses []common.Address, topic0s []common.Hash, header core.Header) ([]types.Log, error) {
func (logFetcher LogFetcher) FetchLogs(addresses []common.Address, topic0s []common.Hash, header core.Header) ([]types.Log, error) {
blockHash := common.HexToHash(header.Hash)
query := ethereum.FilterQuery{
BlockHash: &blockHash,
Expand All @@ -48,7 +48,7 @@ func (fetcher Fetcher) FetchLogs(addresses []common.Address, topic0s []common.Ha
Topics: [][]common.Hash{topic0s},
}

logs, err := fetcher.blockChain.GetEthLogsWithCustomQuery(query)
logs, err := logFetcher.blockChain.GetEthLogsWithCustomQuery(query)
if err != nil {
// TODO review aggregate fetching error handling
return []types.Log{}, err
Expand Down
12 changes: 6 additions & 6 deletions libraries/shared/fetcher/log_fetcher_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,16 +22,16 @@ import (
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"

fetch "github.com/vulcanize/vulcanizedb/libraries/shared/fetcher"
"github.com/vulcanize/vulcanizedb/libraries/shared/fetcher"
"github.com/vulcanize/vulcanizedb/pkg/core"
"github.com/vulcanize/vulcanizedb/pkg/fakes"
)

var _ = Describe("Fetcher", func() {
var _ = Describe("LogFetcher", func() {
Describe("FetchLogs", func() {
It("fetches logs based on the given query", func() {
blockChain := fakes.NewMockBlockChain()
fetcher := fetch.NewFetcher(blockChain)
logFetcher := fetcher.NewLogFetcher(blockChain)
header := fakes.FakeHeader

addresses := []common.Address{
Expand All @@ -41,7 +41,7 @@ var _ = Describe("Fetcher", func() {

topicZeros := []common.Hash{common.BytesToHash([]byte{1, 2, 3, 4, 5})}

_, err := fetcher.FetchLogs(addresses, topicZeros, header)
_, err := logFetcher.FetchLogs(addresses, topicZeros, header)

address1 := common.HexToAddress("0xfakeAddress")
address2 := common.HexToAddress("0xanotherFakeAddress")
Expand All @@ -59,9 +59,9 @@ var _ = Describe("Fetcher", func() {
It("returns an error if fetching the logs fails", func() {
blockChain := fakes.NewMockBlockChain()
blockChain.SetGetEthLogsWithCustomQueryErr(fakes.FakeError)
fetcher := fetch.NewFetcher(blockChain)
logFetcher := fetcher.NewLogFetcher(blockChain)

_, err := fetcher.FetchLogs([]common.Address{}, []common.Hash{}, core.Header{})
_, err := logFetcher.FetchLogs([]common.Address{}, []common.Hash{}, core.Header{})

Expect(err).To(HaveOccurred())
Expect(err).To(MatchError(fakes.FakeError))
Expand Down
50 changes: 50 additions & 0 deletions libraries/shared/fetcher/storage_fetcher.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
// VulcanizeDB
// Copyright © 2019 Vulcanize

// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.

// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.

// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.

package fetcher

import (
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
"github.com/vulcanize/vulcanizedb/pkg/fs"
"strings"
)

type IStorageFetcher interface {
FetchStorageDiffs(chan<- utils.StorageDiffRow, chan<- error)
}

type CsvTailStorageFetcher struct {
tailer fs.Tailer
}

func NewCsvTailStorageFetcher(tailer fs.Tailer) CsvTailStorageFetcher {
return CsvTailStorageFetcher{tailer: tailer}
}

func (storageFetcher CsvTailStorageFetcher) FetchStorageDiffs(out chan<- utils.StorageDiffRow, errs chan<- error) {
t, tailErr := storageFetcher.tailer.Tail()
if tailErr != nil {
errs <- tailErr
}
for line := range t.Lines {
row, parseErr := utils.FromStrings(strings.Split(line.Text, ","))
if parseErr != nil {
errs <- parseErr
} else {
out <- row
}
}
}
99 changes: 99 additions & 0 deletions libraries/shared/fetcher/storage_fetcher_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
// VulcanizeDB
// Copyright © 2019 Vulcanize

// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.

// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.

// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.

package fetcher_test

import (
"fmt"
"strings"
"time"

"github.com/ethereum/go-ethereum/common"
"github.com/hpcloud/tail"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"

"github.com/vulcanize/vulcanizedb/libraries/shared/fetcher"
"github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"
"github.com/vulcanize/vulcanizedb/pkg/fakes"
)

var _ = Describe("Csv Tail Storage Fetcher", func() {
var (
errorsChannel chan error
mockTailer *fakes.MockTailer
rowsChannel chan utils.StorageDiffRow
storageFetcher fetcher.CsvTailStorageFetcher
)

BeforeEach(func() {
errorsChannel = make(chan error)
rowsChannel = make(chan utils.StorageDiffRow)
mockTailer = fakes.NewMockTailer()
storageFetcher = fetcher.NewCsvTailStorageFetcher(mockTailer)
})

It("adds error to errors channel if tailing file fails", func(done Done) {
mockTailer.TailErr = fakes.FakeError

go storageFetcher.FetchStorageDiffs(rowsChannel, errorsChannel)

Expect(<-errorsChannel).To(MatchError(fakes.FakeError))
close(done)
})

It("adds parsed csv row to rows channel for storage diff", func(done Done) {
line := getFakeLine()

go storageFetcher.FetchStorageDiffs(rowsChannel, errorsChannel)
mockTailer.Lines <- line

expectedRow, err := utils.FromStrings(strings.Split(line.Text, ","))
Expect(err).NotTo(HaveOccurred())
Expect(<-rowsChannel).To(Equal(expectedRow))
close(done)
})

It("adds error to errors channel if parsing csv fails", func(done Done) {
line := &tail.Line{Text: "invalid"}

go storageFetcher.FetchStorageDiffs(rowsChannel, errorsChannel)
mockTailer.Lines <- line

Expect(<-errorsChannel).To(HaveOccurred())
select {
case <-rowsChannel:
Fail("value passed to rows channel on error")
default:
Succeed()
}
close(done)
})
})

func getFakeLine() *tail.Line {
address := common.HexToAddress("0x1234567890abcdef")
blockHash := []byte{4, 5, 6}
blockHeight := int64(789)
storageKey := []byte{9, 8, 7}
storageValue := []byte{6, 5, 4}
return &tail.Line{
Text: fmt.Sprintf("%s,%s,%d,%s,%s", common.Bytes2Hex(address.Bytes()), common.Bytes2Hex(blockHash),
blockHeight, common.Bytes2Hex(storageKey), common.Bytes2Hex(storageValue)),
Time: time.Time{},
Err: nil,
}
}
4 changes: 2 additions & 2 deletions libraries/shared/mocks/converter.go
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,6 @@ func (converter *MockConverter) SetToEntityConverterError(err error) {
converter.entityConverterError = err
}

func (c *MockConverter) SetToModelConverterError(err error) {
c.modelConverterError = err
func (converter *MockConverter) SetToModelConverterError(err error) {
converter.modelConverterError = err
}
39 changes: 39 additions & 0 deletions libraries/shared/mocks/storage_fetcher.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
// VulcanizeDB
// Copyright © 2019 Vulcanize

// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.

// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.

// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.

package mocks

import "github.com/vulcanize/vulcanizedb/libraries/shared/storage/utils"

type MockStorageFetcher struct {
RowsToReturn []utils.StorageDiffRow
ErrsToReturn []error
}

func NewMockStorageFetcher() *MockStorageFetcher {
return &MockStorageFetcher{}
}

func (fetcher *MockStorageFetcher) FetchStorageDiffs(out chan<- utils.StorageDiffRow, errs chan<- error) {
defer close(out)
defer close(errs)
for _, err := range fetcher.ErrsToReturn {
errs <- err
}
for _, row := range fetcher.RowsToReturn {
out <- row
}
}
Loading

0 comments on commit 782e3fd

Please sign in to comment.