Skip to content

Commit ecd85b4

Browse files
author
Chris Elder
committed
[FAB-6645] update LTE to add JSON to CouchDB tests
The CouchDB benchmarks currently use binary values only. This change will provide the option for benchmarking CouchDB with JSON values. Change-Id: I9f49331b80607fda0ac8b45790ed3bd4beb4b936 Signed-off-by: Chris Elder <chris.elder@us.ibm.com>
1 parent d79520f commit ecd85b4

File tree

9 files changed

+166
-34
lines changed

9 files changed

+166
-34
lines changed

test/tools/LTE/README.md

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -111,13 +111,19 @@ By default, the tests use golveldb as the state database. Fabric provides the
111111
option of using CouchDB as a pluggable state database. To run the existing
112112
tests with CouchDB, use the parameter file `parameters_couchdb_daily_CI.sh`:
113113
```
114-
./runbenchmark.sh -f parameters_couchdb_daily_CI.sh all
114+
./runbenchmarks.sh -f parameters_couchdb_daily_CI.sh all
115115
```
116116
Note that this parameter file (`parameters_couchdb_daily_CI.sh`) contains the
117117
following line, which is required to run the tests with CouchDB:
118118
```
119119
export useCouchDB="yes"
120120
```
121+
CouchDB can store values in JSON or binary formats. The following option in
122+
`parameters_couchdb_daily_CI.sh` is used to switch between JSON and binary
123+
values:
124+
```
125+
UseJSONFormat="true"
126+
```
121127

122128
## How to View the Test Results
123129

test/tools/LTE/experiments/conf.go

Lines changed: 21 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -15,19 +15,24 @@ import (
1515
// txConf captures the transaction related configurations
1616
// numTotalTxs specifies the total transactions that should be executed and committed across chains
1717
// numParallelTxsPerChain specifies the parallel transactions on each of the chains
18-
// numKeysInEachTx specifies the number of keys that each of transactions should operate
18+
// numWritesPerTx specifies the number of keys to write in each transaction
19+
// numReadsPerTx specifies the number of keys to read in each transaction, Note: this parameters
20+
// match the numWritesPerTx for normal benchmarks. This can be set to zero to make batch update measurements.
1921
type txConf struct {
2022
numTotalTxs int
2123
numParallelTxsPerChain int
22-
numKeysInEachTx int
24+
numWritesPerTx int
25+
numReadsPerTx int
2326
}
2427

2528
// dataConf captures the data related configurations
2629
// numKVs specifies number of total key-values across chains
2730
// kvSize specifies the size of a key-value (in bytes)
31+
// useJSON specifies if the value stored is in JSON format
2832
type dataConf struct {
29-
numKVs int
30-
kvSize int
33+
numKVs int
34+
kvSize int
35+
useJSON bool
3136
}
3237

3338
// configuration captures all the configurations for an experiment
@@ -44,8 +49,8 @@ func defaultConf() *configuration {
4449
conf := &configuration{}
4550
conf.chainMgrConf = &chainmgmt.ChainMgrConf{DataDir: "/tmp/fabric/ledgerPerfTests", NumChains: 1}
4651
conf.batchConf = &chainmgmt.BatchConf{BatchSize: 10, SignBlock: false}
47-
conf.txConf = &txConf{numTotalTxs: 100000, numParallelTxsPerChain: 100, numKeysInEachTx: 4}
48-
conf.dataConf = &dataConf{numKVs: 100000, kvSize: 200}
52+
conf.txConf = &txConf{numTotalTxs: 100000, numParallelTxsPerChain: 100, numWritesPerTx: 4, numReadsPerTx: 4}
53+
conf.dataConf = &dataConf{numKVs: 100000, kvSize: 200, useJSON: false}
4954
return conf
5055
}
5156

@@ -76,8 +81,11 @@ func confFromTestParams(testParams []string) *configuration {
7681
numTotalTxs := flags.Int("NumTotalTx",
7782
conf.txConf.numTotalTxs, "Number of total transactions")
7883

79-
numKeysInEachTx := flags.Int("NumKeysInEachTx",
80-
conf.txConf.numKeysInEachTx, "number of keys operated upon in each Tx")
84+
numWritesPerTx := flags.Int("NumWritesPerTx",
85+
conf.txConf.numWritesPerTx, "number of keys written in each Tx")
86+
87+
numReadsPerTx := flags.Int("NumReadsPerTx",
88+
conf.txConf.numReadsPerTx, "number of keys to read in each Tx")
8189

8290
// batchConf
8391
batchSize := flags.Int("BatchSize",
@@ -90,15 +98,19 @@ func confFromTestParams(testParams []string) *configuration {
9098
kvSize := flags.Int("KVSize",
9199
conf.dataConf.kvSize, "size of the key-value in bytes")
92100

101+
useJSON := flags.Bool("UseJSONFormat", conf.dataConf.useJSON, "should CouchDB use JSON for values")
102+
93103
flags.Parse(testParams)
94104

95105
conf.chainMgrConf.DataDir = *dataDir
96106
conf.chainMgrConf.NumChains = *numChains
97107
conf.txConf.numParallelTxsPerChain = *numParallelTxsPerChain
98108
conf.txConf.numTotalTxs = *numTotalTxs
99-
conf.txConf.numKeysInEachTx = *numKeysInEachTx
109+
conf.txConf.numWritesPerTx = *numWritesPerTx
110+
conf.txConf.numReadsPerTx = *numReadsPerTx
100111
conf.batchConf.BatchSize = *batchSize
101112
conf.dataConf.numKVs = *numKVs
102113
conf.dataConf.kvSize = *kvSize
114+
conf.dataConf.useJSON = *useJSON
103115
return conf
104116
}

test/tools/LTE/experiments/insert_txs_test.go

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -65,16 +65,22 @@ func runInsertClientsForChain(chain *chainmgmt.Chain) {
6565
}
6666

6767
func runInsertClient(chain *chainmgmt.Chain, startKey, endKey int, wg *sync.WaitGroup) {
68-
numKeysPerTx := conf.txConf.numKeysInEachTx
68+
numWritesPerTx := conf.txConf.numWritesPerTx
6969
kvSize := conf.dataConf.kvSize
70+
useJSON := conf.dataConf.useJSON
7071

7172
currentKey := startKey
7273
for currentKey <= endKey {
7374
simulator, err := chain.NewTxSimulator(util.GenerateUUID())
7475
common.PanicOnError(err)
75-
for i := 0; i < numKeysPerTx; i++ {
76-
common.PanicOnError(simulator.SetState(
77-
chaincodeName, constructKey(currentKey), constructValue(currentKey, kvSize)))
76+
for i := 0; i < numWritesPerTx; i++ {
77+
if useJSON {
78+
common.PanicOnError(simulator.SetState(
79+
chaincodeName, constructKey(currentKey), constructJSONValue(currentKey, kvSize)))
80+
} else {
81+
common.PanicOnError(simulator.SetState(
82+
chaincodeName, constructKey(currentKey), constructValue(currentKey, kvSize)))
83+
}
7884
currentKey++
7985
if currentKey > endKey {
8086
break

test/tools/LTE/experiments/readwrite_txs_test.go

Lines changed: 25 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -66,19 +66,38 @@ func runReadWriteClientsForChain(chain *chainmgmt.Chain) {
6666
}
6767

6868
func runReadWriteClient(chain *chainmgmt.Chain, rand *rand.Rand, numTx int, wg *sync.WaitGroup) {
69-
numKeysPerTx := conf.txConf.numKeysInEachTx
69+
numWritesPerTx := conf.txConf.numWritesPerTx
70+
numReadsPerTx := conf.txConf.numReadsPerTx
7071
maxKeyNumber := calculateShare(conf.dataConf.numKVs, conf.chainMgrConf.NumChains, int(chain.ID))
72+
kvSize := conf.dataConf.kvSize
73+
useJSON := conf.dataConf.useJSON
74+
var value []byte
7175

7276
for i := 0; i < numTx; i++ {
7377
simulator, err := chain.NewTxSimulator(util.GenerateUUID())
7478
common.PanicOnError(err)
75-
for i := 0; i < numKeysPerTx; i++ {
79+
for i := 0; i < numWritesPerTx; i++ {
7680
keyNumber := rand.Intn(maxKeyNumber)
7781
key := constructKey(keyNumber)
78-
value, err := simulator.GetState(chaincodeName, key)
79-
common.PanicOnError(err)
80-
if !verifyValue(keyNumber, value) {
81-
panic(fmt.Errorf("Value %s is not expected for key number %d", value, keyNumber))
82+
// check to see if the number of reads is exceeded
83+
if i < numReadsPerTx-1 {
84+
value, err = simulator.GetState(chaincodeName, key)
85+
common.PanicOnError(err)
86+
if useJSON {
87+
if !verifyJSONValue(keyNumber, value) {
88+
panic(fmt.Errorf("Value %s is not expected for key number %d", value, keyNumber))
89+
}
90+
} else {
91+
if !verifyValue(keyNumber, value) {
92+
panic(fmt.Errorf("Value %s is not expected for key number %d", value, keyNumber))
93+
}
94+
}
95+
} else {
96+
if useJSON {
97+
value = []byte(constructJSONValue(keyNumber, kvSize))
98+
} else {
99+
value = []byte(constructValue(keyNumber, kvSize))
100+
}
82101
}
83102
common.PanicOnError(simulator.SetState(chaincodeName, key, value))
84103
}

test/tools/LTE/experiments/util.go

Lines changed: 86 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,24 +18,90 @@ package experiments
1818

1919
import (
2020
"bytes"
21+
"encoding/json"
2122
"fmt"
2223
"math/rand"
24+
"strconv"
2325

2426
logging "github.com/op/go-logging"
2527
)
2628

2729
var logger = logging.MustGetLogger("experiments")
2830

31+
type marbleRecord struct {
32+
ID string `json:"_id,omitempty"`
33+
Rev string `json:"_rev,omitempty"`
34+
Prefix string `json:"prefix,omitempty"`
35+
AssetType string `json:"asset_type,omitempty"`
36+
AssetName string `json:"asset_name,omitempty"`
37+
Color string `json:"color,omitempty"`
38+
Size int `json:"size,omitempty"`
39+
Owner string `json:"owner,omitempty"`
40+
DataPadding string `json:"datapadding,omitempty"`
41+
}
42+
43+
var colors = []string{
44+
"red",
45+
"green",
46+
"purple",
47+
"yellow",
48+
"white",
49+
"black",
50+
}
51+
52+
var owners = []string{
53+
"fred",
54+
"jerry",
55+
"tom",
56+
"alice",
57+
"kim",
58+
"angela",
59+
"john",
60+
}
61+
62+
//TestValue is a struct for holding the test value
63+
type TestValue struct {
64+
Value string
65+
}
66+
2967
func constructKey(keyNumber int) string {
3068
return fmt.Sprintf("%s%09d", "key_", keyNumber)
3169
}
3270

3371
func constructValue(keyNumber int, kvSize int) []byte {
3472
prefix := constructValuePrefix(keyNumber)
3573
randomBytes := constructRandomBytes(kvSize - len(prefix))
74+
3675
return append(prefix, randomBytes...)
3776
}
3877

78+
func constructJSONValue(keyNumber int, kvSize int) []byte {
79+
80+
prefix := constructValuePrefix(keyNumber)
81+
82+
rand.Seed(int64(keyNumber))
83+
color := colors[rand.Intn(len(colors))]
84+
size := rand.Intn(len(colors))*10 + 10
85+
owner := owners[rand.Intn(len(owners))]
86+
assetName := "marble" + strconv.Itoa(keyNumber)
87+
88+
testRecord := marbleRecord{Prefix: string(prefix), AssetType: "marble", AssetName: assetName, Color: color, Size: size, Owner: owner}
89+
90+
jsonValue, _ := json.Marshal(testRecord)
91+
92+
if kvSize > len(jsonValue) {
93+
randomJSONBytes := constructRandomBytes(kvSize - len(jsonValue))
94+
95+
//add in extra bytes
96+
testRecord.DataPadding = string(randomJSONBytes)
97+
98+
jsonValue, _ = json.Marshal(testRecord)
99+
}
100+
101+
return jsonValue
102+
103+
}
104+
39105
func constructValuePrefix(keyNumber int) []byte {
40106
return []byte(fmt.Sprintf("%s%09d", "value_", keyNumber))
41107
}
@@ -45,7 +111,27 @@ func verifyValue(keyNumber int, value []byte) bool {
45111
if len(value) < len(prefix) {
46112
return false
47113
}
114+
48115
return bytes.Equal(value[:len(prefix)], prefix)
116+
117+
}
118+
119+
func verifyJSONValue(keyNumber int, value []byte) bool {
120+
prefix := constructValuePrefix(keyNumber)
121+
if len(value) < len(prefix) {
122+
return false
123+
}
124+
125+
var marble marbleRecord
126+
127+
json.Unmarshal(value, &marble)
128+
129+
if len(value) < len(prefix) {
130+
return false
131+
}
132+
133+
valuePrefix := []byte(marble.Prefix)
134+
return bytes.Equal(valuePrefix, prefix)
49135
}
50136

51137
func disableLogging() {

test/tools/LTE/scripts/benchmarks.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ source ./common.sh
2222
PKG_NAME="github.com/hyperledger/fabric/test/tools/LTE/experiments"
2323

2424
function setCommonTestParams {
25-
TEST_PARAMS="-DataDir=$DataDir, -NumChains=$NumChains, -NumParallelTxPerChain=$NumParallelTxPerChain, -NumKeysInEachTx=$NumKeysInEachTx, -BatchSize=$BatchSize, -NumKVs=$NumKVs, -KVSize=$KVSize"
25+
TEST_PARAMS="-DataDir=$DataDir, -NumChains=$NumChains, -NumParallelTxPerChain=$NumParallelTxPerChain, -NumWritesPerTx=$NumWritesPerTx, -NumReadsPerTx=$NumReadsPerTx, -BatchSize=$BatchSize, -NumKVs=$NumKVs, -KVSize=$KVSize, -UseJSONFormat=$UseJSONFormat"
2626
RESULTANT_DIRS="$DataDir/ledgersData/chains/chains $DataDir/ledgersData/chains/index $DataDir/ledgersData/stateLeveldb $DataDir/ledgersData/historyLeveldb"
2727
}
2828

@@ -36,7 +36,7 @@ function runReadWriteTxs {
3636
FUNCTION_NAME="BenchmarkReadWriteTxs"
3737
if [ "$CLEAR_OS_CACHE" == "true" ]; then
3838
clearOSCache
39-
fi
39+
fi
4040
setCommonTestParams
4141
TEST_PARAMS="$TEST_PARAMS, -NumTotalTx=$NumTotalTx"
4242
executeTest

test/tools/LTE/scripts/parameters_couchdb_daily_CI.sh

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,12 +6,14 @@
66
#
77

88
export useCouchDB="yes"
9+
UseJSONFormat="true"
910
DataDir="/tmp/fabric/test/tools/LTE/data"
1011
NumChains=10
1112
NumParallelTxPerChain=10
1213
NumKVs=10000
1314
NumTotalTx=10000
14-
NumKeysInEachTx=4
15+
NumWritesPerTx=4
16+
NumReadsPerTx=4
1517
BatchSize=50
1618
KVSize=200
1719

@@ -25,12 +27,12 @@ KVSize=200
2527
# NumParallelTxPerChain=10
2628
# NumKVs=10000
2729
# NumTotalTx=10000
28-
# NumKeysInEachTx=4
30+
# NumWritesPerTx=4
2931
# BatchSize=50
3032
# KVSize=200
3133
ArrayNumParallelTxPerChain=(1 5 10 20 50 100)
3234
ArrayNumChains=(1 5 10 20 50)
33-
ArrayNumKeysInEachTx=(1 2 5 10 20)
35+
ArrayNumWritesPerTx=(1 2 5 10 20)
3436
ArrayKVSize=(100 200 500 1000 2000)
3537
ArrayBatchSize=(10 20 100 500)
3638
ArrayNumParallelTxWithSingleChain=(1 5 10 20 50 100)

test/tools/LTE/scripts/parameters_daily_CI.sh

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,8 @@ NumChains=10
99
NumParallelTxPerChain=10
1010
NumKVs=100000
1111
NumTotalTx=100000
12-
NumKeysInEachTx=4
12+
NumWritesPerTx=4
13+
NumReadsPerTx=4
1314
BatchSize=50
1415
KVSize=200
1516

@@ -28,7 +29,7 @@ KVSize=200
2829
# KVSize=200
2930
ArrayNumParallelTxPerChain=(1 5 10 20 50 100 500 2000)
3031
ArrayNumChains=(1 5 10 20 50 100 500 2000)
31-
ArrayNumKeysInEachTx=(1 2 5 10 20)
32+
ArrayNumWritesPerTx=(1 2 5 10 20)
3233
ArrayKVSize=(100 200 500 1000 2000)
3334
ArrayBatchSize=(10 20 100 500)
3435
ArrayNumParallelTxWithSingleChain=(1 5 10 20 50 100 500 2000)

test/tools/LTE/scripts/runbenchmarks.sh

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -27,10 +27,10 @@ function varyNumChains {
2727
done
2828
}
2929

30-
function varyNumKeysInEachTx {
31-
for v in "${ArrayNumKeysInEachTx[@]}"
30+
function varyNumWritesPerTx {
31+
for v in "${ArrayNumWritesPerTx[@]}"
3232
do
33-
NumKeysInEachTx=$v
33+
NumWritesPerTx=$v
3434
rm -rf $DataDir;runInsertTxs;runReadWriteTxs
3535
done
3636
}
@@ -113,7 +113,7 @@ varyNumParallelTxPerChain
113113
varyNumChains
114114
varyNumParallelTxWithSingleChain
115115
varyNumChainsWithNoParallelism
116-
varyNumKeysInEachTx
116+
varyNumWritesPerTx
117117
varyKVSize
118118
varyBatchSize
119119
varyNumTxs
@@ -158,8 +158,8 @@ case $1 in
158158
varyNumParallelTxWithSingleChain ;;
159159
varyNumChainsWithNoParallelism)
160160
varyNumChainsWithNoParallelism ;;
161-
varyNumKeysInEachTx)
162-
varyNumKeysInEachTx ;;
161+
varyNumWritesPerTx)
162+
varyNumWritesPerTx ;;
163163
varyKVSize)
164164
varyKVSize ;;
165165
varyBatchSize)
@@ -176,7 +176,7 @@ case $1 in
176176
varyNumChains
177177
varyNumParallelTxWithSingleChain
178178
varyNumChainsWithNoParallelism
179-
varyNumKeysInEachTx
179+
varyNumWritesPerTx
180180
varyKVSize
181181
varyBatchSize
182182
varyNumTxs

0 commit comments

Comments
 (0)