Skip to content

Commit

Permalink
Address more concerns highlighted by linters
Browse files Browse the repository at this point in the history
These changes remove dead code, add error checks, and use assign unused
variables to the unnamed variable `_`.

Signed-off-by: Matthew Sykes <sykesmat@us.ibm.com>
  • Loading branch information
sykesm authored and ale-linux committed May 25, 2020
1 parent c799dcc commit 445b8f6
Show file tree
Hide file tree
Showing 87 changed files with 193 additions and 247 deletions.
2 changes: 1 addition & 1 deletion bccsp/idemix/bridge/bridge_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -258,7 +258,6 @@ var _ = Describe("Idemix Bridge", func() {
})

Context("public nym import", func() {

It("success", func() {
npk := handlers.NewNymPublicKey(&bridge.Ecp{
E: FP256BN.NewECPbigs(FP256BN.NewBIGint(10), FP256BN.NewBIGint(20)),
Expand All @@ -268,6 +267,7 @@ var _ = Describe("Idemix Bridge", func() {
Expect(raw).NotTo(BeNil())

npk2, err := User.NewPublicNymFromBytes(raw)
Expect(err).NotTo(HaveOccurred())
raw2, err := npk2.Bytes()
Expect(err).NotTo(HaveOccurred())
Expect(raw2).NotTo(BeNil())
Expand Down
1 change: 1 addition & 0 deletions cmd/common/signer/signer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ func TestSigner(t *testing.T) {
assert.NoError(t, err)

r, s, err := utils.UnmarshalECDSASignature(sig)
assert.NoError(t, err)
ecdsa.Verify(&signer.key.PublicKey, util.ComputeSHA256(msg), r, s)
}

Expand Down
3 changes: 3 additions & 0 deletions cmd/configtxgen/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,9 @@ func doOutputAnchorPeersUpdate(conf *genesisconfig.Profile, channelID string, ou
}

updateTx, err := protoutil.CreateSignedEnvelope(cb.HeaderType_CONFIG_UPDATE, channelID, nil, newConfigUpdateEnv, 0, 0)
if err != nil {
return errors.WithMessage(err, "could not create signed envelope")
}

logger.Info("Writing anchor peer update")
err = writeFile(outputAnchorPeersUpdate, protoutil.MarshalOrPanic(updateTx), 0640)
Expand Down
3 changes: 3 additions & 0 deletions common/ledger/blkstorage/blockindex.go
Original file line number Diff line number Diff line change
Expand Up @@ -310,6 +310,9 @@ func (index *blockIndex) exportUniqueTxIDs(dir string, newHashFunc snapshot.NewH
return nil, err
}
metadataHash, err := metadataFile.Done()
if err != nil {
return nil, err
}

return map[string][]byte{
snapshotDataFileName: dataHash,
Expand Down
8 changes: 7 additions & 1 deletion common/ledger/blkstorage/reset_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@ func TestResetToGenesisBlkMultipleBlkFiles(t *testing.T) {

ledgerDir := (&Conf{blockStorageDir: blockStoreRootDir}).getLedgerBlockDir("ledger1")
files, err := ioutil.ReadDir(ledgerDir)
require.NoError(t, err)
require.Len(t, files, 5)
resetToGenesisBlk(ledgerDir)
assertBlocksDirOnlyFileWithGenesisBlock(t, ledgerDir, blocks[0])
Expand Down Expand Up @@ -108,6 +109,7 @@ func TestResetBlockStore(t *testing.T) {
// test load and clear preResetHeight for ledger1 and ledger2
ledgerIDs := []string{"ledger1", "ledger2"}
h, err := LoadPreResetHeight(blockStoreRootDir, ledgerIDs)
require.NoError(t, err)
require.Equal(t,
map[string]uint64{
"ledger1": 20,
Expand All @@ -125,6 +127,7 @@ func TestResetBlockStore(t *testing.T) {

require.NoError(t, ClearPreResetHeight(blockStoreRootDir, ledgerIDs))
h, err = LoadPreResetHeight(blockStoreRootDir, ledgerIDs)
require.NoError(t, err)
require.Equal(t,
map[string]uint64{},
h,
Expand All @@ -134,6 +137,7 @@ func TestResetBlockStore(t *testing.T) {
require.NoError(t, ResetBlockStore(blockStoreRootDir))
ledgerIDs = []string{"ledger2"}
h, err = LoadPreResetHeight(blockStoreRootDir, ledgerIDs)
require.NoError(t, err)
require.Equal(t,
map[string]uint64{
"ledger2": 40,
Expand All @@ -143,6 +147,7 @@ func TestResetBlockStore(t *testing.T) {
require.NoError(t, ClearPreResetHeight(blockStoreRootDir, ledgerIDs))
// verify that ledger1 has preResetHeight file is not deleted
h, err = LoadPreResetHeight(blockStoreRootDir, []string{"ledger1", "ledger2"})
require.NoError(t, err)
require.Equal(t,
map[string]uint64{
"ledger1": 20,
Expand Down Expand Up @@ -195,6 +200,7 @@ func TestRecordHeight(t *testing.T) {

func assertBlocksDirOnlyFileWithGenesisBlock(t *testing.T, ledgerDir string, genesisBlock *common.Block) {
files, err := ioutil.ReadDir(ledgerDir)
require.NoError(t, err)
require.Len(t, files, 2)
require.Equal(t, "__backupGenesisBlockBytes", files[0].Name())
require.Equal(t, "blockfile_000000", files[1].Name())
Expand Down Expand Up @@ -225,7 +231,7 @@ func assertBlockStorePostReset(t *testing.T, store *BlockStore, originallyCommit
require.NoError(t, err)
require.Equal(t, originallyCommittedBlocks[0], blk)

blk, err = store.RetrieveBlockByNumber(1)
_, err = store.RetrieveBlockByNumber(1)
require.Error(t, err)
require.Equal(t, err, ErrNotFoundInIndex)

Expand Down
5 changes: 4 additions & 1 deletion common/ledger/snapshot/file_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,9 @@ func TestFileCreateAndRead(t *testing.T) {

// create file and encode some data
fileCreator, err := CreateFile(path.Join(testDir, "dataFile"), byte(5), testNewHashFunc)
require.NoError(t, err)
defer fileCreator.Close()

require.NoError(t, err)
require.NoError(t, fileCreator.EncodeString("Hi there"))
require.NoError(t, fileCreator.EncodeString("How are you?"))
require.NoError(t, fileCreator.EncodeString("")) // zreo length string
Expand All @@ -61,6 +61,7 @@ func TestFileCreateAndRead(t *testing.T) {

// open the file and verify the reads
fileReader, err := OpenFile(path.Join(testDir, "dataFile"), byte(5))
require.NoError(t, err)
defer fileReader.Close()

str, err := fileReader.DecodeString()
Expand Down Expand Up @@ -115,6 +116,7 @@ func TestFileCreatorErrorPropagation(t *testing.T) {
// Mimic the errors by setting the writer to an error returning writer
dataFilePath := path.Join(testPath, "data-file")
fileCreator, err := CreateFile(dataFilePath, byte(1), testNewHashFunc)
require.NoError(t, err)
defer fileCreator.Close()

fileCreator.multiWriter = &errorCausingWriter{err: errors.New("error-from-EncodeUVarint")}
Expand Down Expand Up @@ -160,6 +162,7 @@ func TestFileReaderErrorPropagation(t *testing.T) {
// a file with mismatched format info causes an error
unexpectedFormatFile := path.Join(testPath, "wrong-data-format-file")
fw, err := CreateFile(unexpectedFormatFile, byte(1), testNewHashFunc)
require.NoError(t, err)
require.NoError(t, fw.EncodeString("Hello there"))
_, err = fw.Done()
require.NoError(t, err)
Expand Down
2 changes: 0 additions & 2 deletions core/chaincode/lifecycle/event_broker.go
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,6 @@ func (b *EventBroker) ProcessInstallEvent(localChaincode *LocalChaincode) {
b.invokeDoneOnListeners(channelID, true)
}
}
return
}

// ProcessApproveOrDefineEvent gets invoked by an event that makes approve and define to be true
Expand All @@ -104,7 +103,6 @@ func (b *EventBroker) ProcessApproveOrDefineEvent(channelID string, chaincodeNam
}
b.invokeListeners(channelID, ccdef, dbArtifacts)
b.defineCallbackStatus.Store(channelID, struct{}{})
return
}

// ApproveOrDefineCommitted gets invoked after the commit of state updates that triggered the invocation of
Expand Down
3 changes: 3 additions & 0 deletions core/chaincode/persistence/chaincode_package.go
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,9 @@ func (fpl *FallbackPackageLocator) GetChaincodePackage(packageID string) (*Chain
}

mdBytes, err := json.Marshal(md)
if err != nil {
return nil, nil, nil, errors.WithMessagef(err, "could not marshal metdata for chaincode package '%s'", packageID)
}

return md,
mdBytes,
Expand Down
1 change: 1 addition & 0 deletions core/committer/txvalidator/v14/txvalidator_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -148,6 +148,7 @@ func TestBlockValidationDuplicateTXId(t *testing.T) {

envs := []*common.Envelope{}
env, _, err := testutil.ConstructTransaction(t, pubSimulationResBytes, "", true)
assert.NoError(t, err)
envs = append(envs, env)
envs = append(envs, env)
block := testutil.NewBlock(envs, 1, gbHash)
Expand Down
2 changes: 2 additions & 0 deletions core/committer/txvalidator/v14/validator_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,7 @@ func createRWset(t *testing.T, ccnames ...string) []byte {
rwset, err := rwsetBuilder.GetTxSimulationResults()
assert.NoError(t, err)
rwsetBytes, err := rwset.GetPubSimulationBytes()
assert.NoError(t, err)
return rwsetBytes
}

Expand Down Expand Up @@ -717,6 +718,7 @@ func TestParallelValidation(t *testing.T) {
rwset, err := rwsetBuilder.GetTxSimulationResults()
assert.NoError(t, err)
rwsetBytes, err := rwset.GetPubSimulationBytes()
assert.NoError(t, err)
tx := getEnvWithSigner(ccID, nil, rwsetBytes, sig, t)
blockData = append(blockData, protoutil.MarshalOrPanic(tx))
}
Expand Down
1 change: 1 addition & 0 deletions core/committer/txvalidator/v20/txvalidator_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,7 @@ func TestBlockValidationDuplicateTXId(t *testing.T) {

envs := []*common.Envelope{}
env, _, err := testutil.ConstructTransaction(t, pubSimulationResBytes, "", true)
assert.NoError(t, err)
envs = append(envs, env)
envs = append(envs, env)
block := testutil.NewBlock(envs, 1, []byte("Water, water everywhere and all the boards did shrink"))
Expand Down
2 changes: 2 additions & 0 deletions core/committer/txvalidator/v20/validator_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ func createRWset(t *testing.T, ccnames ...string) []byte {
rwset, err := rwsetBuilder.GetTxSimulationResults()
assert.NoError(t, err)
rwsetBytes, err := rwset.GetPubSimulationBytes()
assert.NoError(t, err)
return rwsetBytes
}

Expand Down Expand Up @@ -553,6 +554,7 @@ func TestParallelValidation(t *testing.T) {
rwset, err := rwsetBuilder.GetTxSimulationResults()
assert.NoError(t, err)
rwsetBytes, err := rwset.GetPubSimulationBytes()
assert.NoError(t, err)
tx := getEnvWithSigner(ccID, nil, rwsetBytes, sig, t)
blockData = append(blockData, protoutil.MarshalOrPanic(tx))
}
Expand Down
1 change: 1 addition & 0 deletions core/common/ccprovider/ccprovider_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,7 @@ func TestInstalledCCs(t *testing.T) {

func TestSetGetChaincodeInstallPath(t *testing.T) {
tempDir, err := ioutil.TempDir("", "ccprovider")
assert.NoError(t, err)
defer os.RemoveAll(tempDir)

cryptoProvider, err := sw.NewDefaultSecurityLevelWithKeystore(sw.NewDummyKeyStore())
Expand Down
5 changes: 2 additions & 3 deletions core/common/ccprovider/cdspackage.go
Original file line number Diff line number Diff line change
Expand Up @@ -127,8 +127,7 @@ func (ccpack *CDSPackage) getCDSData(cds *pb.ChaincodeDeploymentSpec) ([]byte, [
panic("nil cds")
}

b, err := proto.Marshal(cds)
if err != nil {
if _, err := proto.Marshal(cds); err != nil {
return nil, nil, nil, err
}

Expand All @@ -153,7 +152,7 @@ func (ccpack *CDSPackage) getCDSData(cds *pb.ChaincodeDeploymentSpec) ([]byte, [

cdsdata.MetaDataHash = hash.Sum(nil)

b, err = proto.Marshal(cdsdata)
b, err := proto.Marshal(cdsdata)
if err != nil {
return nil, nil, nil, err
}
Expand Down
5 changes: 0 additions & 5 deletions core/common/validation/msgvalidation_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ package validation

import (
"fmt"
"strconv"
"testing"

"github.com/golang/protobuf/proto"
Expand Down Expand Up @@ -135,7 +134,3 @@ func TestCheckSignatureFromCreator(t *testing.T) {
assert.Error(t, err)
assert.Contains(t, err.Error(), "MSP error: channel doesn't exist")
}

func ToHex(q uint64) string {
return "0x" + strconv.FormatUint(q, 16)
}
1 change: 0 additions & 1 deletion core/common/validation/statebased/v20.go
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,6 @@ type CollectionResources interface {

type policyCheckerV20 struct {
someEPChecked bool
vpmgr KeyLevelValidationParameterManager
policySupport validation.PolicyEvaluator
ccEP []byte
nsEPChecked map[string]bool
Expand Down
1 change: 0 additions & 1 deletion core/common/validation/statebased/validator_keylevel.go
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,6 @@ func (p *baseEvaluator) checkSBAndCCEP(cc, coll, key string, blockNum, txNum uin
// when performing its side of the validation.
case *ledger.CollConfigNotDefinedError, *ledger.InvalidCollNameError:
logger.Warningf(errors.WithMessage(err, "skipping key-level validation").Error())
err = nil
// 3) any other type of error should return an execution failure which will
// lead to halting the processing on this channel. Note that any non-categorized
// deterministic error would be caught by the default and would lead to
Expand Down
2 changes: 1 addition & 1 deletion core/container/externalbuilder/tar_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,8 @@ var _ = Describe("Tar", func() {
Context("when the archive conains an odd file type", func() {
It("returns an error", func() {
file, err := os.Open("testdata/archive_with_symlink.tar.gz")
defer file.Close()
Expect(err).NotTo(HaveOccurred())
defer file.Close()
err = externalbuilder.Untar(file, dst)
Expect(err).To(MatchError("invalid file type '50' contained in archive for file 'c.file'"))
})
Expand Down
4 changes: 2 additions & 2 deletions core/handlers/auth/auth_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ func TestChainFilters(t *testing.T) {
binary.BigEndian.PutUint32(initialProposal.ProposalBytes, 0)

firstFilter := ChainFilters(endorser, filters...)
firstFilter.ProcessProposal(nil, initialProposal)
firstFilter.ProcessProposal(context.Background(), initialProposal)
for i := 0; i < iterations; i++ {
assert.Equal(t, uint32(i), filters[i].(*mockAuthFilter).sequence,
"Expected filters to be invoked in the provided sequence")
Expand All @@ -35,7 +35,7 @@ func TestChainFilters(t *testing.T) {
// Test with no filters
binary.BigEndian.PutUint32(initialProposal.ProposalBytes, 0)
firstFilter = ChainFilters(endorser)
firstFilter.ProcessProposal(nil, initialProposal)
firstFilter.ProcessProposal(context.Background(), initialProposal)
assert.Equal(t, uint32(0), endorser.sequence,
"Expected endorser to be invoked first")
}
Expand Down
2 changes: 1 addition & 1 deletion core/handlers/auth/filter/filter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,6 @@ func TestFilter(t *testing.T) {
auth := NewFilter()
nextEndorser := &mockEndorserServer{}
auth.Init(nextEndorser)
auth.ProcessProposal(nil, nil)
auth.ProcessProposal(context.Background(), nil)
assert.True(t, nextEndorser.invoked)
}
2 changes: 1 addition & 1 deletion core/handlers/auth/plugin/filter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,6 @@ func TestFilter(t *testing.T) {
auth := NewFilter()
nextEndorser := &mockEndorserServer{}
auth.Init(nextEndorser)
auth.ProcessProposal(nil, nil)
auth.ProcessProposal(context.Background(), nil)
assert.True(t, nextEndorser.invoked)
}
2 changes: 1 addition & 1 deletion core/handlers/library/registry_plugin_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ func TestLoadAuthPlugin(t *testing.T) {
assert.Len(t, testReg.filters, 1, "Expected filter to be registered")

testReg.filters[0].Init(endorser)
testReg.filters[0].ProcessProposal(nil, nil)
testReg.filters[0].ProcessProposal(context.TODO(), nil)
assert.True(t, endorser.invoked, "Expected filter to invoke endorser on invoke")
}

Expand Down
3 changes: 3 additions & 0 deletions core/ledger/confighistory/mgr.go
Original file line number Diff line number Diff line change
Expand Up @@ -220,6 +220,9 @@ func (r *Retriever) ExportConfigHistory(dir string, newHashFunc snapshot.NewHash
return nil, err
}
metadataHash, err := metadataFileWriter.Done()
if err != nil {
return nil, err
}

return map[string][]byte{
snapshotDataFileName: dataHash,
Expand Down
2 changes: 1 addition & 1 deletion core/ledger/confighistory/mgr_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ func TestMgr(t *testing.T) {
assert.NoError(t, err)
assert.Nil(t, retrievedConfig)

retrievedConfig, err = retriever.CollectionConfigAt(5000, chaincodeName)
_, err = retriever.CollectionConfigAt(5000, chaincodeName)
typedErr, ok := err.(*ledger.ErrCollectionConfigNotYetAvailable)
assert.True(t, ok)
assert.Equal(t, maxBlockNumberInLedger, typedErr.MaxBlockNumCommitted)
Expand Down
2 changes: 1 addition & 1 deletion core/ledger/kvledger/hashcheck_pvtdata_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ func TestConstructValidInvalidBlocksPvtData(t *testing.T) {
assert.Len(t, hashMismatched, 0)

// construct pvtData from missing data in tx7 with wrong pvtData
wrongPvtDataBlk1Tx7, pubSimResBytesBlk1Tx7 = produceSamplePvtdata(t, 7, []string{"ns-1:coll-2"}, [][]byte{v6})
wrongPvtDataBlk1Tx7, _ = produceSamplePvtdata(t, 7, []string{"ns-1:coll-2"}, [][]byte{v6})
pvtdata = []*ledger.ReconciledPvtdata{
{
BlockNum: 1,
Expand Down
5 changes: 2 additions & 3 deletions core/ledger/kvledger/kv_ledger_provider.go
Original file line number Diff line number Diff line change
Expand Up @@ -644,9 +644,8 @@ func (s *idStore) getLedgerMetadata(ledgerID string) (*msgs.LedgerMetadata, erro

func (s *idStore) ledgerIDExists(ledgerID string) (bool, error) {
key := s.encodeLedgerKey(ledgerID, ledgerKeyPrefix)
val := []byte{}
err := error(nil)
if val, err = s.db.Get(key); err != nil {
val, err := s.db.Get(key)
if err != nil {
return false, err
}
return val != nil, nil
Expand Down
1 change: 1 addition & 0 deletions core/ledger/kvledger/kv_ledger_provider_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -310,6 +310,7 @@ func TestRecovery(t *testing.T) {
// now create the genesis block
genesisBlock, _ := configtxtest.MakeGenesisBlock(constructTestLedgerID(1))
ledger, err := provider1.open(constructTestLedgerID(1))
assert.NoError(t, err)
ledger.CommitLegacy(&lgr.BlockAndPvtData{Block: genesisBlock}, &lgr.CommitOptions{})
ledger.Close()

Expand Down
15 changes: 0 additions & 15 deletions core/ledger/kvledger/kv_ledger_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -978,21 +978,6 @@ func btlPolicyForSampleData() pvtdatapolicy.BTLPolicy {
)
}

func prepareNextBlockWithMissingPvtDataForTest(t *testing.T, l lgr.PeerLedger, bg *testutil.BlockGenerator,
txid string, pubKVs map[string]string, pvtKVs map[string]string) (*lgr.BlockAndPvtData, *lgr.TxPvtData) {

blockAndPvtData := prepareNextBlockForTest(t, l, bg, txid, pubKVs, pvtKVs)

blkMissingDataInfo := make(lgr.TxMissingPvtDataMap)
blkMissingDataInfo.Add(0, "ns", "coll", true)
blockAndPvtData.MissingPvtData = blkMissingDataInfo

pvtData := blockAndPvtData.PvtData[0]
delete(blockAndPvtData.PvtData, 0)

return blockAndPvtData, pvtData
}

func prepareNextBlockForTest(t *testing.T, l lgr.PeerLedger, bg *testutil.BlockGenerator,
txid string, pubKVs map[string]string, pvtKVs map[string]string) *lgr.BlockAndPvtData {
simulator, _ := l.NewTxSimulator(txid)
Expand Down
1 change: 1 addition & 0 deletions core/ledger/kvledger/rwset_backward_compatibility_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@ func testGenerateSampleRWSet(t *testing.T) []byte {
// perform a range query for significant larger scan so that the merkle tree building kicks in
// each level contains max 50 nodes per the current configuration
simulator, err := ledger.NewTxSimulator(txid)
require.NoError(t, err)
for i := 0; i < 10011; i++ {
simulator.SetState("ns1", fmt.Sprintf("key-%000d", i), []byte(fmt.Sprintf("value-%000d", i)))
}
Expand Down
Loading

0 comments on commit 445b8f6

Please sign in to comment.