Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/cool-dancers-rescue.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"chainlink": patch
---

#bugfix Use correct data id in secure mint aggregator
33 changes: 33 additions & 0 deletions core/capabilities/integration_tests/keystone/contracts_setup.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import (
"github.com/ethereum/go-ethereum/common"
"github.com/stretchr/testify/require"

data_feeds_cache "github.com/smartcontractkit/chainlink-evm/gethwrappers/data-feeds/generated/data_feeds_cache"
feeds_consumer "github.com/smartcontractkit/chainlink-evm/gethwrappers/keystone/generated/feeds_consumer_1_0_0"
forwarder "github.com/smartcontractkit/chainlink-evm/gethwrappers/keystone/generated/forwarder_1_0_0"
"github.com/smartcontractkit/chainlink/v2/core/capabilities/integration_tests/framework"
Expand Down Expand Up @@ -47,3 +48,35 @@ func SetupConsumerContract(t *testing.T, backend *framework.EthBlockchain,

return addr, consumer
}

func SetupDataFeedsCacheContract(t *testing.T, backend *framework.EthBlockchain,
forwarderAddress common.Address, workflowOwner string, workflowName string) (common.Address, *data_feeds_cache.DataFeedsCache) {
addr, _, dataFeedsCache, err := data_feeds_cache.DeployDataFeedsCache(backend.TransactionOpts(), backend.Client())
require.NoError(t, err)
backend.Commit()

var nameBytes [10]byte
copy(nameBytes[:], workflowName)

ownerAddr := common.HexToAddress(workflowOwner)

_, err = dataFeedsCache.SetFeedAdmin(backend.TransactionOpts(), backend.TransactionOpts().From, true)
require.NoError(t, err)
backend.Commit()

feedIDBytes := [16]byte{}
copy(feedIDBytes[:], common.FromHex("0x04de41ba4fc9d91ad900000000000000")) // Data ID for secure mint report for chain selector 16015286601757825753 (ethereum-testnet-sepolia)

_, err = dataFeedsCache.SetDecimalFeedConfigs(backend.TransactionOpts(), [][16]byte{feedIDBytes}, []string{"securemint"},
[]data_feeds_cache.DataFeedsCacheWorkflowMetadata{
{
AllowedSender: forwarderAddress,
AllowedWorkflowOwner: ownerAddr,
AllowedWorkflowName: nameBytes,
},
})
require.NoError(t, err)
backend.Commit()

return addr, dataFeedsCache
}
46 changes: 41 additions & 5 deletions core/capabilities/integration_tests/keystone/keystone_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ import (
"github.com/smartcontractkit/chainlink-common/pkg/capabilities/datastreams"
"github.com/smartcontractkit/chainlink-common/pkg/logger"
"github.com/smartcontractkit/chainlink-common/pkg/values"
data_feeds_cache "github.com/smartcontractkit/chainlink-evm/gethwrappers/data-feeds/generated/data_feeds_cache"
feeds_consumer "github.com/smartcontractkit/chainlink-evm/gethwrappers/keystone/generated/feeds_consumer_1_0_0"
fwd "github.com/smartcontractkit/chainlink-evm/gethwrappers/keystone/generated/forwarder_1_0_0"
"github.com/smartcontractkit/chainlink/v2/core/capabilities/integration_tests/framework"
Expand Down Expand Up @@ -46,7 +47,7 @@ func testTransmissionSchedule(t *testing.T, deltaStage string, schedule string)

// mercury-style reports
triggerSink := framework.NewTriggerSink(t, "streams-trigger", "1.0.0")
workflowDon, consumer, _ := setupKeystoneDons(ctx, t, lggr, workflowDonConfiguration, triggerDonConfiguration,
workflowDon, consumer, _, _ := setupKeystoneDons(ctx, t, lggr, workflowDonConfiguration, triggerDonConfiguration,
targetDonConfiguration, triggerSink)

feedCount := 3
Expand Down Expand Up @@ -124,7 +125,7 @@ func waitForConsumerReports(t *testing.T, consumer *feeds_consumer.KeystoneFeeds
}

// trackErrorsOnForwarder watches the forwarder contract for report processed events and fails the test if the report is not forwarded to the consumer
func trackErrorsOnForwarder(t *testing.T, forwarder *fwd.KeystoneForwarder, consumerAddress common.Address) {
func trackErrorsOnForwarder(t *testing.T, forwarder *fwd.KeystoneForwarder, dfCacheAddress common.Address) {
t.Helper()

reportsProcessed := make(chan *fwd.KeystoneForwarderReportProcessed, 1000)
Expand All @@ -150,16 +151,51 @@ func trackErrorsOnForwarder(t *testing.T, forwarder *fwd.KeystoneForwarder, cons
return
case report := <-reportsProcessed:
t.Logf("Forwarder received report: %+v", report)

transmissionInfo, err := forwarder.GetTransmissionInfo(nil, dfCacheAddress, report.WorkflowExecutionId, report.ReportId)
assert.NoError(t, err)
if !report.Result { // if the report is not forwarded to the consumer, we need to get the transmission info to see why
transmissionInfo, err := forwarder.GetTransmissionInfo(nil, consumerAddress, report.WorkflowExecutionId, report.ReportId)
assert.NoError(t, err)
t.Errorf("Report not forwarded to consumer: %+v", transmissionInfo)
t.Errorf("Report not forwarded to DataFeeds Cache: %+v", transmissionInfo)
} else {
t.Logf("Report successfully forwarded to DataFeeds Cache: %+v", transmissionInfo)
}
}
}
}()
}

// trackInvalidPermissionEventsOnDFCache watches the DF Cache contract for invalid permission events
func trackInvalidPermissionEventsOnDFCache(t *testing.T, dataFeedsCache *data_feeds_cache.DataFeedsCache) {
t.Helper()

invalidPermissionEvents := make(chan *data_feeds_cache.DataFeedsCacheInvalidUpdatePermission, 1000)
invalidPermissionSub, err := dataFeedsCache.WatchInvalidUpdatePermission(nil, invalidPermissionEvents, nil)
require.NoError(t, err)

ctx, cancel := context.WithCancel(t.Context())
done := make(chan struct{})
closeFunc := func() {
cancel()
<-done
}
t.Cleanup(closeFunc)

go func() {
defer close(done)
for {
select {
case <-ctx.Done():
return
case err := <-invalidPermissionSub.Err():
assert.NoError(t, err)
return
case evt := <-invalidPermissionEvents:
t.Logf("DF Cache received invalid permission event: %+v", evt)
}
}
}()
}

type streamsV1Handler struct {
mu sync.Mutex
expected map[string]*datastreams.FeedReport
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ func Test_runLLOWorkflow(t *testing.T) {
targetDonConfiguration, err := framework.NewDonConfiguration(framework.NewDonConfigurationParams{Name: "Target", NumNodes: 4, F: 1})
require.NoError(t, err)

workflowDon, consumer, _ := setupKeystoneDons(ctx, t, lggr, workflowDonConfiguration, triggerDonConfiguration,
workflowDon, consumer, _, _ := setupKeystoneDons(ctx, t, lggr, workflowDonConfiguration, triggerDonConfiguration,
targetDonConfiguration, triggerSink)

// generate a wf job with 10 feeds
Expand Down
Original file line number Diff line number Diff line change
@@ -1,17 +1,20 @@
package keystone

import (
"context"
"encoding/json"
"fmt"
"math/big"
"testing"
"time"

"github.com/ethereum/go-ethereum/accounts/abi/bind"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"

commoncap "github.com/smartcontractkit/chainlink-common/pkg/capabilities"
"github.com/smartcontractkit/chainlink-common/pkg/logger"
feeds_consumer "github.com/smartcontractkit/chainlink-evm/gethwrappers/keystone/generated/feeds_consumer_1_0_0"
data_feeds_cache "github.com/smartcontractkit/chainlink-evm/gethwrappers/data-feeds/generated/data_feeds_cache"

"github.com/smartcontractkit/libocr/offchainreporting2plus/ocr3types"
ocr2types "github.com/smartcontractkit/libocr/offchainreporting2plus/types"
Expand All @@ -25,7 +28,7 @@ import (
func Test_runSecureMintWorkflow(t *testing.T) {
ctx := t.Context()
lggr := logger.Test(t)
chainID := chainSelector(1337)
chainID := chainSelector(16015286601757825753)
seqNr := uint64(1)

// setup the trigger sink that will receive the trigger event in the securemint-specific format
Expand All @@ -39,22 +42,25 @@ func Test_runSecureMintWorkflow(t *testing.T) {
targetDonConfiguration, err := framework.NewDonConfiguration(framework.NewDonConfigurationParams{Name: "Target", NumNodes: 4, F: 1})
require.NoError(t, err)

workflowDon, consumer, forwarder := setupKeystoneDons(ctx, t, lggr, workflowDonConfiguration, triggerDonConfiguration,
workflowDon, _, dataFeedsCache, forwarder := setupKeystoneDons(ctx, t, lggr, workflowDonConfiguration, triggerDonConfiguration,
targetDonConfiguration, triggerSink)
t.Logf("Consumer contract address: %s", consumer.Address().String())
t.Logf("Forwarder contract address: %s", forwarder.Address().String())
t.Logf("DataFeedsCache contract address: %s", dataFeedsCache.Address().String())

// make sure we know about forwarder errors in case they happen
trackErrorsOnForwarder(t, forwarder, consumer.Address())
trackErrorsOnForwarder(t, forwarder, dataFeedsCache.Address())

// track invalid update permission events on the DF Cache
trackInvalidPermissionEventsOnDFCache(t, dataFeedsCache)

// generate a wf job
job := createSecureMintWorkflowJob(t, workflowName, workflowOwnerID, int64(chainID), consumer.Address())
job := createSecureMintWorkflowJob(t, workflowName, workflowOwnerID, uint64(chainID), dataFeedsCache.Address())
err = workflowDon.AddJob(ctx, &job)
require.NoError(t, err)

// create the test trigger event in the format expected by the secure mint transmitter
mintableAmount := big.NewInt(99)
blockNumber := uint64(10)
blockNumber := big.NewInt(10)
triggerEvent := createSecureMintTriggerEvent(t, chainID, seqNr, mintableAmount, blockNumber)

t.Logf("Sending triggerEvent: %+v", triggerEvent)
Expand All @@ -68,23 +74,23 @@ func Test_runSecureMintWorkflow(t *testing.T) {
// The price is packed from Mintable (99) and block number (10)
expectedUpdates := []secureMintUpdate{
{
feedID: "0x0400000000000005390000000000000000000000000000000000000000000000",
dataID: "0x04de41ba4fc9d91ad900000000000000", // 0x4 + 16015286601757825753 as bytes + right padded with 0s
mintableAmount: mintableAmount,
blockNumber: blockNumber,
},
}
h := newSecureMintHandler(expectedUpdates, uint32(blockNumber)) // currently the secure mint aggregator uses the block number as timestamp
waitForConsumerReports(t, consumer, h)
h := newSecureMintHandler(expectedUpdates, blockNumber) // currently the secure mint aggregator uses the block number as timestamp
waitForDataFeedsCacheReports(t, dataFeedsCache, h)
}

type secureMintUpdate struct {
feedID string
dataID string
mintableAmount *big.Int
blockNumber uint64
blockNumber *big.Int
}

// chainSelector is mimicked after the por plugin, which mimics it from the chain-selectors repo
type chainSelector int64
type chainSelector uint64

// secureMintReport is mimicked after the report type of the por plugin, see its repo for more details
type secureMintReport struct {
Expand Down Expand Up @@ -114,7 +120,7 @@ type secureMintReport struct {
// triggerResponse := capabilities.TriggerResponse{
// Event: event,
// }
func createSecureMintTriggerEvent(t *testing.T, chainID chainSelector, seqNr uint64, mintable *big.Int, blockNumber uint64) *values.Map {
func createSecureMintTriggerEvent(t *testing.T, chainID chainSelector, seqNr uint64, mintable *big.Int, blockNumber *big.Int) *values.Map {
// Create mock signatures (in a real scenario, these would be actual OCR signatures)
sigs := []commoncap.OCRAttributedOnchainSignature{
{
Expand All @@ -131,7 +137,7 @@ func createSecureMintTriggerEvent(t *testing.T, chainID chainSelector, seqNr uin
secureMintReport := &secureMintReport{
ConfigDigest: ocr2types.ConfigDigest(configDigest),
SeqNr: seqNr,
Block: blockNumber,
Block: blockNumber.Uint64(),
Mintable: mintable,
}

Expand Down Expand Up @@ -161,14 +167,14 @@ func createSecureMintTriggerEvent(t *testing.T, chainID chainSelector, seqNr uin
// produced by a workflow using the secure mint trigger and aggregator
type secureMintHandler struct {
expected []secureMintUpdate
ts uint32 // unix timestamp in seconds
ts *big.Int
found map[string]struct{}
}

func newSecureMintHandler(expected []secureMintUpdate, ts uint32) *secureMintHandler {
func newSecureMintHandler(expected []secureMintUpdate, ts *big.Int) *secureMintHandler {
found := make(map[string]struct{})
for _, update := range expected {
found[update.feedID] = struct{}{}
found[update.dataID] = struct{}{}
}
return &secureMintHandler{
expected: expected,
Expand All @@ -177,47 +183,78 @@ func newSecureMintHandler(expected []secureMintUpdate, ts uint32) *secureMintHan
}
}

// Implement the feedReceivedHandler interface
// to handle the received feeds
func (h *secureMintHandler) handleFeedReceived(t *testing.T, event *feeds_consumer.KeystoneFeedsConsumerFeedReceived) (done bool) {
t.Logf("handling event for feedID %x: %+v", event.FeedId[:], event)
// Implement the dataFeedsCacheHandler interface
// to handle the received feeds from DataFeedsCache
func (h *secureMintHandler) handleDecimalReportUpdated(t *testing.T, event *data_feeds_cache.DataFeedsCacheDecimalReportUpdated) (done bool) {
t.Logf("handling event for dataID %x: %+v", event.DataId[:], event)

// Convert feed ID to string for comparison
feedIDStr := fmt.Sprintf("0x%x", event.FeedId[:])
// Convert data ID to string for comparison (DataFeedsCache uses bytes16 dataId instead of bytes32 feedId)
dataIDStr := fmt.Sprintf("0x%x", event.DataId[:])

// Find the expected update for this feed ID
// Find the expected update for this data ID
var expectedUpdate *secureMintUpdate
for _, update := range h.expected {
if update.feedID == feedIDStr {
if update.dataID == dataIDStr {
expectedUpdate = &update
break
}
}

require.NotNil(t, expectedUpdate, "feedID %s not found in expected updates", feedIDStr)
require.NotNil(t, expectedUpdate, "dataID %s not found in expected updates", dataIDStr)

mintableMask := new(big.Int).Sub(new(big.Int).Lsh(big.NewInt(1), 128), big.NewInt(1))
extractedMintable := new(big.Int).And(event.Price, mintableMask)
extractedMintable := new(big.Int).And(event.Answer, mintableMask)
t.Logf("extractedMintable: %d", extractedMintable)
assert.Equalf(t, expectedUpdate.mintableAmount, extractedMintable, "mintable amount mismatch: expected %d, got %d", expectedUpdate.mintableAmount, extractedMintable)

// Extract block number from bits 128-191
blockNumberMask := new(big.Int).Lsh(new(big.Int).Sub(new(big.Int).Lsh(big.NewInt(1), 64), big.NewInt(1)), 128)
extractedBlockNumber := new(big.Int).And(event.Price, blockNumberMask)
extractedBlockNumber := new(big.Int).And(event.Answer, blockNumberMask)
extractedBlockNumber = new(big.Int).Rsh(extractedBlockNumber, 128)
t.Logf("extractedBlockNumber: %d", extractedBlockNumber)
assert.Equalf(t, expectedUpdate.blockNumber, extractedBlockNumber.Uint64(), "block number mismatch: expected %d, got %d", expectedUpdate.blockNumber, extractedBlockNumber.Uint64())
assert.Equalf(t, expectedUpdate.blockNumber, extractedBlockNumber, "block number mismatch: expected %d, got %d", expectedUpdate.blockNumber, extractedBlockNumber.Uint64())

assert.Equalf(t, h.ts, event.Timestamp, "timestamp mismatch: expected %d, got %d", h.ts, event.Timestamp)

// Mark this feed as found
delete(h.found, expectedUpdate.feedID)
delete(h.found, expectedUpdate.dataID)

// Return true if all expected feeds have been found
t.Logf("found %d of %d expected feeds, left: %+v, expected: %+v", len(h.expected)-len(h.found), len(h.expected), h.found, h.expected)
return len(h.found) == 0
}

func (h *secureMintHandler) handleDone(t *testing.T) {
t.Logf("found %d of %d expected feeds", len(h.expected)-len(h.found), len(h.expected))
require.Empty(t, h.found, "not all expected feeds were received")
}

// Interface for DataFeedsCache event handling
type dataFeedsCacheHandler interface {
handleDecimalReportUpdated(t *testing.T, event *data_feeds_cache.DataFeedsCacheDecimalReportUpdated) (done bool)
handleDone(t *testing.T)
}

// waitForDataFeedsCacheReports waits for DecimalReportUpdated events from DataFeedsCache contract
func waitForDataFeedsCacheReports(t *testing.T, dataFeedsCache *data_feeds_cache.DataFeedsCache, h dataFeedsCacheHandler) {
reportsReceived := make(chan *data_feeds_cache.DataFeedsCacheDecimalReportUpdated, 1000)
reportsSub, err := dataFeedsCache.WatchDecimalReportUpdated(&bind.WatchOpts{}, reportsReceived, nil, nil, nil)
require.NoError(t, err)
ctx := t.Context()
ctx, cancel := context.WithTimeout(ctx, 30*time.Second)
defer cancel()
for {
select {
case <-ctx.Done():
h.handleDone(t)
t.Fatalf("timed out waiting for data feeds cache reports")
case err := <-reportsSub.Err():
require.NoError(t, err)
case report := <-reportsReceived:
done := h.handleDecimalReportUpdated(t, report)
if done {
return
}
}
}
}
6 changes: 4 additions & 2 deletions core/capabilities/integration_tests/keystone/setup.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ import (
"github.com/smartcontractkit/chainlink-common/pkg/logger"
"github.com/smartcontractkit/chainlink-common/pkg/services/servicetest"
v3 "github.com/smartcontractkit/chainlink-common/pkg/types/mercury/v3"
data_feeds_cache "github.com/smartcontractkit/chainlink-evm/gethwrappers/data-feeds/generated/data_feeds_cache"
feeds_consumer "github.com/smartcontractkit/chainlink-evm/gethwrappers/keystone/generated/feeds_consumer_1_0_0"
forwarder "github.com/smartcontractkit/chainlink-evm/gethwrappers/keystone/generated/forwarder_1_0_0"

Expand All @@ -35,13 +36,14 @@ func setupKeystoneDons(ctx context.Context, t *testing.T, lggr logger.Logger,
workflowDonInfo framework.DonConfiguration,
triggerDonInfo framework.DonConfiguration,
targetDonInfo framework.DonConfiguration,
trigger framework.TriggerFactory) (workflowDon *framework.DON, consumer *feeds_consumer.KeystoneFeedsConsumer, forwarder *forwarder.KeystoneForwarder) {
trigger framework.TriggerFactory) (workflowDon *framework.DON, consumer *feeds_consumer.KeystoneFeedsConsumer, dataFeedsCache *data_feeds_cache.DataFeedsCache, forwarder *forwarder.KeystoneForwarder) {
donContext := framework.CreateDonContext(ctx, t)

workflowDon = createKeystoneWorkflowDon(ctx, t, lggr, workflowDonInfo, triggerDonInfo, targetDonInfo, donContext)

forwarderAddr, forwarder := SetupForwarderContract(t, workflowDon, donContext.EthBlockchain)
_, consumer = SetupConsumerContract(t, donContext.EthBlockchain, forwarderAddr, workflowOwnerID, workflowName)
_, dataFeedsCache = SetupDataFeedsCacheContract(t, donContext.EthBlockchain, forwarderAddr, workflowOwnerID, workflowName)

writeTargetDon := createKeystoneWriteTargetDon(ctx, t, lggr, targetDonInfo, donContext, forwarderAddr)

Expand All @@ -53,7 +55,7 @@ func setupKeystoneDons(ctx context.Context, t *testing.T, lggr logger.Logger,

donContext.WaitForCapabilitiesToBeExposed(t, workflowDon, triggerDon, writeTargetDon)

return workflowDon, consumer, forwarder
return workflowDon, consumer, dataFeedsCache, forwarder
}

func createKeystoneTriggerDon(ctx context.Context, t *testing.T, lggr logger.Logger, triggerDonInfo framework.DonConfiguration,
Expand Down
Loading
Loading