From 3fe298e0c25ee434e190dca306d3868b99af0ea6 Mon Sep 17 00:00:00 2001 From: incrypto32 Date: Thu, 11 Jul 2024 15:18:36 +0530 Subject: [PATCH 01/21] graph: Add support for subgraph datasource in manifest --- core/src/subgraph/context/instance/mod.rs | 47 +-- core/src/subgraph/instance_manager.rs | 2 +- core/src/subgraph/registrar.rs | 1 - graph/src/blockchain/mod.rs | 1 + graph/src/components/subgraph/instance.rs | 1 + graph/src/data/subgraph/api_version.rs | 3 + graph/src/data/subgraph/mod.rs | 14 +- graph/src/data_source/mod.rs | 83 +++++ graph/src/data_source/subgraph.rs | 305 ++++++++++++++++++ runtime/wasm/src/host.rs | 2 + runtime/wasm/src/module/mod.rs | 1 + .../tests/chain/ethereum/manifest.rs | 46 ++- .../subgraph-data-sources/abis/Contract.abi | 15 + .../subgraph-data-sources/package.json | 13 + .../subgraph-data-sources/schema.graphql | 6 + .../subgraph-data-sources/src/mapping.ts | 15 + .../subgraph-data-sources/subgraph.yaml | 19 ++ tests/runner-tests/yarn.lock | 98 +++++- tests/tests/runner_tests.rs | 18 ++ 19 files changed, 657 insertions(+), 33 deletions(-) create mode 100644 graph/src/data_source/subgraph.rs create mode 100644 tests/runner-tests/subgraph-data-sources/abis/Contract.abi create mode 100644 tests/runner-tests/subgraph-data-sources/package.json create mode 100644 tests/runner-tests/subgraph-data-sources/schema.graphql create mode 100644 tests/runner-tests/subgraph-data-sources/src/mapping.ts create mode 100644 tests/runner-tests/subgraph-data-sources/subgraph.yaml diff --git a/core/src/subgraph/context/instance/mod.rs b/core/src/subgraph/context/instance/mod.rs index ed242836a28..5a805f34095 100644 --- a/core/src/subgraph/context/instance/mod.rs +++ b/core/src/subgraph/context/instance/mod.rs @@ -138,35 +138,38 @@ where ); } - let is_onchain = data_source.is_onchain(); let Some(host) = self.new_host(logger.clone(), data_source)? else { return Ok(None); }; // Check for duplicates and add the host. - if is_onchain { - // `onchain_hosts` will remain ordered by the creation block. - // See also 8f1bca33-d3b7-4035-affc-fd6161a12448. - ensure!( - self.onchain_hosts - .last() - .and_then(|h| h.creation_block_number()) - <= host.data_source().creation_block(), - ); - - if self.onchain_hosts.contains(&host) { - Ok(None) - } else { - self.onchain_hosts.push(host.cheap_clone()); - Ok(Some(host)) + match host.data_source() { + DataSource::Onchain(_) => { + // `onchain_hosts` will remain ordered by the creation block. + // See also 8f1bca33-d3b7-4035-affc-fd6161a12448. + ensure!( + self.onchain_hosts + .last() + .and_then(|h| h.creation_block_number()) + <= host.data_source().creation_block(), + ); + + if self.onchain_hosts.contains(&host) { + Ok(None) + } else { + self.onchain_hosts.push(host.cheap_clone()); + Ok(Some(host)) + } } - } else { - if self.offchain_hosts.contains(&host) { - Ok(None) - } else { - self.offchain_hosts.push(host.cheap_clone()); - Ok(Some(host)) + DataSource::Offchain(_) => { + if self.offchain_hosts.contains(&host) { + Ok(None) + } else { + self.offchain_hosts.push(host.cheap_clone()); + Ok(Some(host)) + } } + DataSource::Subgraph(_) => Ok(None), } } diff --git a/core/src/subgraph/instance_manager.rs b/core/src/subgraph/instance_manager.rs index c98641539d9..75b0b86f81f 100644 --- a/core/src/subgraph/instance_manager.rs +++ b/core/src/subgraph/instance_manager.rs @@ -318,7 +318,7 @@ impl SubgraphInstanceManager { let start_blocks: Vec = data_sources .iter() - .filter_map(|d| d.as_onchain().map(|d: &C::DataSource| d.start_block())) + .filter_map(|d| d.start_block()) .collect(); let end_blocks: BTreeSet = manifest diff --git a/core/src/subgraph/registrar.rs b/core/src/subgraph/registrar.rs index fe80d118457..e8ad83315e0 100644 --- a/core/src/subgraph/registrar.rs +++ b/core/src/subgraph/registrar.rs @@ -629,7 +629,6 @@ async fn create_subgraph_version( ) .map_err(SubgraphRegistrarError::ResolveError) .await?; - // Determine if the graft_base should be validated. // Validate the graft_base if there is a pending graft, ensuring its presence. // If the subgraph is new (indicated by DeploymentNotFound), the graft_base should be validated. diff --git a/graph/src/blockchain/mod.rs b/graph/src/blockchain/mod.rs index 73cac816728..1b897440b9b 100644 --- a/graph/src/blockchain/mod.rs +++ b/graph/src/blockchain/mod.rs @@ -489,6 +489,7 @@ impl FromStr for BlockchainKind { "cosmos" => Ok(BlockchainKind::Cosmos), "substreams" => Ok(BlockchainKind::Substreams), "starknet" => Ok(BlockchainKind::Starknet), + "subgraph" => Ok(BlockchainKind::Ethereum), // TODO(krishna): We should detect the blockchain kind from the source subgraph _ => Err(anyhow!("unknown blockchain kind {}", s)), } } diff --git a/graph/src/components/subgraph/instance.rs b/graph/src/components/subgraph/instance.rs index 470e50334d3..889690c3916 100644 --- a/graph/src/components/subgraph/instance.rs +++ b/graph/src/components/subgraph/instance.rs @@ -20,6 +20,7 @@ impl From<&DataSourceTemplate> for InstanceDSTemplate { match value { DataSourceTemplate::Onchain(ds) => Self::Onchain(ds.info()), DataSourceTemplate::Offchain(ds) => Self::Offchain(ds.clone()), + DataSourceTemplate::Subgraph(_) => todo!(), // TODO(krishna) } } } diff --git a/graph/src/data/subgraph/api_version.rs b/graph/src/data/subgraph/api_version.rs index e626e9f1dbc..43ee639007c 100644 --- a/graph/src/data/subgraph/api_version.rs +++ b/graph/src/data/subgraph/api_version.rs @@ -54,6 +54,9 @@ pub const SPEC_VERSION_1_1_0: Version = Version::new(1, 1, 0); // Enables eth call declarations and indexed arguments(topics) filtering in manifest pub const SPEC_VERSION_1_2_0: Version = Version::new(1, 2, 0); +// Enables subgraphs as datasource +pub const SPEC_VERSION_1_3_0: Version = Version::new(1, 3, 0); + // The latest spec version available pub const LATEST_VERSION: &Version = &SPEC_VERSION_1_2_0; diff --git a/graph/src/data/subgraph/mod.rs b/graph/src/data/subgraph/mod.rs index 52b0f4dfed1..941764b9a12 100644 --- a/graph/src/data/subgraph/mod.rs +++ b/graph/src/data/subgraph/mod.rs @@ -33,7 +33,7 @@ use web3::types::Address; use crate::{ bail, - blockchain::{BlockPtr, Blockchain, DataSource as _}, + blockchain::{BlockPtr, Blockchain}, components::{ link_resolver::LinkResolver, store::{StoreError, SubgraphStore}, @@ -140,6 +140,10 @@ impl DeploymentHash { link: format!("/ipfs/{}", self), } } + + pub fn to_bytes(&self) -> Vec { + self.0.as_bytes().to_vec() + } } impl Deref for DeploymentHash { @@ -719,7 +723,7 @@ impl UnvalidatedSubgraphManifest { .0 .data_sources .iter() - .filter_map(|d| Some(d.as_onchain()?.network()?.to_string())) + .filter_map(|d| Some(d.network()?.to_string())) .collect::>(); networks.sort(); networks.dedup(); @@ -765,11 +769,9 @@ impl SubgraphManifest { max_spec_version: semver::Version, ) -> Result { let unresolved = UnresolvedSubgraphManifest::parse(id, raw)?; - let resolved = unresolved .resolve(resolver, logger, max_spec_version) .await?; - Ok(resolved) } @@ -777,14 +779,14 @@ impl SubgraphManifest { // Assume the manifest has been validated, ensuring network names are homogenous self.data_sources .iter() - .find_map(|d| Some(d.as_onchain()?.network()?.to_string())) + .find_map(|d| Some(d.network()?.to_string())) .expect("Validated manifest does not have a network defined on any datasource") } pub fn start_blocks(&self) -> Vec { self.data_sources .iter() - .filter_map(|d| Some(d.as_onchain()?.start_block())) + .filter_map(|d| d.start_block()) .collect() } diff --git a/graph/src/data_source/mod.rs b/graph/src/data_source/mod.rs index a38148b25fe..38a166710e0 100644 --- a/graph/src/data_source/mod.rs +++ b/graph/src/data_source/mod.rs @@ -1,6 +1,8 @@ pub mod causality_region; pub mod offchain; +pub mod subgraph; +pub use self::DataSource as DataSourceEnum; pub use causality_region::CausalityRegion; #[cfg(test)] @@ -17,6 +19,7 @@ use crate::{ store::{BlockNumber, StoredDynamicDataSource}, }, data_source::offchain::OFFCHAIN_KINDS, + data_source::subgraph::SUBGRAPH_DS_KIND, prelude::{CheapClone as _, DataSourceContext}, schema::{EntityType, InputSchema}, }; @@ -35,6 +38,7 @@ use thiserror::Error; pub enum DataSource { Onchain(C::DataSource), Offchain(offchain::DataSource), + Subgraph(subgraph::DataSource), } #[derive(Error, Debug)] @@ -89,6 +93,23 @@ impl DataSource { match self { Self::Onchain(ds) => Some(ds), Self::Offchain(_) => None, + Self::Subgraph(_) => None, + } + } + + pub fn as_subgraph(&self) -> Option<&subgraph::DataSource> { + match self { + Self::Onchain(_) => None, + Self::Offchain(_) => None, + Self::Subgraph(ds) => Some(ds), + } + } + + pub fn is_chain_based(&self) -> bool { + match self { + Self::Onchain(_) => true, + Self::Offchain(_) => false, + Self::Subgraph(_) => true, } } @@ -96,6 +117,23 @@ impl DataSource { match self { Self::Onchain(_) => None, Self::Offchain(ds) => Some(ds), + Self::Subgraph(_) => None, + } + } + + pub fn network(&self) -> Option<&str> { + match self { + DataSourceEnum::Onchain(ds) => ds.network(), + DataSourceEnum::Offchain(_) => None, + DataSourceEnum::Subgraph(ds) => ds.network(), + } + } + + pub fn start_block(&self) -> Option { + match self { + DataSourceEnum::Onchain(ds) => Some(ds.start_block()), + DataSourceEnum::Offchain(_) => None, + DataSourceEnum::Subgraph(ds) => Some(ds.source.start_block), } } @@ -111,6 +149,7 @@ impl DataSource { match self { Self::Onchain(ds) => ds.address().map(ToOwned::to_owned), Self::Offchain(ds) => ds.address(), + Self::Subgraph(ds) => ds.address(), } } @@ -118,6 +157,7 @@ impl DataSource { match self { Self::Onchain(ds) => ds.name(), Self::Offchain(ds) => &ds.name, + Self::Subgraph(ds) => &ds.name, } } @@ -125,6 +165,7 @@ impl DataSource { match self { Self::Onchain(ds) => ds.kind().to_owned(), Self::Offchain(ds) => ds.kind.to_string(), + Self::Subgraph(ds) => ds.kind.clone(), } } @@ -132,6 +173,7 @@ impl DataSource { match self { Self::Onchain(ds) => ds.min_spec_version(), Self::Offchain(ds) => ds.min_spec_version(), + Self::Subgraph(ds) => ds.min_spec_version(), } } @@ -139,6 +181,7 @@ impl DataSource { match self { Self::Onchain(ds) => ds.end_block(), Self::Offchain(_) => None, + Self::Subgraph(_) => None, } } @@ -146,6 +189,7 @@ impl DataSource { match self { Self::Onchain(ds) => ds.creation_block(), Self::Offchain(ds) => ds.creation_block, + Self::Subgraph(ds) => ds.creation_block, } } @@ -153,6 +197,7 @@ impl DataSource { match self { Self::Onchain(ds) => ds.context(), Self::Offchain(ds) => ds.context.clone(), + Self::Subgraph(ds) => ds.context.clone(), } } @@ -160,6 +205,7 @@ impl DataSource { match self { Self::Onchain(ds) => ds.api_version(), Self::Offchain(ds) => ds.mapping.api_version.clone(), + Self::Subgraph(ds) => ds.mapping.api_version.clone(), } } @@ -167,6 +213,7 @@ impl DataSource { match self { Self::Onchain(ds) => ds.runtime(), Self::Offchain(ds) => Some(ds.mapping.runtime.cheap_clone()), + Self::Subgraph(ds) => Some(ds.mapping.runtime.cheap_clone()), } } @@ -176,6 +223,7 @@ impl DataSource { // been enforced. Self::Onchain(_) => EntityTypeAccess::Any, Self::Offchain(ds) => EntityTypeAccess::Restriced(ds.mapping.entities.clone()), + Self::Subgraph(_) => EntityTypeAccess::Any, } } @@ -183,6 +231,7 @@ impl DataSource { match self { Self::Onchain(ds) => ds.handler_kinds(), Self::Offchain(ds) => vec![ds.handler_kind()].into_iter().collect(), + Self::Subgraph(ds) => vec![ds.handler_kind()].into_iter().collect(), } } @@ -190,6 +239,7 @@ impl DataSource { match self { Self::Onchain(ds) => ds.has_declared_calls(), Self::Offchain(_) => false, + Self::Subgraph(_) => false, } } @@ -209,6 +259,7 @@ impl DataSource { } (Self::Onchain(_), TriggerData::Offchain(_)) | (Self::Offchain(_), TriggerData::Onchain(_)) => Ok(None), + (Self::Subgraph(_), _) => todo!(), // TODO(krishna) } } @@ -224,6 +275,7 @@ impl DataSource { match self { Self::Onchain(ds) => ds.as_stored_dynamic_data_source(), Self::Offchain(ds) => ds.as_stored_dynamic_data_source(), + Self::Subgraph(_) => todo!(), // TODO(krishna) } } @@ -240,6 +292,7 @@ impl DataSource { offchain::DataSource::from_stored_dynamic_data_source(template, stored) .map(DataSource::Offchain) } + DataSourceTemplate::Subgraph(_) => todo!(), // TODO(krishna) } } @@ -247,6 +300,7 @@ impl DataSource { match self { Self::Onchain(ds) => ds.validate(spec_version), Self::Offchain(_) => vec![], + Self::Subgraph(_) => vec![], // TODO(krishna) } } @@ -254,6 +308,7 @@ impl DataSource { match self { Self::Onchain(_) => CausalityRegion::ONCHAIN, Self::Offchain(ds) => ds.causality_region, + Self::Subgraph(_) => CausalityRegion::ONCHAIN, } } } @@ -262,6 +317,7 @@ impl DataSource { pub enum UnresolvedDataSource { Onchain(C::UnresolvedDataSource), Offchain(offchain::UnresolvedDataSource), + Subgraph(subgraph::UnresolvedDataSource), } impl UnresolvedDataSource { @@ -276,6 +332,10 @@ impl UnresolvedDataSource { .resolve(resolver, logger, manifest_idx) .await .map(DataSource::Onchain), + Self::Subgraph(unresolved) => unresolved + .resolve(resolver, logger, manifest_idx) + .await + .map(DataSource::Subgraph), Self::Offchain(_unresolved) => { anyhow::bail!( "static file data sources are not yet supported, \\ @@ -299,6 +359,7 @@ pub struct DataSourceTemplateInfo { pub enum DataSourceTemplate { Onchain(C::DataSourceTemplate), Offchain(offchain::DataSourceTemplate), + Subgraph(subgraph::DataSourceTemplate), } impl DataSourceTemplate { @@ -306,6 +367,7 @@ impl DataSourceTemplate { match self { DataSourceTemplate::Onchain(template) => template.info(), DataSourceTemplate::Offchain(template) => template.clone().into(), + DataSourceTemplate::Subgraph(template) => template.clone().into(), } } @@ -313,6 +375,7 @@ impl DataSourceTemplate { match self { Self::Onchain(ds) => Some(ds), Self::Offchain(_) => None, + Self::Subgraph(_) => todo!(), // TODO(krishna) } } @@ -320,6 +383,7 @@ impl DataSourceTemplate { match self { Self::Onchain(_) => None, Self::Offchain(t) => Some(t), + Self::Subgraph(_) => todo!(), // TODO(krishna) } } @@ -327,6 +391,7 @@ impl DataSourceTemplate { match self { Self::Onchain(ds) => Some(ds), Self::Offchain(_) => None, + Self::Subgraph(_) => todo!(), // TODO(krishna) } } @@ -334,6 +399,7 @@ impl DataSourceTemplate { match self { Self::Onchain(ds) => &ds.name(), Self::Offchain(ds) => &ds.name, + Self::Subgraph(ds) => &ds.name, } } @@ -341,6 +407,7 @@ impl DataSourceTemplate { match self { Self::Onchain(ds) => ds.api_version(), Self::Offchain(ds) => ds.mapping.api_version.clone(), + Self::Subgraph(ds) => ds.mapping.api_version.clone(), } } @@ -348,6 +415,7 @@ impl DataSourceTemplate { match self { Self::Onchain(ds) => ds.runtime(), Self::Offchain(ds) => Some(ds.mapping.runtime.clone()), + Self::Subgraph(ds) => Some(ds.mapping.runtime.clone()), } } @@ -355,6 +423,7 @@ impl DataSourceTemplate { match self { Self::Onchain(ds) => ds.manifest_idx(), Self::Offchain(ds) => ds.manifest_idx, + Self::Subgraph(ds) => ds.manifest_idx, } } @@ -362,6 +431,7 @@ impl DataSourceTemplate { match self { Self::Onchain(ds) => ds.kind().to_string(), Self::Offchain(ds) => ds.kind.to_string(), + Self::Subgraph(ds) => ds.kind.clone(), } } } @@ -370,6 +440,7 @@ impl DataSourceTemplate { pub enum UnresolvedDataSourceTemplate { Onchain(C::UnresolvedDataSourceTemplate), Offchain(offchain::UnresolvedDataSourceTemplate), + Subgraph(subgraph::UnresolvedDataSourceTemplate), } impl Default for UnresolvedDataSourceTemplate { @@ -395,6 +466,10 @@ impl UnresolvedDataSourceTemplate { .resolve(resolver, logger, manifest_idx, schema) .await .map(DataSourceTemplate::Offchain), + Self::Subgraph(ds) => ds + .resolve(resolver, logger, manifest_idx) + .await + .map(DataSourceTemplate::Subgraph), } } } @@ -490,6 +565,7 @@ impl TriggerData { pub enum MappingTrigger { Onchain(C::MappingTrigger), Offchain(offchain::TriggerData), + Subgraph(subgraph::TriggerData), } impl MappingTrigger { @@ -497,6 +573,7 @@ impl MappingTrigger { match self { Self::Onchain(trigger) => Some(trigger.error_context()), Self::Offchain(_) => None, // TODO: Add error context for offchain triggers + Self::Subgraph(_) => None, // TODO(krishna) } } @@ -504,6 +581,7 @@ impl MappingTrigger { match self { Self::Onchain(trigger) => Some(trigger), Self::Offchain(_) => None, + Self::Subgraph(_) => todo!(), // TODO(krishna) } } } @@ -515,6 +593,7 @@ macro_rules! clone_data_source { match self { Self::Onchain(ds) => Self::Onchain(ds.clone()), Self::Offchain(ds) => Self::Offchain(ds.clone()), + Self::Subgraph(ds) => Self::Subgraph(ds.clone()), } } } @@ -541,6 +620,10 @@ macro_rules! deserialize_data_source { offchain::$t::deserialize(map.into_deserializer()) .map_err(serde::de::Error::custom) .map($t::Offchain) + } else if SUBGRAPH_DS_KIND == kind { + subgraph::$t::deserialize(map.into_deserializer()) + .map_err(serde::de::Error::custom) + .map($t::Subgraph) } else if (&C::KIND.to_string() == kind) || C::ALIASES.contains(&kind) { C::$t::deserialize(map.into_deserializer()) .map_err(serde::de::Error::custom) diff --git a/graph/src/data_source/subgraph.rs b/graph/src/data_source/subgraph.rs new file mode 100644 index 00000000000..dba43786438 --- /dev/null +++ b/graph/src/data_source/subgraph.rs @@ -0,0 +1,305 @@ +use crate::{ + blockchain::{Block, Blockchain}, + components::{ + link_resolver::LinkResolver, + store::{BlockNumber, Entity}, + }, + data::{subgraph::SPEC_VERSION_1_3_0, value::Word}, + data_source, + prelude::{DataSourceContext, DeploymentHash, Link}, +}; +use anyhow::{Context, Error}; +use serde::Deserialize; +use slog::{info, Logger}; +use std::{fmt, sync::Arc}; + +use super::{DataSourceTemplateInfo, TriggerWithHandler}; + +pub const SUBGRAPH_DS_KIND: &str = "subgraph"; + +const ENTITY_HANDLER_KINDS: &str = "entity"; + +#[derive(Debug, Clone)] +pub struct DataSource { + pub kind: String, + pub name: String, + pub network: String, + pub manifest_idx: u32, + pub source: Source, + pub mapping: Mapping, + pub context: Arc>, + pub creation_block: Option, +} + +impl DataSource { + pub fn new( + kind: String, + name: String, + network: String, + manifest_idx: u32, + source: Source, + mapping: Mapping, + context: Arc>, + creation_block: Option, + ) -> Self { + Self { + kind, + name, + network, + manifest_idx, + source, + mapping, + context, + creation_block, + } + } + + pub fn min_spec_version(&self) -> semver::Version { + SPEC_VERSION_1_3_0 + } + + pub fn handler_kind(&self) -> &str { + ENTITY_HANDLER_KINDS + } + + pub fn network(&self) -> Option<&str> { + Some(&self.network) + } + + pub fn match_and_decode( + &self, + block: &Arc, + trigger: &TriggerData, + ) -> Option>> { + if self.source.address != trigger.source { + return None; + } + + let trigger_ref = self.mapping.handlers.iter().find_map(|handler| { + if handler.entity != trigger.entity_type { + return None; + } + + Some(TriggerWithHandler::new( + data_source::MappingTrigger::Subgraph(trigger.clone()), + handler.handler.clone(), + block.ptr(), + block.timestamp(), + )) + }); + + return trigger_ref; + } + + pub fn address(&self) -> Option> { + Some(self.source.address().to_bytes()) + } + + pub fn source_subgraph(&self) -> DeploymentHash { + self.source.address() + } +} + +pub type Base64 = Word; + +#[derive(Clone, Debug, Default, Hash, Eq, PartialEq, Deserialize)] +pub struct Source { + pub address: DeploymentHash, + #[serde(default)] + pub start_block: BlockNumber, +} + +impl Source { + /// The concept of an address may or not make sense for a subgraph data source, but graph node + /// will use this in a few places where some sort of not necessarily unique id is useful: + /// 1. This is used as the value to be returned to mappings from the `dataSource.address()` host + /// function, so changing this is a breaking change. + /// 2. This is used to match with triggers with hosts in `fn hosts_for_trigger`, so make sure + /// the `source` of the data source is equal the `source` of the `TriggerData`. + pub fn address(&self) -> DeploymentHash { + self.address.clone() + } +} + +#[derive(Clone, Debug)] +pub struct Mapping { + pub language: String, + pub api_version: semver::Version, + pub entities: Vec, + pub handlers: Vec, + pub runtime: Arc>, + pub link: Link, +} + +#[derive(Clone, Debug, Hash, Eq, PartialEq, Deserialize)] +pub struct EntityHandler { + pub handler: String, + pub entity: String, +} + +#[derive(Clone, Debug, Default, Eq, PartialEq, Deserialize)] +pub struct UnresolvedDataSource { + pub kind: String, + pub name: String, + pub network: String, + pub source: UnresolvedSource, + pub mapping: UnresolvedMapping, +} + +#[derive(Clone, Debug, Default, Hash, Eq, PartialEq, Deserialize)] +pub struct UnresolvedSource { + address: DeploymentHash, + #[serde(default)] + start_block: BlockNumber, +} + +#[derive(Clone, Debug, Default, Hash, Eq, PartialEq, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct UnresolvedMapping { + pub api_version: String, + pub language: String, + pub file: Link, + pub handlers: Vec, + pub entities: Vec, +} + +impl UnresolvedDataSource { + #[allow(dead_code)] + pub(super) async fn resolve( + self, + resolver: &Arc, + logger: &Logger, + manifest_idx: u32, + ) -> Result { + info!(logger, "Resolve subgraph data source"; + "name" => &self.name, + "kind" => &self.kind, + "source" => format_args!("{:?}", &self.source), + ); + + let kind = self.kind; + let source = Source { + address: self.source.address, + start_block: self.source.start_block, + }; + + Ok(DataSource { + manifest_idx, + kind, + name: self.name, + network: self.network, + source, + mapping: self.mapping.resolve(resolver, logger).await?, + context: Arc::new(None), + creation_block: None, + }) + } +} + +impl UnresolvedMapping { + pub async fn resolve( + self, + resolver: &Arc, + logger: &Logger, + ) -> Result { + info!(logger, "Resolve subgraph ds mapping"; "link" => &self.file.link); + + Ok(Mapping { + language: self.language, + api_version: semver::Version::parse(&self.api_version)?, + entities: self.entities, + handlers: self.handlers, + runtime: Arc::new(resolver.cat(logger, &self.file).await?), + link: self.file, + }) + } +} + +#[derive(Clone, Debug, Deserialize)] +pub struct UnresolvedDataSourceTemplate { + pub kind: String, + pub network: Option, + pub name: String, + pub mapping: UnresolvedMapping, +} + +#[derive(Clone, Debug)] +pub struct DataSourceTemplate { + pub kind: String, + pub network: Option, + pub name: String, + pub manifest_idx: u32, + pub mapping: Mapping, +} + +impl Into for DataSourceTemplate { + fn into(self) -> DataSourceTemplateInfo { + let DataSourceTemplate { + kind, + network: _, + name, + manifest_idx, + mapping, + } = self; + + DataSourceTemplateInfo { + api_version: mapping.api_version.clone(), + runtime: Some(mapping.runtime), + name, + manifest_idx: Some(manifest_idx), + kind: kind.to_string(), + } + } +} + +impl UnresolvedDataSourceTemplate { + pub async fn resolve( + self, + resolver: &Arc, + logger: &Logger, + manifest_idx: u32, + ) -> Result { + let kind = self.kind; + + let mapping = self + .mapping + .resolve(resolver, logger) + .await + .with_context(|| format!("failed to resolve data source template {}", self.name))?; + + Ok(DataSourceTemplate { + kind, + network: self.network, + name: self.name, + manifest_idx, + mapping, + }) + } +} + +#[derive(Clone, PartialEq, Eq)] +pub struct TriggerData { + pub source: DeploymentHash, + pub entity: Entity, + pub entity_type: String, +} + +impl TriggerData { + pub fn new(source: DeploymentHash, entity: Entity, entity_type: String) -> Self { + Self { + source, + entity, + entity_type, + } + } +} + +impl fmt::Debug for TriggerData { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!( + f, + "TriggerData {{ source: {:?}, entity: {:?} }}", + self.source, self.entity, + ) + } +} diff --git a/runtime/wasm/src/host.rs b/runtime/wasm/src/host.rs index 3ecee7ba753..ebf107fb3ec 100644 --- a/runtime/wasm/src/host.rs +++ b/runtime/wasm/src/host.rs @@ -366,6 +366,7 @@ impl RuntimeHostTrait for RuntimeHost { match self.data_source() { DataSource::Onchain(_) => None, DataSource::Offchain(ds) => ds.done_at(), + DataSource::Subgraph(_) => None, } } @@ -373,6 +374,7 @@ impl RuntimeHostTrait for RuntimeHost { match self.data_source() { DataSource::Onchain(_) => {} DataSource::Offchain(ds) => ds.set_done_at(block), + DataSource::Subgraph(_) => {} } } diff --git a/runtime/wasm/src/module/mod.rs b/runtime/wasm/src/module/mod.rs index ffe4f7aba8e..ee19cd173aa 100644 --- a/runtime/wasm/src/module/mod.rs +++ b/runtime/wasm/src/module/mod.rs @@ -81,6 +81,7 @@ where match self { MappingTrigger::Onchain(trigger) => trigger.to_asc_ptr(heap, gas), MappingTrigger::Offchain(trigger) => trigger.to_asc_ptr(heap, gas), + MappingTrigger::Subgraph(_) => todo!(), // TODO(krishna) } } } diff --git a/store/test-store/tests/chain/ethereum/manifest.rs b/store/test-store/tests/chain/ethereum/manifest.rs index 9089ec4f572..34eaf110f77 100644 --- a/store/test-store/tests/chain/ethereum/manifest.rs +++ b/store/test-store/tests/chain/ethereum/manifest.rs @@ -11,10 +11,10 @@ use graph::data::store::Value; use graph::data::subgraph::schema::SubgraphError; use graph::data::subgraph::{ Prune, LATEST_VERSION, SPEC_VERSION_0_0_4, SPEC_VERSION_0_0_7, SPEC_VERSION_0_0_8, - SPEC_VERSION_0_0_9, SPEC_VERSION_1_0_0, SPEC_VERSION_1_2_0, + SPEC_VERSION_0_0_9, SPEC_VERSION_1_0_0, SPEC_VERSION_1_2_0, SPEC_VERSION_1_3_0, }; use graph::data_source::offchain::OffchainDataSourceKind; -use graph::data_source::DataSourceTemplate; +use graph::data_source::{DataSourceEnum, DataSourceTemplate}; use graph::entity; use graph::env::ENV_VARS; use graph::prelude::web3::types::H256; @@ -166,10 +166,52 @@ specVersion: 0.0.7 let data_source = match &manifest.templates[0] { DataSourceTemplate::Offchain(ds) => ds, DataSourceTemplate::Onchain(_) => unreachable!(), + DataSourceTemplate::Subgraph(_) => unreachable!(), }; assert_eq!(data_source.kind, OffchainDataSourceKind::Ipfs); } +#[tokio::test] +async fn subgraph_ds_manifest() { + let yaml = " +schema: + file: + /: /ipfs/Qmschema +dataSources: + - name: SubgraphSource + kind: subgraph + entities: + - Gravatar + network: mainnet + source: + address: 'QmUVaWpdKgcxBov1jHEa8dr46d2rkVzfHuZFu4fXJ4sFse' + startBlock: 0 + mapping: + apiVersion: 0.0.6 + language: wasm/assemblyscript + entities: + - TestEntity + file: + /: /ipfs/Qmmapping + handlers: + - handler: handleEntity + entity: User +specVersion: 1.3.0 +"; + + let manifest = resolve_manifest(yaml, SPEC_VERSION_1_3_0).await; + + assert_eq!("Qmmanifest", manifest.id.as_str()); + assert_eq!(manifest.data_sources.len(), 1); + let data_source = &manifest.data_sources[0]; + match data_source { + DataSourceEnum::Subgraph(ds) => { + assert_eq!(ds.name, "SubgraphSource"); + } + _ => panic!("Expected a subgraph data source"), + } +} + #[tokio::test] async fn graft_manifest() { const YAML: &str = " diff --git a/tests/runner-tests/subgraph-data-sources/abis/Contract.abi b/tests/runner-tests/subgraph-data-sources/abis/Contract.abi new file mode 100644 index 00000000000..9d9f56b9263 --- /dev/null +++ b/tests/runner-tests/subgraph-data-sources/abis/Contract.abi @@ -0,0 +1,15 @@ +[ + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "string", + "name": "testCommand", + "type": "string" + } + ], + "name": "TestEvent", + "type": "event" + } +] diff --git a/tests/runner-tests/subgraph-data-sources/package.json b/tests/runner-tests/subgraph-data-sources/package.json new file mode 100644 index 00000000000..87537290ad2 --- /dev/null +++ b/tests/runner-tests/subgraph-data-sources/package.json @@ -0,0 +1,13 @@ +{ + "name": "subgraph-data-sources", + "version": "0.1.0", + "scripts": { + "codegen": "graph codegen --skip-migrations", + "create:test": "graph create test/subgraph-data-sources --node $GRAPH_NODE_ADMIN_URI", + "deploy:test": "graph deploy test/subgraph-data-sources --version-label v0.0.1 --ipfs $IPFS_URI --node $GRAPH_NODE_ADMIN_URI" + }, + "devDependencies": { + "@graphprotocol/graph-cli": "0.79.0-alpha-20240711124603-49edf22", + "@graphprotocol/graph-ts": "0.31.0" + } +} diff --git a/tests/runner-tests/subgraph-data-sources/schema.graphql b/tests/runner-tests/subgraph-data-sources/schema.graphql new file mode 100644 index 00000000000..6f97fa65c43 --- /dev/null +++ b/tests/runner-tests/subgraph-data-sources/schema.graphql @@ -0,0 +1,6 @@ +type Data @entity { + id: ID! + foo: String + bar: Int + isTest: Boolean +} diff --git a/tests/runner-tests/subgraph-data-sources/src/mapping.ts b/tests/runner-tests/subgraph-data-sources/src/mapping.ts new file mode 100644 index 00000000000..3446d1f83c4 --- /dev/null +++ b/tests/runner-tests/subgraph-data-sources/src/mapping.ts @@ -0,0 +1,15 @@ +import { BigInt, dataSource, ethereum, log } from "@graphprotocol/graph-ts"; +import { Data } from "../generated/schema"; + +export function handleBlock(block: ethereum.Block): void { + let foo = dataSource.context().getString("foo"); + let bar = dataSource.context().getI32("bar"); + let isTest = dataSource.context().getBoolean("isTest"); + if (block.number == BigInt.fromI32(0)) { + let data = new Data("0"); + data.foo = foo; + data.bar = bar; + data.isTest = isTest; + data.save(); + } +} diff --git a/tests/runner-tests/subgraph-data-sources/subgraph.yaml b/tests/runner-tests/subgraph-data-sources/subgraph.yaml new file mode 100644 index 00000000000..b1a3fcbb486 --- /dev/null +++ b/tests/runner-tests/subgraph-data-sources/subgraph.yaml @@ -0,0 +1,19 @@ +specVersion: 1.3.0 +schema: + file: ./schema.graphql +dataSources: + - kind: subgraph + name: Contract + network: test + source: + address: 'QmHash' + startBlock: 6082461 + mapping: + apiVersion: 0.0.7 + language: wasm/assemblyscript + entities: + - Gravatar + handlers: + - handler: handleBlock + entity: Gravatar + file: ./src/mapping.ts diff --git a/tests/runner-tests/yarn.lock b/tests/runner-tests/yarn.lock index 50e0c2b471f..9f3bdae834d 100644 --- a/tests/runner-tests/yarn.lock +++ b/tests/runner-tests/yarn.lock @@ -349,6 +349,40 @@ which "2.0.2" yaml "1.10.2" +"@graphprotocol/graph-cli@0.79.0-alpha-20240711124603-49edf22": + version "0.79.0-alpha-20240711124603-49edf22" + resolved "https://registry.yarnpkg.com/@graphprotocol/graph-cli/-/graph-cli-0.79.0-alpha-20240711124603-49edf22.tgz#4e3f6201932a0b68ce64d6badd8432cf2bead3c2" + integrity sha512-fZrdPiFbbbBVMnvsjfKA+j48WzzquaHQIpozBqnUKRPCV1n1NenIaq2nH16mlMwovRIS7AAIVCpa0QYQuPzw7Q== + dependencies: + "@float-capital/float-subgraph-uncrashable" "^0.0.0-alpha.4" + "@oclif/core" "2.8.6" + "@oclif/plugin-autocomplete" "^2.3.6" + "@oclif/plugin-not-found" "^2.4.0" + "@whatwg-node/fetch" "^0.8.4" + assemblyscript "0.19.23" + binary-install-raw "0.0.13" + chalk "3.0.0" + chokidar "3.5.3" + debug "4.3.4" + docker-compose "0.23.19" + dockerode "2.5.8" + fs-extra "9.1.0" + glob "9.3.5" + gluegun "5.1.6" + graphql "15.5.0" + immutable "4.2.1" + ipfs-http-client "55.0.0" + jayson "4.0.0" + js-yaml "3.14.1" + open "8.4.2" + prettier "3.0.3" + semver "7.4.0" + sync-request "6.1.0" + tmp-promise "3.0.3" + web3-eth-abi "1.7.0" + which "2.0.2" + yaml "1.10.2" + "@graphprotocol/graph-ts@0.30.0": version "0.30.0" resolved "https://registry.npmjs.org/@graphprotocol/graph-ts/-/graph-ts-0.30.0.tgz" @@ -1473,6 +1507,11 @@ defaults@^1.0.3: dependencies: clone "^1.0.2" +define-lazy-prop@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz#3f7ae421129bcaaac9bc74905c98a0009ec9ee7f" + integrity sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og== + delay@^5.0.0: version "5.0.0" resolved "https://registry.npmjs.org/delay/-/delay-5.0.0.tgz" @@ -1545,6 +1584,13 @@ ejs@3.1.6: dependencies: jake "^10.6.1" +ejs@3.1.8: + version "3.1.8" + resolved "https://registry.yarnpkg.com/ejs/-/ejs-3.1.8.tgz#758d32910c78047585c7ef1f92f9ee041c1c190b" + integrity sha512-/sXZeMlhS0ArkfX2Aw780gJzXSMPnKjtspYZv+f3NiKLlubezAHDU5+9xz6gd3/NhG3txQCo6xlglmTS+oTGEQ== + dependencies: + jake "^10.8.5" + ejs@^3.1.8: version "3.1.9" resolved "https://registry.npmjs.org/ejs/-/ejs-3.1.9.tgz" @@ -1996,6 +2042,42 @@ gluegun@5.1.2: which "2.0.2" yargs-parser "^21.0.0" +gluegun@5.1.6: + version "5.1.6" + resolved "https://registry.yarnpkg.com/gluegun/-/gluegun-5.1.6.tgz#74ec13193913dc610f5c1a4039972c70c96a7bad" + integrity sha512-9zbi4EQWIVvSOftJWquWzr9gLX2kaDgPkNR5dYWbM53eVvCI3iKuxLlnKoHC0v4uPoq+Kr/+F569tjoFbA4DSA== + dependencies: + apisauce "^2.1.5" + app-module-path "^2.2.0" + cli-table3 "0.6.0" + colors "1.4.0" + cosmiconfig "7.0.1" + cross-spawn "7.0.3" + ejs "3.1.8" + enquirer "2.3.6" + execa "5.1.1" + fs-jetpack "4.3.1" + lodash.camelcase "^4.3.0" + lodash.kebabcase "^4.1.1" + lodash.lowercase "^4.3.0" + lodash.lowerfirst "^4.3.1" + lodash.pad "^4.5.1" + lodash.padend "^4.6.1" + lodash.padstart "^4.6.1" + lodash.repeat "^4.1.0" + lodash.snakecase "^4.1.1" + lodash.startcase "^4.4.0" + lodash.trim "^4.5.1" + lodash.trimend "^4.5.1" + lodash.trimstart "^4.5.1" + lodash.uppercase "^4.3.0" + lodash.upperfirst "^4.3.1" + ora "4.0.2" + pluralize "^8.0.0" + semver "7.3.5" + which "2.0.2" + yargs-parser "^21.0.0" + graceful-fs@^4.1.6, graceful-fs@^4.2.0: version "4.2.11" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" @@ -2282,7 +2364,7 @@ is-binary-path@~2.1.0: dependencies: binary-extensions "^2.0.0" -is-docker@^2.0.0: +is-docker@^2.0.0, is-docker@^2.1.1: version "2.2.1" resolved "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz" integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ== @@ -2922,6 +3004,15 @@ onetime@^5.1.0, onetime@^5.1.2: dependencies: mimic-fn "^2.1.0" +open@8.4.2: + version "8.4.2" + resolved "https://registry.yarnpkg.com/open/-/open-8.4.2.tgz#5b5ffe2a8f793dcd2aad73e550cb87b59cb084f9" + integrity sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ== + dependencies: + define-lazy-prop "^2.0.0" + is-docker "^2.1.1" + is-wsl "^2.2.0" + ora@4.0.2: version "4.0.2" resolved "https://registry.npmjs.org/ora/-/ora-4.0.2.tgz" @@ -3042,6 +3133,11 @@ prettier@1.19.1: resolved "https://registry.npmjs.org/prettier/-/prettier-1.19.1.tgz" integrity sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew== +prettier@3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-3.0.3.tgz#432a51f7ba422d1469096c0fdc28e235db8f9643" + integrity sha512-L/4pUDMxcNa8R/EthV08Zt42WBO4h1rarVtK0K+QJG0X187OLo7l699jWw0GKuwzkPQ//jMFA/8Xm6Fh3J/DAg== + process-nextick-args@~2.0.0: version "2.0.1" resolved "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz" diff --git a/tests/tests/runner_tests.rs b/tests/tests/runner_tests.rs index 7da707ac7cd..169836f2390 100644 --- a/tests/tests/runner_tests.rs +++ b/tests/tests/runner_tests.rs @@ -1077,6 +1077,24 @@ async fn parse_data_source_context() { ); } +#[tokio::test] +async fn subgraph_data_sources() { + let RunnerTestRecipe { stores, test_info } = + RunnerTestRecipe::new("subgraph-data-sources", "subgraph-data-sources").await; + + let blocks = { + let block_0 = genesis(); + let block_1 = empty_block(block_0.ptr(), test_ptr(1)); + let block_2 = empty_block(block_1.ptr(), test_ptr(2)); + vec![block_0, block_1, block_2] + }; + let stop_block = blocks.last().unwrap().block.ptr(); + let chain = chain(&test_info.test_name, blocks, &stores, None).await; + + let ctx = fixture::setup(&test_info, &stores, &chain, None, None).await; + ctx.start_and_sync_to(stop_block).await; +} + #[tokio::test] async fn retry_create_ds() { let RunnerTestRecipe { stores, test_info } = From a45daf3cd3e0ca7e9bac5d0694c2ccfed21ceb96 Mon Sep 17 00:00:00 2001 From: incrypto32 Date: Fri, 12 Jul 2024 16:03:47 +0530 Subject: [PATCH 02/21] graph: wrap TriggerFilter with TriggerFilterWrapper --- chain/arweave/src/chain.rs | 9 ++++-- chain/cosmos/src/chain.rs | 9 ++++-- chain/ethereum/src/chain.rs | 9 +++--- chain/near/src/chain.rs | 8 ++--- chain/starknet/src/chain.rs | 6 ++-- chain/substreams/src/chain.rs | 6 ++-- core/src/subgraph/context/mod.rs | 4 +-- core/src/subgraph/runner.rs | 17 ++++++---- core/src/subgraph/stream.rs | 6 ++-- graph/src/blockchain/mock.rs | 53 +++++++++++++++++++++++--------- graph/src/blockchain/mod.rs | 18 +++++++++-- tests/tests/runner_tests.rs | 10 +++--- 12 files changed, 102 insertions(+), 53 deletions(-) diff --git a/chain/arweave/src/chain.rs b/chain/arweave/src/chain.rs index 8d40408a463..70c03d832dd 100644 --- a/chain/arweave/src/chain.rs +++ b/chain/arweave/src/chain.rs @@ -3,7 +3,7 @@ use graph::blockchain::client::ChainClient; use graph::blockchain::firehose_block_ingestor::FirehoseBlockIngestor; use graph::blockchain::{ BasicBlockchainBuilder, Block, BlockIngestor, BlockchainBuilder, BlockchainKind, - EmptyNodeCapabilities, NoopDecoderHook, NoopRuntimeAdapter, + EmptyNodeCapabilities, NoopDecoderHook, NoopRuntimeAdapter, TriggerFilterWrapper, }; use graph::cheap_clone::CheapClone; use graph::components::adapter::ChainId; @@ -119,7 +119,7 @@ impl Blockchain for Chain { deployment: DeploymentLocator, store: impl DeploymentCursorTracker, start_blocks: Vec, - filter: Arc, + filter: Arc<&TriggerFilterWrapper>, unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error> { let adapter = self @@ -135,7 +135,10 @@ impl Blockchain for Chain { .subgraph_logger(&deployment) .new(o!("component" => "FirehoseBlockStream")); - let firehose_mapper = Arc::new(FirehoseMapper { adapter, filter }); + let firehose_mapper = Arc::new(FirehoseMapper { + adapter, + filter: filter.filter.clone(), + }); Ok(Box::new(FirehoseBlockStream::new( deployment.hash, diff --git a/chain/cosmos/src/chain.rs b/chain/cosmos/src/chain.rs index 955aa7efc3c..b21acb3a8e6 100644 --- a/chain/cosmos/src/chain.rs +++ b/chain/cosmos/src/chain.rs @@ -1,5 +1,5 @@ use graph::blockchain::firehose_block_ingestor::FirehoseBlockIngestor; -use graph::blockchain::{BlockIngestor, NoopDecoderHook}; +use graph::blockchain::{BlockIngestor, NoopDecoderHook, TriggerFilterWrapper}; use graph::components::adapter::ChainId; use graph::env::EnvVars; use graph::prelude::MetricsRegistry; @@ -113,7 +113,7 @@ impl Blockchain for Chain { deployment: DeploymentLocator, store: impl DeploymentCursorTracker, start_blocks: Vec, - filter: Arc, + filter: Arc<&TriggerFilterWrapper>, unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error> { let adapter = self @@ -129,7 +129,10 @@ impl Blockchain for Chain { .subgraph_logger(&deployment) .new(o!("component" => "FirehoseBlockStream")); - let firehose_mapper = Arc::new(FirehoseMapper { adapter, filter }); + let firehose_mapper = Arc::new(FirehoseMapper { + adapter, + filter: filter.filter.clone(), + }); Ok(Box::new(FirehoseBlockStream::new( deployment.hash, diff --git a/chain/ethereum/src/chain.rs b/chain/ethereum/src/chain.rs index 1def8c483cc..458265c9b87 100644 --- a/chain/ethereum/src/chain.rs +++ b/chain/ethereum/src/chain.rs @@ -3,7 +3,8 @@ use anyhow::{Context, Error}; use graph::blockchain::client::ChainClient; use graph::blockchain::firehose_block_ingestor::{FirehoseBlockIngestor, Transforms}; use graph::blockchain::{ - BlockIngestor, BlockTime, BlockchainKind, ChainIdentifier, TriggersAdapterSelector, + BlockIngestor, BlockTime, BlockchainKind, ChainIdentifier, TriggerFilterWrapper, + TriggersAdapterSelector, }; use graph::components::adapter::ChainId; use graph::components::store::DeploymentCursorTracker; @@ -409,7 +410,7 @@ impl Blockchain for Chain { deployment: DeploymentLocator, store: impl DeploymentCursorTracker, start_blocks: Vec, - filter: Arc, + filter: Arc<&TriggerFilterWrapper>, unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error> { let current_ptr = store.block_ptr(); @@ -421,7 +422,7 @@ impl Blockchain for Chain { deployment, start_blocks, current_ptr, - filter, + filter.filter.clone(), unified_api_version, ) .await @@ -434,7 +435,7 @@ impl Blockchain for Chain { store.firehose_cursor(), start_blocks, current_ptr, - filter, + filter.filter.clone(), unified_api_version, ) .await diff --git a/chain/near/src/chain.rs b/chain/near/src/chain.rs index 283552e7f33..962978304a9 100644 --- a/chain/near/src/chain.rs +++ b/chain/near/src/chain.rs @@ -4,7 +4,7 @@ use graph::blockchain::firehose_block_ingestor::FirehoseBlockIngestor; use graph::blockchain::substreams_block_stream::SubstreamsBlockStream; use graph::blockchain::{ BasicBlockchainBuilder, BlockIngestor, BlockchainBuilder, BlockchainKind, NoopDecoderHook, - NoopRuntimeAdapter, + NoopRuntimeAdapter, TriggerFilterWrapper, }; use graph::cheap_clone::CheapClone; use graph::components::adapter::ChainId; @@ -230,7 +230,7 @@ impl Blockchain for Chain { deployment: DeploymentLocator, store: impl DeploymentCursorTracker, start_blocks: Vec, - filter: Arc, + filter: Arc<&TriggerFilterWrapper>, unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error> { if self.prefer_substreams { @@ -242,7 +242,7 @@ impl Blockchain for Chain { deployment, store.firehose_cursor(), store.block_ptr(), - filter, + filter.filter.clone(), ) .await; } @@ -254,7 +254,7 @@ impl Blockchain for Chain { store.firehose_cursor(), start_blocks, store.block_ptr(), - filter, + filter.filter.clone(), unified_api_version, ) .await diff --git a/chain/starknet/src/chain.rs b/chain/starknet/src/chain.rs index cd10af5f965..442474f507b 100644 --- a/chain/starknet/src/chain.rs +++ b/chain/starknet/src/chain.rs @@ -11,7 +11,7 @@ use graph::{ firehose_block_stream::FirehoseBlockStream, BasicBlockchainBuilder, Block, BlockIngestor, BlockPtr, Blockchain, BlockchainBuilder, BlockchainKind, EmptyNodeCapabilities, IngestorError, NoopDecoderHook, NoopRuntimeAdapter, - RuntimeAdapter as RuntimeAdapterTrait, + RuntimeAdapter as RuntimeAdapterTrait, TriggerFilterWrapper, }, cheap_clone::CheapClone, components::{ @@ -115,7 +115,7 @@ impl Blockchain for Chain { deployment: DeploymentLocator, store: impl DeploymentCursorTracker, start_blocks: Vec, - filter: Arc, + filter: Arc<&TriggerFilterWrapper>, unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error> { self.block_stream_builder @@ -125,7 +125,7 @@ impl Blockchain for Chain { store.firehose_cursor(), start_blocks, store.block_ptr(), - filter, + filter.filter.clone(), unified_api_version, ) .await diff --git a/chain/substreams/src/chain.rs b/chain/substreams/src/chain.rs index 28ef4bdc38b..b027edd3351 100644 --- a/chain/substreams/src/chain.rs +++ b/chain/substreams/src/chain.rs @@ -4,7 +4,7 @@ use anyhow::Error; use graph::blockchain::client::ChainClient; use graph::blockchain::{ BasicBlockchainBuilder, BlockIngestor, BlockTime, EmptyNodeCapabilities, NoopDecoderHook, - NoopRuntimeAdapter, + NoopRuntimeAdapter, TriggerFilterWrapper, }; use graph::components::adapter::ChainId; use graph::components::store::DeploymentCursorTracker; @@ -140,7 +140,7 @@ impl Blockchain for Chain { deployment: DeploymentLocator, store: impl DeploymentCursorTracker, _start_blocks: Vec, - filter: Arc, + filter: Arc<&TriggerFilterWrapper>, _unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error> { self.block_stream_builder @@ -150,7 +150,7 @@ impl Blockchain for Chain { deployment, store.firehose_cursor(), store.block_ptr(), - filter, + filter.filter.clone(), ) .await } diff --git a/core/src/subgraph/context/mod.rs b/core/src/subgraph/context/mod.rs index 6ffc5a5aa12..7b7686a04fb 100644 --- a/core/src/subgraph/context/mod.rs +++ b/core/src/subgraph/context/mod.rs @@ -6,7 +6,7 @@ use crate::polling_monitor::{ use anyhow::{self, Error}; use bytes::Bytes; use graph::{ - blockchain::{BlockTime, Blockchain}, + blockchain::{BlockTime, Blockchain, TriggerFilterWrapper}, components::{ store::{DeploymentId, SubgraphFork}, subgraph::{HostMetrics, MappingError, RuntimeHost as _, SharedProofOfIndexing}, @@ -73,7 +73,7 @@ where pub(crate) instance: SubgraphInstance, pub instances: SubgraphKeepAlive, pub offchain_monitor: OffchainMonitor, - pub filter: Option, + pub filter: Option>, pub(crate) trigger_processor: Box>, pub(crate) decoder: Box>, } diff --git a/core/src/subgraph/runner.rs b/core/src/subgraph/runner.rs index cd341ce2f99..28b82410276 100644 --- a/core/src/subgraph/runner.rs +++ b/core/src/subgraph/runner.rs @@ -7,7 +7,9 @@ use atomic_refcell::AtomicRefCell; use graph::blockchain::block_stream::{ BlockStreamError, BlockStreamEvent, BlockWithTriggers, FirehoseCursor, }; -use graph::blockchain::{Block, BlockTime, Blockchain, DataSource as _, TriggerFilter as _}; +use graph::blockchain::{ + Block, BlockTime, Blockchain, DataSource as _, TriggerFilter as _, TriggerFilterWrapper, +}; use graph::components::store::{EmptyStore, GetScope, ReadStore, StoredDynamicDataSource}; use graph::components::subgraph::InstanceDSTemplate; use graph::components::{ @@ -116,7 +118,7 @@ where self.inputs.static_filters || self.ctx.hosts_len() > ENV_VARS.static_filters_threshold } - fn build_filter(&self) -> C::TriggerFilter { + fn build_filter(&self) -> TriggerFilterWrapper { let current_ptr = self.inputs.store.block_ptr(); let static_filters = self.is_static_filters_enabled(); @@ -130,8 +132,11 @@ where // if static_filters is not enabled we just stick to the filter based on all the data sources. if !static_filters { - return C::TriggerFilter::from_data_sources( - self.ctx.onchain_data_sources().filter(end_block_filter), + return TriggerFilterWrapper::new( + C::TriggerFilter::from_data_sources( + self.ctx.onchain_data_sources().filter(end_block_filter), + ), + None, ); } @@ -158,11 +163,11 @@ where filter.extend_with_template(templates.iter().filter_map(|ds| ds.as_onchain()).cloned()); - filter + TriggerFilterWrapper::new(filter, None) } #[cfg(debug_assertions)] - pub fn build_filter_for_test(&self) -> C::TriggerFilter { + pub fn build_filter_for_test(&self) -> TriggerFilterWrapper { self.build_filter() } diff --git a/core/src/subgraph/stream.rs b/core/src/subgraph/stream.rs index c1d767e3fcf..b71d5e908ae 100644 --- a/core/src/subgraph/stream.rs +++ b/core/src/subgraph/stream.rs @@ -1,13 +1,13 @@ use crate::subgraph::inputs::IndexingInputs; use anyhow::bail; use graph::blockchain::block_stream::{BlockStream, BufferedBlockStream}; -use graph::blockchain::Blockchain; +use graph::blockchain::{Blockchain, TriggerFilterWrapper}; use graph::prelude::{CheapClone, Error, SubgraphInstanceMetrics}; use std::sync::Arc; pub async fn new_block_stream( inputs: &IndexingInputs, - filter: &C::TriggerFilter, + filter: &TriggerFilterWrapper, metrics: &SubgraphInstanceMetrics, ) -> Result>, Error> { let is_firehose = inputs.chain.chain_client().is_firehose(); @@ -18,7 +18,7 @@ pub async fn new_block_stream( inputs.deployment.clone(), inputs.store.cheap_clone(), inputs.start_blocks.clone(), - Arc::new(filter.clone()), + Arc::new(filter), inputs.unified_api_version.clone(), ) .await diff --git a/graph/src/blockchain/mock.rs b/graph/src/blockchain/mock.rs index c89eca95727..6b1eaba4ce7 100644 --- a/graph/src/blockchain/mock.rs +++ b/graph/src/blockchain/mock.rs @@ -14,10 +14,7 @@ use serde::Deserialize; use std::{collections::HashSet, convert::TryFrom, sync::Arc}; use super::{ - block_stream::{self, BlockStream, FirehoseCursor}, - client::ChainClient, - BlockIngestor, BlockTime, EmptyNodeCapabilities, HostFn, IngestorError, MappingTriggerTrait, - NoopDecoderHook, TriggerWithHandler, + block_stream::{self, BlockStream, FirehoseCursor}, client::ChainClient, BlockIngestor, BlockTime, EmptyNodeCapabilities, HostFn, IngestorError, MappingTriggerTrait, NoopDecoderHook, TriggerFilterWrapper, TriggerWithHandler }; use super::{ @@ -218,31 +215,37 @@ impl UnresolvedDataSourceTemplate for MockUnresolvedDataSource pub struct MockTriggersAdapter; #[async_trait] -impl TriggersAdapter for MockTriggersAdapter { +impl TriggersAdapter for MockTriggersAdapter { async fn ancestor_block( &self, _ptr: BlockPtr, _offset: BlockNumber, _root: Option, - ) -> Result, Error> { + ) -> Result, Error> { todo!() } async fn scan_triggers( &self, - _from: crate::components::store::BlockNumber, - _to: crate::components::store::BlockNumber, - _filter: &C::TriggerFilter, - ) -> Result<(Vec>, BlockNumber), Error> { - todo!() + from: crate::components::store::BlockNumber, + to: crate::components::store::BlockNumber, + filter: &MockTriggerFilter, + ) -> Result< + ( + Vec>, + BlockNumber, + ), + Error, + > { + blocks_with_triggers(from, to, filter).await } async fn triggers_in_block( &self, _logger: &slog::Logger, - _block: C::Block, - _filter: &C::TriggerFilter, - ) -> Result, Error> { + _block: MockBlock, + _filter: &MockTriggerFilter, + ) -> Result, Error> { todo!() } @@ -255,6 +258,26 @@ impl TriggersAdapter for MockTriggersAdapter { } } +async fn blocks_with_triggers( + _from: crate::components::store::BlockNumber, + to: crate::components::store::BlockNumber, + _filter: &MockTriggerFilter, +) -> Result< + ( + Vec>, + BlockNumber, + ), + Error, +> { + Ok(( + vec![BlockWithTriggers { + block: MockBlock { number: 0 }, + trigger_data: vec![MockTriggerData], + }], + to, + )) +} + #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd)] pub struct MockTriggerData; @@ -347,7 +370,7 @@ impl Blockchain for MockBlockchain { _deployment: DeploymentLocator, _store: impl DeploymentCursorTracker, _start_blocks: Vec, - _filter: Arc, + _filter: Arc<&TriggerFilterWrapper>, _unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error> { todo!() diff --git a/graph/src/blockchain/mod.rs b/graph/src/blockchain/mod.rs index 1b897440b9b..62b93e61413 100644 --- a/graph/src/blockchain/mod.rs +++ b/graph/src/blockchain/mod.rs @@ -26,7 +26,7 @@ use crate::{ }, data::subgraph::{UnifiedMappingApiVersion, MIN_SPEC_VERSION}, data_source::{self, DataSourceTemplateInfo}, - prelude::DataSourceContext, + prelude::{DataSourceContext, DeploymentHash}, runtime::{gas::GasCounter, AscHeap, HostExportError}, }; use crate::{ @@ -189,7 +189,7 @@ pub trait Blockchain: Debug + Sized + Send + Sync + Unpin + 'static { deployment: DeploymentLocator, store: impl DeploymentCursorTracker, start_blocks: Vec, - filter: Arc, + filter: Arc<&TriggerFilterWrapper>, unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error>; @@ -247,6 +247,20 @@ impl From for IngestorError { } } +pub struct TriggerFilterWrapper { + pub filter: Arc, + _subgraph_filter: Option, +} + +impl TriggerFilterWrapper { + pub fn new(filter: C::TriggerFilter, subgraph_filter: Option) -> Self { + Self { + filter: Arc::new(filter), + _subgraph_filter: subgraph_filter, + } + } +} + pub trait TriggerFilter: Default + Clone + Send + Sync { fn from_data_sources<'a>( data_sources: impl Iterator + Clone, diff --git a/tests/tests/runner_tests.rs b/tests/tests/runner_tests.rs index 169836f2390..0ff9f40d537 100644 --- a/tests/tests/runner_tests.rs +++ b/tests/tests/runner_tests.rs @@ -500,10 +500,10 @@ async fn substreams_trigger_filter_construction() -> anyhow::Result<()> { let runner = ctx.runner_substreams(test_ptr(0)).await; let filter = runner.build_filter_for_test(); - assert_eq!(filter.module_name(), "graph_out"); - assert_eq!(filter.modules().as_ref().unwrap().modules.len(), 2); - assert_eq!(filter.start_block().unwrap(), 0); - assert_eq!(filter.data_sources_len(), 1); + assert_eq!(filter.filter.module_name(), "graph_out"); + assert_eq!(filter.filter.modules().as_ref().unwrap().modules.len(), 2); + assert_eq!(filter.filter.start_block().unwrap(), 0); + assert_eq!(filter.filter.data_sources_len(), 1); Ok(()) } @@ -525,7 +525,7 @@ async fn end_block() -> anyhow::Result<()> { let runner = ctx.runner(block_ptr.clone()).await; let runner = runner.run_for_test(false).await.unwrap(); let filter = runner.context().filter.as_ref().unwrap(); - let addresses = filter.log().contract_addresses().collect::>(); + let addresses = filter.filter.log().contract_addresses().collect::>(); if should_contain_addr { assert!(addresses.contains(&addr)); From af3468c448e96be97d7b71a69636af552b89430f Mon Sep 17 00:00:00 2001 From: incrypto32 Date: Fri, 12 Jul 2024 16:21:18 +0530 Subject: [PATCH 03/21] graph,chain: add build_subgraph_block_stream method --- chain/ethereum/src/chain.rs | 26 ++++++++++++++++++++++++++ chain/near/src/chain.rs | 12 ++++++++++++ chain/starknet/src/chain.rs | 12 ++++++++++++ chain/substreams/src/block_stream.rs | 14 +++++++++++++- graph/src/blockchain/block_stream.rs | 12 +++++++++++- graph/src/blockchain/mod.rs | 4 ++-- tests/src/fixture/mod.rs | 26 +++++++++++++++++++++++++- 7 files changed, 101 insertions(+), 5 deletions(-) diff --git a/chain/ethereum/src/chain.rs b/chain/ethereum/src/chain.rs index 458265c9b87..fb2c5fafa59 100644 --- a/chain/ethereum/src/chain.rs +++ b/chain/ethereum/src/chain.rs @@ -122,6 +122,17 @@ impl BlockStreamBuilder for EthereumStreamBuilder { unimplemented!() } + async fn build_subgraph_block_stream( + &self, + _chain: &Chain, + _deployment: DeploymentLocator, + _start_blocks: Vec, + _subgraph_current_block: Option, + _filter: Arc<&TriggerFilterWrapper>, + _unified_api_version: UnifiedMappingApiVersion, + ) -> Result>> { + unimplemented!() + } async fn build_polling( &self, chain: &Chain, @@ -414,6 +425,21 @@ impl Blockchain for Chain { unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error> { let current_ptr = store.block_ptr(); + + if filter.subgraph_filter.is_some() { + return self + .block_stream_builder + .build_subgraph_block_stream( + self, + deployment, + start_blocks, + current_ptr, + filter, + unified_api_version, + ) + .await; + } + match self.chain_client().as_ref() { ChainClient::Rpc(_) => { self.block_stream_builder diff --git a/chain/near/src/chain.rs b/chain/near/src/chain.rs index 962978304a9..a5be2d260c3 100644 --- a/chain/near/src/chain.rs +++ b/chain/near/src/chain.rs @@ -109,6 +109,18 @@ impl BlockStreamBuilder for NearStreamBuilder { ))) } + async fn build_subgraph_block_stream( + &self, + _chain: &Chain, + _deployment: DeploymentLocator, + _start_blocks: Vec, + _subgraph_current_block: Option, + _filter: Arc<&TriggerFilterWrapper>, + _unified_api_version: UnifiedMappingApiVersion, + ) -> Result>> { + unimplemented!() + } + async fn build_firehose( &self, chain: &Chain, diff --git a/chain/starknet/src/chain.rs b/chain/starknet/src/chain.rs index 442474f507b..2419538435f 100644 --- a/chain/starknet/src/chain.rs +++ b/chain/starknet/src/chain.rs @@ -196,6 +196,18 @@ impl BlockStreamBuilder for StarknetStreamBuilder { unimplemented!() } + async fn build_subgraph_block_stream( + &self, + _chain: &Chain, + _deployment: DeploymentLocator, + _start_blocks: Vec, + _subgraph_current_block: Option, + _filter: Arc<&TriggerFilterWrapper>, + _unified_api_version: UnifiedMappingApiVersion, + ) -> Result>> { + unimplemented!() + } + async fn build_firehose( &self, chain: &Chain, diff --git a/chain/substreams/src/block_stream.rs b/chain/substreams/src/block_stream.rs index 8844df0610e..a2a233961d2 100644 --- a/chain/substreams/src/block_stream.rs +++ b/chain/substreams/src/block_stream.rs @@ -7,7 +7,7 @@ use graph::{ BlockStream, BlockStreamBuilder as BlockStreamBuilderTrait, FirehoseCursor, }, substreams_block_stream::SubstreamsBlockStream, - Blockchain, + Blockchain, TriggerFilterWrapper, }, components::store::DeploymentLocator, data::subgraph::UnifiedMappingApiVersion, @@ -99,6 +99,18 @@ impl BlockStreamBuilderTrait for BlockStreamBuilder { unimplemented!() } + async fn build_subgraph_block_stream( + &self, + _chain: &Chain, + _deployment: DeploymentLocator, + _start_blocks: Vec, + _subgraph_current_block: Option, + _filter: Arc<&TriggerFilterWrapper>, + _unified_api_version: UnifiedMappingApiVersion, + ) -> Result>> { + unimplemented!() + } + async fn build_polling( &self, _chain: &Chain, diff --git a/graph/src/blockchain/block_stream.rs b/graph/src/blockchain/block_stream.rs index 25a923dd502..3f792b3688e 100644 --- a/graph/src/blockchain/block_stream.rs +++ b/graph/src/blockchain/block_stream.rs @@ -12,7 +12,7 @@ use thiserror::Error; use tokio::sync::mpsc::{self, Receiver, Sender}; use super::substreams_block_stream::SubstreamsLogData; -use super::{Block, BlockPtr, BlockTime, Blockchain}; +use super::{Block, BlockPtr, BlockTime, Blockchain, TriggerFilterWrapper}; use crate::anyhow::Result; use crate::components::store::{BlockNumber, DeploymentLocator}; use crate::data::subgraph::UnifiedMappingApiVersion; @@ -148,6 +148,16 @@ pub trait BlockStreamBuilder: Send + Sync { filter: Arc, unified_api_version: UnifiedMappingApiVersion, ) -> Result>>; + + async fn build_subgraph_block_stream( + &self, + chain: &C, + deployment: DeploymentLocator, + start_blocks: Vec, + subgraph_current_block: Option, + filter: Arc<&TriggerFilterWrapper>, + unified_api_version: UnifiedMappingApiVersion, + ) -> Result>>; } #[derive(Debug, Clone)] diff --git a/graph/src/blockchain/mod.rs b/graph/src/blockchain/mod.rs index 62b93e61413..cbaff052706 100644 --- a/graph/src/blockchain/mod.rs +++ b/graph/src/blockchain/mod.rs @@ -249,14 +249,14 @@ impl From for IngestorError { pub struct TriggerFilterWrapper { pub filter: Arc, - _subgraph_filter: Option, + pub subgraph_filter: Option, } impl TriggerFilterWrapper { pub fn new(filter: C::TriggerFilter, subgraph_filter: Option) -> Self { Self { filter: Arc::new(filter), - _subgraph_filter: subgraph_filter, + subgraph_filter, } } } diff --git a/tests/src/fixture/mod.rs b/tests/src/fixture/mod.rs index ebed1d3a115..2ecd386a087 100644 --- a/tests/src/fixture/mod.rs +++ b/tests/src/fixture/mod.rs @@ -14,7 +14,7 @@ use graph::blockchain::block_stream::{ }; use graph::blockchain::{ Block, BlockHash, BlockPtr, Blockchain, BlockchainMap, ChainIdentifier, RuntimeAdapter, - TriggersAdapter, TriggersAdapterSelector, + TriggerFilterWrapper, TriggersAdapter, TriggersAdapterSelector, }; use graph::cheap_clone::CheapClone; use graph::components::adapter::ChainId; @@ -716,6 +716,18 @@ impl BlockStreamBuilder for MutexBlockStreamBuilder { unimplemented!(); } + async fn build_subgraph_block_stream( + &self, + _chain: &C, + _deployment: DeploymentLocator, + _start_blocks: Vec, + _subgraph_current_block: Option, + _filter: Arc<&TriggerFilterWrapper>, + _unified_api_version: graph::data::subgraph::UnifiedMappingApiVersion, + ) -> anyhow::Result>> { + unimplemented!() + } + async fn build_polling( &self, _chain: &C, @@ -755,6 +767,18 @@ where unimplemented!() } + async fn build_subgraph_block_stream( + &self, + _chain: &C, + _deployment: DeploymentLocator, + _start_blocks: Vec, + _subgraph_current_block: Option, + _filter: Arc<&TriggerFilterWrapper>, + _unified_api_version: graph::data::subgraph::UnifiedMappingApiVersion, + ) -> anyhow::Result>> { + unimplemented!() + } + async fn build_firehose( &self, _chain: &C, From d59c14b57e86cf091c275d3d2cf63241d4f7cb67 Mon Sep 17 00:00:00 2001 From: incrypto32 Date: Mon, 15 Jul 2024 16:45:35 +0530 Subject: [PATCH 04/21] graph,core,chain: use TriggerFilterWrapper in PollingBlockStream --- chain/arweave/src/chain.rs | 2 +- chain/cosmos/src/chain.rs | 2 +- chain/ethereum/src/chain.rs | 77 +++++++++++++++++--- chain/near/src/chain.rs | 6 +- chain/starknet/src/chain.rs | 6 +- chain/substreams/src/block_stream.rs | 4 +- chain/substreams/src/chain.rs | 2 +- core/src/subgraph/runner.rs | 4 +- core/src/subgraph/stream.rs | 4 +- graph/src/blockchain/block_stream.rs | 4 +- graph/src/blockchain/mock.rs | 2 +- graph/src/blockchain/mod.rs | 18 ++++- graph/src/blockchain/polling_block_stream.rs | 10 +-- tests/src/fixture/mod.rs | 8 +- 14 files changed, 110 insertions(+), 39 deletions(-) diff --git a/chain/arweave/src/chain.rs b/chain/arweave/src/chain.rs index 70c03d832dd..9e6167b5678 100644 --- a/chain/arweave/src/chain.rs +++ b/chain/arweave/src/chain.rs @@ -119,7 +119,7 @@ impl Blockchain for Chain { deployment: DeploymentLocator, store: impl DeploymentCursorTracker, start_blocks: Vec, - filter: Arc<&TriggerFilterWrapper>, + filter: Arc>, unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error> { let adapter = self diff --git a/chain/cosmos/src/chain.rs b/chain/cosmos/src/chain.rs index b21acb3a8e6..6c88e710491 100644 --- a/chain/cosmos/src/chain.rs +++ b/chain/cosmos/src/chain.rs @@ -113,7 +113,7 @@ impl Blockchain for Chain { deployment: DeploymentLocator, store: impl DeploymentCursorTracker, start_blocks: Vec, - filter: Arc<&TriggerFilterWrapper>, + filter: Arc>, unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error> { let adapter = self diff --git a/chain/ethereum/src/chain.rs b/chain/ethereum/src/chain.rs index fb2c5fafa59..2f91a05e817 100644 --- a/chain/ethereum/src/chain.rs +++ b/chain/ethereum/src/chain.rs @@ -124,25 +124,80 @@ impl BlockStreamBuilder for EthereumStreamBuilder { async fn build_subgraph_block_stream( &self, - _chain: &Chain, - _deployment: DeploymentLocator, - _start_blocks: Vec, - _subgraph_current_block: Option, - _filter: Arc<&TriggerFilterWrapper>, - _unified_api_version: UnifiedMappingApiVersion, + chain: &Chain, + deployment: DeploymentLocator, + start_blocks: Vec, + subgraph_current_block: Option, + filter: Arc>, + unified_api_version: UnifiedMappingApiVersion, ) -> Result>> { - unimplemented!() + let requirements = filter.filter.node_capabilities(); + let adapter = chain + .triggers_adapter(&deployment, &requirements, unified_api_version.clone()) + .unwrap_or_else(|_| { + panic!( + "no adapter for network {} with capabilities {}", + chain.name, requirements + ) + }); + + let logger = chain + .logger_factory + .subgraph_logger(&deployment) + .new(o!("component" => "BlockStream")); + let chain_store = chain.chain_store(); + let chain_head_update_stream = chain + .chain_head_update_listener + .subscribe(chain.name.to_string(), logger.clone()); + + // Special case: Detect Celo and set the threshold to 0, so that eth_getLogs is always used. + // This is ok because Celo blocks are always final. And we _need_ to do this because + // some events appear only in eth_getLogs but not in transaction receipts. + // See also ca0edc58-0ec5-4c89-a7dd-2241797f5e50. + let chain_id = match chain.chain_client().as_ref() { + ChainClient::Rpc(adapter) => { + adapter + .cheapest() + .await + .ok_or(anyhow!("unable to get eth adapter for chan_id call"))? + .chain_id() + .await? + } + _ => panic!("expected rpc when using polling blockstream"), + }; + let reorg_threshold = match CELO_CHAIN_IDS.contains(&chain_id) { + false => chain.reorg_threshold, + true => 0, + }; + + Ok(Box::new(PollingBlockStream::new( + chain_store, + chain_head_update_stream, + adapter, + chain.node_id.clone(), + deployment.hash, + filter, + start_blocks, + reorg_threshold, + logger, + ENV_VARS.max_block_range_size, + ENV_VARS.target_triggers_per_block_range, + unified_api_version, + subgraph_current_block, + ))) } + async fn build_polling( &self, chain: &Chain, deployment: DeploymentLocator, start_blocks: Vec, subgraph_current_block: Option, - filter: Arc<::TriggerFilter>, + filter: Arc>, unified_api_version: UnifiedMappingApiVersion, ) -> Result>> { - let requirements = filter.node_capabilities(); + + let requirements = filter.filter.node_capabilities(); let adapter = chain .triggers_adapter(&deployment, &requirements, unified_api_version.clone()) .unwrap_or_else(|_| { @@ -421,7 +476,7 @@ impl Blockchain for Chain { deployment: DeploymentLocator, store: impl DeploymentCursorTracker, start_blocks: Vec, - filter: Arc<&TriggerFilterWrapper>, + filter: Arc>, unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error> { let current_ptr = store.block_ptr(); @@ -448,7 +503,7 @@ impl Blockchain for Chain { deployment, start_blocks, current_ptr, - filter.filter.clone(), + filter, unified_api_version, ) .await diff --git a/chain/near/src/chain.rs b/chain/near/src/chain.rs index a5be2d260c3..86a77c2fc61 100644 --- a/chain/near/src/chain.rs +++ b/chain/near/src/chain.rs @@ -115,7 +115,7 @@ impl BlockStreamBuilder for NearStreamBuilder { _deployment: DeploymentLocator, _start_blocks: Vec, _subgraph_current_block: Option, - _filter: Arc<&TriggerFilterWrapper>, + _filter: Arc>, _unified_api_version: UnifiedMappingApiVersion, ) -> Result>> { unimplemented!() @@ -164,7 +164,7 @@ impl BlockStreamBuilder for NearStreamBuilder { _deployment: DeploymentLocator, _start_blocks: Vec, _subgraph_current_block: Option, - _filter: Arc<::TriggerFilter>, + _filter: Arc>, _unified_api_version: UnifiedMappingApiVersion, ) -> Result>> { todo!() @@ -242,7 +242,7 @@ impl Blockchain for Chain { deployment: DeploymentLocator, store: impl DeploymentCursorTracker, start_blocks: Vec, - filter: Arc<&TriggerFilterWrapper>, + filter: Arc>, unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error> { if self.prefer_substreams { diff --git a/chain/starknet/src/chain.rs b/chain/starknet/src/chain.rs index 2419538435f..d926b31877b 100644 --- a/chain/starknet/src/chain.rs +++ b/chain/starknet/src/chain.rs @@ -115,7 +115,7 @@ impl Blockchain for Chain { deployment: DeploymentLocator, store: impl DeploymentCursorTracker, start_blocks: Vec, - filter: Arc<&TriggerFilterWrapper>, + filter: Arc>, unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error> { self.block_stream_builder @@ -202,7 +202,7 @@ impl BlockStreamBuilder for StarknetStreamBuilder { _deployment: DeploymentLocator, _start_blocks: Vec, _subgraph_current_block: Option, - _filter: Arc<&TriggerFilterWrapper>, + _filter: Arc>, _unified_api_version: UnifiedMappingApiVersion, ) -> Result>> { unimplemented!() @@ -251,7 +251,7 @@ impl BlockStreamBuilder for StarknetStreamBuilder { _deployment: DeploymentLocator, _start_blocks: Vec, _subgraph_current_block: Option, - _filter: Arc, + _filter: Arc>, _unified_api_version: UnifiedMappingApiVersion, ) -> Result>> { panic!("StarkNet does not support polling block stream") diff --git a/chain/substreams/src/block_stream.rs b/chain/substreams/src/block_stream.rs index a2a233961d2..2d6eb902409 100644 --- a/chain/substreams/src/block_stream.rs +++ b/chain/substreams/src/block_stream.rs @@ -105,7 +105,7 @@ impl BlockStreamBuilderTrait for BlockStreamBuilder { _deployment: DeploymentLocator, _start_blocks: Vec, _subgraph_current_block: Option, - _filter: Arc<&TriggerFilterWrapper>, + _filter: Arc>, _unified_api_version: UnifiedMappingApiVersion, ) -> Result>> { unimplemented!() @@ -117,7 +117,7 @@ impl BlockStreamBuilderTrait for BlockStreamBuilder { _deployment: DeploymentLocator, _start_blocks: Vec, _subgraph_current_block: Option, - _filter: Arc, + _filter: Arc>, _unified_api_version: UnifiedMappingApiVersion, ) -> Result>> { unimplemented!("polling block stream is not support for substreams") diff --git a/chain/substreams/src/chain.rs b/chain/substreams/src/chain.rs index b027edd3351..044617603d8 100644 --- a/chain/substreams/src/chain.rs +++ b/chain/substreams/src/chain.rs @@ -140,7 +140,7 @@ impl Blockchain for Chain { deployment: DeploymentLocator, store: impl DeploymentCursorTracker, _start_blocks: Vec, - filter: Arc<&TriggerFilterWrapper>, + filter: Arc>, _unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error> { self.block_stream_builder diff --git a/core/src/subgraph/runner.rs b/core/src/subgraph/runner.rs index 28b82410276..64f60e2c378 100644 --- a/core/src/subgraph/runner.rs +++ b/core/src/subgraph/runner.rs @@ -167,7 +167,7 @@ where } #[cfg(debug_assertions)] - pub fn build_filter_for_test(&self) -> TriggerFilterWrapper { + pub fn build_filter_for_test(&self) -> TriggerFilterWrapper { self.build_filter() } @@ -214,7 +214,7 @@ where let mut block_stream = new_block_stream( &self.inputs, - self.ctx.filter.as_ref().unwrap(), // Safe to unwrap as we just called `build_filter` in the previous line + self.ctx.filter.cheap_clone().unwrap(), // Safe to unwrap as we just called `build_filter` in the previous line &self.metrics.subgraph, ) .await? diff --git a/core/src/subgraph/stream.rs b/core/src/subgraph/stream.rs index b71d5e908ae..cfd41808e27 100644 --- a/core/src/subgraph/stream.rs +++ b/core/src/subgraph/stream.rs @@ -7,7 +7,7 @@ use std::sync::Arc; pub async fn new_block_stream( inputs: &IndexingInputs, - filter: &TriggerFilterWrapper, + filter: TriggerFilterWrapper, metrics: &SubgraphInstanceMetrics, ) -> Result>, Error> { let is_firehose = inputs.chain.chain_client().is_firehose(); @@ -18,7 +18,7 @@ pub async fn new_block_stream( inputs.deployment.clone(), inputs.store.cheap_clone(), inputs.start_blocks.clone(), - Arc::new(filter), + Arc::new(filter.clone()), inputs.unified_api_version.clone(), ) .await diff --git a/graph/src/blockchain/block_stream.rs b/graph/src/blockchain/block_stream.rs index 3f792b3688e..2b4deb0e15b 100644 --- a/graph/src/blockchain/block_stream.rs +++ b/graph/src/blockchain/block_stream.rs @@ -145,7 +145,7 @@ pub trait BlockStreamBuilder: Send + Sync { deployment: DeploymentLocator, start_blocks: Vec, subgraph_current_block: Option, - filter: Arc, + filter: Arc>, unified_api_version: UnifiedMappingApiVersion, ) -> Result>>; @@ -155,7 +155,7 @@ pub trait BlockStreamBuilder: Send + Sync { deployment: DeploymentLocator, start_blocks: Vec, subgraph_current_block: Option, - filter: Arc<&TriggerFilterWrapper>, + filter: Arc>, unified_api_version: UnifiedMappingApiVersion, ) -> Result>>; } diff --git a/graph/src/blockchain/mock.rs b/graph/src/blockchain/mock.rs index 6b1eaba4ce7..5321547f5dd 100644 --- a/graph/src/blockchain/mock.rs +++ b/graph/src/blockchain/mock.rs @@ -370,7 +370,7 @@ impl Blockchain for MockBlockchain { _deployment: DeploymentLocator, _store: impl DeploymentCursorTracker, _start_blocks: Vec, - _filter: Arc<&TriggerFilterWrapper>, + _filter: Arc>, _unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error> { todo!() diff --git a/graph/src/blockchain/mod.rs b/graph/src/blockchain/mod.rs index cbaff052706..39a42f3ff48 100644 --- a/graph/src/blockchain/mod.rs +++ b/graph/src/blockchain/mod.rs @@ -189,7 +189,7 @@ pub trait Blockchain: Debug + Sized + Send + Sync + Unpin + 'static { deployment: DeploymentLocator, store: impl DeploymentCursorTracker, start_blocks: Vec, - filter: Arc<&TriggerFilterWrapper>, + filter: Arc>, unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error>; @@ -247,6 +247,7 @@ impl From for IngestorError { } } +#[derive(Debug)] pub struct TriggerFilterWrapper { pub filter: Arc, pub subgraph_filter: Option, @@ -261,6 +262,21 @@ impl TriggerFilterWrapper { } } +impl Clone for TriggerFilterWrapper { + fn clone(&self) -> Self { + Self { + filter: self.filter.cheap_clone(), + subgraph_filter: self.subgraph_filter.cheap_clone(), + } + } +} + +impl CheapClone for TriggerFilterWrapper { + fn cheap_clone(&self) -> Self { + self.clone() + } +} + pub trait TriggerFilter: Default + Clone + Send + Sync { fn from_data_sources<'a>( data_sources: impl Iterator + Clone, diff --git a/graph/src/blockchain/polling_block_stream.rs b/graph/src/blockchain/polling_block_stream.rs index ce3fdf2a4ef..ed4416fda70 100644 --- a/graph/src/blockchain/polling_block_stream.rs +++ b/graph/src/blockchain/polling_block_stream.rs @@ -11,7 +11,7 @@ use super::block_stream::{ BlockStream, BlockStreamError, BlockStreamEvent, BlockWithTriggers, ChainHeadUpdateStream, FirehoseCursor, TriggersAdapter, BUFFERED_BLOCK_STREAM_SIZE, }; -use super::{Block, BlockPtr, Blockchain}; +use super::{Block, BlockPtr, Blockchain, TriggerFilterWrapper}; use crate::components::store::BlockNumber; use crate::data::subgraph::UnifiedMappingApiVersion; @@ -85,7 +85,7 @@ where // This is not really a block number, but the (unsigned) difference // between two block numbers reorg_threshold: BlockNumber, - filter: Arc, + filter: Arc>, start_blocks: Vec, logger: Logger, previous_triggers_per_block: f64, @@ -149,7 +149,7 @@ where adapter: Arc>, node_id: NodeId, subgraph_id: DeploymentHash, - filter: Arc, + filter: Arc>, start_blocks: Vec, reorg_threshold: BlockNumber, logger: Logger, @@ -379,7 +379,7 @@ where ); // Update with actually scanned range, to account for any skipped null blocks. - let (blocks, to) = self.adapter.scan_triggers(from, to, &self.filter).await?; + let (blocks, to) = self.adapter.scan_triggers(from, to, &self.filter.filter.clone()).await?; let range_size = to - from + 1; // If the target block (`to`) is within the reorg threshold, indicating no non-null finalized blocks are @@ -469,7 +469,7 @@ where // Note that head_ancestor is a child of subgraph_ptr. let block = self .adapter - .triggers_in_block(&self.logger, head_ancestor, &self.filter) + .triggers_in_block(&self.logger, head_ancestor, &self.filter.filter.clone()) .await?; Ok(ReconciliationStep::ProcessDescendantBlocks(vec![block], 1)) } else { diff --git a/tests/src/fixture/mod.rs b/tests/src/fixture/mod.rs index 2ecd386a087..a6dbd650a3e 100644 --- a/tests/src/fixture/mod.rs +++ b/tests/src/fixture/mod.rs @@ -722,7 +722,7 @@ impl BlockStreamBuilder for MutexBlockStreamBuilder { _deployment: DeploymentLocator, _start_blocks: Vec, _subgraph_current_block: Option, - _filter: Arc<&TriggerFilterWrapper>, + _filter: Arc>, _unified_api_version: graph::data::subgraph::UnifiedMappingApiVersion, ) -> anyhow::Result>> { unimplemented!() @@ -734,7 +734,7 @@ impl BlockStreamBuilder for MutexBlockStreamBuilder { _deployment: DeploymentLocator, _start_blocks: Vec, _subgraph_current_block: Option, - _filter: Arc<::TriggerFilter>, + _filter: Arc>, _unified_api_version: graph::data::subgraph::UnifiedMappingApiVersion, ) -> anyhow::Result>> { unimplemented!("only firehose mode should be used for tests") @@ -773,7 +773,7 @@ where _deployment: DeploymentLocator, _start_blocks: Vec, _subgraph_current_block: Option, - _filter: Arc<&TriggerFilterWrapper>, + _filter: Arc>, _unified_api_version: graph::data::subgraph::UnifiedMappingApiVersion, ) -> anyhow::Result>> { unimplemented!() @@ -808,7 +808,7 @@ where _deployment: DeploymentLocator, _start_blocks: Vec, _subgraph_current_block: Option, - _filter: Arc, + _filter: Arc>, _unified_api_version: graph::data::subgraph::UnifiedMappingApiVersion, ) -> anyhow::Result>> { unimplemented!("only firehose mode should be used for tests") From edc806cc49657cf360555e858c7aabaca06c4f8b Mon Sep 17 00:00:00 2001 From: incrypto32 Date: Mon, 15 Jul 2024 17:44:21 +0530 Subject: [PATCH 05/21] graph: created TriggersAdapterWrapper --- chain/ethereum/src/chain.rs | 4 ++- graph/src/blockchain/block_stream.rs | 48 ++++++++++++++++++++++++++++ 2 files changed, 51 insertions(+), 1 deletion(-) diff --git a/chain/ethereum/src/chain.rs b/chain/ethereum/src/chain.rs index 2f91a05e817..46a9bc3184e 100644 --- a/chain/ethereum/src/chain.rs +++ b/chain/ethereum/src/chain.rs @@ -62,6 +62,7 @@ use crate::{BufferedCallCache, NodeCapabilities}; use crate::{EthereumAdapter, RuntimeAdapter}; use graph::blockchain::block_stream::{ BlockStream, BlockStreamBuilder, BlockStreamError, BlockStreamMapper, FirehoseCursor, + TriggersAdaterWrapper, }; /// Celo Mainnet: 42220, Testnet Alfajores: 44787, Testnet Baklava: 62320 @@ -141,6 +142,8 @@ impl BlockStreamBuilder for EthereumStreamBuilder { ) }); + let adapter = Arc::new(TriggersAdaterWrapper::new(adapter)); + let logger = chain .logger_factory .subgraph_logger(&deployment) @@ -196,7 +199,6 @@ impl BlockStreamBuilder for EthereumStreamBuilder { filter: Arc>, unified_api_version: UnifiedMappingApiVersion, ) -> Result>> { - let requirements = filter.filter.node_capabilities(); let adapter = chain .triggers_adapter(&deployment, &requirements, unified_api_version.clone()) diff --git a/graph/src/blockchain/block_stream.rs b/graph/src/blockchain/block_stream.rs index 2b4deb0e15b..758209c343e 100644 --- a/graph/src/blockchain/block_stream.rs +++ b/graph/src/blockchain/block_stream.rs @@ -268,6 +268,54 @@ impl BlockWithTriggers { } } +pub struct TriggersAdaterWrapper { + pub adapter: Arc>, +} + +impl TriggersAdaterWrapper { + pub fn new(adapter: Arc>) -> Self { + Self { adapter } + } +} + +#[async_trait] +impl TriggersAdapter for TriggersAdaterWrapper { + async fn ancestor_block( + &self, + ptr: BlockPtr, + offset: BlockNumber, + root: Option, + ) -> Result, Error> { + self.adapter.ancestor_block(ptr, offset, root).await + } + + async fn scan_triggers( + &self, + from: BlockNumber, + to: BlockNumber, + filter: &C::TriggerFilter, + ) -> Result<(Vec>, BlockNumber), Error> { + self.adapter.scan_triggers(from, to, filter).await + } + + async fn triggers_in_block( + &self, + logger: &Logger, + block: C::Block, + filter: &C::TriggerFilter, + ) -> Result, Error> { + self.adapter.triggers_in_block(logger, block, filter).await + } + + async fn is_on_main_chain(&self, ptr: BlockPtr) -> Result { + self.adapter.is_on_main_chain(ptr).await + } + + async fn parent_ptr(&self, block: &BlockPtr) -> Result, Error> { + self.adapter.parent_ptr(block).await + } +} + #[async_trait] pub trait TriggersAdapter: Send + Sync { // Return the block that is `offset` blocks before the block pointed to by `ptr` from the local From eb41c6bb120a68290dc8bcdbaca93fa524463932 Mon Sep 17 00:00:00 2001 From: incrypto32 Date: Tue, 16 Jul 2024 14:14:37 +0530 Subject: [PATCH 06/21] graph,core,chain: Add a wrapper enum for Triggers to handle subgraph datasource triggers --- chain/cosmos/src/chain.rs | 14 ++-- chain/ethereum/src/ethereum_adapter.rs | 6 +- chain/ethereum/src/tests.rs | 22 +++--- chain/near/src/chain.rs | 18 ++--- core/src/subgraph/context/instance/mod.rs | 1 + core/src/subgraph/runner.rs | 13 +++- graph/src/blockchain/block_stream.rs | 19 +++++- graph/src/blockchain/mock.rs | 7 +- graph/src/blockchain/mod.rs | 72 +++++++++++++++++++- graph/src/blockchain/polling_block_stream.rs | 13 +++- graph/src/data_source/mod.rs | 6 +- graph/src/env/mod.rs | 2 +- tests/src/fixture/ethereum.rs | 30 +++++--- 13 files changed, 176 insertions(+), 47 deletions(-) diff --git a/chain/cosmos/src/chain.rs b/chain/cosmos/src/chain.rs index 6c88e710491..868617968df 100644 --- a/chain/cosmos/src/chain.rs +++ b/chain/cosmos/src/chain.rs @@ -470,9 +470,12 @@ impl FirehoseMapperTrait for FirehoseMapper { #[cfg(test)] mod test { - use graph::prelude::{ - slog::{o, Discard, Logger}, - tokio, + use graph::{ + blockchain::Trigger, + prelude::{ + slog::{o, Discard, Logger}, + tokio, + }, }; use super::*; @@ -603,7 +606,10 @@ mod test { // they may not be in the same order for trigger in expected_triggers { assert!( - triggers.trigger_data.contains(&trigger), + triggers.trigger_data.iter().any(|t| match t { + Trigger::Chain(t) => t == &trigger, + _ => false, + }), "Expected trigger list to contain {:?}, but it only contains: {:?}", trigger, triggers.trigger_data diff --git a/chain/ethereum/src/ethereum_adapter.rs b/chain/ethereum/src/ethereum_adapter.rs index c4ea6323c7d..123f79bb4a8 100644 --- a/chain/ethereum/src/ethereum_adapter.rs +++ b/chain/ethereum/src/ethereum_adapter.rs @@ -2077,8 +2077,8 @@ async fn filter_call_triggers_from_unsuccessful_transactions( let transaction_hashes: BTreeSet = block .trigger_data .iter() - .filter_map(|trigger| match trigger { - EthereumTrigger::Call(call_trigger) => Some(call_trigger.transaction_hash), + .filter_map(|trigger| match trigger.as_chain() { + Some(EthereumTrigger::Call(call_trigger)) => Some(call_trigger.transaction_hash), _ => None, }) .collect::>>() @@ -2169,7 +2169,7 @@ async fn filter_call_triggers_from_unsuccessful_transactions( // Filter call triggers from unsuccessful transactions block.trigger_data.retain(|trigger| { - if let EthereumTrigger::Call(call_trigger) = trigger { + if let Some(EthereumTrigger::Call(call_trigger)) = trigger.as_chain() { // Unwrap: We already checked that those values exist transaction_success[&call_trigger.transaction_hash.unwrap()] } else { diff --git a/chain/ethereum/src/tests.rs b/chain/ethereum/src/tests.rs index 455a7c07432..00873f8ea87 100644 --- a/chain/ethereum/src/tests.rs +++ b/chain/ethereum/src/tests.rs @@ -1,7 +1,7 @@ use std::sync::Arc; use graph::{ - blockchain::{block_stream::BlockWithTriggers, BlockPtr}, + blockchain::{block_stream::BlockWithTriggers, BlockPtr, Trigger}, prelude::{ web3::types::{Address, Bytes, Log, H160, H256, U64}, EthereumCall, LightEthereumBlock, @@ -107,10 +107,12 @@ fn test_trigger_ordering() { &logger, ); - assert_eq!( - block_with_triggers.trigger_data, - vec![log1, log2, call1, log3, call2, call4, call3, block2, block1] - ); + let expected = vec![log1, log2, call1, log3, call2, call4, call3, block2, block1] + .into_iter() + .map(|t| Trigger::Chain(t)) + .collect::>(); + + assert_eq!(block_with_triggers.trigger_data, expected); } #[test] @@ -203,8 +205,10 @@ fn test_trigger_dedup() { &logger, ); - assert_eq!( - block_with_triggers.trigger_data, - vec![log1, log2, call1, log3, call2, call3, block2, block1] - ); + let expected = vec![log1, log2, call1, log3, call2, call3, block2, block1] + .into_iter() + .map(|t| Trigger::Chain(t)) + .collect::>(); + + assert_eq!(block_with_triggers.trigger_data, expected); } diff --git a/chain/near/src/chain.rs b/chain/near/src/chain.rs index 86a77c2fc61..3211870f069 100644 --- a/chain/near/src/chain.rs +++ b/chain/near/src/chain.rs @@ -4,7 +4,7 @@ use graph::blockchain::firehose_block_ingestor::FirehoseBlockIngestor; use graph::blockchain::substreams_block_stream::SubstreamsBlockStream; use graph::blockchain::{ BasicBlockchainBuilder, BlockIngestor, BlockchainBuilder, BlockchainKind, NoopDecoderHook, - NoopRuntimeAdapter, TriggerFilterWrapper, + NoopRuntimeAdapter, Trigger, TriggerFilterWrapper, }; use graph::cheap_clone::CheapClone; use graph::components::adapter::ChainId; @@ -474,11 +474,13 @@ impl BlockStreamMapper for FirehoseMapper { .into_iter() .zip(receipt.into_iter()) .map(|(outcome, receipt)| { - NearTrigger::Receipt(Arc::new(trigger::ReceiptWithOutcome { - outcome, - receipt, - block: arc_block.clone(), - })) + Trigger::Chain(NearTrigger::Receipt(Arc::new( + trigger::ReceiptWithOutcome { + outcome, + receipt, + block: arc_block.clone(), + }, + ))) }) .collect(); @@ -985,8 +987,8 @@ mod test { .trigger_data .clone() .into_iter() - .filter_map(|x| match x { - crate::trigger::NearTrigger::Block(b) => b.header.clone().map(|x| x.height), + .filter_map(|x| match x.as_chain() { + Some(crate::trigger::NearTrigger::Block(b)) => b.header.clone().map(|x| x.height), _ => None, }) .collect() diff --git a/core/src/subgraph/context/instance/mod.rs b/core/src/subgraph/context/instance/mod.rs index 5a805f34095..fa723d3f18e 100644 --- a/core/src/subgraph/context/instance/mod.rs +++ b/core/src/subgraph/context/instance/mod.rs @@ -229,6 +229,7 @@ where TriggerData::Offchain(trigger) => self .offchain_hosts .matches_by_address(trigger.source.address().as_ref().map(|a| a.as_slice())), + TriggerData::Subgraph(_) => todo!(), // TODO(krishna) } } diff --git a/core/src/subgraph/runner.rs b/core/src/subgraph/runner.rs index 64f60e2c378..1060fb2f93c 100644 --- a/core/src/subgraph/runner.rs +++ b/core/src/subgraph/runner.rs @@ -8,7 +8,8 @@ use graph::blockchain::block_stream::{ BlockStreamError, BlockStreamEvent, BlockWithTriggers, FirehoseCursor, }; use graph::blockchain::{ - Block, BlockTime, Blockchain, DataSource as _, TriggerFilter as _, TriggerFilterWrapper, + Block, BlockTime, Blockchain, DataSource as _, Trigger, TriggerFilter as _, + TriggerFilterWrapper, }; use graph::components::store::{EmptyStore, GetScope, ReadStore, StoredDynamicDataSource}; use graph::components::subgraph::InstanceDSTemplate; @@ -328,7 +329,10 @@ where .match_and_decode_many( &logger, &block, - triggers.into_iter().map(TriggerData::Onchain), + triggers.into_iter().map(|t| match t { + Trigger::Chain(t) => TriggerData::Onchain(t), + Trigger::Subgraph(_) => todo!(), //TODO(krishna), + }), hosts_filter, &self.metrics.subgraph, ) @@ -487,7 +491,10 @@ where .match_and_decode_many( &logger, &block, - triggers.into_iter().map(TriggerData::Onchain), + triggers.into_iter().map(|t| match t { + Trigger::Chain(t) => TriggerData::Onchain(t), + Trigger::Subgraph(_) => todo!(), //TODO(krishna), + }), |_| Box::new(runtime_hosts.iter().map(Arc::as_ref)), &self.metrics.subgraph, ) diff --git a/graph/src/blockchain/block_stream.rs b/graph/src/blockchain/block_stream.rs index 758209c343e..93693bb0384 100644 --- a/graph/src/blockchain/block_stream.rs +++ b/graph/src/blockchain/block_stream.rs @@ -12,7 +12,7 @@ use thiserror::Error; use tokio::sync::mpsc::{self, Receiver, Sender}; use super::substreams_block_stream::SubstreamsLogData; -use super::{Block, BlockPtr, BlockTime, Blockchain, TriggerFilterWrapper}; +use super::{Block, BlockPtr, BlockTime, Blockchain, Trigger, TriggerFilterWrapper}; use crate::anyhow::Result; use crate::components::store::{BlockNumber, DeploymentLocator}; use crate::data::subgraph::UnifiedMappingApiVersion; @@ -208,7 +208,7 @@ impl AsRef> for FirehoseCursor { #[derive(Debug)] pub struct BlockWithTriggers { pub block: C::Block, - pub trigger_data: Vec, + pub trigger_data: Vec>, } impl Clone for BlockWithTriggers @@ -226,7 +226,15 @@ where impl BlockWithTriggers { /// Creates a BlockWithTriggers structure, which holds /// the trigger data ordered and without any duplicates. - pub fn new(block: C::Block, mut trigger_data: Vec, logger: &Logger) -> Self { + pub fn new(block: C::Block, trigger_data: Vec, logger: &Logger) -> Self { + let mut trigger_data = trigger_data + .into_iter() + .map(|trigger_data| { + let trigger = Trigger::Chain(trigger_data); + trigger + }) + .collect::>(); + // This is where triggers get sorted. trigger_data.sort(); @@ -266,6 +274,11 @@ impl BlockWithTriggers { pub fn parent_ptr(&self) -> Option { self.block.parent_ptr() } + + pub fn extend_triggers(&mut self, triggers: Vec>) { + self.trigger_data.extend(triggers); + self.trigger_data.sort(); + } } pub struct TriggersAdaterWrapper { diff --git a/graph/src/blockchain/mock.rs b/graph/src/blockchain/mock.rs index 5321547f5dd..e13a21878e4 100644 --- a/graph/src/blockchain/mock.rs +++ b/graph/src/blockchain/mock.rs @@ -14,7 +14,10 @@ use serde::Deserialize; use std::{collections::HashSet, convert::TryFrom, sync::Arc}; use super::{ - block_stream::{self, BlockStream, FirehoseCursor}, client::ChainClient, BlockIngestor, BlockTime, EmptyNodeCapabilities, HostFn, IngestorError, MappingTriggerTrait, NoopDecoderHook, TriggerFilterWrapper, TriggerWithHandler + block_stream::{self, BlockStream, FirehoseCursor}, + client::ChainClient, + BlockIngestor, BlockTime, EmptyNodeCapabilities, HostFn, IngestorError, MappingTriggerTrait, + NoopDecoderHook, Trigger, TriggerFilterWrapper, TriggerWithHandler, }; use super::{ @@ -272,7 +275,7 @@ async fn blocks_with_triggers( Ok(( vec![BlockWithTriggers { block: MockBlock { number: 0 }, - trigger_data: vec![MockTriggerData], + trigger_data: vec![Trigger::Chain(MockTriggerData)], }], to, )) diff --git a/graph/src/blockchain/mod.rs b/graph/src/blockchain/mod.rs index 39a42f3ff48..eccb0336b8e 100644 --- a/graph/src/blockchain/mod.rs +++ b/graph/src/blockchain/mod.rs @@ -25,7 +25,7 @@ use crate::{ trigger_processor::RunnableTriggers, }, data::subgraph::{UnifiedMappingApiVersion, MIN_SPEC_VERSION}, - data_source::{self, DataSourceTemplateInfo}, + data_source::{self, subgraph, DataSourceTemplateInfo}, prelude::{DataSourceContext, DeploymentHash}, runtime::{gas::GasCounter, AscHeap, HostExportError}, }; @@ -400,6 +400,76 @@ pub trait UnresolvedDataSource: ) -> Result; } +#[derive(Debug)] +pub enum Trigger { + Chain(C::TriggerData), + Subgraph(subgraph::TriggerData), +} + +impl Trigger { + pub fn as_chain(&self) -> Option<&C::TriggerData> { + match self { + Trigger::Chain(data) => Some(data), + _ => None, + } + } + + pub fn as_subgraph(&self) -> Option<&subgraph::TriggerData> { + match self { + Trigger::Subgraph(data) => Some(data), + _ => None, + } + } +} + +impl Eq for Trigger where C::TriggerData: Eq {} + +impl PartialEq for Trigger +where + C::TriggerData: PartialEq, +{ + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Trigger::Chain(data1), Trigger::Chain(data2)) => data1 == data2, + (Trigger::Subgraph(a), Trigger::Subgraph(b)) => a == b, + _ => false, + } + } +} + +impl Clone for Trigger +where + C::TriggerData: Clone, +{ + fn clone(&self) -> Self { + match self { + Trigger::Chain(data) => Trigger::Chain(data.clone()), + Trigger::Subgraph(data) => Trigger::Subgraph(data.clone()), + } + } +} + +// TODO(krishna): Proper ordering for triggers +impl Ord for Trigger +where + C::TriggerData: Ord, +{ + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + match (self, other) { + (Trigger::Chain(data1), Trigger::Chain(data2)) => data1.cmp(data2), + (Trigger::Subgraph(_), Trigger::Chain(_)) => std::cmp::Ordering::Greater, + (Trigger::Chain(_), Trigger::Subgraph(_)) => std::cmp::Ordering::Less, + (Trigger::Subgraph(_), Trigger::Subgraph(_)) => std::cmp::Ordering::Equal, + } + } +} + +impl PartialOrd for Trigger { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + pub trait TriggerData { /// If there is an error when processing this trigger, this will called to add relevant context. /// For example an useful return is: `"block # (), transaction ". diff --git a/graph/src/blockchain/polling_block_stream.rs b/graph/src/blockchain/polling_block_stream.rs index ed4416fda70..64a84ff3b94 100644 --- a/graph/src/blockchain/polling_block_stream.rs +++ b/graph/src/blockchain/polling_block_stream.rs @@ -149,7 +149,7 @@ where adapter: Arc>, node_id: NodeId, subgraph_id: DeploymentHash, - filter: Arc>, + filter: Arc>, start_blocks: Vec, reorg_threshold: BlockNumber, logger: Logger, @@ -379,7 +379,10 @@ where ); // Update with actually scanned range, to account for any skipped null blocks. - let (blocks, to) = self.adapter.scan_triggers(from, to, &self.filter.filter.clone()).await?; + let (blocks, to) = self + .adapter + .scan_triggers(from, to, &self.filter.filter.clone()) + .await?; let range_size = to - from + 1; // If the target block (`to`) is within the reorg threshold, indicating no non-null finalized blocks are @@ -469,7 +472,11 @@ where // Note that head_ancestor is a child of subgraph_ptr. let block = self .adapter - .triggers_in_block(&self.logger, head_ancestor, &self.filter.filter.clone()) + .triggers_in_block( + &self.logger, + head_ancestor, + &self.filter.filter.clone(), + ) .await?; Ok(ReconciliationStep::ProcessDescendantBlocks(vec![block], 1)) } else { diff --git a/graph/src/data_source/mod.rs b/graph/src/data_source/mod.rs index 38a166710e0..e68b2b5c85d 100644 --- a/graph/src/data_source/mod.rs +++ b/graph/src/data_source/mod.rs @@ -258,7 +258,9 @@ impl DataSource { Ok(ds.match_and_decode(trigger)) } (Self::Onchain(_), TriggerData::Offchain(_)) - | (Self::Offchain(_), TriggerData::Onchain(_)) => Ok(None), + | (Self::Offchain(_), TriggerData::Onchain(_)) + | (Self::Onchain(_), TriggerData::Subgraph(_)) + | (Self::Offchain(_), TriggerData::Subgraph(_)) => Ok(None), (Self::Subgraph(_), _) => todo!(), // TODO(krishna) } } @@ -550,6 +552,7 @@ impl TriggerWithHandler { pub enum TriggerData { Onchain(C::TriggerData), Offchain(offchain::TriggerData), + Subgraph(subgraph::TriggerData), } impl TriggerData { @@ -557,6 +560,7 @@ impl TriggerData { match self { Self::Onchain(trigger) => trigger.error_context(), Self::Offchain(trigger) => format!("{:?}", trigger.source), + Self::Subgraph(trigger) => format!("{:?}", trigger.source), } } } diff --git a/graph/src/env/mod.rs b/graph/src/env/mod.rs index af53562528a..3b1ca5a2862 100644 --- a/graph/src/env/mod.rs +++ b/graph/src/env/mod.rs @@ -357,7 +357,7 @@ struct Inner { default = "false" )] allow_non_deterministic_fulltext_search: EnvVarBoolean, - #[envconfig(from = "GRAPH_MAX_SPEC_VERSION", default = "1.2.0")] + #[envconfig(from = "GRAPH_MAX_SPEC_VERSION", default = "1.3.0")] max_spec_version: Version, #[envconfig(from = "GRAPH_LOAD_WINDOW_SIZE", default = "300")] load_window_size_in_secs: u64, diff --git a/tests/src/fixture/ethereum.rs b/tests/src/fixture/ethereum.rs index b20672ce563..1004e4d8900 100644 --- a/tests/src/fixture/ethereum.rs +++ b/tests/src/fixture/ethereum.rs @@ -7,7 +7,7 @@ use super::{ NoopRuntimeAdapterBuilder, StaticBlockRefetcher, StaticStreamBuilder, Stores, TestChain, }; use graph::blockchain::client::ChainClient; -use graph::blockchain::{BlockPtr, TriggersAdapterSelector}; +use graph::blockchain::{BlockPtr, Trigger, TriggersAdapterSelector}; use graph::cheap_clone::CheapClone; use graph::prelude::ethabi::ethereum_types::H256; use graph::prelude::web3::types::{Address, Log, Transaction, H160}; @@ -81,7 +81,10 @@ pub fn genesis() -> BlockWithTriggers { number: Some(U64::from(ptr.number)), ..Default::default() })), - trigger_data: vec![EthereumTrigger::Block(ptr, EthereumBlockTriggerType::End)], + trigger_data: vec![Trigger::Chain(EthereumTrigger::Block( + ptr, + EthereumBlockTriggerType::End, + ))], } } @@ -128,7 +131,10 @@ pub fn empty_block(parent_ptr: BlockPtr, ptr: BlockPtr) -> BlockWithTriggers, payload: impl Into) { - block.trigger_data.push(EthereumTrigger::Block( - block.ptr(), - EthereumBlockTriggerType::End, - )) + block + .trigger_data + .push(Trigger::Chain(EthereumTrigger::Block( + block.ptr(), + EthereumBlockTriggerType::End, + ))) } From 3710cf8537bb1cc43dd6a40774c234601fc8a73c Mon Sep 17 00:00:00 2001 From: incrypto32 Date: Tue, 16 Jul 2024 15:21:31 +0530 Subject: [PATCH 07/21] graph, chain: Build subgraph trigger filters in build_filter --- chain/ethereum/src/chain.rs | 2 +- core/src/subgraph/runner.rs | 16 +++++++++++++--- graph/src/blockchain/block_stream.rs | 11 ++++++++++- graph/src/blockchain/mod.rs | 15 ++++++--------- 4 files changed, 30 insertions(+), 14 deletions(-) diff --git a/chain/ethereum/src/chain.rs b/chain/ethereum/src/chain.rs index 46a9bc3184e..591bc9fdf6c 100644 --- a/chain/ethereum/src/chain.rs +++ b/chain/ethereum/src/chain.rs @@ -483,7 +483,7 @@ impl Blockchain for Chain { ) -> Result>, Error> { let current_ptr = store.block_ptr(); - if filter.subgraph_filter.is_some() { + if !filter.subgraph_filter.is_empty() { return self .block_stream_builder .build_subgraph_block_stream( diff --git a/core/src/subgraph/runner.rs b/core/src/subgraph/runner.rs index 1060fb2f93c..7c4e014b3e7 100644 --- a/core/src/subgraph/runner.rs +++ b/core/src/subgraph/runner.rs @@ -131,13 +131,23 @@ where None => true, }; + + let data_sources = self.ctx.static_data_sources(); + + let subgraph_filter = data_sources + .iter() + .filter_map(|ds| ds.as_subgraph()) + .map(|ds| (ds.source.address(), ds.source.start_block)) + .collect::>(); + + // if static_filters is not enabled we just stick to the filter based on all the data sources. if !static_filters { return TriggerFilterWrapper::new( C::TriggerFilter::from_data_sources( self.ctx.onchain_data_sources().filter(end_block_filter), ), - None, + subgraph_filter, ); } @@ -164,7 +174,7 @@ where filter.extend_with_template(templates.iter().filter_map(|ds| ds.as_onchain()).cloned()); - TriggerFilterWrapper::new(filter, None) + TriggerFilterWrapper::new(filter, subgraph_filter) } #[cfg(debug_assertions)] @@ -215,7 +225,7 @@ where let mut block_stream = new_block_stream( &self.inputs, - self.ctx.filter.cheap_clone().unwrap(), // Safe to unwrap as we just called `build_filter` in the previous line + self.ctx.filter.clone().unwrap(), // Safe to unwrap as we just called `build_filter` in the previous line &self.metrics.subgraph, ) .await? diff --git a/graph/src/blockchain/block_stream.rs b/graph/src/blockchain/block_stream.rs index 93693bb0384..8743fdd6c6b 100644 --- a/graph/src/blockchain/block_stream.rs +++ b/graph/src/blockchain/block_stream.rs @@ -308,7 +308,16 @@ impl TriggersAdapter for TriggersAdaterWrapper { to: BlockNumber, filter: &C::TriggerFilter, ) -> Result<(Vec>, BlockNumber), Error> { - self.adapter.scan_triggers(from, to, filter).await + // TODO(krishna): Do a proper implementation + self.adapter + .scan_triggers(from, to, filter) + .await + .map(|(mut blocks, next_block)| { + for _ in &mut blocks { + todo!() + } + (blocks, next_block) + }) } async fn triggers_in_block( diff --git a/graph/src/blockchain/mod.rs b/graph/src/blockchain/mod.rs index eccb0336b8e..746debb2f26 100644 --- a/graph/src/blockchain/mod.rs +++ b/graph/src/blockchain/mod.rs @@ -250,11 +250,14 @@ impl From for IngestorError { #[derive(Debug)] pub struct TriggerFilterWrapper { pub filter: Arc, - pub subgraph_filter: Option, + pub subgraph_filter: Vec<(DeploymentHash, BlockNumber)>, } impl TriggerFilterWrapper { - pub fn new(filter: C::TriggerFilter, subgraph_filter: Option) -> Self { + pub fn new( + filter: C::TriggerFilter, + subgraph_filter: Vec<(DeploymentHash, BlockNumber)>, + ) -> Self { Self { filter: Arc::new(filter), subgraph_filter, @@ -266,17 +269,11 @@ impl Clone for TriggerFilterWrapper { fn clone(&self) -> Self { Self { filter: self.filter.cheap_clone(), - subgraph_filter: self.subgraph_filter.cheap_clone(), + subgraph_filter: self.subgraph_filter.clone(), } } } -impl CheapClone for TriggerFilterWrapper { - fn cheap_clone(&self) -> Self { - self.clone() - } -} - pub trait TriggerFilter: Default + Clone + Send + Sync { fn from_data_sources<'a>( data_sources: impl Iterator + Clone, From 579d8867ac9511ac3c78c3c522e60e465c1dda16 Mon Sep 17 00:00:00 2001 From: incrypto32 Date: Wed, 17 Jul 2024 14:57:37 +0530 Subject: [PATCH 08/21] graph,core: Add subgraph_hosts to RuntimeHostBuilder --- core/src/subgraph/context/instance/hosts.rs | 2 +- core/src/subgraph/context/instance/mod.rs | 17 +++++++- core/src/subgraph/runner.rs | 6 +-- graph/src/blockchain/mod.rs | 2 +- graph/src/data_source/mod.rs | 10 +++-- runtime/wasm/src/module/mod.rs | 13 ++++++- .../subgraph-data-sources/src/mapping.ts | 17 ++------ .../subgraph-data-sources/subgraph.yaml | 2 +- tests/src/fixture/ethereum.rs | 18 ++++++++- tests/src/fixture/mod.rs | 39 ++++++++++++++----- tests/tests/runner_tests.rs | 34 ++++++++++++++-- 11 files changed, 120 insertions(+), 40 deletions(-) diff --git a/core/src/subgraph/context/instance/hosts.rs b/core/src/subgraph/context/instance/hosts.rs index 73701fadb29..f9a774246c8 100644 --- a/core/src/subgraph/context/instance/hosts.rs +++ b/core/src/subgraph/context/instance/hosts.rs @@ -57,7 +57,7 @@ impl> OnchainHosts { } pub fn push(&mut self, host: Arc) { - assert!(host.data_source().as_onchain().is_some()); + assert!(host.data_source().is_chain_based()); self.hosts.push(host.cheap_clone()); let idx = self.hosts.len() - 1; diff --git a/core/src/subgraph/context/instance/mod.rs b/core/src/subgraph/context/instance/mod.rs index fa723d3f18e..6436ea1a56e 100644 --- a/core/src/subgraph/context/instance/mod.rs +++ b/core/src/subgraph/context/instance/mod.rs @@ -29,6 +29,9 @@ pub(crate) struct SubgraphInstance> { /// will return the onchain hosts in the same order as they were inserted. onchain_hosts: OnchainHosts, + // TODO(krishna): Describe subgraph_hosts + subgraph_hosts: OnchainHosts, + offchain_hosts: OffchainHosts, /// Maps the hash of a module to a channel to the thread in which the module is instantiated. @@ -79,6 +82,7 @@ where network, static_data_sources: Arc::new(manifest.data_sources), onchain_hosts: OnchainHosts::new(), + subgraph_hosts: OnchainHosts::new(), offchain_hosts: OffchainHosts::new(), module_cache: HashMap::new(), templates, @@ -169,7 +173,14 @@ where Ok(Some(host)) } } - DataSource::Subgraph(_) => Ok(None), + DataSource::Subgraph(_) => { + if self.subgraph_hosts.contains(&host) { + Ok(None) + } else { + self.subgraph_hosts.push(host.cheap_clone()); + Ok(Some(host)) + } + } } } @@ -229,7 +240,9 @@ where TriggerData::Offchain(trigger) => self .offchain_hosts .matches_by_address(trigger.source.address().as_ref().map(|a| a.as_slice())), - TriggerData::Subgraph(_) => todo!(), // TODO(krishna) + TriggerData::Subgraph(trigger) => self + .subgraph_hosts + .matches_by_address(Some(trigger.source.to_bytes().as_slice())), } } diff --git a/core/src/subgraph/runner.rs b/core/src/subgraph/runner.rs index 7c4e014b3e7..a2d7eea4382 100644 --- a/core/src/subgraph/runner.rs +++ b/core/src/subgraph/runner.rs @@ -131,7 +131,6 @@ where None => true, }; - let data_sources = self.ctx.static_data_sources(); let subgraph_filter = data_sources @@ -140,7 +139,6 @@ where .map(|ds| (ds.source.address(), ds.source.start_block)) .collect::>(); - // if static_filters is not enabled we just stick to the filter based on all the data sources. if !static_filters { return TriggerFilterWrapper::new( @@ -341,7 +339,7 @@ where &block, triggers.into_iter().map(|t| match t { Trigger::Chain(t) => TriggerData::Onchain(t), - Trigger::Subgraph(_) => todo!(), //TODO(krishna), + Trigger::Subgraph(t) => TriggerData::Subgraph(t), }), hosts_filter, &self.metrics.subgraph, @@ -503,7 +501,7 @@ where &block, triggers.into_iter().map(|t| match t { Trigger::Chain(t) => TriggerData::Onchain(t), - Trigger::Subgraph(_) => todo!(), //TODO(krishna), + Trigger::Subgraph(_) => unreachable!(), // TODO(krishna): Re-evaulate this }), |_| Box::new(runtime_hosts.iter().map(Arc::as_ref)), &self.metrics.subgraph, diff --git a/graph/src/blockchain/mod.rs b/graph/src/blockchain/mod.rs index 746debb2f26..ebf958827e7 100644 --- a/graph/src/blockchain/mod.rs +++ b/graph/src/blockchain/mod.rs @@ -250,7 +250,7 @@ impl From for IngestorError { #[derive(Debug)] pub struct TriggerFilterWrapper { pub filter: Arc, - pub subgraph_filter: Vec<(DeploymentHash, BlockNumber)>, + pub subgraph_filter: Vec<(DeploymentHash, BlockNumber)>, // TODO(krishna): Make this a struct } impl TriggerFilterWrapper { diff --git a/graph/src/data_source/mod.rs b/graph/src/data_source/mod.rs index e68b2b5c85d..1d255b1563c 100644 --- a/graph/src/data_source/mod.rs +++ b/graph/src/data_source/mod.rs @@ -257,11 +257,15 @@ impl DataSource { (Self::Offchain(ds), TriggerData::Offchain(trigger)) => { Ok(ds.match_and_decode(trigger)) } + (Self::Subgraph(ds), TriggerData::Subgraph(trigger)) => { + Ok(ds.match_and_decode(block, trigger)) + } (Self::Onchain(_), TriggerData::Offchain(_)) | (Self::Offchain(_), TriggerData::Onchain(_)) | (Self::Onchain(_), TriggerData::Subgraph(_)) - | (Self::Offchain(_), TriggerData::Subgraph(_)) => Ok(None), - (Self::Subgraph(_), _) => todo!(), // TODO(krishna) + | (Self::Offchain(_), TriggerData::Subgraph(_)) + | (Self::Subgraph(_), TriggerData::Onchain(_)) + | (Self::Subgraph(_), TriggerData::Offchain(_)) => Ok(None), } } @@ -585,7 +589,7 @@ impl MappingTrigger { match self { Self::Onchain(trigger) => Some(trigger), Self::Offchain(_) => None, - Self::Subgraph(_) => todo!(), // TODO(krishna) + Self::Subgraph(_) => None, // TODO(krishna) } } } diff --git a/runtime/wasm/src/module/mod.rs b/runtime/wasm/src/module/mod.rs index ee19cd173aa..532f75d2660 100644 --- a/runtime/wasm/src/module/mod.rs +++ b/runtime/wasm/src/module/mod.rs @@ -4,6 +4,7 @@ use std::mem::MaybeUninit; use anyhow::anyhow; use anyhow::Error; use graph::blockchain::Blockchain; +use graph::data_source::subgraph; use graph::util::mem::init_slice; use semver::Version; use wasmtime::AsContext; @@ -69,6 +70,16 @@ impl ToAscPtr for offchain::TriggerData { } } +impl ToAscPtr for subgraph::TriggerData { + fn to_asc_ptr( + self, + heap: &mut H, + gas: &GasCounter, + ) -> Result, HostExportError> { + asc_new(heap, &self.entity.sorted_ref(), gas).map(|ptr| ptr.erase()) + } +} + impl ToAscPtr for MappingTrigger where C::MappingTrigger: ToAscPtr, @@ -81,7 +92,7 @@ where match self { MappingTrigger::Onchain(trigger) => trigger.to_asc_ptr(heap, gas), MappingTrigger::Offchain(trigger) => trigger.to_asc_ptr(heap, gas), - MappingTrigger::Subgraph(_) => todo!(), // TODO(krishna) + MappingTrigger::Subgraph(trigger) => trigger.to_asc_ptr(heap, gas), } } } diff --git a/tests/runner-tests/subgraph-data-sources/src/mapping.ts b/tests/runner-tests/subgraph-data-sources/src/mapping.ts index 3446d1f83c4..2e1a5382af3 100644 --- a/tests/runner-tests/subgraph-data-sources/src/mapping.ts +++ b/tests/runner-tests/subgraph-data-sources/src/mapping.ts @@ -1,15 +1,6 @@ -import { BigInt, dataSource, ethereum, log } from "@graphprotocol/graph-ts"; -import { Data } from "../generated/schema"; +import { Entity, log } from '@graphprotocol/graph-ts'; -export function handleBlock(block: ethereum.Block): void { - let foo = dataSource.context().getString("foo"); - let bar = dataSource.context().getI32("bar"); - let isTest = dataSource.context().getBoolean("isTest"); - if (block.number == BigInt.fromI32(0)) { - let data = new Data("0"); - data.foo = foo; - data.bar = bar; - data.isTest = isTest; - data.save(); - } +export function handleBlock(content: Entity): void { + let stringContent = content.getString('val'); + log.info('Content: {}', [stringContent]); } diff --git a/tests/runner-tests/subgraph-data-sources/subgraph.yaml b/tests/runner-tests/subgraph-data-sources/subgraph.yaml index b1a3fcbb486..050964f1c16 100644 --- a/tests/runner-tests/subgraph-data-sources/subgraph.yaml +++ b/tests/runner-tests/subgraph-data-sources/subgraph.yaml @@ -6,7 +6,7 @@ dataSources: name: Contract network: test source: - address: 'QmHash' + address: 'QmRFXhvyvbm4z5Lo7z2mN9Ckmo623uuB2jJYbRmAXgYKXJ' startBlock: 6082461 mapping: apiVersion: 0.0.7 diff --git a/tests/src/fixture/ethereum.rs b/tests/src/fixture/ethereum.rs index 1004e4d8900..5381a530148 100644 --- a/tests/src/fixture/ethereum.rs +++ b/tests/src/fixture/ethereum.rs @@ -9,9 +9,10 @@ use super::{ use graph::blockchain::client::ChainClient; use graph::blockchain::{BlockPtr, Trigger, TriggersAdapterSelector}; use graph::cheap_clone::CheapClone; +use graph::data_source::subgraph; use graph::prelude::ethabi::ethereum_types::H256; use graph::prelude::web3::types::{Address, Log, Transaction, H160}; -use graph::prelude::{ethabi, tiny_keccak, LightEthereumBlock, ENV_VARS}; +use graph::prelude::{ethabi, tiny_keccak, DeploymentHash, Entity, LightEthereumBlock, ENV_VARS}; use graph::{blockchain::block_stream::BlockWithTriggers, prelude::ethabi::ethereum_types::U64}; use graph_chain_ethereum::network::EthereumNetworkAdapters; use graph_chain_ethereum::trigger::LogRef; @@ -159,6 +160,21 @@ pub fn push_test_log(block: &mut BlockWithTriggers, payload: impl Into, + source: DeploymentHash, + entity: Entity, + entity_type: &str, +) { + block + .trigger_data + .push(Trigger::Subgraph(subgraph::TriggerData { + source, + entity: entity, + entity_type: entity_type.to_string(), + })); +} + pub fn push_test_command( block: &mut BlockWithTriggers, test_command: impl Into, diff --git a/tests/src/fixture/mod.rs b/tests/src/fixture/mod.rs index a6dbd650a3e..da37e8449f1 100644 --- a/tests/src/fixture/mod.rs +++ b/tests/src/fixture/mod.rs @@ -718,14 +718,25 @@ impl BlockStreamBuilder for MutexBlockStreamBuilder { async fn build_subgraph_block_stream( &self, - _chain: &C, - _deployment: DeploymentLocator, - _start_blocks: Vec, - _subgraph_current_block: Option, - _filter: Arc>, - _unified_api_version: graph::data::subgraph::UnifiedMappingApiVersion, + chain: &C, + deployment: DeploymentLocator, + start_blocks: Vec, + subgraph_current_block: Option, + filter: Arc>, + unified_api_version: graph::data::subgraph::UnifiedMappingApiVersion, ) -> anyhow::Result>> { - unimplemented!() + let builder = self.0.lock().unwrap().clone(); + + builder + .build_subgraph_block_stream( + chain, + deployment, + start_blocks, + subgraph_current_block, + filter, + unified_api_version, + ) + .await } async fn build_polling( @@ -772,11 +783,21 @@ where _chain: &C, _deployment: DeploymentLocator, _start_blocks: Vec, - _subgraph_current_block: Option, + subgraph_current_block: Option, _filter: Arc>, _unified_api_version: graph::data::subgraph::UnifiedMappingApiVersion, ) -> anyhow::Result>> { - unimplemented!() + let current_idx = subgraph_current_block.map(|current_block| { + self.chain + .iter() + .enumerate() + .find(|(_, b)| b.ptr() == current_block) + .unwrap() + .0 + }); + Ok(Box::new(StaticStream { + stream: Box::pin(stream_events(self.chain.clone(), current_idx)), + })) } async fn build_firehose( diff --git a/tests/tests/runner_tests.rs b/tests/tests/runner_tests.rs index 0ff9f40d537..eb409f6417c 100644 --- a/tests/tests/runner_tests.rs +++ b/tests/tests/runner_tests.rs @@ -18,11 +18,12 @@ use graph::object; use graph::prelude::ethabi::ethereum_types::H256; use graph::prelude::web3::types::Address; use graph::prelude::{ - hex, CheapClone, DeploymentHash, SubgraphAssignmentProvider, SubgraphName, SubgraphStore, + hex, CheapClone, DeploymentHash, SubgraphAssignmentProvider, SubgraphName, SubgraphStore, Value, }; +use graph::schema::InputSchema; use graph_tests::fixture::ethereum::{ chain, empty_block, generate_empty_blocks_for_range, genesis, push_test_command, push_test_log, - push_test_polling_trigger, + push_test_polling_trigger, push_test_subgraph_trigger, }; use graph_tests::fixture::substreams::chain as substreams_chain; @@ -1082,9 +1083,29 @@ async fn subgraph_data_sources() { let RunnerTestRecipe { stores, test_info } = RunnerTestRecipe::new("subgraph-data-sources", "subgraph-data-sources").await; + let schema = InputSchema::parse_latest( + "type User @entity { id: String!, val: String! }", + DeploymentHash::new("test").unwrap(), + ) + .unwrap(); + + let entity = schema + .make_entity(vec![ + ("id".into(), Value::String("id".to_owned())), + ("val".into(), Value::String("DATA".to_owned())), + ]) + .unwrap(); + let blocks = { let block_0 = genesis(); - let block_1 = empty_block(block_0.ptr(), test_ptr(1)); + let mut block_1 = empty_block(block_0.ptr(), test_ptr(1)); + push_test_subgraph_trigger( + &mut block_1, + DeploymentHash::new("QmRFXhvyvbm4z5Lo7z2mN9Ckmo623uuB2jJYbRmAXgYKXJ").unwrap(), + entity, + "User", + ); + let block_2 = empty_block(block_1.ptr(), test_ptr(2)); vec![block_0, block_1, block_2] }; @@ -1092,7 +1113,12 @@ async fn subgraph_data_sources() { let chain = chain(&test_info.test_name, blocks, &stores, None).await; let ctx = fixture::setup(&test_info, &stores, &chain, None, None).await; - ctx.start_and_sync_to(stop_block).await; + let _ = ctx + .runner(stop_block) + .await + .run_for_test(true) + .await + .unwrap(); } #[tokio::test] From cb4d08205b29abd324d03d3d325f707e2cbfed7e Mon Sep 17 00:00:00 2001 From: incrypto32 Date: Thu, 18 Jul 2024 13:46:46 +0530 Subject: [PATCH 09/21] chain, core, graph: Add source_subgraph_stores to indexing inputs --- chain/arweave/src/chain.rs | 5 +- chain/cosmos/src/chain.rs | 5 +- chain/ethereum/src/chain.rs | 7 ++- chain/near/src/chain.rs | 6 +- chain/starknet/src/chain.rs | 7 ++- chain/substreams/src/block_stream.rs | 5 +- chain/substreams/src/chain.rs | 5 +- core/src/subgraph/inputs.rs | 5 +- core/src/subgraph/instance_manager.rs | 89 +++++++++++++++++++++++++++ core/src/subgraph/stream.rs | 1 + graph/src/blockchain/block_stream.rs | 3 +- graph/src/blockchain/mock.rs | 5 +- graph/src/blockchain/mod.rs | 3 +- tests/src/fixture/mod.rs | 11 +++- 14 files changed, 134 insertions(+), 23 deletions(-) diff --git a/chain/arweave/src/chain.rs b/chain/arweave/src/chain.rs index 9e6167b5678..4092eae3c5a 100644 --- a/chain/arweave/src/chain.rs +++ b/chain/arweave/src/chain.rs @@ -7,11 +7,11 @@ use graph::blockchain::{ }; use graph::cheap_clone::CheapClone; use graph::components::adapter::ChainId; -use graph::components::store::DeploymentCursorTracker; +use graph::components::store::{DeploymentCursorTracker, WritableStore}; use graph::data::subgraph::UnifiedMappingApiVersion; use graph::env::EnvVars; use graph::firehose::FirehoseEndpoint; -use graph::prelude::MetricsRegistry; +use graph::prelude::{DeploymentHash, MetricsRegistry}; use graph::substreams::Clock; use graph::{ blockchain::{ @@ -119,6 +119,7 @@ impl Blockchain for Chain { deployment: DeploymentLocator, store: impl DeploymentCursorTracker, start_blocks: Vec, + _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, filter: Arc>, unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error> { diff --git a/chain/cosmos/src/chain.rs b/chain/cosmos/src/chain.rs index 868617968df..8eff2a9339c 100644 --- a/chain/cosmos/src/chain.rs +++ b/chain/cosmos/src/chain.rs @@ -2,7 +2,7 @@ use graph::blockchain::firehose_block_ingestor::FirehoseBlockIngestor; use graph::blockchain::{BlockIngestor, NoopDecoderHook, TriggerFilterWrapper}; use graph::components::adapter::ChainId; use graph::env::EnvVars; -use graph::prelude::MetricsRegistry; +use graph::prelude::{DeploymentHash, MetricsRegistry}; use graph::substreams::Clock; use std::convert::TryFrom; use std::sync::Arc; @@ -11,7 +11,7 @@ use graph::blockchain::block_stream::{BlockStreamError, BlockStreamMapper, Fireh use graph::blockchain::client::ChainClient; use graph::blockchain::{BasicBlockchainBuilder, BlockchainBuilder, NoopRuntimeAdapter}; use graph::cheap_clone::CheapClone; -use graph::components::store::DeploymentCursorTracker; +use graph::components::store::{DeploymentCursorTracker, WritableStore}; use graph::data::subgraph::UnifiedMappingApiVersion; use graph::{ blockchain::{ @@ -113,6 +113,7 @@ impl Blockchain for Chain { deployment: DeploymentLocator, store: impl DeploymentCursorTracker, start_blocks: Vec, + _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, filter: Arc>, unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error> { diff --git a/chain/ethereum/src/chain.rs b/chain/ethereum/src/chain.rs index 591bc9fdf6c..f891acdacb6 100644 --- a/chain/ethereum/src/chain.rs +++ b/chain/ethereum/src/chain.rs @@ -7,12 +7,12 @@ use graph::blockchain::{ TriggersAdapterSelector, }; use graph::components::adapter::ChainId; -use graph::components::store::DeploymentCursorTracker; +use graph::components::store::{DeploymentCursorTracker, WritableStore}; use graph::data::subgraph::UnifiedMappingApiVersion; use graph::firehose::{FirehoseEndpoint, ForkStep}; use graph::futures03::compat::Future01CompatExt; use graph::prelude::{ - BlockHash, ComponentLoggerConfig, ElasticComponentLoggerConfig, EthereumBlock, + BlockHash, ComponentLoggerConfig, DeploymentHash, ElasticComponentLoggerConfig, EthereumBlock, EthereumCallCache, LightEthereumBlock, LightEthereumBlockExt, MetricsRegistry, }; use graph::schema::InputSchema; @@ -128,6 +128,7 @@ impl BlockStreamBuilder for EthereumStreamBuilder { chain: &Chain, deployment: DeploymentLocator, start_blocks: Vec, + _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, subgraph_current_block: Option, filter: Arc>, unified_api_version: UnifiedMappingApiVersion, @@ -478,6 +479,7 @@ impl Blockchain for Chain { deployment: DeploymentLocator, store: impl DeploymentCursorTracker, start_blocks: Vec, + source_subgraph_stores: Vec<(DeploymentHash, Arc)>, filter: Arc>, unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error> { @@ -490,6 +492,7 @@ impl Blockchain for Chain { self, deployment, start_blocks, + source_subgraph_stores, current_ptr, filter, unified_api_version, diff --git a/chain/near/src/chain.rs b/chain/near/src/chain.rs index 3211870f069..b3203f40e33 100644 --- a/chain/near/src/chain.rs +++ b/chain/near/src/chain.rs @@ -8,12 +8,12 @@ use graph::blockchain::{ }; use graph::cheap_clone::CheapClone; use graph::components::adapter::ChainId; -use graph::components::store::DeploymentCursorTracker; +use graph::components::store::{DeploymentCursorTracker, WritableStore}; use graph::data::subgraph::UnifiedMappingApiVersion; use graph::env::EnvVars; use graph::firehose::FirehoseEndpoint; use graph::futures03::TryFutureExt; -use graph::prelude::MetricsRegistry; +use graph::prelude::{DeploymentHash, MetricsRegistry}; use graph::schema::InputSchema; use graph::substreams::{Clock, Package}; use graph::{ @@ -114,6 +114,7 @@ impl BlockStreamBuilder for NearStreamBuilder { _chain: &Chain, _deployment: DeploymentLocator, _start_blocks: Vec, + _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, _subgraph_current_block: Option, _filter: Arc>, _unified_api_version: UnifiedMappingApiVersion, @@ -242,6 +243,7 @@ impl Blockchain for Chain { deployment: DeploymentLocator, store: impl DeploymentCursorTracker, start_blocks: Vec, + _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, filter: Arc>, unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error> { diff --git a/chain/starknet/src/chain.rs b/chain/starknet/src/chain.rs index d926b31877b..5d7fdaf663d 100644 --- a/chain/starknet/src/chain.rs +++ b/chain/starknet/src/chain.rs @@ -16,15 +16,14 @@ use graph::{ cheap_clone::CheapClone, components::{ adapter::ChainId, - store::{DeploymentCursorTracker, DeploymentLocator}, + store::{DeploymentCursorTracker, DeploymentLocator, WritableStore}, }, data::subgraph::UnifiedMappingApiVersion, env::EnvVars, firehose::{self, FirehoseEndpoint, ForkStep}, futures03::future::TryFutureExt, prelude::{ - async_trait, BlockHash, BlockNumber, ChainStore, Error, Logger, LoggerFactory, - MetricsRegistry, + async_trait, BlockHash, BlockNumber, ChainStore, DeploymentHash, Error, Logger, LoggerFactory, MetricsRegistry }, schema::InputSchema, slog::o, @@ -115,6 +114,7 @@ impl Blockchain for Chain { deployment: DeploymentLocator, store: impl DeploymentCursorTracker, start_blocks: Vec, + _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, filter: Arc>, unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error> { @@ -201,6 +201,7 @@ impl BlockStreamBuilder for StarknetStreamBuilder { _chain: &Chain, _deployment: DeploymentLocator, _start_blocks: Vec, + _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, _subgraph_current_block: Option, _filter: Arc>, _unified_api_version: UnifiedMappingApiVersion, diff --git a/chain/substreams/src/block_stream.rs b/chain/substreams/src/block_stream.rs index 2d6eb902409..24cedade570 100644 --- a/chain/substreams/src/block_stream.rs +++ b/chain/substreams/src/block_stream.rs @@ -9,9 +9,9 @@ use graph::{ substreams_block_stream::SubstreamsBlockStream, Blockchain, TriggerFilterWrapper, }, - components::store::DeploymentLocator, + components::store::{DeploymentLocator, WritableStore}, data::subgraph::UnifiedMappingApiVersion, - prelude::{async_trait, BlockNumber, BlockPtr}, + prelude::{async_trait, BlockNumber, BlockPtr, DeploymentHash}, schema::InputSchema, slog::o, }; @@ -104,6 +104,7 @@ impl BlockStreamBuilderTrait for BlockStreamBuilder { _chain: &Chain, _deployment: DeploymentLocator, _start_blocks: Vec, + _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, _subgraph_current_block: Option, _filter: Arc>, _unified_api_version: UnifiedMappingApiVersion, diff --git a/chain/substreams/src/chain.rs b/chain/substreams/src/chain.rs index 044617603d8..abb092c7499 100644 --- a/chain/substreams/src/chain.rs +++ b/chain/substreams/src/chain.rs @@ -7,9 +7,9 @@ use graph::blockchain::{ NoopRuntimeAdapter, TriggerFilterWrapper, }; use graph::components::adapter::ChainId; -use graph::components::store::DeploymentCursorTracker; +use graph::components::store::{DeploymentCursorTracker, WritableStore}; use graph::env::EnvVars; -use graph::prelude::{BlockHash, CheapClone, Entity, LoggerFactory, MetricsRegistry}; +use graph::prelude::{BlockHash, CheapClone, DeploymentHash, Entity, LoggerFactory, MetricsRegistry}; use graph::schema::EntityKey; use graph::{ blockchain::{ @@ -140,6 +140,7 @@ impl Blockchain for Chain { deployment: DeploymentLocator, store: impl DeploymentCursorTracker, _start_blocks: Vec, + _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, filter: Arc>, _unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error> { diff --git a/core/src/subgraph/inputs.rs b/core/src/subgraph/inputs.rs index b2e95c753f5..3a12e33415f 100644 --- a/core/src/subgraph/inputs.rs +++ b/core/src/subgraph/inputs.rs @@ -6,7 +6,7 @@ use graph::{ }, data::subgraph::{SubgraphFeature, UnifiedMappingApiVersion}, data_source::DataSourceTemplate, - prelude::BlockNumber, + prelude::{BlockNumber, DeploymentHash}, }; use std::collections::BTreeSet; use std::sync::Arc; @@ -16,6 +16,7 @@ pub struct IndexingInputs { pub features: BTreeSet, pub start_blocks: Vec, pub end_blocks: BTreeSet, + pub source_subgraph_stores: Vec<(DeploymentHash, Arc)>, pub stop_block: Option, pub store: Arc, pub debug_fork: Option>, @@ -39,6 +40,7 @@ impl IndexingInputs { features, start_blocks, end_blocks, + source_subgraph_stores, stop_block, store: _, debug_fork, @@ -56,6 +58,7 @@ impl IndexingInputs { features: features.clone(), start_blocks: start_blocks.clone(), end_blocks: end_blocks.clone(), + source_subgraph_stores: source_subgraph_stores.clone(), stop_block: stop_block.clone(), store, debug_fork: debug_fork.clone(), diff --git a/core/src/subgraph/instance_manager.rs b/core/src/subgraph/instance_manager.rs index 75b0b86f81f..11ce5f065c7 100644 --- a/core/src/subgraph/instance_manager.rs +++ b/core/src/subgraph/instance_manager.rs @@ -9,12 +9,14 @@ use crate::subgraph::runner::SubgraphRunner; use graph::blockchain::block_stream::BlockStreamMetrics; use graph::blockchain::{Blockchain, BlockchainKind, DataSource, NodeCapabilities}; use graph::components::metrics::gas::GasMetrics; +use graph::components::store::WritableStore; use graph::components::subgraph::ProofOfIndexingVersion; use graph::data::subgraph::{UnresolvedSubgraphManifest, SPEC_VERSION_0_0_6}; use graph::data::value::Word; use graph::data_source::causality_region::CausalityRegionSeq; use graph::env::EnvVars; use graph::prelude::{SubgraphInstanceManager as SubgraphInstanceManagerTrait, *}; +use graph::semver::Version; use graph::{blockchain::BlockchainMap, components::store::DeploymentLocator}; use graph_runtime_wasm::module::ToAscPtr; use graph_runtime_wasm::RuntimeHostBuilder; @@ -202,6 +204,52 @@ impl SubgraphInstanceManager { } } + pub async fn hashes_to_writable_store( + &self, + logger: &Logger, + link_resolver: &Arc, + hashes: Vec, + max_spec_version: Version, + is_runner_test: bool, + ) -> anyhow::Result)>> { + let mut writable_stores = Vec::new(); + let subgraph_store = self.subgraph_store.clone(); + + if is_runner_test { + return Ok(writable_stores); + } + + for hash in hashes { + let file_bytes = link_resolver + .cat(logger, &hash.to_ipfs_link()) + .await + .map_err(SubgraphAssignmentProviderError::ResolveError)?; + let raw: serde_yaml::Mapping = serde_yaml::from_slice(&file_bytes) + .map_err(|e| SubgraphAssignmentProviderError::ResolveError(e.into()))?; + let manifest = UnresolvedSubgraphManifest::::parse(hash.cheap_clone(), raw)?; + let manifest = manifest + .resolve(&link_resolver, &logger, max_spec_version.clone()) + .await?; + + let loc = subgraph_store + .active_locator(&hash)? + .ok_or_else(|| anyhow!("no active deployment for hash {}", hash))?; + + let writable_store = subgraph_store + .clone() // Clone the Arc again for each iteration + .writable( + logger.clone(), + loc.id.clone(), + Arc::new(manifest.template_idx_and_name().collect()), + ) + .await?; + + writable_stores.push((loc.hash, writable_store)); + } + + Ok(writable_stores) + } + pub async fn build_subgraph_runner( &self, logger: Logger, @@ -211,6 +259,26 @@ impl SubgraphInstanceManager { stop_block: Option, tp: Box>>, ) -> anyhow::Result>> + where + C: Blockchain, + ::MappingTrigger: ToAscPtr, + { + self.build_subgraph_runner_inner( + logger, env_vars, deployment, manifest, stop_block, tp, false, + ) + .await + } + + pub async fn build_subgraph_runner_inner( + &self, + logger: Logger, + env_vars: Arc, + deployment: DeploymentLocator, + manifest: serde_yaml::Mapping, + stop_block: Option, + tp: Box>>, + is_runner_test: bool, + ) -> anyhow::Result>> where C: Blockchain, ::MappingTrigger: ToAscPtr, @@ -307,6 +375,16 @@ impl SubgraphInstanceManager { .filter_map(|d| d.as_onchain().cloned()) .collect::>(); + let subgraph_data_sources = data_sources + .iter() + .filter_map(|d| d.as_subgraph()) + .collect::>(); + + let subgraph_ds_source_deployments = subgraph_data_sources + .iter() + .map(|d| d.source.address()) + .collect::>(); + let required_capabilities = C::NodeCapabilities::from_data_sources(&onchain_data_sources); let network: Word = manifest.network_name().into(); @@ -413,11 +491,22 @@ impl SubgraphInstanceManager { let decoder = Box::new(Decoder::new(decoder_hook)); + let subgraph_data_source_writables = self + .hashes_to_writable_store::( + &logger, + &link_resolver, + subgraph_ds_source_deployments, + manifest.spec_version.clone(), + is_runner_test, + ) + .await?; + let inputs = IndexingInputs { deployment: deployment.clone(), features, start_blocks, end_blocks, + source_subgraph_stores: subgraph_data_source_writables, stop_block, store, debug_fork, diff --git a/core/src/subgraph/stream.rs b/core/src/subgraph/stream.rs index cfd41808e27..5547543f13d 100644 --- a/core/src/subgraph/stream.rs +++ b/core/src/subgraph/stream.rs @@ -18,6 +18,7 @@ pub async fn new_block_stream( inputs.deployment.clone(), inputs.store.cheap_clone(), inputs.start_blocks.clone(), + inputs.source_subgraph_stores.clone(), Arc::new(filter.clone()), inputs.unified_api_version.clone(), ) diff --git a/graph/src/blockchain/block_stream.rs b/graph/src/blockchain/block_stream.rs index 8743fdd6c6b..382378a9b24 100644 --- a/graph/src/blockchain/block_stream.rs +++ b/graph/src/blockchain/block_stream.rs @@ -14,7 +14,7 @@ use tokio::sync::mpsc::{self, Receiver, Sender}; use super::substreams_block_stream::SubstreamsLogData; use super::{Block, BlockPtr, BlockTime, Blockchain, Trigger, TriggerFilterWrapper}; use crate::anyhow::Result; -use crate::components::store::{BlockNumber, DeploymentLocator}; +use crate::components::store::{BlockNumber, DeploymentLocator, WritableStore}; use crate::data::subgraph::UnifiedMappingApiVersion; use crate::firehose::{self, FirehoseEndpoint}; use crate::futures03::stream::StreamExt as _; @@ -154,6 +154,7 @@ pub trait BlockStreamBuilder: Send + Sync { chain: &C, deployment: DeploymentLocator, start_blocks: Vec, + source_subgraph_stores: Vec<(DeploymentHash, Arc)>, subgraph_current_block: Option, filter: Arc>, unified_api_version: UnifiedMappingApiVersion, diff --git a/graph/src/blockchain/mock.rs b/graph/src/blockchain/mock.rs index e13a21878e4..4be185d1b39 100644 --- a/graph/src/blockchain/mock.rs +++ b/graph/src/blockchain/mock.rs @@ -2,11 +2,11 @@ use crate::{ bail, components::{ link_resolver::LinkResolver, - store::{BlockNumber, DeploymentCursorTracker, DeploymentLocator}, + store::{BlockNumber, DeploymentCursorTracker, DeploymentLocator, WritableStore}, subgraph::InstanceDSTemplateInfo, }, data::subgraph::UnifiedMappingApiVersion, - prelude::{BlockHash, DataSourceTemplateInfo}, + prelude::{BlockHash, DataSourceTemplateInfo, DeploymentHash}, }; use anyhow::Error; use async_trait::async_trait; @@ -373,6 +373,7 @@ impl Blockchain for MockBlockchain { _deployment: DeploymentLocator, _store: impl DeploymentCursorTracker, _start_blocks: Vec, + _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, _filter: Arc>, _unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error> { diff --git a/graph/src/blockchain/mod.rs b/graph/src/blockchain/mod.rs index ebf958827e7..efb9d0659c4 100644 --- a/graph/src/blockchain/mod.rs +++ b/graph/src/blockchain/mod.rs @@ -20,7 +20,7 @@ use crate::{ components::{ adapter::ChainId, metrics::subgraph::SubgraphInstanceMetrics, - store::{DeploymentCursorTracker, DeploymentLocator, StoredDynamicDataSource}, + store::{DeploymentCursorTracker, DeploymentLocator, StoredDynamicDataSource, WritableStore}, subgraph::{HostMetrics, InstanceDSTemplateInfo, MappingError}, trigger_processor::RunnableTriggers, }, @@ -189,6 +189,7 @@ pub trait Blockchain: Debug + Sized + Send + Sync + Unpin + 'static { deployment: DeploymentLocator, store: impl DeploymentCursorTracker, start_blocks: Vec, + source_subgraph_stores: Vec<(DeploymentHash, Arc)>, filter: Arc>, unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error>; diff --git a/tests/src/fixture/mod.rs b/tests/src/fixture/mod.rs index da37e8449f1..095ca0446c7 100644 --- a/tests/src/fixture/mod.rs +++ b/tests/src/fixture/mod.rs @@ -20,7 +20,7 @@ use graph::cheap_clone::CheapClone; use graph::components::adapter::ChainId; use graph::components::link_resolver::{ArweaveClient, ArweaveResolver, FileSizeLimit}; use graph::components::metrics::MetricsRegistry; -use graph::components::store::{BlockStore, DeploymentLocator, EthereumCallCache}; +use graph::components::store::{BlockStore, DeploymentLocator, EthereumCallCache, WritableStore}; use graph::components::subgraph::Settings; use graph::data::graphql::load_manager::LoadManager; use graph::data::query::{Query, QueryTarget}; @@ -211,13 +211,14 @@ impl TestContext { let tp: Box> = Box::new(SubgraphTriggerProcessor {}); self.instance_manager - .build_subgraph_runner( + .build_subgraph_runner_inner( logger, self.env_vars.cheap_clone(), deployment, raw, Some(stop_block.block_number()), tp, + true, ) .await .unwrap() @@ -236,13 +237,14 @@ impl TestContext { ); self.instance_manager - .build_subgraph_runner( + .build_subgraph_runner_inner( logger, self.env_vars.cheap_clone(), deployment, raw, Some(stop_block.block_number()), tp, + true, ) .await .unwrap() @@ -721,6 +723,7 @@ impl BlockStreamBuilder for MutexBlockStreamBuilder { chain: &C, deployment: DeploymentLocator, start_blocks: Vec, + source_subgraph_stores: Vec<(DeploymentHash, Arc)>, subgraph_current_block: Option, filter: Arc>, unified_api_version: graph::data::subgraph::UnifiedMappingApiVersion, @@ -732,6 +735,7 @@ impl BlockStreamBuilder for MutexBlockStreamBuilder { chain, deployment, start_blocks, + source_subgraph_stores, subgraph_current_block, filter, unified_api_version, @@ -783,6 +787,7 @@ where _chain: &C, _deployment: DeploymentLocator, _start_blocks: Vec, + _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, subgraph_current_block: Option, _filter: Arc>, _unified_api_version: graph::data::subgraph::UnifiedMappingApiVersion, From ff572cfc21809c78c07d87e2cd0e2ed109e4b699 Mon Sep 17 00:00:00 2001 From: incrypto32 Date: Thu, 18 Jul 2024 15:28:41 +0530 Subject: [PATCH 10/21] graph, chain: minor refactoring and formatting --- chain/starknet/src/chain.rs | 3 ++- chain/substreams/src/chain.rs | 4 ++- graph/src/blockchain/block_stream.rs | 26 ++++++++++++++++++- graph/src/blockchain/mod.rs | 4 ++- .../subgraph-data-sources/subgraph.yaml | 2 +- 5 files changed, 34 insertions(+), 5 deletions(-) diff --git a/chain/starknet/src/chain.rs b/chain/starknet/src/chain.rs index 5d7fdaf663d..865ad90af10 100644 --- a/chain/starknet/src/chain.rs +++ b/chain/starknet/src/chain.rs @@ -23,7 +23,8 @@ use graph::{ firehose::{self, FirehoseEndpoint, ForkStep}, futures03::future::TryFutureExt, prelude::{ - async_trait, BlockHash, BlockNumber, ChainStore, DeploymentHash, Error, Logger, LoggerFactory, MetricsRegistry + async_trait, BlockHash, BlockNumber, ChainStore, DeploymentHash, Error, Logger, + LoggerFactory, MetricsRegistry, }, schema::InputSchema, slog::o, diff --git a/chain/substreams/src/chain.rs b/chain/substreams/src/chain.rs index abb092c7499..b3cf8cca8a6 100644 --- a/chain/substreams/src/chain.rs +++ b/chain/substreams/src/chain.rs @@ -9,7 +9,9 @@ use graph::blockchain::{ use graph::components::adapter::ChainId; use graph::components::store::{DeploymentCursorTracker, WritableStore}; use graph::env::EnvVars; -use graph::prelude::{BlockHash, CheapClone, DeploymentHash, Entity, LoggerFactory, MetricsRegistry}; +use graph::prelude::{ + BlockHash, CheapClone, DeploymentHash, Entity, LoggerFactory, MetricsRegistry, +}; use graph::schema::EntityKey; use graph::{ blockchain::{ diff --git a/graph/src/blockchain/block_stream.rs b/graph/src/blockchain/block_stream.rs index 382378a9b24..5784307a0b1 100644 --- a/graph/src/blockchain/block_stream.rs +++ b/graph/src/blockchain/block_stream.rs @@ -303,13 +303,13 @@ impl TriggersAdapter for TriggersAdaterWrapper { self.adapter.ancestor_block(ptr, offset, root).await } + // TODO: Do a proper implementation, this is a complete mock implementation async fn scan_triggers( &self, from: BlockNumber, to: BlockNumber, filter: &C::TriggerFilter, ) -> Result<(Vec>, BlockNumber), Error> { - // TODO(krishna): Do a proper implementation self.adapter .scan_triggers(from, to, filter) .await @@ -339,6 +339,30 @@ impl TriggersAdapter for TriggersAdaterWrapper { } } +// fn create_mock_trigger() -> Trigger { +// let entity = create_mock_entity(); +// Trigger::Subgraph(subgraph::TriggerData { +// source: DeploymentHash::new("test").unwrap(), +// entity, +// entity_type: "User".to_string(), +// }) +// } + +// fn create_mock_entity() -> Entity { +// let schema = InputSchema::parse_latest( +// "type User @entity { id: String!, val: String! }", +// DeploymentHash::new("test").unwrap(), +// ) +// .unwrap(); + +// schema +// .make_entity(vec![ +// ("id".into(), Value::String("id".to_owned())), +// ("val".into(), Value::String("content".to_owned())), +// ]) +// .unwrap() +// } + #[async_trait] pub trait TriggersAdapter: Send + Sync { // Return the block that is `offset` blocks before the block pointed to by `ptr` from the local diff --git a/graph/src/blockchain/mod.rs b/graph/src/blockchain/mod.rs index efb9d0659c4..14dceaf45e1 100644 --- a/graph/src/blockchain/mod.rs +++ b/graph/src/blockchain/mod.rs @@ -20,7 +20,9 @@ use crate::{ components::{ adapter::ChainId, metrics::subgraph::SubgraphInstanceMetrics, - store::{DeploymentCursorTracker, DeploymentLocator, StoredDynamicDataSource, WritableStore}, + store::{ + DeploymentCursorTracker, DeploymentLocator, StoredDynamicDataSource, WritableStore, + }, subgraph::{HostMetrics, InstanceDSTemplateInfo, MappingError}, trigger_processor::RunnableTriggers, }, diff --git a/tests/runner-tests/subgraph-data-sources/subgraph.yaml b/tests/runner-tests/subgraph-data-sources/subgraph.yaml index 050964f1c16..1c666e3417e 100644 --- a/tests/runner-tests/subgraph-data-sources/subgraph.yaml +++ b/tests/runner-tests/subgraph-data-sources/subgraph.yaml @@ -15,5 +15,5 @@ dataSources: - Gravatar handlers: - handler: handleBlock - entity: Gravatar + entity: User file: ./src/mapping.ts From 35b2a549523860b1029e93ab8663f1a7affb7b1e Mon Sep 17 00:00:00 2001 From: incrypto32 Date: Thu, 18 Jul 2024 16:37:59 +0530 Subject: [PATCH 11/21] graph, chain: fix typo in TriggersAdapterWrapper --- chain/ethereum/src/chain.rs | 4 ++-- graph/src/blockchain/block_stream.rs | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/chain/ethereum/src/chain.rs b/chain/ethereum/src/chain.rs index f891acdacb6..18ea14b82b7 100644 --- a/chain/ethereum/src/chain.rs +++ b/chain/ethereum/src/chain.rs @@ -62,7 +62,7 @@ use crate::{BufferedCallCache, NodeCapabilities}; use crate::{EthereumAdapter, RuntimeAdapter}; use graph::blockchain::block_stream::{ BlockStream, BlockStreamBuilder, BlockStreamError, BlockStreamMapper, FirehoseCursor, - TriggersAdaterWrapper, + TriggersAdapterWrapper, }; /// Celo Mainnet: 42220, Testnet Alfajores: 44787, Testnet Baklava: 62320 @@ -143,7 +143,7 @@ impl BlockStreamBuilder for EthereumStreamBuilder { ) }); - let adapter = Arc::new(TriggersAdaterWrapper::new(adapter)); + let adapter = Arc::new(TriggersAdapterWrapper::new(adapter)); let logger = chain .logger_factory diff --git a/graph/src/blockchain/block_stream.rs b/graph/src/blockchain/block_stream.rs index 5784307a0b1..190445e85bd 100644 --- a/graph/src/blockchain/block_stream.rs +++ b/graph/src/blockchain/block_stream.rs @@ -282,18 +282,18 @@ impl BlockWithTriggers { } } -pub struct TriggersAdaterWrapper { +pub struct TriggersAdapterWrapper { pub adapter: Arc>, } -impl TriggersAdaterWrapper { +impl TriggersAdapterWrapper { pub fn new(adapter: Arc>) -> Self { Self { adapter } } } #[async_trait] -impl TriggersAdapter for TriggersAdaterWrapper { +impl TriggersAdapter for TriggersAdapterWrapper { async fn ancestor_block( &self, ptr: BlockPtr, From fe5d5ba80d022821bc8415f9a8e655ae9ee407dd Mon Sep 17 00:00:00 2001 From: incrypto32 Date: Thu, 18 Jul 2024 16:55:03 +0530 Subject: [PATCH 12/21] chain, graph: use TriggerFilterWrapper in scan_triggers --- chain/arweave/src/chain.rs | 2 +- chain/cosmos/src/chain.rs | 2 +- chain/ethereum/src/chain.rs | 8 ++++---- chain/near/src/chain.rs | 2 +- chain/starknet/src/chain.rs | 2 +- chain/substreams/src/trigger.rs | 5 +++-- graph/src/blockchain/block_stream.rs | 15 +++++++++++---- graph/src/blockchain/mock.rs | 4 ++-- graph/src/blockchain/polling_block_stream.rs | 5 +---- tests/src/fixture/mod.rs | 2 +- 10 files changed, 26 insertions(+), 21 deletions(-) diff --git a/chain/arweave/src/chain.rs b/chain/arweave/src/chain.rs index 4092eae3c5a..163aed06204 100644 --- a/chain/arweave/src/chain.rs +++ b/chain/arweave/src/chain.rs @@ -198,7 +198,7 @@ impl TriggersAdapterTrait for TriggersAdapter { &self, _from: BlockNumber, _to: BlockNumber, - _filter: &TriggerFilter, + _filter: &Arc>, ) -> Result<(Vec>, BlockNumber), Error> { panic!("Should never be called since not used by FirehoseBlockStream") } diff --git a/chain/cosmos/src/chain.rs b/chain/cosmos/src/chain.rs index 8eff2a9339c..7a20adc127f 100644 --- a/chain/cosmos/src/chain.rs +++ b/chain/cosmos/src/chain.rs @@ -201,7 +201,7 @@ impl TriggersAdapterTrait for TriggersAdapter { &self, _from: BlockNumber, _to: BlockNumber, - _filter: &TriggerFilter, + _filter: &Arc>, ) -> Result<(Vec>, BlockNumber), Error> { panic!("Should never be called since not used by FirehoseBlockStream") } diff --git a/chain/ethereum/src/chain.rs b/chain/ethereum/src/chain.rs index 18ea14b82b7..9dffc477f5c 100644 --- a/chain/ethereum/src/chain.rs +++ b/chain/ethereum/src/chain.rs @@ -128,7 +128,7 @@ impl BlockStreamBuilder for EthereumStreamBuilder { chain: &Chain, deployment: DeploymentLocator, start_blocks: Vec, - _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, + source_subgraph_stores: Vec<(DeploymentHash, Arc)>, subgraph_current_block: Option, filter: Arc>, unified_api_version: UnifiedMappingApiVersion, @@ -143,7 +143,7 @@ impl BlockStreamBuilder for EthereumStreamBuilder { ) }); - let adapter = Arc::new(TriggersAdapterWrapper::new(adapter)); + let adapter = Arc::new(TriggersAdapterWrapper::new(adapter, source_subgraph_stores)); let logger = chain .logger_factory @@ -758,7 +758,7 @@ impl TriggersAdapterTrait for TriggersAdapter { &self, from: BlockNumber, to: BlockNumber, - filter: &TriggerFilter, + filter: &Arc>, ) -> Result<(Vec>, BlockNumber), Error> { blocks_with_triggers( self.chain_client @@ -770,7 +770,7 @@ impl TriggersAdapterTrait for TriggersAdapter { self.ethrpc_metrics.clone(), from, to, - filter, + &filter.filter.clone(), self.unified_api_version.clone(), ) .await diff --git a/chain/near/src/chain.rs b/chain/near/src/chain.rs index b3203f40e33..73a17a6640a 100644 --- a/chain/near/src/chain.rs +++ b/chain/near/src/chain.rs @@ -331,7 +331,7 @@ impl TriggersAdapterTrait for TriggersAdapter { &self, _from: BlockNumber, _to: BlockNumber, - _filter: &TriggerFilter, + _filter: &Arc>, ) -> Result<(Vec>, BlockNumber), Error> { panic!("Should never be called since not used by FirehoseBlockStream") } diff --git a/chain/starknet/src/chain.rs b/chain/starknet/src/chain.rs index 865ad90af10..620c8412760 100644 --- a/chain/starknet/src/chain.rs +++ b/chain/starknet/src/chain.rs @@ -393,7 +393,7 @@ impl TriggersAdapterTrait for TriggersAdapter { &self, _from: BlockNumber, _to: BlockNumber, - _filter: &crate::adapter::TriggerFilter, + _filter: &Arc>, ) -> Result<(Vec>, BlockNumber), Error> { panic!("Should never be called since not used by FirehoseBlockStream") } diff --git a/chain/substreams/src/trigger.rs b/chain/substreams/src/trigger.rs index 2b47e4e57b8..5f919a12a90 100644 --- a/chain/substreams/src/trigger.rs +++ b/chain/substreams/src/trigger.rs @@ -1,7 +1,8 @@ use anyhow::Error; use graph::{ blockchain::{ - self, block_stream::BlockWithTriggers, BlockPtr, EmptyNodeCapabilities, MappingTriggerTrait, + self, block_stream::BlockWithTriggers, BlockPtr, EmptyNodeCapabilities, + MappingTriggerTrait, TriggerFilterWrapper, }, components::{ store::{DeploymentLocator, SubgraphFork}, @@ -140,7 +141,7 @@ impl blockchain::TriggersAdapter for TriggersAdapter { &self, _from: BlockNumber, _to: BlockNumber, - _filter: &TriggerFilter, + _filter: &Arc>, ) -> Result<(Vec>, BlockNumber), Error> { unimplemented!() } diff --git a/graph/src/blockchain/block_stream.rs b/graph/src/blockchain/block_stream.rs index 190445e85bd..3979c28ce92 100644 --- a/graph/src/blockchain/block_stream.rs +++ b/graph/src/blockchain/block_stream.rs @@ -284,11 +284,18 @@ impl BlockWithTriggers { pub struct TriggersAdapterWrapper { pub adapter: Arc>, + pub source_subgraph_stores: Vec<(DeploymentHash, Arc)>, } impl TriggersAdapterWrapper { - pub fn new(adapter: Arc>) -> Self { - Self { adapter } + pub fn new( + adapter: Arc>, + source_subgraph_stores: Vec<(DeploymentHash, Arc)>, + ) -> Self { + Self { + adapter, + source_subgraph_stores, + } } } @@ -308,7 +315,7 @@ impl TriggersAdapter for TriggersAdapterWrapper { &self, from: BlockNumber, to: BlockNumber, - filter: &C::TriggerFilter, + filter: &Arc>, ) -> Result<(Vec>, BlockNumber), Error> { self.adapter .scan_triggers(from, to, filter) @@ -386,7 +393,7 @@ pub trait TriggersAdapter: Send + Sync { &self, from: BlockNumber, to: BlockNumber, - filter: &C::TriggerFilter, + filter: &Arc>, ) -> Result<(Vec>, BlockNumber), Error>; // Used for reprocessing blocks when creating a data source. diff --git a/graph/src/blockchain/mock.rs b/graph/src/blockchain/mock.rs index 4be185d1b39..c08f688a4b4 100644 --- a/graph/src/blockchain/mock.rs +++ b/graph/src/blockchain/mock.rs @@ -232,7 +232,7 @@ impl TriggersAdapter for MockTriggersAdapter { &self, from: crate::components::store::BlockNumber, to: crate::components::store::BlockNumber, - filter: &MockTriggerFilter, + filter: &Arc>, ) -> Result< ( Vec>, @@ -264,7 +264,7 @@ impl TriggersAdapter for MockTriggersAdapter { async fn blocks_with_triggers( _from: crate::components::store::BlockNumber, to: crate::components::store::BlockNumber, - _filter: &MockTriggerFilter, + _filter: &Arc>, ) -> Result< ( Vec>, diff --git a/graph/src/blockchain/polling_block_stream.rs b/graph/src/blockchain/polling_block_stream.rs index 64a84ff3b94..8cf466a6486 100644 --- a/graph/src/blockchain/polling_block_stream.rs +++ b/graph/src/blockchain/polling_block_stream.rs @@ -379,10 +379,7 @@ where ); // Update with actually scanned range, to account for any skipped null blocks. - let (blocks, to) = self - .adapter - .scan_triggers(from, to, &self.filter.filter.clone()) - .await?; + let (blocks, to) = self.adapter.scan_triggers(from, to, &self.filter).await?; let range_size = to - from + 1; // If the target block (`to`) is within the reorg threshold, indicating no non-null finalized blocks are diff --git a/tests/src/fixture/mod.rs b/tests/src/fixture/mod.rs index 095ca0446c7..358c5a8e425 100644 --- a/tests/src/fixture/mod.rs +++ b/tests/src/fixture/mod.rs @@ -1006,7 +1006,7 @@ impl TriggersAdapter for MockTriggersAdapter { &self, _from: BlockNumber, _to: BlockNumber, - _filter: &::TriggerFilter, + _filter: &Arc>, ) -> Result<(Vec>, BlockNumber), Error> { todo!() } From 4c05119aa14fe6148adedfcaae283eea0d798f10 Mon Sep 17 00:00:00 2001 From: incrypto32 Date: Wed, 24 Jul 2024 09:56:51 +0530 Subject: [PATCH 13/21] chain/ethereum: implement load_blocks_by_numbers for EthereumAdapter --- chain/ethereum/src/adapter.rs | 7 +++++++ chain/ethereum/src/ethereum_adapter.rs | 22 ++++++++++++++++++++++ graph/src/blockchain/types.rs | 4 ++++ 3 files changed, 33 insertions(+) diff --git a/chain/ethereum/src/adapter.rs b/chain/ethereum/src/adapter.rs index f78ff1b0bec..31083724809 100644 --- a/chain/ethereum/src/adapter.rs +++ b/chain/ethereum/src/adapter.rs @@ -1109,6 +1109,13 @@ pub trait EthereumAdapter: Send + Sync + 'static { block_hash: H256, ) -> Box + Send>; + async fn load_blocks_by_numbers( + &self, + logger: Logger, + chain_store: Arc, + block_numbers: HashSet, + ) -> Box, Error = Error> + Send>; + /// Load Ethereum blocks in bulk, returning results as they come back as a Stream. /// May use the `chain_store` as a cache. async fn load_blocks( diff --git a/chain/ethereum/src/ethereum_adapter.rs b/chain/ethereum/src/ethereum_adapter.rs index 123f79bb4a8..7c08c61950e 100644 --- a/chain/ethereum/src/ethereum_adapter.rs +++ b/chain/ethereum/src/ethereum_adapter.rs @@ -1648,6 +1648,28 @@ impl EthereumAdapterTrait for EthereumAdapter { Ok(decoded) } + // This is a ugly temporary implementation to get the block ptrs for a range of blocks + async fn load_blocks_by_numbers( + &self, + logger: Logger, + chain_store: Arc, + block_numbers: HashSet, + ) -> Box, Error = Error> + Send> { + let block_hashes = block_numbers + .into_iter() + .map(|number| { + chain_store + .block_hashes_by_block_number(number) + .unwrap() + .first() + .unwrap() + .as_h256() + }) + .collect::>(); + + self.load_blocks(logger, chain_store, block_hashes).await + } + /// Load Ethereum blocks in bulk, returning results as they come back as a Stream. async fn load_blocks( &self, diff --git a/graph/src/blockchain/types.rs b/graph/src/blockchain/types.rs index 931e52e2dd5..7c670d4cdd6 100644 --- a/graph/src/blockchain/types.rs +++ b/graph/src/blockchain/types.rs @@ -31,6 +31,10 @@ impl BlockHash { &self.0 } + pub fn as_h256(&self) -> H256 { + H256::from_slice(self.as_slice()) + } + /// Encodes the block hash into a hexadecimal string **without** a "0x" /// prefix. Hashes are stored in the database in this format when the /// schema uses `text` columns, which is a legacy and such columns From d20bafd934200bcb17304a2071b41141c3a5bb8a Mon Sep 17 00:00:00 2001 From: incrypto32 Date: Wed, 24 Jul 2024 12:53:31 +0530 Subject: [PATCH 14/21] graph: refactor BlockWithTriggers impl --- graph/src/blockchain/block_stream.rs | 41 +++++++++++++++++----------- 1 file changed, 25 insertions(+), 16 deletions(-) diff --git a/graph/src/blockchain/block_stream.rs b/graph/src/blockchain/block_stream.rs index 3979c28ce92..a528bc97c15 100644 --- a/graph/src/blockchain/block_stream.rs +++ b/graph/src/blockchain/block_stream.rs @@ -1,3 +1,4 @@ +use crate::data_source::subgraph; use crate::substreams::Clock; use crate::substreams_rpc::response::Message as SubstreamsMessage; use crate::substreams_rpc::BlockScopedData; @@ -228,14 +229,30 @@ impl BlockWithTriggers { /// Creates a BlockWithTriggers structure, which holds /// the trigger data ordered and without any duplicates. pub fn new(block: C::Block, trigger_data: Vec, logger: &Logger) -> Self { - let mut trigger_data = trigger_data - .into_iter() - .map(|trigger_data| { - let trigger = Trigger::Chain(trigger_data); - trigger - }) - .collect::>(); + Self::new_with_triggers( + block, + trigger_data.into_iter().map(Trigger::Chain).collect(), + logger, + ) + } + + pub fn new_with_subgraph_triggers( + block: C::Block, + trigger_data: Vec, + logger: &Logger, + ) -> Self { + Self::new_with_triggers( + block, + trigger_data.into_iter().map(Trigger::Subgraph).collect(), + logger, + ) + } + fn new_with_triggers( + block: C::Block, + mut trigger_data: Vec>, + logger: &Logger, + ) -> Self { // This is where triggers get sorted. trigger_data.sort(); @@ -317,15 +334,7 @@ impl TriggersAdapter for TriggersAdapterWrapper { to: BlockNumber, filter: &Arc>, ) -> Result<(Vec>, BlockNumber), Error> { - self.adapter - .scan_triggers(from, to, filter) - .await - .map(|(mut blocks, next_block)| { - for _ in &mut blocks { - todo!() - } - (blocks, next_block) - }) + self.adapter.scan_triggers(from, to, filter).await } async fn triggers_in_block( From 3ab4941ad540cd8c98195c3720c46eb2d9dc0147 Mon Sep 17 00:00:00 2001 From: incrypto32 Date: Wed, 24 Jul 2024 12:55:05 +0530 Subject: [PATCH 15/21] graph, core: Add a new SubgraphFilter struct --- core/src/subgraph/runner.rs | 13 +++++++++++-- graph/src/blockchain/mod.rs | 12 ++++++++++-- 2 files changed, 21 insertions(+), 4 deletions(-) diff --git a/core/src/subgraph/runner.rs b/core/src/subgraph/runner.rs index a2d7eea4382..9712a9095fe 100644 --- a/core/src/subgraph/runner.rs +++ b/core/src/subgraph/runner.rs @@ -8,7 +8,7 @@ use graph::blockchain::block_stream::{ BlockStreamError, BlockStreamEvent, BlockWithTriggers, FirehoseCursor, }; use graph::blockchain::{ - Block, BlockTime, Blockchain, DataSource as _, Trigger, TriggerFilter as _, + Block, BlockTime, Blockchain, DataSource as _, SubgraphFilter, Trigger, TriggerFilter as _, TriggerFilterWrapper, }; use graph::components::store::{EmptyStore, GetScope, ReadStore, StoredDynamicDataSource}; @@ -136,7 +136,16 @@ where let subgraph_filter = data_sources .iter() .filter_map(|ds| ds.as_subgraph()) - .map(|ds| (ds.source.address(), ds.source.start_block)) + .map(|ds| SubgraphFilter { + subgraph: ds.source.address(), + start_block: ds.source.start_block, + entities: ds + .mapping + .handlers + .iter() + .map(|handler| handler.entity.clone()) + .collect(), + }) .collect::>(); // if static_filters is not enabled we just stick to the filter based on all the data sources. diff --git a/graph/src/blockchain/mod.rs b/graph/src/blockchain/mod.rs index 14dceaf45e1..e6c410acc3a 100644 --- a/graph/src/blockchain/mod.rs +++ b/graph/src/blockchain/mod.rs @@ -253,13 +253,21 @@ impl From for IngestorError { #[derive(Debug)] pub struct TriggerFilterWrapper { pub filter: Arc, - pub subgraph_filter: Vec<(DeploymentHash, BlockNumber)>, // TODO(krishna): Make this a struct + pub subgraph_filter: Vec, +} + + +#[derive(Clone, Debug)] +pub struct SubgraphFilter { + pub subgraph: DeploymentHash, + pub start_block: BlockNumber, + pub entities: Vec, } impl TriggerFilterWrapper { pub fn new( filter: C::TriggerFilter, - subgraph_filter: Vec<(DeploymentHash, BlockNumber)>, + subgraph_filter: Vec, ) -> Self { Self { filter: Arc::new(filter), From aec82dcf58ccd33f5e420a34d4565656fc62a805 Mon Sep 17 00:00:00 2001 From: incrypto32 Date: Wed, 24 Jul 2024 12:57:09 +0530 Subject: [PATCH 16/21] chain/ethereum: Mock implementation of subgraph_triggers for ethereum --- chain/ethereum/src/chain.rs | 4 +- chain/ethereum/src/ethereum_adapter.rs | 106 ++++++++++++++++++++++++- graph/src/blockchain/mod.rs | 6 +- 3 files changed, 107 insertions(+), 9 deletions(-) diff --git a/chain/ethereum/src/chain.rs b/chain/ethereum/src/chain.rs index 9dffc477f5c..b5a507565a7 100644 --- a/chain/ethereum/src/chain.rs +++ b/chain/ethereum/src/chain.rs @@ -770,7 +770,7 @@ impl TriggersAdapterTrait for TriggersAdapter { self.ethrpc_metrics.clone(), from, to, - &filter.filter.clone(), + filter, self.unified_api_version.clone(), ) .await @@ -807,7 +807,7 @@ impl TriggersAdapterTrait for TriggersAdapter { self.ethrpc_metrics.clone(), block_number, block_number, - filter, + &Arc::new(TriggerFilterWrapper::::new(filter.clone(), vec![])), // TODO(krishna): This is temporary until we take TriggerFilterWrapper as param in triggers_in_block self.unified_api_version.clone(), ) .await?; diff --git a/chain/ethereum/src/ethereum_adapter.rs b/chain/ethereum/src/ethereum_adapter.rs index 7c08c61950e..7e9786a6883 100644 --- a/chain/ethereum/src/ethereum_adapter.rs +++ b/chain/ethereum/src/ethereum_adapter.rs @@ -2,11 +2,14 @@ use futures03::{future::BoxFuture, stream::FuturesUnordered}; use graph::blockchain::client::ChainClient; use graph::blockchain::BlockHash; use graph::blockchain::ChainIdentifier; +use graph::blockchain::SubgraphFilter; +use graph::blockchain::TriggerFilterWrapper; use graph::components::transaction_receipt::LightTransactionReceipt; use graph::data::store::ethereum::call; use graph::data::store::scalar; use graph::data::subgraph::UnifiedMappingApiVersion; use graph::data::subgraph::API_VERSION_0_0_7; +use graph::data_source::subgraph; use graph::futures01::stream; use graph::futures01::Future; use graph::futures01::Stream; @@ -18,6 +21,10 @@ use graph::prelude::ethabi::ParamType; use graph::prelude::ethabi::Token; use graph::prelude::tokio::try_join; use graph::prelude::web3::types::U256; +use graph::prelude::DeploymentHash; +use graph::prelude::Entity; +use graph::prelude::Value; +use graph::schema::InputSchema; use graph::slog::o; use graph::tokio::sync::RwLock; use graph::tokio::time::timeout; @@ -66,7 +73,7 @@ use crate::{ }, transport::Transport, trigger::{EthereumBlockTriggerType, EthereumTrigger}, - TriggerFilter, ENV_VARS, + ENV_VARS, }; #[derive(Debug, Clone)] @@ -1722,6 +1729,81 @@ impl EthereumAdapterTrait for EthereumAdapter { } } +// TODO(krishna): Currently this is a mock implementation of subgraph triggers. +// This will be replaced with the actual implementation which will use the filters to +// query the database of the source subgraph and return the entity triggers. +async fn subgraph_triggers( + adapter: Arc, + logger: Logger, + chain_store: Arc, + _subgraph_metrics: Arc, + from: BlockNumber, + to: BlockNumber, + filter: &Arc>, + _unified_api_version: UnifiedMappingApiVersion, +) -> Result<(Vec>, BlockNumber), Error> { + let logger2 = logger.cheap_clone(); + let eth = adapter.clone(); + let to_ptr = eth.next_existing_ptr_to_number(&logger, to).await?; + let to = to_ptr.block_number(); + + let first_filter = filter.subgraph_filter.first().unwrap(); + + let blocks = adapter + .load_blocks_by_numbers( + logger.cheap_clone(), + chain_store.clone(), + HashSet::from_iter(from..=to), + ) + .await + .and_then(move |block| { + Ok(BlockWithTriggers::::new_with_subgraph_triggers( + BlockFinality::Final(block.clone()), + vec![create_mock_subgraph_trigger(first_filter, &block)], + &logger2, + )) + }) + .collect() + .compat() + .await?; + + Ok((blocks, to)) +} + +fn create_mock_subgraph_trigger( + filter: &SubgraphFilter, + block: &LightEthereumBlock, +) -> subgraph::TriggerData { + let mock_entity = create_mock_entity(block); + subgraph::TriggerData { + source: filter.subgraph.clone(), + entity: mock_entity, + entity_type: filter.entities.first().unwrap().clone(), + } +} + +fn create_mock_entity(block: &LightEthereumBlock) -> Entity { + let id = DeploymentHash::new("test").unwrap(); + let data_schema = InputSchema::parse_latest( + "type Block @entity { id: Bytes!, number: BigInt!, hash: Bytes! }", + id.clone(), + ) + .unwrap(); + let hash = Value::Bytes(scalar::Bytes::from(block.hash.unwrap().as_bytes().to_vec())); + let data = data_schema + .make_entity(vec![ + ("id".into(), hash.clone()), + ( + "number".into(), + Value::BigInt(scalar::BigInt::from(block.number())), + ), + ("hash".into(), hash), + ]) + .unwrap(); + + data +} + /// Returns blocks with triggers, corresponding to the specified range and filters; and the resolved /// `to` block, which is the nearest non-null block greater than or equal to the passed `to` block. /// If a block contains no triggers, there may be no corresponding item in the stream. @@ -1743,13 +1825,33 @@ pub(crate) async fn blocks_with_triggers( subgraph_metrics: Arc, from: BlockNumber, to: BlockNumber, - filter: &TriggerFilter, + filter: &Arc>, unified_api_version: UnifiedMappingApiVersion, ) -> Result<(Vec>, BlockNumber), Error> { // Each trigger filter needs to be queried for the same block range // and the blocks yielded need to be deduped. If any error occurs // while searching for a trigger type, the entire operation fails. let eth = adapter.clone(); + let subgraph_filter = filter.subgraph_filter.clone(); + + // TODO(krishna): In the initial implementation we do not allow any other datasource type + // When using subgraph data sources, there if subgraph_filter is not empty, we can return + // by just processing the subgraph triggers. + if !subgraph_filter.is_empty() { + return subgraph_triggers( + adapter.clone(), + logger.clone(), + chain_store.clone(), + subgraph_metrics.clone(), + from, + to, + filter, + unified_api_version, + ) + .await; + } + + let filter = filter.filter.clone(); let call_filter = EthereumCallFilter::from(&filter.block); // Scan the block range to find relevant triggers diff --git a/graph/src/blockchain/mod.rs b/graph/src/blockchain/mod.rs index e6c410acc3a..ec281f48ff6 100644 --- a/graph/src/blockchain/mod.rs +++ b/graph/src/blockchain/mod.rs @@ -256,7 +256,6 @@ pub struct TriggerFilterWrapper { pub subgraph_filter: Vec, } - #[derive(Clone, Debug)] pub struct SubgraphFilter { pub subgraph: DeploymentHash, @@ -265,10 +264,7 @@ pub struct SubgraphFilter { } impl TriggerFilterWrapper { - pub fn new( - filter: C::TriggerFilter, - subgraph_filter: Vec, - ) -> Self { + pub fn new(filter: C::TriggerFilter, subgraph_filter: Vec) -> Self { Self { filter: Arc::new(filter), subgraph_filter, From ae7559d315512beac8364777df4f084a3d88d387 Mon Sep 17 00:00:00 2001 From: incrypto32 Date: Wed, 24 Jul 2024 14:09:19 +0530 Subject: [PATCH 17/21] chain,graph : use `chain_head_ptr` method from adapter --- chain/arweave/src/chain.rs | 4 +++ chain/cosmos/src/chain.rs | 4 +++ chain/ethereum/src/chain.rs | 5 ++++ chain/near/src/chain.rs | 4 +++ chain/starknet/src/chain.rs | 4 +++ chain/substreams/src/trigger.rs | 4 +++ graph/src/blockchain/block_stream.rs | 31 +++++--------------- graph/src/blockchain/mock.rs | 5 ++++ graph/src/blockchain/polling_block_stream.rs | 2 +- tests/src/fixture/mod.rs | 4 +++ 10 files changed, 42 insertions(+), 25 deletions(-) diff --git a/chain/arweave/src/chain.rs b/chain/arweave/src/chain.rs index 163aed06204..d890279af43 100644 --- a/chain/arweave/src/chain.rs +++ b/chain/arweave/src/chain.rs @@ -203,6 +203,10 @@ impl TriggersAdapterTrait for TriggersAdapter { panic!("Should never be called since not used by FirehoseBlockStream") } + async fn chain_head_ptr(&self) -> Result, Error> { + unimplemented!() + } + async fn triggers_in_block( &self, logger: &Logger, diff --git a/chain/cosmos/src/chain.rs b/chain/cosmos/src/chain.rs index 7a20adc127f..b7f7d9d2fb5 100644 --- a/chain/cosmos/src/chain.rs +++ b/chain/cosmos/src/chain.rs @@ -197,6 +197,10 @@ impl TriggersAdapterTrait for TriggersAdapter { panic!("Should never be called since not used by FirehoseBlockStream") } + async fn chain_head_ptr(&self) -> Result, Error> { + unimplemented!() + } + async fn scan_triggers( &self, _from: BlockNumber, diff --git a/chain/ethereum/src/chain.rs b/chain/ethereum/src/chain.rs index b5a507565a7..2fff79ff7d2 100644 --- a/chain/ethereum/src/chain.rs +++ b/chain/ethereum/src/chain.rs @@ -776,6 +776,11 @@ impl TriggersAdapterTrait for TriggersAdapter { .await } + async fn chain_head_ptr(&self) -> Result, Error> { + let chain_store = self.chain_store.clone(); + chain_store.chain_head_ptr().await + } + async fn triggers_in_block( &self, logger: &Logger, diff --git a/chain/near/src/chain.rs b/chain/near/src/chain.rs index 73a17a6640a..18450159cf4 100644 --- a/chain/near/src/chain.rs +++ b/chain/near/src/chain.rs @@ -336,6 +336,10 @@ impl TriggersAdapterTrait for TriggersAdapter { panic!("Should never be called since not used by FirehoseBlockStream") } + async fn chain_head_ptr(&self) -> Result, Error> { + unimplemented!() + } + async fn triggers_in_block( &self, logger: &Logger, diff --git a/chain/starknet/src/chain.rs b/chain/starknet/src/chain.rs index 620c8412760..2d69e2a16c6 100644 --- a/chain/starknet/src/chain.rs +++ b/chain/starknet/src/chain.rs @@ -383,6 +383,10 @@ impl TriggersAdapterTrait for TriggersAdapter { panic!("Should never be called since FirehoseBlockStream cannot resolve it") } + async fn chain_head_ptr(&self) -> Result, Error> { + unimplemented!() + } + // Returns a sequence of blocks in increasing order of block number. // Each block will include all of its triggers that match the given `filter`. // The sequence may omit blocks that contain no triggers, diff --git a/chain/substreams/src/trigger.rs b/chain/substreams/src/trigger.rs index 5f919a12a90..becfecac505 100644 --- a/chain/substreams/src/trigger.rs +++ b/chain/substreams/src/trigger.rs @@ -137,6 +137,10 @@ impl blockchain::TriggersAdapter for TriggersAdapter { unimplemented!() } + async fn chain_head_ptr(&self) -> Result, Error> { + unimplemented!() + } + async fn scan_triggers( &self, _from: BlockNumber, diff --git a/graph/src/blockchain/block_stream.rs b/graph/src/blockchain/block_stream.rs index a528bc97c15..523a1e0a469 100644 --- a/graph/src/blockchain/block_stream.rs +++ b/graph/src/blockchain/block_stream.rs @@ -353,31 +353,11 @@ impl TriggersAdapter for TriggersAdapterWrapper { async fn parent_ptr(&self, block: &BlockPtr) -> Result, Error> { self.adapter.parent_ptr(block).await } -} -// fn create_mock_trigger() -> Trigger { -// let entity = create_mock_entity(); -// Trigger::Subgraph(subgraph::TriggerData { -// source: DeploymentHash::new("test").unwrap(), -// entity, -// entity_type: "User".to_string(), -// }) -// } - -// fn create_mock_entity() -> Entity { -// let schema = InputSchema::parse_latest( -// "type User @entity { id: String!, val: String! }", -// DeploymentHash::new("test").unwrap(), -// ) -// .unwrap(); - -// schema -// .make_entity(vec![ -// ("id".into(), Value::String("id".to_owned())), -// ("val".into(), Value::String("content".to_owned())), -// ]) -// .unwrap() -// } + async fn chain_head_ptr(&self) -> Result, Error> { + self.adapter.chain_head_ptr().await + } +} #[async_trait] pub trait TriggersAdapter: Send + Sync { @@ -419,6 +399,9 @@ pub trait TriggersAdapter: Send + Sync { /// Get pointer to parent of `block`. This is called when reverting `block`. async fn parent_ptr(&self, block: &BlockPtr) -> Result, Error>; + + /// Get pointer to parent of `block`. This is called when reverting `block`. + async fn chain_head_ptr(&self) -> Result, Error>; } #[async_trait] diff --git a/graph/src/blockchain/mock.rs b/graph/src/blockchain/mock.rs index c08f688a4b4..b1bef71fda9 100644 --- a/graph/src/blockchain/mock.rs +++ b/graph/src/blockchain/mock.rs @@ -228,6 +228,11 @@ impl TriggersAdapter for MockTriggersAdapter { todo!() } + async fn chain_head_ptr(&self) -> Result, Error> { + unimplemented!() + } + + async fn scan_triggers( &self, from: crate::components::store::BlockNumber, diff --git a/graph/src/blockchain/polling_block_stream.rs b/graph/src/blockchain/polling_block_stream.rs index 8cf466a6486..307f71cf283 100644 --- a/graph/src/blockchain/polling_block_stream.rs +++ b/graph/src/blockchain/polling_block_stream.rs @@ -218,7 +218,7 @@ where let max_block_range_size = self.max_block_range_size; // Get pointers from database for comparison - let head_ptr_opt = ctx.chain_store.chain_head_ptr().await?; + let head_ptr_opt = ctx.adapter.chain_head_ptr().await?; let subgraph_ptr = self.current_block.clone(); // If chain head ptr is not set yet diff --git a/tests/src/fixture/mod.rs b/tests/src/fixture/mod.rs index 358c5a8e425..3d329eb6163 100644 --- a/tests/src/fixture/mod.rs +++ b/tests/src/fixture/mod.rs @@ -1002,6 +1002,10 @@ impl TriggersAdapter for MockTriggersAdapter { todo!() } + async fn chain_head_ptr(&self) -> Result, Error> { + todo!() + } + async fn scan_triggers( &self, _from: BlockNumber, From 686e7e19ae5db8c55d438a0cf05d9378a896e630 Mon Sep 17 00:00:00 2001 From: incrypto32 Date: Thu, 25 Jul 2024 10:11:26 +0530 Subject: [PATCH 18/21] chain, core, graph : use TriggersAdapterWrapper at top level --- chain/ethereum/src/chain.rs | 85 +++++--------------- chain/near/src/chain.rs | 15 +--- chain/starknet/src/chain.rs | 14 +--- chain/substreams/src/block_stream.rs | 14 +--- core/src/subgraph/inputs.rs | 4 +- core/src/subgraph/instance_manager.rs | 7 +- graph/src/blockchain/block_stream.rs | 29 ++++--- graph/src/blockchain/polling_block_stream.rs | 6 +- tests/src/fixture/mod.rs | 50 +++--------- 9 files changed, 69 insertions(+), 155 deletions(-) diff --git a/chain/ethereum/src/chain.rs b/chain/ethereum/src/chain.rs index 2fff79ff7d2..73489ae003f 100644 --- a/chain/ethereum/src/chain.rs +++ b/chain/ethereum/src/chain.rs @@ -133,62 +133,16 @@ impl BlockStreamBuilder for EthereumStreamBuilder { filter: Arc>, unified_api_version: UnifiedMappingApiVersion, ) -> Result>> { - let requirements = filter.filter.node_capabilities(); - let adapter = chain - .triggers_adapter(&deployment, &requirements, unified_api_version.clone()) - .unwrap_or_else(|_| { - panic!( - "no adapter for network {} with capabilities {}", - chain.name, requirements - ) - }); - - let adapter = Arc::new(TriggersAdapterWrapper::new(adapter, source_subgraph_stores)); - - let logger = chain - .logger_factory - .subgraph_logger(&deployment) - .new(o!("component" => "BlockStream")); - let chain_store = chain.chain_store(); - let chain_head_update_stream = chain - .chain_head_update_listener - .subscribe(chain.name.to_string(), logger.clone()); - - // Special case: Detect Celo and set the threshold to 0, so that eth_getLogs is always used. - // This is ok because Celo blocks are always final. And we _need_ to do this because - // some events appear only in eth_getLogs but not in transaction receipts. - // See also ca0edc58-0ec5-4c89-a7dd-2241797f5e50. - let chain_id = match chain.chain_client().as_ref() { - ChainClient::Rpc(adapter) => { - adapter - .cheapest() - .await - .ok_or(anyhow!("unable to get eth adapter for chan_id call"))? - .chain_id() - .await? - } - _ => panic!("expected rpc when using polling blockstream"), - }; - let reorg_threshold = match CELO_CHAIN_IDS.contains(&chain_id) { - false => chain.reorg_threshold, - true => 0, - }; - - Ok(Box::new(PollingBlockStream::new( - chain_store, - chain_head_update_stream, - adapter, - chain.node_id.clone(), - deployment.hash, - filter, + self.build_polling( + chain, + deployment, start_blocks, - reorg_threshold, - logger, - ENV_VARS.max_block_range_size, - ENV_VARS.target_triggers_per_block_range, - unified_api_version, + source_subgraph_stores, subgraph_current_block, - ))) + filter, + unified_api_version, + ) + .await } async fn build_polling( @@ -196,19 +150,23 @@ impl BlockStreamBuilder for EthereumStreamBuilder { chain: &Chain, deployment: DeploymentLocator, start_blocks: Vec, + source_subgraph_stores: Vec<(DeploymentHash, Arc)>, subgraph_current_block: Option, filter: Arc>, unified_api_version: UnifiedMappingApiVersion, ) -> Result>> { let requirements = filter.filter.node_capabilities(); - let adapter = chain - .triggers_adapter(&deployment, &requirements, unified_api_version.clone()) - .unwrap_or_else(|_| { - panic!( - "no adapter for network {} with capabilities {}", - chain.name, requirements - ) - }); + let adapter = TriggersAdapterWrapper::new( + chain + .triggers_adapter(&deployment, &requirements, unified_api_version.clone()) + .unwrap_or_else(|_| { + panic!( + "no adapter for network {} with capabilities {}", + chain.name, requirements + ) + }), + source_subgraph_stores, + ); let logger = chain .logger_factory @@ -242,7 +200,7 @@ impl BlockStreamBuilder for EthereumStreamBuilder { Ok(Box::new(PollingBlockStream::new( chain_store, chain_head_update_stream, - adapter, + Arc::new(adapter), chain.node_id.clone(), deployment.hash, filter, @@ -507,6 +465,7 @@ impl Blockchain for Chain { self, deployment, start_blocks, + source_subgraph_stores, current_ptr, filter, unified_api_version, diff --git a/chain/near/src/chain.rs b/chain/near/src/chain.rs index 18450159cf4..4d67e49bd48 100644 --- a/chain/near/src/chain.rs +++ b/chain/near/src/chain.rs @@ -108,20 +108,6 @@ impl BlockStreamBuilder for NearStreamBuilder { chain.metrics_registry.clone(), ))) } - - async fn build_subgraph_block_stream( - &self, - _chain: &Chain, - _deployment: DeploymentLocator, - _start_blocks: Vec, - _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, - _subgraph_current_block: Option, - _filter: Arc>, - _unified_api_version: UnifiedMappingApiVersion, - ) -> Result>> { - unimplemented!() - } - async fn build_firehose( &self, chain: &Chain, @@ -164,6 +150,7 @@ impl BlockStreamBuilder for NearStreamBuilder { _chain: &Chain, _deployment: DeploymentLocator, _start_blocks: Vec, + _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, _subgraph_current_block: Option, _filter: Arc>, _unified_api_version: UnifiedMappingApiVersion, diff --git a/chain/starknet/src/chain.rs b/chain/starknet/src/chain.rs index 2d69e2a16c6..583ea30bd69 100644 --- a/chain/starknet/src/chain.rs +++ b/chain/starknet/src/chain.rs @@ -197,19 +197,6 @@ impl BlockStreamBuilder for StarknetStreamBuilder { unimplemented!() } - async fn build_subgraph_block_stream( - &self, - _chain: &Chain, - _deployment: DeploymentLocator, - _start_blocks: Vec, - _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, - _subgraph_current_block: Option, - _filter: Arc>, - _unified_api_version: UnifiedMappingApiVersion, - ) -> Result>> { - unimplemented!() - } - async fn build_firehose( &self, chain: &Chain, @@ -252,6 +239,7 @@ impl BlockStreamBuilder for StarknetStreamBuilder { _chain: &Chain, _deployment: DeploymentLocator, _start_blocks: Vec, + _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, _subgraph_current_block: Option, _filter: Arc>, _unified_api_version: UnifiedMappingApiVersion, diff --git a/chain/substreams/src/block_stream.rs b/chain/substreams/src/block_stream.rs index 24cedade570..3687bab0b91 100644 --- a/chain/substreams/src/block_stream.rs +++ b/chain/substreams/src/block_stream.rs @@ -99,24 +99,12 @@ impl BlockStreamBuilderTrait for BlockStreamBuilder { unimplemented!() } - async fn build_subgraph_block_stream( - &self, - _chain: &Chain, - _deployment: DeploymentLocator, - _start_blocks: Vec, - _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, - _subgraph_current_block: Option, - _filter: Arc>, - _unified_api_version: UnifiedMappingApiVersion, - ) -> Result>> { - unimplemented!() - } - async fn build_polling( &self, _chain: &Chain, _deployment: DeploymentLocator, _start_blocks: Vec, + _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, _subgraph_current_block: Option, _filter: Arc>, _unified_api_version: UnifiedMappingApiVersion, diff --git a/core/src/subgraph/inputs.rs b/core/src/subgraph/inputs.rs index 3a12e33415f..386b1a5e35b 100644 --- a/core/src/subgraph/inputs.rs +++ b/core/src/subgraph/inputs.rs @@ -1,5 +1,5 @@ use graph::{ - blockchain::{Blockchain, TriggersAdapter}, + blockchain::{block_stream::TriggersAdapterWrapper, Blockchain}, components::{ store::{DeploymentLocator, SubgraphFork, WritableStore}, subgraph::ProofOfIndexingVersion, @@ -20,7 +20,7 @@ pub struct IndexingInputs { pub stop_block: Option, pub store: Arc, pub debug_fork: Option>, - pub triggers_adapter: Arc>, + pub triggers_adapter: Arc>, pub chain: Arc, pub templates: Arc>>, pub unified_api_version: UnifiedMappingApiVersion, diff --git a/core/src/subgraph/instance_manager.rs b/core/src/subgraph/instance_manager.rs index 11ce5f065c7..20ad464339f 100644 --- a/core/src/subgraph/instance_manager.rs +++ b/core/src/subgraph/instance_manager.rs @@ -6,7 +6,7 @@ use crate::subgraph::Decoder; use std::collections::BTreeSet; use crate::subgraph::runner::SubgraphRunner; -use graph::blockchain::block_stream::BlockStreamMetrics; +use graph::blockchain::block_stream::{BlockStreamMetrics, TriggersAdapterWrapper}; use graph::blockchain::{Blockchain, BlockchainKind, DataSource, NodeCapabilities}; use graph::components::metrics::gas::GasMetrics; use graph::components::store::WritableStore; @@ -501,6 +501,11 @@ impl SubgraphInstanceManager { ) .await?; + let triggers_adapter = Arc::new(TriggersAdapterWrapper::new( + triggers_adapter, + subgraph_data_source_writables.clone(), + )); + let inputs = IndexingInputs { deployment: deployment.clone(), features, diff --git a/graph/src/blockchain/block_stream.rs b/graph/src/blockchain/block_stream.rs index 523a1e0a469..c043a5d42eb 100644 --- a/graph/src/blockchain/block_stream.rs +++ b/graph/src/blockchain/block_stream.rs @@ -145,6 +145,7 @@ pub trait BlockStreamBuilder: Send + Sync { chain: &C, deployment: DeploymentLocator, start_blocks: Vec, + source_subgraph_stores: Vec<(DeploymentHash, Arc)>, subgraph_current_block: Option, filter: Arc>, unified_api_version: UnifiedMappingApiVersion, @@ -159,7 +160,18 @@ pub trait BlockStreamBuilder: Send + Sync { subgraph_current_block: Option, filter: Arc>, unified_api_version: UnifiedMappingApiVersion, - ) -> Result>>; + ) -> Result>> { + self.build_polling( + chain, + deployment, + start_blocks, + source_subgraph_stores, + subgraph_current_block, + filter, + unified_api_version, + ) + .await + } } #[derive(Debug, Clone)] @@ -316,9 +328,8 @@ impl TriggersAdapterWrapper { } } -#[async_trait] -impl TriggersAdapter for TriggersAdapterWrapper { - async fn ancestor_block( +impl TriggersAdapterWrapper { + pub async fn ancestor_block( &self, ptr: BlockPtr, offset: BlockNumber, @@ -328,7 +339,7 @@ impl TriggersAdapter for TriggersAdapterWrapper { } // TODO: Do a proper implementation, this is a complete mock implementation - async fn scan_triggers( + pub async fn scan_triggers( &self, from: BlockNumber, to: BlockNumber, @@ -337,7 +348,7 @@ impl TriggersAdapter for TriggersAdapterWrapper { self.adapter.scan_triggers(from, to, filter).await } - async fn triggers_in_block( + pub async fn triggers_in_block( &self, logger: &Logger, block: C::Block, @@ -346,15 +357,15 @@ impl TriggersAdapter for TriggersAdapterWrapper { self.adapter.triggers_in_block(logger, block, filter).await } - async fn is_on_main_chain(&self, ptr: BlockPtr) -> Result { + pub async fn is_on_main_chain(&self, ptr: BlockPtr) -> Result { self.adapter.is_on_main_chain(ptr).await } - async fn parent_ptr(&self, block: &BlockPtr) -> Result, Error> { + pub async fn parent_ptr(&self, block: &BlockPtr) -> Result, Error> { self.adapter.parent_ptr(block).await } - async fn chain_head_ptr(&self) -> Result, Error> { + pub async fn chain_head_ptr(&self) -> Result, Error> { self.adapter.chain_head_ptr().await } } diff --git a/graph/src/blockchain/polling_block_stream.rs b/graph/src/blockchain/polling_block_stream.rs index 307f71cf283..81fb4804c97 100644 --- a/graph/src/blockchain/polling_block_stream.rs +++ b/graph/src/blockchain/polling_block_stream.rs @@ -9,7 +9,7 @@ use std::time::Duration; use super::block_stream::{ BlockStream, BlockStreamError, BlockStreamEvent, BlockWithTriggers, ChainHeadUpdateStream, - FirehoseCursor, TriggersAdapter, BUFFERED_BLOCK_STREAM_SIZE, + FirehoseCursor, TriggersAdapterWrapper, BUFFERED_BLOCK_STREAM_SIZE, }; use super::{Block, BlockPtr, Blockchain, TriggerFilterWrapper}; @@ -79,7 +79,7 @@ where C: Blockchain, { chain_store: Arc, - adapter: Arc>, + adapter: Arc>, node_id: NodeId, subgraph_id: DeploymentHash, // This is not really a block number, but the (unsigned) difference @@ -146,7 +146,7 @@ where pub fn new( chain_store: Arc, chain_head_update_stream: ChainHeadUpdateStream, - adapter: Arc>, + adapter: Arc>, node_id: NodeId, subgraph_id: DeploymentHash, filter: Arc>, diff --git a/tests/src/fixture/mod.rs b/tests/src/fixture/mod.rs index 3d329eb6163..b58e436eba2 100644 --- a/tests/src/fixture/mod.rs +++ b/tests/src/fixture/mod.rs @@ -718,7 +718,7 @@ impl BlockStreamBuilder for MutexBlockStreamBuilder { unimplemented!(); } - async fn build_subgraph_block_stream( + async fn build_polling( &self, chain: &C, deployment: DeploymentLocator, @@ -731,7 +731,7 @@ impl BlockStreamBuilder for MutexBlockStreamBuilder { let builder = self.0.lock().unwrap().clone(); builder - .build_subgraph_block_stream( + .build_polling( chain, deployment, start_blocks, @@ -742,18 +742,6 @@ impl BlockStreamBuilder for MutexBlockStreamBuilder { ) .await } - - async fn build_polling( - &self, - _chain: &C, - _deployment: DeploymentLocator, - _start_blocks: Vec, - _subgraph_current_block: Option, - _filter: Arc>, - _unified_api_version: graph::data::subgraph::UnifiedMappingApiVersion, - ) -> anyhow::Result>> { - unimplemented!("only firehose mode should be used for tests") - } } /// `chain` is the sequence of chain heads to be processed. If the next block to be processed in the @@ -782,17 +770,17 @@ where unimplemented!() } - async fn build_subgraph_block_stream( + async fn build_firehose( &self, _chain: &C, _deployment: DeploymentLocator, - _start_blocks: Vec, - _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, - subgraph_current_block: Option, - _filter: Arc>, + _block_cursor: FirehoseCursor, + _start_blocks: Vec, + current_block: Option, + _filter: Arc, _unified_api_version: graph::data::subgraph::UnifiedMappingApiVersion, ) -> anyhow::Result>> { - let current_idx = subgraph_current_block.map(|current_block| { + let current_idx = current_block.map(|current_block| { self.chain .iter() .enumerate() @@ -805,17 +793,17 @@ where })) } - async fn build_firehose( + async fn build_polling( &self, _chain: &C, _deployment: DeploymentLocator, - _block_cursor: FirehoseCursor, _start_blocks: Vec, - current_block: Option, - _filter: Arc, + _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, + subgraph_current_block: Option, + _filter: Arc>, _unified_api_version: graph::data::subgraph::UnifiedMappingApiVersion, ) -> anyhow::Result>> { - let current_idx = current_block.map(|current_block| { + let current_idx = subgraph_current_block.map(|current_block| { self.chain .iter() .enumerate() @@ -827,18 +815,6 @@ where stream: Box::pin(stream_events(self.chain.clone(), current_idx)), })) } - - async fn build_polling( - &self, - _chain: &C, - _deployment: DeploymentLocator, - _start_blocks: Vec, - _subgraph_current_block: Option, - _filter: Arc>, - _unified_api_version: graph::data::subgraph::UnifiedMappingApiVersion, - ) -> anyhow::Result>> { - unimplemented!("only firehose mode should be used for tests") - } } struct StaticStream { From 3aba98f17f3daed7edc51b2fe39ae8894c6b9a76 Mon Sep 17 00:00:00 2001 From: incrypto32 Date: Thu, 25 Jul 2024 13:35:51 +0530 Subject: [PATCH 19/21] chain, graph : move subgraph trigger scanning back to TriggersAdapterWrapper --- chain/arweave/src/chain.rs | 14 ++- chain/cosmos/src/chain.rs | 15 ++- chain/ethereum/src/adapter.rs | 6 +- chain/ethereum/src/chain.rs | 32 +++++- chain/ethereum/src/ethereum_adapter.rs | 106 +----------------- chain/near/src/chain.rs | 16 ++- chain/starknet/src/chain.rs | 15 ++- chain/substreams/src/chain.rs | 2 +- chain/substreams/src/trigger.rs | 15 ++- graph/src/blockchain/block_stream.rs | 90 ++++++++++++++- graph/src/blockchain/mock.rs | 16 ++- graph/src/blockchain/mod.rs | 6 +- graph/src/blockchain/polling_block_stream.rs | 2 +- .../subgraph/proof_of_indexing/online.rs | 4 +- tests/src/fixture/mod.rs | 12 +- tests/tests/runner_tests.rs | 23 +++- 16 files changed, 228 insertions(+), 146 deletions(-) diff --git a/chain/arweave/src/chain.rs b/chain/arweave/src/chain.rs index d890279af43..e897e10d8d8 100644 --- a/chain/arweave/src/chain.rs +++ b/chain/arweave/src/chain.rs @@ -27,11 +27,13 @@ use graph::{ prelude::{async_trait, o, BlockNumber, ChainStore, Error, Logger, LoggerFactory}, }; use prost::Message; +use std::collections::HashSet; use std::sync::Arc; use crate::adapter::TriggerFilter; use crate::data_source::{DataSourceTemplate, UnresolvedDataSourceTemplate}; use crate::trigger::{self, ArweaveTrigger}; +use crate::Block as ArweaveBlock; use crate::{ codec, data_source::{DataSource, UnresolvedDataSource}, @@ -138,7 +140,7 @@ impl Blockchain for Chain { let firehose_mapper = Arc::new(FirehoseMapper { adapter, - filter: filter.filter.clone(), + filter: filter.chain_filter.clone(), }); Ok(Box::new(FirehoseBlockStream::new( @@ -198,7 +200,7 @@ impl TriggersAdapterTrait for TriggersAdapter { &self, _from: BlockNumber, _to: BlockNumber, - _filter: &Arc>, + _filter: &TriggerFilter, ) -> Result<(Vec>, BlockNumber), Error> { panic!("Should never be called since not used by FirehoseBlockStream") } @@ -266,6 +268,14 @@ impl TriggersAdapterTrait for TriggersAdapter { number: block.number.saturating_sub(1), })) } + + async fn load_blocks_by_numbers( + &self, + _logger: Logger, + _block_numbers: HashSet, + ) -> Result, Error> { + todo!() + } } pub struct FirehoseMapper { diff --git a/chain/cosmos/src/chain.rs b/chain/cosmos/src/chain.rs index b7f7d9d2fb5..83a299b6163 100644 --- a/chain/cosmos/src/chain.rs +++ b/chain/cosmos/src/chain.rs @@ -4,6 +4,7 @@ use graph::components::adapter::ChainId; use graph::env::EnvVars; use graph::prelude::{DeploymentHash, MetricsRegistry}; use graph::substreams::Clock; +use std::collections::HashSet; use std::convert::TryFrom; use std::sync::Arc; @@ -33,7 +34,7 @@ use crate::data_source::{ DataSource, DataSourceTemplate, EventOrigin, UnresolvedDataSource, UnresolvedDataSourceTemplate, }; use crate::trigger::CosmosTrigger; -use crate::{codec, TriggerFilter}; +use crate::{codec, Block, TriggerFilter}; pub struct Chain { logger_factory: LoggerFactory, @@ -132,7 +133,7 @@ impl Blockchain for Chain { let firehose_mapper = Arc::new(FirehoseMapper { adapter, - filter: filter.filter.clone(), + filter: filter.chain_filter.clone(), }); Ok(Box::new(FirehoseBlockStream::new( @@ -197,6 +198,14 @@ impl TriggersAdapterTrait for TriggersAdapter { panic!("Should never be called since not used by FirehoseBlockStream") } + async fn load_blocks_by_numbers( + &self, + _logger: Logger, + _block_numbers: HashSet, + ) -> Result, Error> { + unimplemented!() + } + async fn chain_head_ptr(&self) -> Result, Error> { unimplemented!() } @@ -205,7 +214,7 @@ impl TriggersAdapterTrait for TriggersAdapter { &self, _from: BlockNumber, _to: BlockNumber, - _filter: &Arc>, + _filter: &TriggerFilter, ) -> Result<(Vec>, BlockNumber), Error> { panic!("Should never be called since not used by FirehoseBlockStream") } diff --git a/chain/ethereum/src/adapter.rs b/chain/ethereum/src/adapter.rs index 31083724809..3d4dc00c030 100644 --- a/chain/ethereum/src/adapter.rs +++ b/chain/ethereum/src/adapter.rs @@ -1111,9 +1111,9 @@ pub trait EthereumAdapter: Send + Sync + 'static { async fn load_blocks_by_numbers( &self, - logger: Logger, - chain_store: Arc, - block_numbers: HashSet, + _logger: Logger, + _chain_store: Arc, + _block_numbers: HashSet, ) -> Box, Error = Error> + Send>; /// Load Ethereum blocks in bulk, returning results as they come back as a Stream. diff --git a/chain/ethereum/src/chain.rs b/chain/ethereum/src/chain.rs index 73489ae003f..9b8e71b0fe2 100644 --- a/chain/ethereum/src/chain.rs +++ b/chain/ethereum/src/chain.rs @@ -155,7 +155,7 @@ impl BlockStreamBuilder for EthereumStreamBuilder { filter: Arc>, unified_api_version: UnifiedMappingApiVersion, ) -> Result>> { - let requirements = filter.filter.node_capabilities(); + let requirements = filter.chain_filter.node_capabilities(); let adapter = TriggersAdapterWrapper::new( chain .triggers_adapter(&deployment, &requirements, unified_api_version.clone()) @@ -480,7 +480,7 @@ impl Blockchain for Chain { store.firehose_cursor(), start_blocks, current_ptr, - filter.filter.clone(), + filter.chain_filter.clone(), unified_api_version, ) .await @@ -717,7 +717,7 @@ impl TriggersAdapterTrait for TriggersAdapter { &self, from: BlockNumber, to: BlockNumber, - filter: &Arc>, + filter: &TriggerFilter, ) -> Result<(Vec>, BlockNumber), Error> { blocks_with_triggers( self.chain_client @@ -735,6 +735,30 @@ impl TriggersAdapterTrait for TriggersAdapter { .await } + async fn load_blocks_by_numbers( + &self, + logger: Logger, + block_numbers: HashSet, + ) -> Result> { + use graph::futures01::stream::Stream; + + let adapter = self + .chain_client + .rpc()? + .cheapest_with(&self.capabilities) + .await?; + + let blocks = adapter + .load_blocks_by_numbers(logger, self.chain_store.clone(), block_numbers) + .await + .map(|block| BlockFinality::Final(block)) + .collect() + .compat() + .await?; + + Ok(blocks) + } + async fn chain_head_ptr(&self) -> Result, Error> { let chain_store = self.chain_store.clone(); chain_store.chain_head_ptr().await @@ -771,7 +795,7 @@ impl TriggersAdapterTrait for TriggersAdapter { self.ethrpc_metrics.clone(), block_number, block_number, - &Arc::new(TriggerFilterWrapper::::new(filter.clone(), vec![])), // TODO(krishna): This is temporary until we take TriggerFilterWrapper as param in triggers_in_block + filter, self.unified_api_version.clone(), ) .await?; diff --git a/chain/ethereum/src/ethereum_adapter.rs b/chain/ethereum/src/ethereum_adapter.rs index 7e9786a6883..9fe0b8262b2 100644 --- a/chain/ethereum/src/ethereum_adapter.rs +++ b/chain/ethereum/src/ethereum_adapter.rs @@ -2,14 +2,12 @@ use futures03::{future::BoxFuture, stream::FuturesUnordered}; use graph::blockchain::client::ChainClient; use graph::blockchain::BlockHash; use graph::blockchain::ChainIdentifier; -use graph::blockchain::SubgraphFilter; -use graph::blockchain::TriggerFilterWrapper; + use graph::components::transaction_receipt::LightTransactionReceipt; use graph::data::store::ethereum::call; use graph::data::store::scalar; use graph::data::subgraph::UnifiedMappingApiVersion; use graph::data::subgraph::API_VERSION_0_0_7; -use graph::data_source::subgraph; use graph::futures01::stream; use graph::futures01::Future; use graph::futures01::Stream; @@ -21,10 +19,6 @@ use graph::prelude::ethabi::ParamType; use graph::prelude::ethabi::Token; use graph::prelude::tokio::try_join; use graph::prelude::web3::types::U256; -use graph::prelude::DeploymentHash; -use graph::prelude::Entity; -use graph::prelude::Value; -use graph::schema::InputSchema; use graph::slog::o; use graph::tokio::sync::RwLock; use graph::tokio::time::timeout; @@ -65,6 +59,7 @@ use crate::chain::BlockFinality; use crate::trigger::LogRef; use crate::Chain; use crate::NodeCapabilities; +use crate::TriggerFilter; use crate::{ adapter::{ ContractCall, ContractCallError, EthGetLogsFilter, EthereumAdapter as EthereumAdapterTrait, @@ -1729,81 +1724,6 @@ impl EthereumAdapterTrait for EthereumAdapter { } } -// TODO(krishna): Currently this is a mock implementation of subgraph triggers. -// This will be replaced with the actual implementation which will use the filters to -// query the database of the source subgraph and return the entity triggers. -async fn subgraph_triggers( - adapter: Arc, - logger: Logger, - chain_store: Arc, - _subgraph_metrics: Arc, - from: BlockNumber, - to: BlockNumber, - filter: &Arc>, - _unified_api_version: UnifiedMappingApiVersion, -) -> Result<(Vec>, BlockNumber), Error> { - let logger2 = logger.cheap_clone(); - let eth = adapter.clone(); - let to_ptr = eth.next_existing_ptr_to_number(&logger, to).await?; - let to = to_ptr.block_number(); - - let first_filter = filter.subgraph_filter.first().unwrap(); - - let blocks = adapter - .load_blocks_by_numbers( - logger.cheap_clone(), - chain_store.clone(), - HashSet::from_iter(from..=to), - ) - .await - .and_then(move |block| { - Ok(BlockWithTriggers::::new_with_subgraph_triggers( - BlockFinality::Final(block.clone()), - vec![create_mock_subgraph_trigger(first_filter, &block)], - &logger2, - )) - }) - .collect() - .compat() - .await?; - - Ok((blocks, to)) -} - -fn create_mock_subgraph_trigger( - filter: &SubgraphFilter, - block: &LightEthereumBlock, -) -> subgraph::TriggerData { - let mock_entity = create_mock_entity(block); - subgraph::TriggerData { - source: filter.subgraph.clone(), - entity: mock_entity, - entity_type: filter.entities.first().unwrap().clone(), - } -} - -fn create_mock_entity(block: &LightEthereumBlock) -> Entity { - let id = DeploymentHash::new("test").unwrap(); - let data_schema = InputSchema::parse_latest( - "type Block @entity { id: Bytes!, number: BigInt!, hash: Bytes! }", - id.clone(), - ) - .unwrap(); - let hash = Value::Bytes(scalar::Bytes::from(block.hash.unwrap().as_bytes().to_vec())); - let data = data_schema - .make_entity(vec![ - ("id".into(), hash.clone()), - ( - "number".into(), - Value::BigInt(scalar::BigInt::from(block.number())), - ), - ("hash".into(), hash), - ]) - .unwrap(); - - data -} - /// Returns blocks with triggers, corresponding to the specified range and filters; and the resolved /// `to` block, which is the nearest non-null block greater than or equal to the passed `to` block. /// If a block contains no triggers, there may be no corresponding item in the stream. @@ -1825,33 +1745,13 @@ pub(crate) async fn blocks_with_triggers( subgraph_metrics: Arc, from: BlockNumber, to: BlockNumber, - filter: &Arc>, + filter: &TriggerFilter, unified_api_version: UnifiedMappingApiVersion, ) -> Result<(Vec>, BlockNumber), Error> { // Each trigger filter needs to be queried for the same block range // and the blocks yielded need to be deduped. If any error occurs // while searching for a trigger type, the entire operation fails. let eth = adapter.clone(); - let subgraph_filter = filter.subgraph_filter.clone(); - - // TODO(krishna): In the initial implementation we do not allow any other datasource type - // When using subgraph data sources, there if subgraph_filter is not empty, we can return - // by just processing the subgraph triggers. - if !subgraph_filter.is_empty() { - return subgraph_triggers( - adapter.clone(), - logger.clone(), - chain_store.clone(), - subgraph_metrics.clone(), - from, - to, - filter, - unified_api_version, - ) - .await; - } - - let filter = filter.filter.clone(); let call_filter = EthereumCallFilter::from(&filter.block); // Scan the block range to find relevant triggers diff --git a/chain/near/src/chain.rs b/chain/near/src/chain.rs index 4d67e49bd48..44c0449adce 100644 --- a/chain/near/src/chain.rs +++ b/chain/near/src/chain.rs @@ -32,10 +32,12 @@ use graph::{ prelude::{async_trait, o, BlockNumber, ChainStore, Error, Logger, LoggerFactory}, }; use prost::Message; +use std::collections::HashSet; use std::sync::Arc; use crate::adapter::TriggerFilter; use crate::codec::substreams_triggers::BlockAndReceipts; +use crate::codec::Block; use crate::data_source::{DataSourceTemplate, UnresolvedDataSourceTemplate}; use crate::trigger::{self, NearTrigger}; use crate::{ @@ -243,7 +245,7 @@ impl Blockchain for Chain { deployment, store.firehose_cursor(), store.block_ptr(), - filter.filter.clone(), + filter.chain_filter.clone(), ) .await; } @@ -255,7 +257,7 @@ impl Blockchain for Chain { store.firehose_cursor(), start_blocks, store.block_ptr(), - filter.filter.clone(), + filter.chain_filter.clone(), unified_api_version, ) .await @@ -318,11 +320,19 @@ impl TriggersAdapterTrait for TriggersAdapter { &self, _from: BlockNumber, _to: BlockNumber, - _filter: &Arc>, + _filter: &TriggerFilter, ) -> Result<(Vec>, BlockNumber), Error> { panic!("Should never be called since not used by FirehoseBlockStream") } + async fn load_blocks_by_numbers( + &self, + _logger: Logger, + _block_numbers: HashSet, + ) -> Result> { + unimplemented!() + } + async fn chain_head_ptr(&self) -> Result, Error> { unimplemented!() } diff --git a/chain/starknet/src/chain.rs b/chain/starknet/src/chain.rs index 583ea30bd69..b6ff51a9bcd 100644 --- a/chain/starknet/src/chain.rs +++ b/chain/starknet/src/chain.rs @@ -30,7 +30,7 @@ use graph::{ slog::o, }; use prost::Message; -use std::sync::Arc; +use std::{collections::HashSet, sync::Arc}; use crate::{ adapter::TriggerFilter, @@ -39,6 +39,7 @@ use crate::{ DataSource, DataSourceTemplate, UnresolvedDataSource, UnresolvedDataSourceTemplate, }, trigger::{StarknetBlockTrigger, StarknetEventTrigger, StarknetTrigger}, + Block as StarknetBlock, }; pub struct Chain { @@ -126,7 +127,7 @@ impl Blockchain for Chain { store.firehose_cursor(), start_blocks, store.block_ptr(), - filter.filter.clone(), + filter.chain_filter.clone(), unified_api_version, ) .await @@ -371,6 +372,14 @@ impl TriggersAdapterTrait for TriggersAdapter { panic!("Should never be called since FirehoseBlockStream cannot resolve it") } + async fn load_blocks_by_numbers( + &self, + _logger: Logger, + _block_numbers: HashSet, + ) -> Result, Error> { + unimplemented!() + } + async fn chain_head_ptr(&self) -> Result, Error> { unimplemented!() } @@ -385,7 +394,7 @@ impl TriggersAdapterTrait for TriggersAdapter { &self, _from: BlockNumber, _to: BlockNumber, - _filter: &Arc>, + _filter: &TriggerFilter, ) -> Result<(Vec>, BlockNumber), Error> { panic!("Should never be called since not used by FirehoseBlockStream") } diff --git a/chain/substreams/src/chain.rs b/chain/substreams/src/chain.rs index b3cf8cca8a6..38ef49bdb5d 100644 --- a/chain/substreams/src/chain.rs +++ b/chain/substreams/src/chain.rs @@ -153,7 +153,7 @@ impl Blockchain for Chain { deployment, store.firehose_cursor(), store.block_ptr(), - filter.filter.clone(), + filter.chain_filter.clone(), ) .await } diff --git a/chain/substreams/src/trigger.rs b/chain/substreams/src/trigger.rs index becfecac505..db4034cd55c 100644 --- a/chain/substreams/src/trigger.rs +++ b/chain/substreams/src/trigger.rs @@ -1,8 +1,7 @@ use anyhow::Error; use graph::{ blockchain::{ - self, block_stream::BlockWithTriggers, BlockPtr, EmptyNodeCapabilities, - MappingTriggerTrait, TriggerFilterWrapper, + self, block_stream::BlockWithTriggers, BlockPtr, EmptyNodeCapabilities, MappingTriggerTrait, }, components::{ store::{DeploymentLocator, SubgraphFork}, @@ -17,7 +16,7 @@ use graph::{ }; use graph_runtime_wasm::module::ToAscPtr; use lazy_static::__Deref; -use std::sync::Arc; +use std::{collections::HashSet, sync::Arc}; use crate::{Block, Chain, NoopDataSourceTemplate, ParsedChanges}; @@ -137,6 +136,14 @@ impl blockchain::TriggersAdapter for TriggersAdapter { unimplemented!() } + async fn load_blocks_by_numbers( + &self, + _logger: Logger, + _block_numbers: HashSet, + ) -> Result, Error> { + unimplemented!() + } + async fn chain_head_ptr(&self) -> Result, Error> { unimplemented!() } @@ -145,7 +152,7 @@ impl blockchain::TriggersAdapter for TriggersAdapter { &self, _from: BlockNumber, _to: BlockNumber, - _filter: &Arc>, + _filter: &TriggerFilter, ) -> Result<(Vec>, BlockNumber), Error> { unimplemented!() } diff --git a/graph/src/blockchain/block_stream.rs b/graph/src/blockchain/block_stream.rs index c043a5d42eb..3c5f08851f2 100644 --- a/graph/src/blockchain/block_stream.rs +++ b/graph/src/blockchain/block_stream.rs @@ -1,3 +1,4 @@ +use crate::data::store::scalar; use crate::data_source::subgraph; use crate::substreams::Clock; use crate::substreams_rpc::response::Message as SubstreamsMessage; @@ -6,6 +7,7 @@ use anyhow::Error; use async_stream::stream; use futures03::Stream; use prost_types::Any; +use std::collections::HashSet; use std::fmt; use std::sync::Arc; use std::time::Instant; @@ -13,7 +15,9 @@ use thiserror::Error; use tokio::sync::mpsc::{self, Receiver, Sender}; use super::substreams_block_stream::SubstreamsLogData; -use super::{Block, BlockPtr, BlockTime, Blockchain, Trigger, TriggerFilterWrapper}; +use super::{ + Block, BlockPtr, BlockTime, Blockchain, SubgraphFilter, Trigger, TriggerFilterWrapper, +}; use crate::anyhow::Result; use crate::components::store::{BlockNumber, DeploymentLocator, WritableStore}; use crate::data::subgraph::UnifiedMappingApiVersion; @@ -345,7 +349,15 @@ impl TriggersAdapterWrapper { to: BlockNumber, filter: &Arc>, ) -> Result<(Vec>, BlockNumber), Error> { - self.adapter.scan_triggers(from, to, filter).await + if !filter.subgraph_filter.is_empty() { + return self + .subgraph_triggers(Logger::root(slog::Discard, o!()), from, to, filter) + .await; + } + + self.adapter + .scan_triggers(from, to, &filter.chain_filter) + .await } pub async fn triggers_in_block( @@ -368,6 +380,72 @@ impl TriggersAdapterWrapper { pub async fn chain_head_ptr(&self) -> Result, Error> { self.adapter.chain_head_ptr().await } + + // TODO(krishna): Currently this is a mock implementation of subgraph triggers. + // This will be replaced with the actual implementation which will use the filters to + // query the database of the source subgraph and return the entity triggers. + async fn subgraph_triggers( + &self, + logger: Logger, + from: BlockNumber, + to: BlockNumber, + filter: &Arc>, + ) -> Result<(Vec>, BlockNumber), Error> { + let logger2 = logger.cheap_clone(); + let adapter = self.adapter.clone(); + // let to_ptr = eth.next_existing_ptr_to_number(&logger, to).await?; + // let to = to_ptr.block_number(); + + let first_filter = filter.subgraph_filter.first().unwrap(); + + let blocks = adapter + .load_blocks_by_numbers(logger, HashSet::from_iter(from..=to)) + .await? + .into_iter() + .map(|block| { + let trigger_data = vec![Self::create_mock_subgraph_trigger(first_filter, &block)]; + BlockWithTriggers::new_with_subgraph_triggers(block, trigger_data, &logger2) + }) + .collect(); + + Ok((blocks, to)) + } + + fn create_mock_subgraph_trigger( + filter: &SubgraphFilter, + block: &C::Block, + ) -> subgraph::TriggerData { + let mock_entity = Self::create_mock_entity(block); + subgraph::TriggerData { + source: filter.subgraph.clone(), + entity: mock_entity, + entity_type: filter.entities.first().unwrap().clone(), + } + } + + fn create_mock_entity(block: &C::Block) -> Entity { + let id = DeploymentHash::new("test").unwrap(); + let data_schema = InputSchema::parse_latest( + "type Block @entity { id: Bytes!, number: BigInt!, hash: Bytes! }", + id.clone(), + ) + .unwrap(); + + let block = block.ptr(); + let hash = Value::Bytes(scalar::Bytes::from(block.hash_slice().to_vec())); + let data = data_schema + .make_entity(vec![ + ("id".into(), hash.clone()), + ( + "number".into(), + Value::BigInt(scalar::BigInt::from(block.block_number())), + ), + ("hash".into(), hash), + ]) + .unwrap(); + + data + } } #[async_trait] @@ -393,7 +471,7 @@ pub trait TriggersAdapter: Send + Sync { &self, from: BlockNumber, to: BlockNumber, - filter: &Arc>, + filter: &C::TriggerFilter, ) -> Result<(Vec>, BlockNumber), Error>; // Used for reprocessing blocks when creating a data source. @@ -413,6 +491,12 @@ pub trait TriggersAdapter: Send + Sync { /// Get pointer to parent of `block`. This is called when reverting `block`. async fn chain_head_ptr(&self) -> Result, Error>; + + async fn load_blocks_by_numbers( + &self, + logger: Logger, + block_numbers: HashSet, + ) -> Result>; } #[async_trait] diff --git a/graph/src/blockchain/mock.rs b/graph/src/blockchain/mock.rs index b1bef71fda9..21042921cb6 100644 --- a/graph/src/blockchain/mock.rs +++ b/graph/src/blockchain/mock.rs @@ -8,9 +8,10 @@ use crate::{ data::subgraph::UnifiedMappingApiVersion, prelude::{BlockHash, DataSourceTemplateInfo, DeploymentHash}, }; -use anyhow::Error; +use anyhow::{Error, Result}; use async_trait::async_trait; use serde::Deserialize; +use slog::Logger; use std::{collections::HashSet, convert::TryFrom, sync::Arc}; use super::{ @@ -228,16 +229,23 @@ impl TriggersAdapter for MockTriggersAdapter { todo!() } + async fn load_blocks_by_numbers( + &self, + _logger: Logger, + _block_numbers: HashSet, + ) -> Result> { + unimplemented!() + } + async fn chain_head_ptr(&self) -> Result, Error> { unimplemented!() } - async fn scan_triggers( &self, from: crate::components::store::BlockNumber, to: crate::components::store::BlockNumber, - filter: &Arc>, + filter: &MockTriggerFilter, ) -> Result< ( Vec>, @@ -269,7 +277,7 @@ impl TriggersAdapter for MockTriggersAdapter { async fn blocks_with_triggers( _from: crate::components::store::BlockNumber, to: crate::components::store::BlockNumber, - _filter: &Arc>, + _filter: &MockTriggerFilter, ) -> Result< ( Vec>, diff --git a/graph/src/blockchain/mod.rs b/graph/src/blockchain/mod.rs index ec281f48ff6..d1e07d2d36e 100644 --- a/graph/src/blockchain/mod.rs +++ b/graph/src/blockchain/mod.rs @@ -252,7 +252,7 @@ impl From for IngestorError { #[derive(Debug)] pub struct TriggerFilterWrapper { - pub filter: Arc, + pub chain_filter: Arc, pub subgraph_filter: Vec, } @@ -266,7 +266,7 @@ pub struct SubgraphFilter { impl TriggerFilterWrapper { pub fn new(filter: C::TriggerFilter, subgraph_filter: Vec) -> Self { Self { - filter: Arc::new(filter), + chain_filter: Arc::new(filter), subgraph_filter, } } @@ -275,7 +275,7 @@ impl TriggerFilterWrapper { impl Clone for TriggerFilterWrapper { fn clone(&self) -> Self { Self { - filter: self.filter.cheap_clone(), + chain_filter: self.chain_filter.cheap_clone(), subgraph_filter: self.subgraph_filter.clone(), } } diff --git a/graph/src/blockchain/polling_block_stream.rs b/graph/src/blockchain/polling_block_stream.rs index 81fb4804c97..5b37cd303b4 100644 --- a/graph/src/blockchain/polling_block_stream.rs +++ b/graph/src/blockchain/polling_block_stream.rs @@ -472,7 +472,7 @@ where .triggers_in_block( &self.logger, head_ancestor, - &self.filter.filter.clone(), + &self.filter.chain_filter.clone(), ) .await?; Ok(ReconciliationStep::ProcessDescendantBlocks(vec![block], 1)) diff --git a/graph/src/components/subgraph/proof_of_indexing/online.rs b/graph/src/components/subgraph/proof_of_indexing/online.rs index caaa76f0a76..f90fac969cf 100644 --- a/graph/src/components/subgraph/proof_of_indexing/online.rs +++ b/graph/src/components/subgraph/proof_of_indexing/online.rs @@ -146,8 +146,8 @@ impl BlockEventStream { fn write(&mut self, event: &ProofOfIndexingEvent<'_>) { let children = &[ 1, // kvp -> v - 0, // PoICausalityRegion.blocks: Vec - self.block_index, // Vec -> [i] + 0, // PoICausalityRegion.blocks: Result> + self.block_index, // Result> -> [i] 0, // Block.events -> Vec self.vec_length, ]; diff --git a/tests/src/fixture/mod.rs b/tests/src/fixture/mod.rs index b58e436eba2..b01eb4c7670 100644 --- a/tests/src/fixture/mod.rs +++ b/tests/src/fixture/mod.rs @@ -1,7 +1,7 @@ pub mod ethereum; pub mod substreams; -use std::collections::HashMap; +use std::collections::{HashMap, HashSet}; use std::marker::PhantomData; use std::sync::Mutex; use std::time::{Duration, Instant}; @@ -978,6 +978,14 @@ impl TriggersAdapter for MockTriggersAdapter { todo!() } + async fn load_blocks_by_numbers( + &self, + _logger: Logger, + _block_numbers: HashSet, + ) -> Result, Error> { + unimplemented!() + } + async fn chain_head_ptr(&self) -> Result, Error> { todo!() } @@ -986,7 +994,7 @@ impl TriggersAdapter for MockTriggersAdapter { &self, _from: BlockNumber, _to: BlockNumber, - _filter: &Arc>, + _filter: &C::TriggerFilter, ) -> Result<(Vec>, BlockNumber), Error> { todo!() } diff --git a/tests/tests/runner_tests.rs b/tests/tests/runner_tests.rs index eb409f6417c..8f01e4a98f2 100644 --- a/tests/tests/runner_tests.rs +++ b/tests/tests/runner_tests.rs @@ -501,10 +501,19 @@ async fn substreams_trigger_filter_construction() -> anyhow::Result<()> { let runner = ctx.runner_substreams(test_ptr(0)).await; let filter = runner.build_filter_for_test(); - assert_eq!(filter.filter.module_name(), "graph_out"); - assert_eq!(filter.filter.modules().as_ref().unwrap().modules.len(), 2); - assert_eq!(filter.filter.start_block().unwrap(), 0); - assert_eq!(filter.filter.data_sources_len(), 1); + assert_eq!(filter.chain_filter.module_name(), "graph_out"); + assert_eq!( + filter + .chain_filter + .modules() + .as_ref() + .unwrap() + .modules + .len(), + 2 + ); + assert_eq!(filter.chain_filter.start_block().unwrap(), 0); + assert_eq!(filter.chain_filter.data_sources_len(), 1); Ok(()) } @@ -526,7 +535,11 @@ async fn end_block() -> anyhow::Result<()> { let runner = ctx.runner(block_ptr.clone()).await; let runner = runner.run_for_test(false).await.unwrap(); let filter = runner.context().filter.as_ref().unwrap(); - let addresses = filter.filter.log().contract_addresses().collect::>(); + let addresses = filter + .chain_filter + .log() + .contract_addresses() + .collect::>(); if should_contain_addr { assert!(addresses.contains(&addr)); From 9069486cedbf0dbd70dbb1287c2b0e31a7297c01 Mon Sep 17 00:00:00 2001 From: incrypto32 Date: Tue, 12 Nov 2024 14:16:24 +0400 Subject: [PATCH 20/21] graph: use ReadStore instead of WriteStore for source subgraph stores --- chain/arweave/src/chain.rs | 4 +- chain/cosmos/src/chain.rs | 4 +- chain/ethereum/src/chain.rs | 8 +-- chain/near/src/chain.rs | 6 +- chain/starknet/src/chain.rs | 6 +- chain/substreams/src/block_stream.rs | 4 +- chain/substreams/src/chain.rs | 4 +- core/src/subgraph/inputs.rs | 4 +- core/src/subgraph/instance_manager.rs | 22 +++---- graph/src/blockchain/block_stream.rs | 10 +-- graph/src/blockchain/mock.rs | 4 +- graph/src/blockchain/mod.rs | 6 +- graph/src/components/store/traits.rs | 7 ++ store/postgres/src/subgraph_store.rs | 92 +++++++++++++++++---------- tests/src/fixture/mod.rs | 6 +- 15 files changed, 107 insertions(+), 80 deletions(-) diff --git a/chain/arweave/src/chain.rs b/chain/arweave/src/chain.rs index e897e10d8d8..b3390e4fa5f 100644 --- a/chain/arweave/src/chain.rs +++ b/chain/arweave/src/chain.rs @@ -7,7 +7,7 @@ use graph::blockchain::{ }; use graph::cheap_clone::CheapClone; use graph::components::adapter::ChainId; -use graph::components::store::{DeploymentCursorTracker, WritableStore}; +use graph::components::store::{DeploymentCursorTracker, ReadStore}; use graph::data::subgraph::UnifiedMappingApiVersion; use graph::env::EnvVars; use graph::firehose::FirehoseEndpoint; @@ -121,7 +121,7 @@ impl Blockchain for Chain { deployment: DeploymentLocator, store: impl DeploymentCursorTracker, start_blocks: Vec, - _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, + _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, filter: Arc>, unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error> { diff --git a/chain/cosmos/src/chain.rs b/chain/cosmos/src/chain.rs index 83a299b6163..96f2cd7735b 100644 --- a/chain/cosmos/src/chain.rs +++ b/chain/cosmos/src/chain.rs @@ -12,7 +12,7 @@ use graph::blockchain::block_stream::{BlockStreamError, BlockStreamMapper, Fireh use graph::blockchain::client::ChainClient; use graph::blockchain::{BasicBlockchainBuilder, BlockchainBuilder, NoopRuntimeAdapter}; use graph::cheap_clone::CheapClone; -use graph::components::store::{DeploymentCursorTracker, WritableStore}; +use graph::components::store::{DeploymentCursorTracker, ReadStore}; use graph::data::subgraph::UnifiedMappingApiVersion; use graph::{ blockchain::{ @@ -114,7 +114,7 @@ impl Blockchain for Chain { deployment: DeploymentLocator, store: impl DeploymentCursorTracker, start_blocks: Vec, - _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, + _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, filter: Arc>, unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error> { diff --git a/chain/ethereum/src/chain.rs b/chain/ethereum/src/chain.rs index 9b8e71b0fe2..a61f500d27d 100644 --- a/chain/ethereum/src/chain.rs +++ b/chain/ethereum/src/chain.rs @@ -7,7 +7,7 @@ use graph::blockchain::{ TriggersAdapterSelector, }; use graph::components::adapter::ChainId; -use graph::components::store::{DeploymentCursorTracker, WritableStore}; +use graph::components::store::{DeploymentCursorTracker, ReadStore}; use graph::data::subgraph::UnifiedMappingApiVersion; use graph::firehose::{FirehoseEndpoint, ForkStep}; use graph::futures03::compat::Future01CompatExt; @@ -128,7 +128,7 @@ impl BlockStreamBuilder for EthereumStreamBuilder { chain: &Chain, deployment: DeploymentLocator, start_blocks: Vec, - source_subgraph_stores: Vec<(DeploymentHash, Arc)>, + source_subgraph_stores: Vec<(DeploymentHash, Arc)>, subgraph_current_block: Option, filter: Arc>, unified_api_version: UnifiedMappingApiVersion, @@ -150,7 +150,7 @@ impl BlockStreamBuilder for EthereumStreamBuilder { chain: &Chain, deployment: DeploymentLocator, start_blocks: Vec, - source_subgraph_stores: Vec<(DeploymentHash, Arc)>, + source_subgraph_stores: Vec<(DeploymentHash, Arc)>, subgraph_current_block: Option, filter: Arc>, unified_api_version: UnifiedMappingApiVersion, @@ -437,7 +437,7 @@ impl Blockchain for Chain { deployment: DeploymentLocator, store: impl DeploymentCursorTracker, start_blocks: Vec, - source_subgraph_stores: Vec<(DeploymentHash, Arc)>, + source_subgraph_stores: Vec<(DeploymentHash, Arc)>, filter: Arc>, unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error> { diff --git a/chain/near/src/chain.rs b/chain/near/src/chain.rs index 44c0449adce..fe53f946f29 100644 --- a/chain/near/src/chain.rs +++ b/chain/near/src/chain.rs @@ -8,7 +8,7 @@ use graph::blockchain::{ }; use graph::cheap_clone::CheapClone; use graph::components::adapter::ChainId; -use graph::components::store::{DeploymentCursorTracker, WritableStore}; +use graph::components::store::{DeploymentCursorTracker, ReadStore}; use graph::data::subgraph::UnifiedMappingApiVersion; use graph::env::EnvVars; use graph::firehose::FirehoseEndpoint; @@ -152,7 +152,7 @@ impl BlockStreamBuilder for NearStreamBuilder { _chain: &Chain, _deployment: DeploymentLocator, _start_blocks: Vec, - _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, + _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, _subgraph_current_block: Option, _filter: Arc>, _unified_api_version: UnifiedMappingApiVersion, @@ -232,7 +232,7 @@ impl Blockchain for Chain { deployment: DeploymentLocator, store: impl DeploymentCursorTracker, start_blocks: Vec, - _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, + _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, filter: Arc>, unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error> { diff --git a/chain/starknet/src/chain.rs b/chain/starknet/src/chain.rs index b6ff51a9bcd..4a4986baa5a 100644 --- a/chain/starknet/src/chain.rs +++ b/chain/starknet/src/chain.rs @@ -16,7 +16,7 @@ use graph::{ cheap_clone::CheapClone, components::{ adapter::ChainId, - store::{DeploymentCursorTracker, DeploymentLocator, WritableStore}, + store::{DeploymentCursorTracker, DeploymentLocator, ReadStore}, }, data::subgraph::UnifiedMappingApiVersion, env::EnvVars, @@ -116,7 +116,7 @@ impl Blockchain for Chain { deployment: DeploymentLocator, store: impl DeploymentCursorTracker, start_blocks: Vec, - _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, + _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, filter: Arc>, unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error> { @@ -240,7 +240,7 @@ impl BlockStreamBuilder for StarknetStreamBuilder { _chain: &Chain, _deployment: DeploymentLocator, _start_blocks: Vec, - _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, + _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, _subgraph_current_block: Option, _filter: Arc>, _unified_api_version: UnifiedMappingApiVersion, diff --git a/chain/substreams/src/block_stream.rs b/chain/substreams/src/block_stream.rs index 3687bab0b91..59f99e06c53 100644 --- a/chain/substreams/src/block_stream.rs +++ b/chain/substreams/src/block_stream.rs @@ -9,7 +9,7 @@ use graph::{ substreams_block_stream::SubstreamsBlockStream, Blockchain, TriggerFilterWrapper, }, - components::store::{DeploymentLocator, WritableStore}, + components::store::{DeploymentLocator, ReadStore}, data::subgraph::UnifiedMappingApiVersion, prelude::{async_trait, BlockNumber, BlockPtr, DeploymentHash}, schema::InputSchema, @@ -104,7 +104,7 @@ impl BlockStreamBuilderTrait for BlockStreamBuilder { _chain: &Chain, _deployment: DeploymentLocator, _start_blocks: Vec, - _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, + _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, _subgraph_current_block: Option, _filter: Arc>, _unified_api_version: UnifiedMappingApiVersion, diff --git a/chain/substreams/src/chain.rs b/chain/substreams/src/chain.rs index 38ef49bdb5d..57282ce7062 100644 --- a/chain/substreams/src/chain.rs +++ b/chain/substreams/src/chain.rs @@ -7,7 +7,7 @@ use graph::blockchain::{ NoopRuntimeAdapter, TriggerFilterWrapper, }; use graph::components::adapter::ChainId; -use graph::components::store::{DeploymentCursorTracker, WritableStore}; +use graph::components::store::{DeploymentCursorTracker, ReadStore}; use graph::env::EnvVars; use graph::prelude::{ BlockHash, CheapClone, DeploymentHash, Entity, LoggerFactory, MetricsRegistry, @@ -142,7 +142,7 @@ impl Blockchain for Chain { deployment: DeploymentLocator, store: impl DeploymentCursorTracker, _start_blocks: Vec, - _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, + _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, filter: Arc>, _unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error> { diff --git a/core/src/subgraph/inputs.rs b/core/src/subgraph/inputs.rs index 386b1a5e35b..01264b615bf 100644 --- a/core/src/subgraph/inputs.rs +++ b/core/src/subgraph/inputs.rs @@ -1,7 +1,7 @@ use graph::{ blockchain::{block_stream::TriggersAdapterWrapper, Blockchain}, components::{ - store::{DeploymentLocator, SubgraphFork, WritableStore}, + store::{DeploymentLocator, ReadStore, SubgraphFork, WritableStore}, subgraph::ProofOfIndexingVersion, }, data::subgraph::{SubgraphFeature, UnifiedMappingApiVersion}, @@ -16,7 +16,7 @@ pub struct IndexingInputs { pub features: BTreeSet, pub start_blocks: Vec, pub end_blocks: BTreeSet, - pub source_subgraph_stores: Vec<(DeploymentHash, Arc)>, + pub source_subgraph_stores: Vec<(DeploymentHash, Arc)>, pub stop_block: Option, pub store: Arc, pub debug_fork: Option>, diff --git a/core/src/subgraph/instance_manager.rs b/core/src/subgraph/instance_manager.rs index 20ad464339f..d76342b3a2e 100644 --- a/core/src/subgraph/instance_manager.rs +++ b/core/src/subgraph/instance_manager.rs @@ -9,7 +9,7 @@ use crate::subgraph::runner::SubgraphRunner; use graph::blockchain::block_stream::{BlockStreamMetrics, TriggersAdapterWrapper}; use graph::blockchain::{Blockchain, BlockchainKind, DataSource, NodeCapabilities}; use graph::components::metrics::gas::GasMetrics; -use graph::components::store::WritableStore; +use graph::components::store::ReadStore; use graph::components::subgraph::ProofOfIndexingVersion; use graph::data::subgraph::{UnresolvedSubgraphManifest, SPEC_VERSION_0_0_6}; use graph::data::value::Word; @@ -204,14 +204,14 @@ impl SubgraphInstanceManager { } } - pub async fn hashes_to_writable_store( + pub async fn hashes_to_read_store( &self, logger: &Logger, link_resolver: &Arc, hashes: Vec, max_spec_version: Version, is_runner_test: bool, - ) -> anyhow::Result)>> { + ) -> anyhow::Result)>> { let mut writable_stores = Vec::new(); let subgraph_store = self.subgraph_store.clone(); @@ -235,16 +235,16 @@ impl SubgraphInstanceManager { .active_locator(&hash)? .ok_or_else(|| anyhow!("no active deployment for hash {}", hash))?; - let writable_store = subgraph_store - .clone() // Clone the Arc again for each iteration - .writable( + let readable_store = subgraph_store + .clone() + .readable( logger.clone(), loc.id.clone(), Arc::new(manifest.template_idx_and_name().collect()), ) .await?; - writable_stores.push((loc.hash, writable_store)); + writable_stores.push((loc.hash, readable_store)); } Ok(writable_stores) @@ -491,8 +491,8 @@ impl SubgraphInstanceManager { let decoder = Box::new(Decoder::new(decoder_hook)); - let subgraph_data_source_writables = self - .hashes_to_writable_store::( + let subgraph_data_source_read_stores = self + .hashes_to_read_store::( &logger, &link_resolver, subgraph_ds_source_deployments, @@ -503,7 +503,7 @@ impl SubgraphInstanceManager { let triggers_adapter = Arc::new(TriggersAdapterWrapper::new( triggers_adapter, - subgraph_data_source_writables.clone(), + subgraph_data_source_read_stores.clone(), )); let inputs = IndexingInputs { @@ -511,7 +511,7 @@ impl SubgraphInstanceManager { features, start_blocks, end_blocks, - source_subgraph_stores: subgraph_data_source_writables, + source_subgraph_stores: subgraph_data_source_read_stores, stop_block, store, debug_fork, diff --git a/graph/src/blockchain/block_stream.rs b/graph/src/blockchain/block_stream.rs index 3c5f08851f2..9c9e9850698 100644 --- a/graph/src/blockchain/block_stream.rs +++ b/graph/src/blockchain/block_stream.rs @@ -19,7 +19,7 @@ use super::{ Block, BlockPtr, BlockTime, Blockchain, SubgraphFilter, Trigger, TriggerFilterWrapper, }; use crate::anyhow::Result; -use crate::components::store::{BlockNumber, DeploymentLocator, WritableStore}; +use crate::components::store::{BlockNumber, DeploymentLocator, ReadStore}; use crate::data::subgraph::UnifiedMappingApiVersion; use crate::firehose::{self, FirehoseEndpoint}; use crate::futures03::stream::StreamExt as _; @@ -149,7 +149,7 @@ pub trait BlockStreamBuilder: Send + Sync { chain: &C, deployment: DeploymentLocator, start_blocks: Vec, - source_subgraph_stores: Vec<(DeploymentHash, Arc)>, + source_subgraph_stores: Vec<(DeploymentHash, Arc)>, subgraph_current_block: Option, filter: Arc>, unified_api_version: UnifiedMappingApiVersion, @@ -160,7 +160,7 @@ pub trait BlockStreamBuilder: Send + Sync { chain: &C, deployment: DeploymentLocator, start_blocks: Vec, - source_subgraph_stores: Vec<(DeploymentHash, Arc)>, + source_subgraph_stores: Vec<(DeploymentHash, Arc)>, subgraph_current_block: Option, filter: Arc>, unified_api_version: UnifiedMappingApiVersion, @@ -317,13 +317,13 @@ impl BlockWithTriggers { pub struct TriggersAdapterWrapper { pub adapter: Arc>, - pub source_subgraph_stores: Vec<(DeploymentHash, Arc)>, + pub source_subgraph_stores: Vec<(DeploymentHash, Arc)>, } impl TriggersAdapterWrapper { pub fn new( adapter: Arc>, - source_subgraph_stores: Vec<(DeploymentHash, Arc)>, + source_subgraph_stores: Vec<(DeploymentHash, Arc)>, ) -> Self { Self { adapter, diff --git a/graph/src/blockchain/mock.rs b/graph/src/blockchain/mock.rs index 21042921cb6..287d7b054f9 100644 --- a/graph/src/blockchain/mock.rs +++ b/graph/src/blockchain/mock.rs @@ -2,7 +2,7 @@ use crate::{ bail, components::{ link_resolver::LinkResolver, - store::{BlockNumber, DeploymentCursorTracker, DeploymentLocator, WritableStore}, + store::{BlockNumber, DeploymentCursorTracker, DeploymentLocator, ReadStore}, subgraph::InstanceDSTemplateInfo, }, data::subgraph::UnifiedMappingApiVersion, @@ -386,7 +386,7 @@ impl Blockchain for MockBlockchain { _deployment: DeploymentLocator, _store: impl DeploymentCursorTracker, _start_blocks: Vec, - _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, + _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, _filter: Arc>, _unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error> { diff --git a/graph/src/blockchain/mod.rs b/graph/src/blockchain/mod.rs index d1e07d2d36e..41561e7670b 100644 --- a/graph/src/blockchain/mod.rs +++ b/graph/src/blockchain/mod.rs @@ -20,9 +20,7 @@ use crate::{ components::{ adapter::ChainId, metrics::subgraph::SubgraphInstanceMetrics, - store::{ - DeploymentCursorTracker, DeploymentLocator, StoredDynamicDataSource, WritableStore, - }, + store::{DeploymentCursorTracker, DeploymentLocator, ReadStore, StoredDynamicDataSource}, subgraph::{HostMetrics, InstanceDSTemplateInfo, MappingError}, trigger_processor::RunnableTriggers, }, @@ -191,7 +189,7 @@ pub trait Blockchain: Debug + Sized + Send + Sync + Unpin + 'static { deployment: DeploymentLocator, store: impl DeploymentCursorTracker, start_blocks: Vec, - source_subgraph_stores: Vec<(DeploymentHash, Arc)>, + source_subgraph_stores: Vec<(DeploymentHash, Arc)>, filter: Arc>, unified_api_version: UnifiedMappingApiVersion, ) -> Result>, Error>; diff --git a/graph/src/components/store/traits.rs b/graph/src/components/store/traits.rs index 69ed67c16b2..428321884eb 100644 --- a/graph/src/components/store/traits.rs +++ b/graph/src/components/store/traits.rs @@ -185,6 +185,13 @@ pub trait SubgraphStore: Send + Sync + 'static { manifest_idx_and_name: Arc>, ) -> Result, StoreError>; + async fn readable( + self: Arc, + logger: Logger, + deployment: DeploymentId, + manifest_idx_and_name: Arc>, + ) -> Result, StoreError>; + /// Initiate a graceful shutdown of the writable that a previous call to /// `writable` might have started async fn stop_subgraph(&self, deployment: &DeploymentLocator) -> Result<(), StoreError>; diff --git a/store/postgres/src/subgraph_store.rs b/store/postgres/src/subgraph_store.rs index 41cbef15982..bf5f3d38a20 100644 --- a/store/postgres/src/subgraph_store.rs +++ b/store/postgres/src/subgraph_store.rs @@ -268,6 +268,50 @@ impl SubgraphStore { pub fn for_site(&self, site: &Site) -> Result<&Arc, StoreError> { self.inner.for_site(site) } + + async fn get_or_create_writable_store( + self: Arc, + logger: Logger, + deployment: graph::components::store::DeploymentId, + manifest_idx_and_name: Arc>, + ) -> Result, StoreError> { + let deployment = deployment.into(); + // We cache writables to make sure calls to this method are + // idempotent and there is ever only one `WritableStore` for any + // deployment + if let Some(writable) = self.writables.lock().unwrap().get(&deployment) { + // A poisoned writable will not write anything anymore; we + // discard it and create a new one that is properly initialized + // according to the state in the database. + if !writable.poisoned() { + return Ok(writable.cheap_clone()); + } + } + + // Ideally the lower level functions would be asyncified. + let this = self.clone(); + let site = graph::spawn_blocking_allow_panic(move || -> Result<_, StoreError> { + this.find_site(deployment) + }) + .await + .unwrap()?; // Propagate panics, there shouldn't be any. + + let writable = Arc::new( + WritableStore::new( + self.as_ref().clone(), + logger, + site, + manifest_idx_and_name, + self.registry.clone(), + ) + .await?, + ); + self.writables + .lock() + .unwrap() + .insert(deployment, writable.cheap_clone()); + Ok(writable) + } } impl std::ops::Deref for SubgraphStore { @@ -1488,42 +1532,20 @@ impl SubgraphStoreTrait for SubgraphStore { deployment: graph::components::store::DeploymentId, manifest_idx_and_name: Arc>, ) -> Result, StoreError> { - let deployment = deployment.into(); - // We cache writables to make sure calls to this method are - // idempotent and there is ever only one `WritableStore` for any - // deployment - if let Some(writable) = self.writables.lock().unwrap().get(&deployment) { - // A poisoned writable will not write anything anymore; we - // discard it and create a new one that is properly initialized - // according to the state in the database. - if !writable.poisoned() { - return Ok(writable.cheap_clone()); - } - } - - // Ideally the lower level functions would be asyncified. - let this = self.clone(); - let site = graph::spawn_blocking_allow_panic(move || -> Result<_, StoreError> { - this.find_site(deployment) - }) - .await - .unwrap()?; // Propagate panics, there shouldn't be any. + self.get_or_create_writable_store(logger, deployment, manifest_idx_and_name) + .await + .map(|store| store as Arc) + } - let writable = Arc::new( - WritableStore::new( - self.as_ref().clone(), - logger, - site, - manifest_idx_and_name, - self.registry.clone(), - ) - .await?, - ); - self.writables - .lock() - .unwrap() - .insert(deployment, writable.cheap_clone()); - Ok(writable) + async fn readable( + self: Arc, + logger: Logger, + deployment: graph::components::store::DeploymentId, + manifest_idx_and_name: Arc>, + ) -> Result, StoreError> { + self.get_or_create_writable_store(logger, deployment, manifest_idx_and_name) + .await + .map(|store| store as Arc) } async fn stop_subgraph(&self, loc: &DeploymentLocator) -> Result<(), StoreError> { diff --git a/tests/src/fixture/mod.rs b/tests/src/fixture/mod.rs index b01eb4c7670..47e8ea1d771 100644 --- a/tests/src/fixture/mod.rs +++ b/tests/src/fixture/mod.rs @@ -20,7 +20,7 @@ use graph::cheap_clone::CheapClone; use graph::components::adapter::ChainId; use graph::components::link_resolver::{ArweaveClient, ArweaveResolver, FileSizeLimit}; use graph::components::metrics::MetricsRegistry; -use graph::components::store::{BlockStore, DeploymentLocator, EthereumCallCache, WritableStore}; +use graph::components::store::{BlockStore, DeploymentLocator, EthereumCallCache, ReadStore}; use graph::components::subgraph::Settings; use graph::data::graphql::load_manager::LoadManager; use graph::data::query::{Query, QueryTarget}; @@ -723,7 +723,7 @@ impl BlockStreamBuilder for MutexBlockStreamBuilder { chain: &C, deployment: DeploymentLocator, start_blocks: Vec, - source_subgraph_stores: Vec<(DeploymentHash, Arc)>, + source_subgraph_stores: Vec<(DeploymentHash, Arc)>, subgraph_current_block: Option, filter: Arc>, unified_api_version: graph::data::subgraph::UnifiedMappingApiVersion, @@ -798,7 +798,7 @@ where _chain: &C, _deployment: DeploymentLocator, _start_blocks: Vec, - _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, + _source_subgraph_stores: Vec<(DeploymentHash, Arc)>, subgraph_current_block: Option, _filter: Arc>, _unified_api_version: graph::data::subgraph::UnifiedMappingApiVersion, From c7ca25a4d7a7f269f9850e0648d1d7c64fb7912d Mon Sep 17 00:00:00 2001 From: incrypto32 Date: Tue, 12 Nov 2024 14:38:51 +0400 Subject: [PATCH 21/21] graph: Better comments --- core/src/subgraph/context/instance/mod.rs | 5 +++-- graph/src/blockchain/block_stream.rs | 3 +++ graph/src/blockchain/mod.rs | 5 +++++ 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/core/src/subgraph/context/instance/mod.rs b/core/src/subgraph/context/instance/mod.rs index 6436ea1a56e..86b64195493 100644 --- a/core/src/subgraph/context/instance/mod.rs +++ b/core/src/subgraph/context/instance/mod.rs @@ -22,14 +22,15 @@ pub(crate) struct SubgraphInstance> { pub(super) static_data_sources: Arc>>, host_metrics: Arc, - /// The hosts represent the data sources in the subgraph. There is one host per data source. + /// The hosts represent the onchain data sources in the subgraph. There is one host per data source. /// Data sources with no mappings (e.g. direct substreams) have no host. /// /// Onchain hosts must be created in increasing order of block number. `fn hosts_for_trigger` /// will return the onchain hosts in the same order as they were inserted. onchain_hosts: OnchainHosts, - // TODO(krishna): Describe subgraph_hosts + /// `subgraph_hosts` represent subgraph data sources declared in the manifest. These are a special + /// kind of data source that depends on the data from another source subgraph. subgraph_hosts: OnchainHosts, offchain_hosts: OffchainHosts, diff --git a/graph/src/blockchain/block_stream.rs b/graph/src/blockchain/block_stream.rs index 9c9e9850698..3c80ff61c59 100644 --- a/graph/src/blockchain/block_stream.rs +++ b/graph/src/blockchain/block_stream.rs @@ -315,6 +315,9 @@ impl BlockWithTriggers { } } +/// The `TriggersAdapterWrapper` wraps the chain-specific `TriggersAdapter`, enabling chain-agnostic +/// handling of subgraph datasource triggers. Without this wrapper, we would have to duplicate the same +/// logic for each chain, increasing code repetition. pub struct TriggersAdapterWrapper { pub adapter: Arc>, pub source_subgraph_stores: Vec<(DeploymentHash, Arc)>, diff --git a/graph/src/blockchain/mod.rs b/graph/src/blockchain/mod.rs index 41561e7670b..9de8d9cd8a3 100644 --- a/graph/src/blockchain/mod.rs +++ b/graph/src/blockchain/mod.rs @@ -248,6 +248,11 @@ impl From for IngestorError { } } +/// The `TriggerFilterWrapper` is a higher-level wrapper around the chain-specific `TriggerFilter`, +/// enabling subgraph-based trigger filtering for subgraph datasources. This abstraction is necessary +/// because subgraph filtering operates at a higher level than chain-based filtering. By using this wrapper, +/// we reduce code duplication, allowing subgraph-based filtering to be implemented once, instead of +/// duplicating it across different chains. #[derive(Debug)] pub struct TriggerFilterWrapper { pub chain_filter: Arc,