From f599953475a4047581487b700fe74351276402ab Mon Sep 17 00:00:00 2001 From: Yuki Kishimoto Date: Tue, 12 Mar 2024 13:28:13 +0100 Subject: [PATCH] ffi(sdk): allow to use custom database * Add `CustomNostrDatabase` trait * Add `NostrDatabase::custom` * Add `custom-database.py` example Closes https://github.com/rust-nostr/nostr/issues/325 --- .../examples/custom-database.py | 83 +++++++ bindings/nostr-sdk-ffi/src/database/custom.rs | 217 ++++++++++++++++++ .../src/{database.rs => database/mod.rs} | 12 + 3 files changed, 312 insertions(+) create mode 100644 bindings/nostr-sdk-ffi/bindings-python/examples/custom-database.py create mode 100644 bindings/nostr-sdk-ffi/src/database/custom.rs rename bindings/nostr-sdk-ffi/src/{database.rs => database/mod.rs} (90%) diff --git a/bindings/nostr-sdk-ffi/bindings-python/examples/custom-database.py b/bindings/nostr-sdk-ffi/bindings-python/examples/custom-database.py new file mode 100644 index 000000000..4254d3e34 --- /dev/null +++ b/bindings/nostr-sdk-ffi/bindings-python/examples/custom-database.py @@ -0,0 +1,83 @@ +from nostr_sdk import Keys, Client, EventBuilder, Filter, ClientBuilder, CustomNostrDatabase, NostrDatabase, NegentropyOptions, Event, EventId, init_logger, LogLevel +from datetime import timedelta +import time +from typing import List, Optional, Set, Dict, Tuple + +init_logger(LogLevel.INFO) + +# Example of custom in-memory database +class MyDatabase(CustomNostrDatabase): + def __init__(self): + self.seen_event_ids = {} + self.events = {} + + def backend(self) -> str: + return "my-in-memory-backend" + + def save_event(self, event: Event) -> bool: + self.events[event.id()] = event + return True + + def has_event_already_been_saved(self, event_id) -> bool: + return event_id in self.events + + def has_event_already_been_seen(self, event_id) -> bool: + return event_id in self.seen_event_ids + + def has_event_id_been_deleted(self, event_id) -> bool: + return False + + def has_coordinate_been_deleted(self, coordinate, timestamp) -> bool: + return False + + def event_id_seen(self, event_id, relay_url: str): + if event_id in self.seen_event_ids: + self.seen_event_ids[event_id].add(relay_url) + else: + new_set = {relay_url} + self.seen_event_ids[event_id] = new_set + + def event_seen_on_relays(self, event_id) -> Optional[Set[str]]: + return self.seen_event_ids.get(event_id) + + def event_by_id(self, event_id) -> Event: + return self.events.get(event_id, None) + + def count(self, filters) -> int: + return 0 + + def query(self, filters) -> List[Event]: + # Fake algorithm + return list(self.events.values())[:10] + + def delete(self, filter): + return + + def wipe(self): + self.seen_event_ids.clear() + self.events.clear() + +my_db = MyDatabase() +database = NostrDatabase.custom(my_db) +client = ClientBuilder().database(database).build() + +client.add_relay("wss://relay.damus.io") +client.add_relay("wss://atl.purplerelay.com") +client.connect() + +keys = Keys.parse("nsec1ufnus6pju578ste3v90xd5m2decpuzpql2295m3sknqcjzyys9ls0qlc85") +print(keys.public_key().to_bech32()) + +# Negentropy reconciliation +filter = Filter().author(keys.public_key()) +opts = NegentropyOptions() +client.reconcile(filter, opts) + +# Query events from database +filter = Filter().author(keys.public_key()).limit(10) +events = client.database().query([filter]) +if len(events) == 0: + print("Query not found any event") +else: + for event in events: + print(event.as_json()) diff --git a/bindings/nostr-sdk-ffi/src/database/custom.rs b/bindings/nostr-sdk-ffi/src/database/custom.rs new file mode 100644 index 000000000..35e3ed952 --- /dev/null +++ b/bindings/nostr-sdk-ffi/src/database/custom.rs @@ -0,0 +1,217 @@ +// Copyright (c) 2022-2023 Yuki Kishimoto +// Copyright (c) 2023-2024 Rust Nostr Developers +// Distributed under the MIT software license + +use std::fmt::Debug; +use std::sync::Arc; + +use nostr_ffi::nips::nip01::Coordinate; +use nostr_ffi::{Event, EventId, Filter, Timestamp}; + +use crate::error::Result; + +#[uniffi::export(callback_interface)] +pub trait CustomNostrDatabase: Send + Sync + Debug { + /// Name of backend + fn backend(&self) -> String; + + /// Save [`Event`] into store + /// + /// Return `true` if event was successfully saved into database. + /// + /// **This method assume that [`Event`] was already verified** + fn save_event(&self, event: Arc) -> Result; + + /// Check if [`Event`] has already been saved + fn has_event_already_been_saved(&self, event_id: Arc) -> Result; + + /// Check if [`EventId`] has already been seen + fn has_event_already_been_seen(&self, event_id: Arc) -> Result; + + /// Check if [`EventId`] has been deleted + fn has_event_id_been_deleted(&self, event_id: Arc) -> Result; + + /// Check if event with [`Coordinate`] has been deleted before [`Timestamp`] + fn has_coordinate_been_deleted( + &self, + coordinate: Arc, + timestamp: Arc, + ) -> Result; + + /// Set [`EventId`] as seen by relay + /// + /// Useful for NIP65 (aka gossip) + fn event_id_seen(&self, event_id: Arc, relay_url: String) -> Result<()>; + + /// Get list of relays that have seen the [`EventId`] + fn event_seen_on_relays(&self, event_id: Arc) -> Result>>; + + /// Get [`Event`] by [`EventId`] + fn event_by_id(&self, event_id: Arc) -> Result>; + + /// Count number of [`Event`] found by filters + /// + /// Use `Filter::new()` or `Filter::default()` to count all events. + fn count(&self, filters: Vec>) -> Result; + + /// Query store with filters + fn query(&self, filters: Vec>) -> Result>>; + + /// Delete all events that match the `Filter` + fn delete(&self, filter: Arc) -> Result<()>; + + /// Wipe all data + fn wipe(&self) -> Result<()>; +} + +#[derive(Debug)] +pub(super) struct IntermediateCustomNostrDatabase { + pub(super) inner: Box, +} + +mod inner { + use std::collections::{BTreeSet, HashSet}; + use std::ops::Deref; + use std::sync::Arc; + + use nostr_sdk::database::{DatabaseError, NostrDatabase, Order}; + use nostr_sdk::prelude::*; + + use super::IntermediateCustomNostrDatabase; + + #[async_trait] + impl NostrDatabase for IntermediateCustomNostrDatabase { + type Err = DatabaseError; + + fn backend(&self) -> Backend { + Backend::Custom(self.inner.backend()) + } + + async fn save_event(&self, event: &Event) -> Result { + self.inner + .save_event(Arc::new(event.to_owned().into())) + .map_err(DatabaseError::backend) + } + + async fn bulk_import(&self, _events: BTreeSet) -> Result<(), Self::Err> { + Ok(()) + } + + async fn has_event_already_been_saved( + &self, + event_id: &EventId, + ) -> Result { + self.inner + .has_event_already_been_saved(Arc::new((*event_id).into())) + .map_err(DatabaseError::backend) + } + + async fn has_event_already_been_seen(&self, event_id: &EventId) -> Result { + self.inner + .has_event_already_been_seen(Arc::new((*event_id).into())) + .map_err(DatabaseError::backend) + } + + async fn has_event_id_been_deleted(&self, event_id: &EventId) -> Result { + self.inner + .has_event_id_been_deleted(Arc::new((*event_id).into())) + .map_err(DatabaseError::backend) + } + + async fn has_coordinate_been_deleted( + &self, + coordinate: &Coordinate, + timestamp: Timestamp, + ) -> Result { + self.inner + .has_coordinate_been_deleted( + Arc::new(coordinate.to_owned().into()), + Arc::new(timestamp.into()), + ) + .map_err(DatabaseError::backend) + } + + async fn event_id_seen(&self, event_id: EventId, relay_url: Url) -> Result<(), Self::Err> { + self.inner + .event_id_seen(Arc::new(event_id.into()), relay_url.to_string()) + .map_err(DatabaseError::backend) + } + + async fn event_seen_on_relays( + &self, + event_id: EventId, + ) -> Result>, Self::Err> { + let res = self + .inner + .event_seen_on_relays(Arc::new(event_id.into())) + .map_err(DatabaseError::backend)?; + Ok(res.map(|list| { + list.into_iter() + .filter_map(|u| Url::parse(&u).ok()) + .collect() + })) + } + + async fn event_by_id(&self, event_id: EventId) -> Result { + let res = self + .inner + .event_by_id(Arc::new(event_id.into())) + .map_err(DatabaseError::backend)?; + Ok(res.as_ref().deref().clone()) + } + + async fn count(&self, filters: Vec) -> Result { + let filters = filters.into_iter().map(|f| Arc::new(f.into())).collect(); + let res = self.inner.count(filters).map_err(DatabaseError::backend)?; + Ok(res as usize) + } + + async fn query( + &self, + filters: Vec, + _order: Order, + ) -> Result, Self::Err> { + let filters = filters.into_iter().map(|f| Arc::new(f.into())).collect(); + let res = self.inner.query(filters).map_err(DatabaseError::backend)?; + Ok(res + .into_iter() + .map(|e| e.as_ref().deref().clone()) + .collect()) + } + + async fn event_ids_by_filters( + &self, + filters: Vec, + _order: Order, + ) -> Result, Self::Err> { + let filters = filters.into_iter().map(|f| Arc::new(f.into())).collect(); + let res = self.inner.query(filters).map_err(DatabaseError::backend)?; + Ok(res.into_iter().map(|e| *e.id()).collect()) + } + + async fn negentropy_items( + &self, + filter: Filter, + ) -> Result, Self::Err> { + let filter = Arc::new(filter.into()); + let res = self + .inner + .query(vec![filter]) + .map_err(DatabaseError::backend)?; + Ok(res + .into_iter() + .map(|e| (*e.id(), *e.created_at())) + .collect()) + } + + async fn delete(&self, filter: Filter) -> Result<(), Self::Err> { + self.inner + .delete(Arc::new(filter.into())) + .map_err(DatabaseError::backend) + } + + async fn wipe(&self) -> Result<(), Self::Err> { + self.inner.wipe().map_err(DatabaseError::backend) + } + } +} diff --git a/bindings/nostr-sdk-ffi/src/database.rs b/bindings/nostr-sdk-ffi/src/database/mod.rs similarity index 90% rename from bindings/nostr-sdk-ffi/src/database.rs rename to bindings/nostr-sdk-ffi/src/database/mod.rs index cf2efd0a7..a534b5696 100644 --- a/bindings/nostr-sdk-ffi/src/database.rs +++ b/bindings/nostr-sdk-ffi/src/database/mod.rs @@ -10,6 +10,9 @@ use nostr_sdk::database::{DynNostrDatabase, IntoNostrDatabase, NostrDatabaseExt, use nostr_sdk::{block_on, SQLiteDatabase}; use uniffi::Object; +pub mod custom; + +use self::custom::{CustomNostrDatabase, IntermediateCustomNostrDatabase}; use crate::error::Result; use crate::profile::Profile; @@ -42,6 +45,15 @@ impl NostrDatabase { }) } + #[uniffi::constructor] + pub fn custom(database: Box) -> Self { + let intermediate = IntermediateCustomNostrDatabase { inner: database }; + + Self { + inner: intermediate.into_nostr_database(), + } + } + /// Save [`Event`] into store /// /// Return `true` if event was successfully saved into database.