Skip to content

Commit

Permalink
ffi(sdk): allow to use custom database
Browse files Browse the repository at this point in the history
* Add `CustomNostrDatabase` trait
* Add `NostrDatabase::custom`
* Add `custom-database.py` example

Closes #325
  • Loading branch information
yukibtc committed Mar 12, 2024
1 parent 89253a9 commit f599953
Show file tree
Hide file tree
Showing 3 changed files with 312 additions and 0 deletions.
83 changes: 83 additions & 0 deletions bindings/nostr-sdk-ffi/bindings-python/examples/custom-database.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
from nostr_sdk import Keys, Client, EventBuilder, Filter, ClientBuilder, CustomNostrDatabase, NostrDatabase, NegentropyOptions, Event, EventId, init_logger, LogLevel
from datetime import timedelta
import time
from typing import List, Optional, Set, Dict, Tuple

init_logger(LogLevel.INFO)

# Example of custom in-memory database
class MyDatabase(CustomNostrDatabase):
def __init__(self):
self.seen_event_ids = {}
self.events = {}

def backend(self) -> str:
return "my-in-memory-backend"

def save_event(self, event: Event) -> bool:
self.events[event.id()] = event
return True

def has_event_already_been_saved(self, event_id) -> bool:
return event_id in self.events

def has_event_already_been_seen(self, event_id) -> bool:
return event_id in self.seen_event_ids

def has_event_id_been_deleted(self, event_id) -> bool:
return False

def has_coordinate_been_deleted(self, coordinate, timestamp) -> bool:
return False

def event_id_seen(self, event_id, relay_url: str):
if event_id in self.seen_event_ids:
self.seen_event_ids[event_id].add(relay_url)
else:
new_set = {relay_url}
self.seen_event_ids[event_id] = new_set

def event_seen_on_relays(self, event_id) -> Optional[Set[str]]:
return self.seen_event_ids.get(event_id)

def event_by_id(self, event_id) -> Event:
return self.events.get(event_id, None)

def count(self, filters) -> int:
return 0

def query(self, filters) -> List[Event]:
# Fake algorithm
return list(self.events.values())[:10]

def delete(self, filter):
return

def wipe(self):
self.seen_event_ids.clear()
self.events.clear()

my_db = MyDatabase()
database = NostrDatabase.custom(my_db)
client = ClientBuilder().database(database).build()

client.add_relay("wss://relay.damus.io")
client.add_relay("wss://atl.purplerelay.com")
client.connect()

keys = Keys.parse("nsec1ufnus6pju578ste3v90xd5m2decpuzpql2295m3sknqcjzyys9ls0qlc85")
print(keys.public_key().to_bech32())

# Negentropy reconciliation
filter = Filter().author(keys.public_key())
opts = NegentropyOptions()
client.reconcile(filter, opts)

# Query events from database
filter = Filter().author(keys.public_key()).limit(10)
events = client.database().query([filter])
if len(events) == 0:
print("Query not found any event")
else:
for event in events:
print(event.as_json())
217 changes: 217 additions & 0 deletions bindings/nostr-sdk-ffi/src/database/custom.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,217 @@
// Copyright (c) 2022-2023 Yuki Kishimoto
// Copyright (c) 2023-2024 Rust Nostr Developers
// Distributed under the MIT software license

use std::fmt::Debug;
use std::sync::Arc;

use nostr_ffi::nips::nip01::Coordinate;
use nostr_ffi::{Event, EventId, Filter, Timestamp};

use crate::error::Result;

#[uniffi::export(callback_interface)]
pub trait CustomNostrDatabase: Send + Sync + Debug {
/// Name of backend
fn backend(&self) -> String;

/// Save [`Event`] into store
///
/// Return `true` if event was successfully saved into database.
///
/// **This method assume that [`Event`] was already verified**
fn save_event(&self, event: Arc<Event>) -> Result<bool>;

/// Check if [`Event`] has already been saved
fn has_event_already_been_saved(&self, event_id: Arc<EventId>) -> Result<bool>;

/// Check if [`EventId`] has already been seen
fn has_event_already_been_seen(&self, event_id: Arc<EventId>) -> Result<bool>;

/// Check if [`EventId`] has been deleted
fn has_event_id_been_deleted(&self, event_id: Arc<EventId>) -> Result<bool>;

/// Check if event with [`Coordinate`] has been deleted before [`Timestamp`]
fn has_coordinate_been_deleted(
&self,
coordinate: Arc<Coordinate>,
timestamp: Arc<Timestamp>,
) -> Result<bool>;

/// Set [`EventId`] as seen by relay
///
/// Useful for NIP65 (aka gossip)
fn event_id_seen(&self, event_id: Arc<EventId>, relay_url: String) -> Result<()>;

/// Get list of relays that have seen the [`EventId`]
fn event_seen_on_relays(&self, event_id: Arc<EventId>) -> Result<Option<Vec<String>>>;

/// Get [`Event`] by [`EventId`]
fn event_by_id(&self, event_id: Arc<EventId>) -> Result<Arc<Event>>;

/// Count number of [`Event`] found by filters
///
/// Use `Filter::new()` or `Filter::default()` to count all events.
fn count(&self, filters: Vec<Arc<Filter>>) -> Result<u64>;

/// Query store with filters
fn query(&self, filters: Vec<Arc<Filter>>) -> Result<Vec<Arc<Event>>>;

/// Delete all events that match the `Filter`
fn delete(&self, filter: Arc<Filter>) -> Result<()>;

/// Wipe all data
fn wipe(&self) -> Result<()>;
}

#[derive(Debug)]
pub(super) struct IntermediateCustomNostrDatabase {
pub(super) inner: Box<dyn CustomNostrDatabase>,
}

mod inner {
use std::collections::{BTreeSet, HashSet};
use std::ops::Deref;
use std::sync::Arc;

use nostr_sdk::database::{DatabaseError, NostrDatabase, Order};
use nostr_sdk::prelude::*;

use super::IntermediateCustomNostrDatabase;

#[async_trait]
impl NostrDatabase for IntermediateCustomNostrDatabase {
type Err = DatabaseError;

fn backend(&self) -> Backend {
Backend::Custom(self.inner.backend())
}

async fn save_event(&self, event: &Event) -> Result<bool, Self::Err> {
self.inner
.save_event(Arc::new(event.to_owned().into()))
.map_err(DatabaseError::backend)
}

async fn bulk_import(&self, _events: BTreeSet<Event>) -> Result<(), Self::Err> {
Ok(())
}

async fn has_event_already_been_saved(
&self,
event_id: &EventId,
) -> Result<bool, Self::Err> {
self.inner
.has_event_already_been_saved(Arc::new((*event_id).into()))
.map_err(DatabaseError::backend)
}

async fn has_event_already_been_seen(&self, event_id: &EventId) -> Result<bool, Self::Err> {
self.inner
.has_event_already_been_seen(Arc::new((*event_id).into()))
.map_err(DatabaseError::backend)
}

async fn has_event_id_been_deleted(&self, event_id: &EventId) -> Result<bool, Self::Err> {
self.inner
.has_event_id_been_deleted(Arc::new((*event_id).into()))
.map_err(DatabaseError::backend)
}

async fn has_coordinate_been_deleted(
&self,
coordinate: &Coordinate,
timestamp: Timestamp,
) -> Result<bool, Self::Err> {
self.inner
.has_coordinate_been_deleted(
Arc::new(coordinate.to_owned().into()),
Arc::new(timestamp.into()),
)
.map_err(DatabaseError::backend)
}

async fn event_id_seen(&self, event_id: EventId, relay_url: Url) -> Result<(), Self::Err> {
self.inner
.event_id_seen(Arc::new(event_id.into()), relay_url.to_string())
.map_err(DatabaseError::backend)
}

async fn event_seen_on_relays(
&self,
event_id: EventId,
) -> Result<Option<HashSet<Url>>, Self::Err> {
let res = self
.inner
.event_seen_on_relays(Arc::new(event_id.into()))
.map_err(DatabaseError::backend)?;
Ok(res.map(|list| {
list.into_iter()
.filter_map(|u| Url::parse(&u).ok())
.collect()
}))
}

async fn event_by_id(&self, event_id: EventId) -> Result<Event, Self::Err> {
let res = self
.inner
.event_by_id(Arc::new(event_id.into()))
.map_err(DatabaseError::backend)?;
Ok(res.as_ref().deref().clone())
}

async fn count(&self, filters: Vec<Filter>) -> Result<usize, Self::Err> {
let filters = filters.into_iter().map(|f| Arc::new(f.into())).collect();
let res = self.inner.count(filters).map_err(DatabaseError::backend)?;
Ok(res as usize)
}

async fn query(
&self,
filters: Vec<Filter>,
_order: Order,
) -> Result<Vec<Event>, Self::Err> {
let filters = filters.into_iter().map(|f| Arc::new(f.into())).collect();
let res = self.inner.query(filters).map_err(DatabaseError::backend)?;
Ok(res
.into_iter()
.map(|e| e.as_ref().deref().clone())
.collect())
}

async fn event_ids_by_filters(
&self,
filters: Vec<Filter>,
_order: Order,
) -> Result<Vec<EventId>, Self::Err> {
let filters = filters.into_iter().map(|f| Arc::new(f.into())).collect();
let res = self.inner.query(filters).map_err(DatabaseError::backend)?;
Ok(res.into_iter().map(|e| *e.id()).collect())
}

async fn negentropy_items(
&self,
filter: Filter,
) -> Result<Vec<(EventId, Timestamp)>, Self::Err> {
let filter = Arc::new(filter.into());
let res = self
.inner
.query(vec![filter])
.map_err(DatabaseError::backend)?;
Ok(res
.into_iter()
.map(|e| (*e.id(), *e.created_at()))
.collect())
}

async fn delete(&self, filter: Filter) -> Result<(), Self::Err> {
self.inner
.delete(Arc::new(filter.into()))
.map_err(DatabaseError::backend)
}

async fn wipe(&self) -> Result<(), Self::Err> {
self.inner.wipe().map_err(DatabaseError::backend)
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,9 @@ use nostr_sdk::database::{DynNostrDatabase, IntoNostrDatabase, NostrDatabaseExt,
use nostr_sdk::{block_on, SQLiteDatabase};
use uniffi::Object;

pub mod custom;

use self::custom::{CustomNostrDatabase, IntermediateCustomNostrDatabase};
use crate::error::Result;
use crate::profile::Profile;

Expand Down Expand Up @@ -42,6 +45,15 @@ impl NostrDatabase {
})
}

#[uniffi::constructor]
pub fn custom(database: Box<dyn CustomNostrDatabase>) -> Self {
let intermediate = IntermediateCustomNostrDatabase { inner: database };

Self {
inner: intermediate.into_nostr_database(),
}
}

/// Save [`Event`] into store
///
/// Return `true` if event was successfully saved into database.
Expand Down

0 comments on commit f599953

Please sign in to comment.