diff --git a/Cargo.toml b/Cargo.toml index 5174b7a..6bc238c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "valence_core" -version = "0.1.6" +version = "0.1.7" edition = "2021" license = "MIT" keywords = ["blockchain", "L2", "peer-to-peer", "P2P"] diff --git a/src/db/handler.rs b/src/db/handler.rs deleted file mode 100644 index 41703dc..0000000 --- a/src/db/handler.rs +++ /dev/null @@ -1,85 +0,0 @@ -use async_trait::async_trait; -use serde::{de::DeserializeOwned, Serialize}; -use std::collections::HashMap; - -/// Trait for a key-value data store connection -#[async_trait] -pub trait KvStoreConnection { - /// Initialize a connection to the cache - /// - /// ### Arguments - /// - /// * `url` - A string slice that holds the URL to connect to - async fn init(url: &str) -> Result> - where - Self: Sized; - - /// Sets a data entry in the cache - /// - /// ### Arguments - /// - /// * `key` - Key of the data entry to set - /// * `value_id` - ID of the value to set - /// * `value` - Value of the data entry to set - async fn set_data( - &mut self, - key: &str, - value_id: &str, - value: T, - ) -> Result<(), Box>; - - /// Sets a data entry in the cache with an expiration time - /// - /// ### Arguments - /// - /// * `key` - Key of the data entry to set - /// * `value_id` - ID of the value to set - /// * `value` - Value of the data entry to set - /// * `seconds` - Number of seconds to expire the data entry in - async fn set_data_with_expiry( - &mut self, - key: &str, - value_id: &str, - value: T, - seconds: usize, - ) -> Result<(), Box>; - - /// Deletes a data entry from the cache - /// - /// ### Arguments - /// - /// * `key` - Key of the data entry to delete - /// * `value_id` - ID of the value to delete. If not provided, all values for the key are deleted - async fn del_data( - &mut self, - key: &str, - value_id: Option<&str>, - ) -> Result<(), Box>; - - /// Gets a data entry from the cache - /// - /// ### Arguments - /// - /// * `key` - Key of the data entry to get - /// * `value_id` - ID of the value to get. If not provided, all values for the key are retrieved - async fn get_data( - &mut self, - key: &str, - value_id: Option<&str>, - ) -> Result>, Box>; -} - -#[async_trait] -pub trait CacheHandler { - /// Sets an expiration time for a data entry - /// - /// ### Arguments - /// - /// * `key` - Key of the data entry to expire - /// * `seconds` - Number of seconds to expire the data entry in - async fn expire_entry( - &mut self, - key: &str, - seconds: usize, - ) -> Result<(), Box>; -} diff --git a/src/db/mod.rs b/src/db/mod.rs deleted file mode 100644 index 689c642..0000000 --- a/src/db/mod.rs +++ /dev/null @@ -1,3 +0,0 @@ -pub mod handler; -pub mod mongo_db; -pub mod redis_cache; diff --git a/src/db/mongo_db.rs b/src/db/mongo_db.rs deleted file mode 100644 index 6de9ca6..0000000 --- a/src/db/mongo_db.rs +++ /dev/null @@ -1,301 +0,0 @@ -use async_trait::async_trait; -use mongodb::bson::{doc, DateTime, Document}; -use mongodb::{options::ClientOptions, Client}; -use serde::{de::DeserializeOwned, Serialize}; -use std::collections::HashMap; -use tracing::{event, span, trace, warn, Level}; - -use super::handler::KvStoreConnection; - -#[derive(Debug, Clone)] -pub struct MongoDbIndex { - pub db_name: String, - pub coll_name: String, -} - -#[derive(Debug, Clone)] -pub struct MongoDbConn { - pub client: Client, - pub index: MongoDbIndex, -} - -impl MongoDbConn { - /// Creates a TTL index on the expiry field. - /// - /// NOTE: This function will need to be called in the main function when initialising a MongoDB connection. - pub async fn create_ttl_index(&self) -> Result<(), Box> { - let collection = self - .client - .database(&self.index.db_name) - .collection::(&self.index.coll_name); - - // Create TTL index on the 'expiry' field - let index_model = mongodb::IndexModel::builder() - .keys(doc! { "expiry": 1 }) - .options(Some( - mongodb::options::IndexOptions::builder() - .expire_after(Some(std::time::Duration::from_secs(0))) - .build(), - )) - .build(); - - collection.create_index(index_model, None).await?; - Ok(()) - } -} - -#[async_trait] -impl KvStoreConnection for MongoDbConn { - async fn init(url: &str) -> Result> { - // Tracing - let span = span!(Level::TRACE, "MongoDbConn::init"); - let _enter = span.enter(); - - let client_options = match ClientOptions::parse(url).await { - Ok(client_options) => client_options, - Err(e) => panic!("Failed to connect to MongoDB instance with error: {e}"), - }; - - trace!("Connected to MongoDB instance at {url}"); - - let client = match Client::with_options(client_options) { - Ok(client) => client, - Err(e) => panic!("Failed to connect to MongoDB instance with error: {e}"), - }; - - trace!("MongoDB client created successfully"); - - let index = MongoDbIndex { - db_name: String::from("default"), - coll_name: String::from("default"), - }; - - Ok(MongoDbConn { client, index }) - } - - async fn set_data( - &mut self, - key: &str, - value_id: &str, - value: T, - ) -> Result<(), Box> { - // Tracing - let span = span!(Level::TRACE, "MongoDbConn::set_data"); - let _enter = span.enter(); - - let collection = self - .client - .database(&self.index.db_name) - .collection::(&self.index.coll_name); - - // Check if the document with the given key exists - let filter = doc! { "_id": key }; - let existing_doc = collection.find_one(filter.clone(), None).await?; - - let mut mapping: HashMap = if let Some(doc) = existing_doc { - if doc.contains_key("data") { - // Deserialize the existing data - mongodb::bson::from_bson(doc.get("data").unwrap().clone())? - } else { - HashMap::new() - } - } else { - HashMap::new() - }; - - // Append the new data to the vec - mapping.insert(value_id.to_string(), value); - - // Serialize the vec back to a BSON array - let serialized_vec = mongodb::bson::to_bson(&mapping)?; - - // Create or update the document - let update = doc! { - "$set": { "data": serialized_vec } - }; - match collection - .update_one( - filter, - update, - mongodb::options::UpdateOptions::builder() - .upsert(true) - .build(), - ) - .await - { - Ok(_) => (), - Err(e) => { - event!(Level::ERROR, "Failed to set data with error: {e}"); - } - }; - - trace!("Data set successfully with expiry"); - - Ok(()) - } - - async fn set_data_with_expiry( - &mut self, - key: &str, - value_id: &str, - value: T, - seconds: usize, - ) -> Result<(), Box> { - // Tracing - let span = span!(Level::TRACE, "MongoDbConn::set_data_with_expiry"); - let _enter = span.enter(); - - let collection = self - .client - .database(&self.index.db_name) - .collection::(&self.index.coll_name); - - // Check if the document with the given key exists - let filter = doc! { "_id": key }; - let existing_doc = collection.find_one(filter.clone(), None).await?; - - let mut mapping: HashMap = if let Some(doc) = existing_doc { - // Deserialize the existing data - mongodb::bson::from_bson(doc.get("data").unwrap().clone())? - } else { - HashMap::new() - }; - - // Append the new data to the vec - mapping.insert(value_id.to_string(), value); - - // Serialize the vec back to a BSON array - let serialized_vec = mongodb::bson::to_bson(&mapping)?; - - // Calculate the expiry time - let expiry_time = (seconds * 1000) as i64; - let expiry_bson_datetime = DateTime::from_millis(expiry_time); - - // Create or update the document with the new expiry time - let update = doc! { - "$set": { - "data": serialized_vec, - "expiry": expiry_bson_datetime, - } - }; - collection - .update_one( - filter, - update, - mongodb::options::UpdateOptions::builder() - .upsert(true) - .build(), - ) - .await?; - - trace!("Data set successfully with expiry"); - - Ok(()) - } - - async fn del_data( - &mut self, - key: &str, - value_id: Option<&str>, - ) -> Result<(), Box> { - // Tracing - let span = span!(Level::TRACE, "MongoDbConn::del_data"); - let _enter = span.enter(); - - let collection = self - .client - .database(&self.index.db_name) - .collection::(&self.index.coll_name); - - // Build the filter based on the key - let filter = doc! { "_id": key }; - - // If value_id is provided, we need to fetch the document and update it - if let Some(value_id) = value_id { - let update = doc! { - "$unset": { - &format!("data.{}", value_id): "" - } - }; - - match collection.find_one_and_update(filter, update, None).await { - Ok(result) => { - if let Some(_) = result { - // Document was found and updated, log success or handle as needed - trace!("Data updated successfully"); - } else { - // Document not found - event!(Level::ERROR, "Document not found for key: {}", key); - } - } - Err(e) => { - // Handle error from MongoDB - event!(Level::ERROR, "Failed to update data with error: {:?}", e); - return Err(Box::new(e)); - } - } - } else { - // value_id is None, so delete the entire document - match collection.delete_one(filter.clone(), None).await { - Ok(_) => { - trace!("Data deleted successfully"); - } - Err(e) => { - event!(Level::ERROR, "Failed to delete data with error: {:?}", e); - return Err(Box::new(e)); - } - }; - } - - Ok(()) - } - - async fn get_data( - &mut self, - key: &str, - value_id: Option<&str>, - ) -> Result>, Box> { - // Tracing - let span = span!(Level::TRACE, "MongoDbConn::get_data"); - let _enter = span.enter(); - - let collection = self - .client - .database(&self.index.db_name) - .collection::(&self.index.coll_name); - - // Check if the document with the given key exists - let filter = doc! { "_id": key }; - let doc_find = match collection.find_one(filter.clone(), None).await { - Ok(doc) => doc, - Err(e) => { - event!(Level::ERROR, "Failed to get data with error: {e}"); - return Ok(None); - } - }; - - if let Some(doc) = doc_find { - // Deserialize the existing data - let mapping: HashMap = - mongodb::bson::from_bson(doc.get("data").unwrap().clone())?; - - if let Some(id) = value_id { - // If value_id is provided, return only the value with the given ID - if let Some(value) = mapping.get(id) { - let mut result: HashMap = HashMap::new(); - result.insert(id.to_string(), value.clone()); - return Ok(Some(result)); - } else { - // Value with the given ID not found - event!(Level::ERROR, "Value with ID {id} not found for key {key}"); - return Ok(None); - } - } - return Ok(Some(mapping)); - } - - warn!("Data unsuccessfully deserialized"); - - Ok(None) - } -} diff --git a/src/db/redis_cache.rs b/src/db/redis_cache.rs deleted file mode 100644 index 817a48e..0000000 --- a/src/db/redis_cache.rs +++ /dev/null @@ -1,153 +0,0 @@ -use std::collections::HashMap; - -use crate::db::handler::{CacheHandler, KvStoreConnection}; -use async_trait::async_trait; -use redis::{aio::ConnectionManager, AsyncCommands}; -use serde::{de::DeserializeOwned, Serialize}; -use tracing::{event, span, Level}; - -#[derive(Clone)] -pub struct RedisCacheConn { - pub connection: ConnectionManager, -} - -#[async_trait] -impl CacheHandler for RedisCacheConn { - async fn expire_entry( - &mut self, - key: &str, - seconds: usize, - ) -> Result<(), Box> { - self.connection.expire(key, seconds).await?; - Ok(()) - } -} - -#[async_trait] -impl KvStoreConnection for RedisCacheConn { - async fn init(url: &str) -> Result> { - let redis_client = redis::Client::open(url)?; - let redis_connection_manager = ConnectionManager::new(redis_client).await?; - - Ok(RedisCacheConn { - connection: redis_connection_manager, - }) - } - - async fn set_data( - &mut self, - key: &str, - value_id: &str, - value: T, - ) -> Result<(), Box> { - let exists: bool = self.connection.exists(key).await?; - - let mut mapping: HashMap = if exists { - // Get the existing data - let data: String = self.connection.get(key).await?; - serde_json::from_str(&data)? - } else { - HashMap::new() - }; - - // Append the new data to the vec - mapping.insert(value_id.to_string(), value); - - let serialized = serde_json::to_string(&mapping)?; - self.connection.set(key, serialized).await?; - - Ok(()) - } - - async fn set_data_with_expiry( - &mut self, - key: &str, - value_id: &str, - value: T, - seconds: usize, - ) -> Result<(), Box> { - // Check if the key exists - let exists: bool = self.connection.exists(key).await?; - - let mut mapping: HashMap = if exists { - // Get the existing data - let data: String = self.connection.get(key).await?; - serde_json::from_str(&data)? - } else { - HashMap::new() - }; - - // Append the new data to the hashmap - mapping.insert(value_id.to_string(), value); - - // Serialize the vec back to a string - let serialized = serde_json::to_string(&mapping)?; - - // Set the data back to Redis - self.connection.set(key, serialized).await?; - - // Set the expiry time for the key - self.connection.expire(key, seconds).await?; - - Ok(()) - } - - async fn del_data( - &mut self, - key: &str, - value_id: Option<&str>, - ) -> Result<(), Box> { - if let Some(value_id) = value_id { - let exists: bool = self.connection.exists(key).await?; - - if exists { - let mut mapping: HashMap = self.get_data(key, None).await?.unwrap(); - mapping.remove(value_id); - let serialized = serde_json::to_string(&mapping)?; - self.connection.set(key, serialized).await?; - } - return Ok(()); - } - - let _: () = self.connection.del(key).await?; - Ok(()) - } - - async fn get_data( - &mut self, - key: &str, - value_id: Option<&str>, - ) -> Result>, Box> { - let span = span!(Level::TRACE, "MongoDbConn::get_data"); - let _enter = span.enter(); - - // Check if the key exists - let exists: bool = self.connection.exists(key).await?; - - if exists { - // Get the existing data - let data: String = self.connection.get(key).await?; - let mapping: HashMap = serde_json::from_str(&data)?; - - if let Some(value_id) = value_id { - let value = mapping.get(value_id); - if let Some(value) = value { - let mut new_mapping: HashMap = HashMap::new(); - new_mapping.insert(value_id.to_string(), value.clone()); - return Ok(Some(new_mapping)); - } else { - // Value with the given ID not found - event!( - Level::ERROR, - "Value with ID {value_id} not found for key {key}" - ); - return Ok(None); - } - } - - return Ok(Some(mapping)); - } - - Ok(None) - } -} diff --git a/src/lib.rs b/src/lib.rs index 97b6d70..939c75c 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,4 +1,3 @@ pub mod api; pub mod crypto; -pub mod db; pub mod utils;