Compare commits

..

No commits in common. "main" and "feature/syncable_derive" have entirely different histories.

153 changed files with 1523 additions and 2459 deletions

View file

@ -1,32 +1,5 @@
[package]
name = "caretta-sync"
edition.workspace = true
version.workspace = true
description.workspace = true
license.workspace = true
repository.workspace = true
[features]
default = ["macros"]
bevy = ["dep:caretta-sync-bevy"]
mobile = ["dep:caretta-sync-mobile"]
cli = ["dep:caretta-sync-cli"]
desktop = ["cli", "bevy"]
macros = ["dep:caretta-sync-macros"]
test = ["caretta-sync-core/test"]
[dependencies]
caretta-sync-bevy = { path = "bevy", optional = true }
caretta-sync-core.workspace = true
caretta-sync-cli = { path="cli", optional = true }
caretta-sync-mobile = { path = "mobile", optional = true }
caretta-sync-macros = { path="macros", optional = true}
[dev-dependencies]
caretta-sync-core = {workspace = true, features = ["test"]}
[workspace] [workspace]
members = [ ".", "core", "macros", "cli", "mobile", "examples/*" , "bevy"] members = [ "lazy-supplements-*", "examples/*" ]
resolver = "3" resolver = "3"
[workspace.package] [workspace.package]
@ -37,27 +10,15 @@ license = "MIT OR Apache-2.0"
repository = "https://forgejo.fireturlte.net/lazy-supplements" repository = "https://forgejo.fireturlte.net/lazy-supplements"
[workspace.dependencies] [workspace.dependencies]
bevy = { git = "https://github.com/bevyengine/bevy.git", rev="16ffdaea0daec11e4347d965f56c9c8e1122a488" }
chrono = "0.4.41" chrono = "0.4.41"
ciborium = "0.2.2" ciborium = "0.2.2"
clap = { version = "4.5.38", features = ["derive"] } clap = { version = "4.5.38", features = ["derive"] }
caretta-sync-core.path = "core" dioxus = { version = "0.6.0", features = [] }
futures = { version = "0.3.31", features = ["executor"] } lazy-supplements-core.path = "lazy-supplements-core"
libp2p = { version = "0.55.0", features = ["macros", "mdns", "noise", "ping", "tcp", "tokio", "yamux" ] } libp2p = { version = "0.55.0", features = ["macros", "mdns", "noise", "ping", "tcp", "tokio", "yamux" ] }
sea-orm = { version = "1.1.11", features = ["sqlx-sqlite", "runtime-tokio-native-tls", "macros", "with-chrono", "with-uuid"] } sea-orm = { version = "1.1.11", features = ["sqlx-sqlite", "runtime-tokio-native-tls", "macros", "with-chrono", "with-uuid"] }
sea-orm-migration = { version = "1.1.0", features = ["runtime-tokio-rustls", "sqlx-postgres"] } sea-orm-migration = { version = "1.1.0", features = ["runtime-tokio-rustls", "sqlx-postgres"] }
serde = { version = "1.0.219", features = ["derive"] } serde = { version = "1.0.219", features = ["derive"] }
thiserror = "2.0.12" thiserror = "2.0.12"
tokio = { version = "1.45.0", features = ["macros", "rt", "rt-multi-thread"] } tokio = { version = "1.45.0", features = ["macros", "rt", "rt-multi-thread"] }
tonic = "0.14.0"
uuid = { version = "1.17.0", features = ["v7"] } uuid = { version = "1.17.0", features = ["v7"] }
[profile.dev]
opt-level = 1
[profile.dev.package."*"]
opt-level = 3
[profile.release]
codegen-units = 1
lto = "thin"

View file

@ -1,8 +1,3 @@
# Caretta Framework # Lazy Supplements Framework
A local-first application framework. A local-first application framework for lazy person
## Features
- Local first
- Decentralized data synchronization with libp2p
- Device management

View file

@ -1,15 +0,0 @@
[package]
name = "caretta-sync-bevy"
edition.workspace = true
version.workspace = true
description.workspace = true
license.workspace = true
repository.workspace = true
[dependencies]
bevy.workspace = true
caretta-sync-core.workspace = true
futures.workspace = true
sea-orm.workspace = true
tokio.workspace = true
tonic.workspace = true

View file

@ -1,4 +0,0 @@
use bevy::{asset::uuid::Uuid, ecs::component::Component};
#[derive(Component)]
struct Id(Uuid);

View file

@ -1,2 +0,0 @@
pub mod global;
pub mod peer;

View file

@ -1,47 +0,0 @@
use bevy::{app::{App, Plugin, Startup, Update}, ecs::{component::Component, query::With, system::{Commands, Query}}, tasks::TaskPool};
use caretta_sync_core::{cache::entity::{CachedPeerEntity, CachedPeerModel}, global::{CONFIG, DATABASE_CONNECTIONS}};
use caretta_sync_core::{
proto::*,
};
use sea_orm::EntityTrait;
#[derive(Component)]
pub struct Peer;
#[derive(Component)]
pub struct PeerId(String);
#[derive(Component)]
pub struct PeerAddress(String);
#[tokio::main]
async fn add_cached_peers(mut commands: Commands) {
let config = CONFIG.get_unchecked();
let path = String::from("unix://") + config.rpc.socket_path.as_os_str().to_str().expect("Invalid string");
let mut client = caretta_sync_core::proto::cached_peer_service_client::CachedPeerServiceClient::connect(path).await.expect("Unix socket should be accessible");
let request = tonic::Request::new(CachedPeerListRequest {});
let response = client.list(request).await.expect("Faild to request/response");
let peers = response.into_inner().peers;
for model in peers.into_iter() {
commands.spawn((Peer, PeerId(model.peer_id.to_string())));
}
}
fn print_peer(query: Query<&PeerId, With<Peer>>) {
for peer_id in &query {
println!("Hello {}!", peer_id.0);
}
}
fn hello_world() {
println!("hello world!");
}
pub struct PeerPlugin;
impl Plugin for PeerPlugin {
fn build(&self, app: &mut App) {
app.add_systems(Startup, add_cached_peers);
app.add_systems(Update, (hello_world, print_peer));
}
}

View file

@ -1,69 +0,0 @@
use std::{net::IpAddr, path::PathBuf, sync::LazyLock};
use clap::Args;
use caretta_sync_core::{
config::{Config, ConfigError, PartialConfig, PartialP2pConfig, PartialStorageConfig},
utils::{emptiable::Emptiable, mergeable::Mergeable}
};
use libp2p::identity::Keypair;
use serde::{Deserialize, Serialize};
use tokio::sync::OnceCell;
#[derive(Args, Clone, Debug)]
pub struct ConfigArgs {
#[arg(short = 'c', long = "config")]
pub file_path: Option<PathBuf>,
#[arg(skip)]
pub file_content: OnceCell<PartialConfig>,
#[command(flatten)]
pub args: PartialConfig,
}
impl ConfigArgs {
fn get_file_path_or_default(&self, app_name: &'static str) -> PathBuf {
self.file_path.clone().unwrap_or(
dirs::config_local_dir()
.expect("Config user directory should be set")
.join(app_name)
.join("config.toml")
)
}
async fn get_or_read_file_content(&self, app_name: &'static str) -> PartialConfig {
self.file_content.get_or_init(|| async {
PartialConfig::read_from(self.get_file_path_or_default(app_name)).await.expect("Config file should be invalid!")
}).await.clone()
}
pub async fn to_partial_config_with_default(&self, app_name: &'static str) -> PartialConfig {
let mut default = PartialConfig::default_desktop(app_name);
default.merge(self.to_partial_config_without_default(app_name).await);
default
}
pub async fn to_partial_config_without_default(&self, app_name: &'static str) -> PartialConfig {
let mut file_content = self.get_or_read_file_content(app_name).await;
let args = self.args.clone();
file_content.merge(args);
file_content
}
async fn has_p2p_private_key(&self, app_name: &'static str) -> bool {
let merged = self.to_partial_config_with_default(app_name).await;
match merged.p2p {
Some(p2p) => p2p.private_key.is_some(),
None => false
}
}
pub async fn into_config(mut self, app_name: &'static str) -> Config {
if !self.has_p2p_private_key(app_name).await {
let path = self.get_file_path_or_default(app_name);
let mut content = self.file_content.get_mut().unwrap();
if let Some(p2p) = content.p2p.as_mut() {
p2p.init_private_key();
} else {
content.p2p.insert(PartialP2pConfig::empty().with_new_private_key());
}
content.write_to(path).await.expect("Config file should be writable first time to initialize secret");
}
self.to_partial_config_with_default(app_name).await.try_into().expect("Some configurations are missing!")
}
}

View file

@ -1,17 +0,0 @@
use clap::Args;
use caretta_sync_core::utils::runnable::Runnable;
use crate::cli::ConfigArgs;
#[derive(Debug, Args)]
pub struct ConfigCheckCommandArgs{
#[command(flatten)]
config: ConfigArgs
}
impl Runnable for ConfigCheckCommandArgs {
#[tokio::main]
async fn run(self, app_name: &'static str) {
let _ = self.config.into_config(app_name).await;
println!("Ok");
}
}

View file

@ -1,24 +0,0 @@
use clap::Args;
use caretta_sync_core::{config::PartialConfig, utils::runnable::Runnable};
use crate::cli::ConfigArgs;
#[derive(Debug, Args)]
pub struct ConfigListCommandArgs{
#[command(flatten)]
config: ConfigArgs,
#[arg(short,long)]
all: bool
}
impl Runnable for ConfigListCommandArgs {
#[tokio::main]
async fn run(self, app_name: &'static str) {
let config: PartialConfig = if self.all {
self.config.into_config(app_name).await.into()
} else {
self.config.to_partial_config_without_default(app_name).await
};
println!("{}", config.into_toml().unwrap())
}
}

View file

@ -1,38 +0,0 @@
mod check;
mod list;
pub use check::*;
pub use list::*;
use caretta_sync_core::utils::runnable::Runnable;
use clap::{Args, Subcommand};
#[derive(Debug, Args)]
pub struct ConfigCommandArgs {
#[command(subcommand)]
pub command: ConfigSubcommand
}
impl Runnable for ConfigCommandArgs {
fn run(self, app_name: &'static str) {
self.command.run(app_name)
}
}
#[derive(Debug, Subcommand)]
pub enum ConfigSubcommand {
Check(ConfigCheckCommandArgs),
List(ConfigListCommandArgs),
}
impl Runnable for ConfigSubcommand {
fn run(self, app_name: &'static str) {
match self {
Self::Check(x) => x.run(app_name),
Self::List(x) => x.run(app_name),
}
}
}

View file

@ -1,13 +0,0 @@
use std::path::PathBuf;
mod args;
mod config;
mod device;
mod peer;
mod serve;
pub use args::*;
pub use config::*;
pub use device::*;
pub use peer::*;
pub use serve::*;

View file

@ -1,17 +0,0 @@
use clap::Args;
use caretta_sync_core::utils::runnable::Runnable;
use crate::cli::{ConfigArgs, PeerArgs};
#[derive(Debug, Args)]
pub struct PeerInfoCommandArgs{
#[command(flatten)]
config: ConfigArgs,
#[command(flatten)]
peer: PeerArgs,
}
impl Runnable for PeerInfoCommandArgs {
fn run(self, app_name: &'static str) {
todo!()
}
}

View file

@ -1,24 +0,0 @@
use clap::Args;
use caretta_sync_core::{
utils::runnable::Runnable,
proto::*,
};
use crate::cli::ConfigArgs;
#[derive(Debug, Args)]
pub struct PeerListCommandArgs{
#[command(flatten)]
config: ConfigArgs
}
impl Runnable for PeerListCommandArgs {
#[tokio::main]
async fn run(self, app_name: &'static str) {
let config = self.config.into_config(app_name).await;
let path = String::from("unix://") + config.rpc.socket_path.as_os_str().to_str().expect("Invalid string");
let mut client = caretta_sync_core::proto::cached_peer_service_client::CachedPeerServiceClient::connect(path).await.expect("Unix socket should be accessible");
let request = tonic::Request::new(CachedPeerListRequest {});
let response = client.list(request).await.expect("Faild to request/response");
println!("{:?}", response);
}
}

View file

@ -1,42 +0,0 @@
mod info;
mod list;
mod ping;
pub use info::*;
pub use list::*;
pub use ping::*;
use caretta_sync_core::utils::runnable::Runnable;
use clap::{Args, Subcommand};
#[derive(Debug, Args)]
pub struct PeerCommandArgs {
#[command(subcommand)]
pub command: PeerSubcommand
}
impl Runnable for PeerCommandArgs {
fn run(self, app_name: &'static str) {
self.command.run(app_name)
}
}
#[derive(Debug, Subcommand)]
pub enum PeerSubcommand {
Info(PeerInfoCommandArgs),
List(PeerListCommandArgs),
Ping(PeerPingCommandArgs),
}
impl Runnable for PeerSubcommand {
fn run(self, app_name: &'static str) {
match self {
Self::Info(x) => x.run(app_name),
Self::List(x) => x.run(app_name),
Self::Ping(x) => x.run(app_name),
}
}
}

View file

@ -1,18 +0,0 @@
use clap::Args;
use caretta_sync_core::utils::runnable::Runnable;
use crate::cli::{ConfigArgs, PeerArgs};
#[derive(Debug, Args)]
pub struct PeerPingCommandArgs{
#[command(flatten)]
config: ConfigArgs,
#[command(flatten)]
peer: PeerArgs,
}
impl Runnable for PeerPingCommandArgs {
#[tokio::main]
async fn run(self, app_name: &'static str) {
todo!()
}
}

View file

@ -1,29 +0,0 @@
use std::marker::PhantomData;
use clap::Args;
use caretta_sync_core::{config::Config, data::migration::DataMigrator, global::{CONFIG, DATABASE_CONNECTIONS}, server::ServerTrait, utils::runnable::Runnable};
use libp2p::{noise, ping, swarm::{NetworkBehaviour, SwarmEvent}, tcp, yamux, Swarm};
use super::ConfigArgs;
#[derive(Args, Debug)]
pub struct ServeCommandArgs<T>
where
T: ServerTrait
{
#[arg(skip)]
server: PhantomData<T>,
#[command(flatten)]
config: ConfigArgs,
}
impl<T> Runnable for ServeCommandArgs<T>
where
T: ServerTrait
{
#[tokio::main]
async fn run(self, app_name: &'static str) {
let config = CONFIG.get_or_init::<Config>(self.config.into_config(app_name).await).await;
let _ = DATABASE_CONNECTIONS.get_or_init_unchecked(&config, DataMigrator).await;
T::serve_all(config).await.unwrap();
}
}

View file

@ -1 +0,0 @@
pub mod cli;

View file

@ -1,4 +0,0 @@
fn main() -> Result<(), Box<dyn std::error::Error>> {
tonic_prost_build::compile_protos("proto/caretta_sync.proto")?;
Ok(())
}

View file

@ -1,33 +0,0 @@
syntax = "proto3";
package caretta_sync;
import "google/protobuf/timestamp.proto";
enum PeerListOrderBy {
CREATED_AT = 0;
UPDATED_AT = 1;
PEER_ID = 2;
}
service CachedPeerService {
rpc List(CachedPeerListRequest) returns (CachedPeerListResponse);
}
message CachedPeerListRequest {}
message CachedPeerMessage {
uint32 number = 1;
string peer_id = 2;
google.protobuf.Timestamp created_at = 3;
repeated CachedAddressMessage addresses = 4;
}
message CachedAddressMessage {
uint32 number = 1;
google.protobuf.Timestamp created_at = 2;
google.protobuf.Timestamp updated_at = 3;
string multiaddress = 4;
}
message CachedPeerListResponse {
repeated CachedPeerMessage peers = 1;
}

View file

@ -1,59 +0,0 @@
use std::str::FromStr;
use chrono::{Days, Local};
use libp2p::{multiaddr, Multiaddr, PeerId};
use prost_types::Timestamp;
use sea_orm::{entity::{
prelude::*, *
}, sea_query};
use serde::{Deserialize, Serialize};
use crate::{cache, data::value::{MultiaddrValue, PeerIdValue}, utils::utc_to_timestamp};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Deserialize, Serialize)]
#[sea_orm(table_name = "cached_address")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: u32,
#[sea_orm(indexed)]
pub created_at: DateTimeUtc,
#[sea_orm(indexed)]
pub updated_at: DateTimeUtc,
#[sea_orm(indexed)]
pub cached_peer_id: u32,
#[sea_orm(indexed)]
pub multiaddress: MultiaddrValue,
}
#[derive(Copy, Clone, Debug, DeriveRelation, EnumIter)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::CachedPeerEntity",
from = "Column::CachedPeerId",
to = "super::CachedPeerColumn::Id"
)]
CachedPeer,
}
impl Related<super::CachedPeerEntity> for Entity {
fn to() -> RelationDef {
Relation::CachedPeer.def()
}
}
impl ActiveModelBehavior for ActiveModel {}
impl ActiveModel {
pub fn new(cached_peer_id: u32, multiaddr: Multiaddr) -> Self {
let timestamp: DateTimeUtc = Local::now().to_utc();
Self{
cached_peer_id: Set(cached_peer_id),
multiaddress: Set(MultiaddrValue::from(multiaddr)),
created_at: Set(timestamp),
updated_at: Set(timestamp),
..Default::default()
}
}
}

View file

@ -1,57 +0,0 @@
use std::str::FromStr;
use chrono::{Days, Local};
use libp2p::{multiaddr, Multiaddr, PeerId};
use sea_orm::{entity::{
prelude::*, *
}, sea_query};
use serde::{Deserialize, Serialize};
use crate::data::value::{MultiaddrValue, PeerIdValue};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Deserialize, Serialize)]
#[sea_orm(table_name = "cached_peer")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: u32,
#[sea_orm(indexed)]
pub created_at: DateTimeUtc,
#[sea_orm(indexed)]
pub updated_at: DateTimeUtc,
#[sea_orm(indexed)]
pub peer_id: PeerIdValue,
}
#[derive(Copy, Clone, Debug, DeriveRelation, EnumIter)]
pub enum Relation {
#[sea_orm(has_many = "super::CachedAddressEntity")]
CachedAddress,
}
impl Related<super::CachedAddressEntity> for Entity {
fn to() -> RelationDef {
Relation::CachedAddress.def()
}
}
impl ActiveModelBehavior for ActiveModel {}
impl ActiveModel {
pub fn new(peer_id: PeerId) -> Self {
let timestamp: DateTimeUtc = Local::now().to_utc();
Self{
peer_id: Set(PeerIdValue::from(peer_id)),
created_at: Set(timestamp),
updated_at: Set(timestamp),
..Default::default()
}
}
}
impl Entity {
pub fn find_by_peer_id(peer_id: PeerId) -> Select<Entity> {
Self::find().filter(Column::PeerId.eq(PeerIdValue::from(peer_id)))
}
}

View file

@ -1,48 +0,0 @@
mod cached_peer;
mod cached_address;
pub use cached_peer::{
ActiveModel as CachedPeerActiveModel,
Column as CachedPeerColumn,
Model as CachedPeerModel,
Entity as CachedPeerEntity,
};
pub use cached_address::{
ActiveModel as CachedAddressActiveModel,
Column as CachedAddressColumn,
Model as CachedAddressModel,
Entity as CachedAddressEntity,
};
#[cfg(test)]
mod tests {
use std::net::Ipv4Addr;
use crate::{cache::entity::cached_peer, data::migration::DataMigrator, global::{DATABASE_CONNECTIONS}, tests::TEST_CONFIG};
use super::*;
use libp2p::{identity::{self, Keypair}, multiaddr, swarm::handler::multi, Multiaddr, PeerId};
use sea_orm::ActiveModelTrait;
#[tokio::test]
async fn insert() {
let db = DATABASE_CONNECTIONS.get_or_init_unchecked(&*TEST_CONFIG, DataMigrator).await.cache;
let peer_id = Keypair::generate_ed25519().public().to_peer_id();
let multiaddr = Multiaddr::empty()
.with(Ipv4Addr::new(127,0,0,1).into())
.with(multiaddr::Protocol::Tcp(0));
let inserted_cached_peer: CachedPeerModel = CachedPeerActiveModel::new(peer_id.clone())
.insert(db).await.unwrap();
let inserted_cached_address: CachedAddressModel = CachedAddressActiveModel::new(inserted_cached_peer.id, multiaddr.clone())
.insert(db).await.unwrap();
assert_eq!(PeerId::from(inserted_cached_peer.peer_id), peer_id);
assert_eq!(Multiaddr::from(inserted_cached_address.multiaddress), multiaddr);
}
}

View file

@ -1,148 +0,0 @@
use sea_orm_migration::{prelude::*, schema::*};
use crate::migration::TableMigration;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
CachedPeer::up(manager).await?;
CachedAddress::up(manager).await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
CachedAddress::down(manager).await?;
CachedPeer::down(manager).await?;
Ok(())
}
}
#[derive(DeriveIden, DeriveMigrationName)]
enum CachedPeer {
Table,
Id,
PeerId,
CreatedAt,
UpdatedAt,
}
static IDX_CACHED_PEER_PEER_ID: &str = "idx_cached_peer_peer_id";
static IDX_CACHED_PEER_CREATED_AT: &str = "idx_cached_peer_created_at";
static IDX_CACHED_PEER_UPDATED_AT: &str = "idx_cached_peer_updated_at";
#[async_trait::async_trait]
impl TableMigration for CachedPeer {
async fn up<'a>(manager: &'a SchemaManager<'a>) -> Result<(), DbErr> {
manager.create_table(
Table::create()
.table(Self::Table)
.if_not_exists()
.col(pk_auto(Self::Id))
.col(string_len(Self::PeerId, 255))
.col(timestamp(Self::CreatedAt))
.col(timestamp(Self::UpdatedAt))
.to_owned()
).await?;
manager.create_index(
Index::create()
.name(IDX_CACHED_PEER_PEER_ID)
.table(Self::Table)
.col(Self::PeerId)
.to_owned()
).await?;
manager.create_index(
Index::create()
.name(IDX_CACHED_PEER_CREATED_AT)
.table(Self::Table)
.col(Self::CreatedAt)
.to_owned()
).await?;
manager.create_index(
Index::create()
.name(IDX_CACHED_PEER_UPDATED_AT)
.table(Self::Table)
.col(Self::UpdatedAt)
.to_owned()
).await?;
Ok(())
}
async fn down<'a>(manager: &'a SchemaManager<'a>) -> Result<(), DbErr>{
manager.drop_table(Table::drop().table(Self::Table).to_owned()).await
}
}
#[derive(DeriveIden, DeriveMigrationName)]
enum CachedAddress {
Table,
Id,
CachedPeerId,
CreatedAt,
UpdatedAt,
Multiaddress,
}
static IDX_CACHED_ADDRESS_MULTIADDRESS: &str = "idx_cached_address_multiaddress";
static IDX_CACHED_ADDRESS_CACHED_PEER_ID: &str = "idx_cached_address_cached_peer_id";
static IDX_CACHED_ADDRESS_CREATED_AT: &str = "idx_cached_address_created_at";
static IDX_CACHED_ADDRESS_UPDATED_AT: &str = "idx_cached_address_updated_at";
static FK_CACHED_ADDRESS_CACHED_PEER: &str = "fk_cached_address_cached_peer";
#[async_trait::async_trait]
impl TableMigration for CachedAddress {
async fn up<'a>(manager: &'a SchemaManager<'a>) -> Result<(), DbErr> {
manager.create_table(
Table::create()
.table(Self::Table)
.if_not_exists()
.col(pk_auto(Self::Id))
.col(integer(Self::CachedPeerId))
.foreign_key(ForeignKey::create()
.name(FK_CACHED_ADDRESS_CACHED_PEER)
.from(Self::Table,Self::CachedPeerId)
.to(CachedPeer::Table, CachedPeer::Id)
.on_delete(ForeignKeyAction::Cascade)
.on_update(ForeignKeyAction::Cascade)
)
.col(timestamp(Self::CreatedAt))
.col(timestamp(Self::UpdatedAt))
.col(text_uniq(Self::Multiaddress))
.to_owned()
).await?;
manager.create_index(
Index::create()
.name(IDX_CACHED_ADDRESS_CACHED_PEER_ID)
.table(Self::Table)
.col(Self::CachedPeerId)
.to_owned()
).await?;
manager.create_index(
Index::create()
.name(IDX_CACHED_ADDRESS_MULTIADDRESS)
.table(Self::Table)
.col(Self::Multiaddress)
.to_owned()
).await?;
manager.create_index(
Index::create()
.name(IDX_CACHED_ADDRESS_CREATED_AT)
.table(Self::Table)
.col(Self::CreatedAt)
.to_owned()
).await?;
manager.create_index(
Index::create()
.name(IDX_CACHED_ADDRESS_UPDATED_AT)
.table(Self::Table)
.col(Self::UpdatedAt)
.to_owned()
).await?;
Ok(())
}
async fn down<'a>(manager: &'a SchemaManager<'a>) -> Result<(), DbErr>{
manager.drop_table(Table::drop().table(Self::Table).to_owned()).await
}
}

View file

@ -1,159 +0,0 @@
pub mod error;
mod storage;
mod p2p;
mod rpc;
use std::{path::Path, default::Default};
use crate::{utils::{emptiable::Emptiable, mergeable::Mergeable}};
pub use error::ConfigError;
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use tokio::{fs::File, io::{AsyncReadExt, AsyncWriteExt}};
pub use storage::{StorageConfig, PartialStorageConfig};
pub use p2p::{P2pConfig, PartialP2pConfig};
pub use rpc::*;
#[cfg(feature="cli")]
use clap::Args;
#[derive(Clone, Debug)]
pub struct Config {
pub p2p: P2pConfig,
pub storage: StorageConfig,
pub rpc: RpcConfig,
}
impl AsRef<StorageConfig> for Config {
fn as_ref(&self) -> &StorageConfig {
&self.storage
}
}
impl AsRef<P2pConfig> for Config {
fn as_ref(&self) -> &P2pConfig {
&self.p2p
}
}
impl AsRef<RpcConfig> for Config {
fn as_ref(&self) -> &RpcConfig {
&self.rpc
}
}
impl TryFrom<PartialConfig> for Config {
type Error = crate::error::Error;
fn try_from(value: PartialConfig) -> Result<Self, Self::Error> {
Ok(Self{
rpc: value.rpc.ok_or(crate::error::Error::MissingConfig("rpc"))?.try_into()?,
p2p: value.p2p.ok_or(crate::error::Error::MissingConfig("p2p"))?.try_into()?,
storage: value.storage.ok_or(crate::error::Error::MissingConfig("storage"))?.try_into()?
})
}
}
#[cfg_attr(feature="cli", derive(Args))]
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
pub struct PartialConfig {
#[cfg_attr(feature="cli", command(flatten))]
pub p2p: Option<PartialP2pConfig>,
#[cfg_attr(feature="cli", command(flatten))]
pub storage: Option<PartialStorageConfig>,
#[cfg_attr(feature="cli", command(flatten))]
pub rpc: Option<PartialRpcConfig>,
}
impl PartialConfig {
pub fn new() -> Self {
Self {
p2p : Some(PartialP2pConfig::empty().with_new_private_key()),
storage: Some(PartialStorageConfig::empty()),
rpc: Some(PartialRpcConfig::empty()),
}
}
pub fn from_toml(s: &str) -> Result<Self, toml::de::Error> {
toml::from_str(s)
}
pub fn into_toml(&self) -> Result<String, toml::ser::Error> {
toml::to_string(self)
}
pub async fn read_or_create<T>(path: T) -> Result<Self, ConfigError>
where
T: AsRef<Path>
{
if !path.as_ref().exists() {
Self::new().write_to(&path).await?;
}
Self::read_from(&path).await
}
pub async fn read_from<T>(path:T) -> Result<Self, ConfigError>
where
T: AsRef<Path>
{
if !path.as_ref().exists() {
if let Some(x) = path.as_ref().parent() {
std::fs::create_dir_all(x)?;
};
let _ = File::create(&path).await?;
}
let mut file = File::open(path.as_ref()).await?;
let mut content = String::new();
file.read_to_string(&mut content).await?;
let config: Self = toml::from_str(&content)?;
Ok(config)
}
pub async fn write_to<T>(&self, path:T) -> Result<(), ConfigError>
where
T: AsRef<Path>
{
if !path.as_ref().exists() {
if let Some(x) = path.as_ref().parent() {
std::fs::create_dir_all(x)?;
};
let _ = File::create(&path).await?;
}
let mut file = File::create(&path).await?;
file.write_all(toml::to_string(self)?.as_bytes()).await?;
Ok(())
}
#[cfg(not(any(target_os="android", target_os="ios")))]
pub fn default_desktop(app_name: &'static str) -> Self {
Self {
p2p: Some(PartialP2pConfig::default()),
rpc: Some(PartialRpcConfig::default(app_name)),
storage: Some(PartialStorageConfig::default(app_name)),
}
}
}
impl From<Config> for PartialConfig {
fn from(value: Config) -> Self {
Self {
p2p: Some(value.p2p.into()),
storage: Some(value.storage.into()),
rpc: Some(value.rpc.into())
}
}
}
impl Emptiable for PartialConfig {
fn empty() -> Self {
Self {
p2p: None,
storage: None,
rpc: None,
}
}
fn is_empty(&self) -> bool {
self.p2p.is_empty() && self.rpc.is_empty() && self.storage.is_empty()
}
}
impl Mergeable for PartialConfig {
fn merge(&mut self, other: Self) {
self.p2p.merge(other.p2p);
self.rpc.merge(other.rpc);
self.storage.merge(other.storage);
}
}

View file

@ -1,82 +0,0 @@
use std::{net::{IpAddr, Ipv4Addr, SocketAddr, TcpListener}, path::PathBuf, str::FromStr};
#[cfg(feature="cli")]
use clap::Args;
use crate::{config::PartialConfig, utils::{emptiable::Emptiable, mergeable::Mergeable}};
use libp2p::mdns::Config;
use serde::{Deserialize, Serialize};
use crate::config::error::ConfigError;
#[cfg(unix)]
static DEFAULT_SOCKET_PATH: &str = "caretta.sock";
#[derive(Clone, Debug)]
pub struct RpcConfig {
pub socket_path: PathBuf,
}
impl TryFrom<PartialRpcConfig> for RpcConfig {
type Error = ConfigError;
fn try_from(config: PartialRpcConfig) -> Result<Self, Self::Error> {
Ok(Self{
socket_path: config.socket_path.ok_or(ConfigError::MissingConfig("port".to_string()))?,
})
}
}
#[cfg_attr(feature="cli", derive(Args))]
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)]
pub struct PartialRpcConfig {
pub socket_path: Option<PathBuf>,
}
impl PartialRpcConfig {
pub fn default(app_name: &'static str) -> Self {
let username = whoami::username();
Self{
socket_path: Some(std::env::temp_dir().join(username).join(String::from(app_name) + ".sock")),
}
}
}
impl Emptiable for PartialRpcConfig {
fn empty() -> Self {
Self {
socket_path: None,
}
}
fn is_empty(&self) -> bool {
self.socket_path.is_none()
}
}
impl From<RpcConfig> for PartialRpcConfig {
fn from(source: RpcConfig) -> Self {
Self {
socket_path: Some(source.socket_path),
}
}
}
impl Mergeable for PartialRpcConfig {
fn merge(&mut self, other: Self) {
if let Some(x) = other.socket_path {
self.socket_path = Some(x);
}
}
}
impl Mergeable for Option<PartialRpcConfig> {
fn merge(&mut self, mut other: Self) {
match other.take() {
Some(x) => {
if let Some(y) = self.as_mut() {
y.merge(x);
} else {
let _ = self.insert(x);
}
},
None => {}
};
}
}

View file

@ -1,126 +0,0 @@
use std::path::PathBuf;
#[cfg(feature="cli")]
use clap::Args;
#[cfg(any(test, feature="test"))]
use tempfile::tempdir;
use crate::{config::{ConfigError, PartialConfig}, utils::{emptiable::Emptiable, get_binary_name, mergeable::Mergeable}};
use libp2p::mdns::Config;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug)]
pub struct StorageConfig {
pub data_directory: PathBuf,
pub cache_directory: PathBuf,
}
impl TryFrom<PartialStorageConfig> for StorageConfig {
type Error = ConfigError;
fn try_from(value: PartialStorageConfig) -> Result<Self, Self::Error> {
Ok(Self {
data_directory: value.data_directory.ok_or(ConfigError::MissingConfig("data_directory".to_string()))?,
cache_directory: value.cache_directory.ok_or(ConfigError::MissingConfig("cache_directory".to_string()))?,
})
}
}
#[cfg_attr(feature="cli", derive(Args))]
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
pub struct PartialStorageConfig {
#[cfg_attr(feature="cli", arg(long))]
pub data_directory: Option<PathBuf>,
#[cfg_attr(feature="cli", arg(long))]
pub cache_directory: Option<PathBuf>,
}
impl PartialStorageConfig {
#[cfg(not(any(target_os="android", target_os="ios")))]
pub fn default(app_name: &'static str) -> Self {
let mut data_dir = dirs::data_local_dir().unwrap();
data_dir.push(app_name);
let mut cache_dir = dirs::cache_dir().unwrap();
cache_dir.push(app_name);
Self {
data_directory: Some(data_dir),
cache_directory: Some(cache_dir)
}
}
#[cfg(target_os="android")]
fn default_android() -> Self{
let ctx = ndk_context::android_context();
let vm = unsafe { jni::JavaVM::from_raw(ctx.vm().cast()) }?;
let mut env = vm.attach_current_thread()?;
let ctx = unsafe { jni::objects::JObject::from_raw(ctx.context().cast()) };
let cache_dir = env
.call_method(ctx, "getFilesDir", "()Ljava/io/File;", &[])?
.l()?;
let cache_dir: jni::objects::JString = env
.call_method(&cache_dir, "toString", "()Ljava/lang/String;", &[])?
.l()?
.try_into()?;
let cache_dir = env.get_string(&cache_dir)?;
let cache_dir = cache_dir.to_str()?;
Ok(cache_dir.to_string())
}
#[cfg(false)]
fn default_ios(){
unsafe {
let file_manager: *mut Object = msg_send![Class::get("NSFileManager").unwrap(), defaultManager];
let paths: Id<Object> = msg_send![file_manager, URLsForDirectory:1 inDomains:1];
let first_path: *mut Object = msg_send![paths, firstObject];
let path: Id<NSString> = Id::from_ptr(msg_send![first_path, path]);
Some(path.as_str().to_string())
}
}
}
impl From<StorageConfig> for PartialStorageConfig {
fn from(config: StorageConfig) -> PartialStorageConfig {
Self {
data_directory: Some(config.data_directory),
cache_directory: Some(config.cache_directory),
}
}
}
impl Emptiable for PartialStorageConfig {
fn empty() -> Self {
Self {
data_directory: None,
cache_directory: None
}
}
fn is_empty(&self) -> bool {
self.data_directory.is_none() && self.cache_directory.is_none()
}
}
impl Mergeable for PartialStorageConfig {
fn merge(&mut self, mut other: Self) {
if let Some(x) = other.data_directory.take() {
let _ = self.data_directory.insert(x);
};
if let Some(x) = other.cache_directory.take() {
let _ = self.cache_directory.insert(x);
};
}
}
impl Mergeable for Option<PartialStorageConfig> {
fn merge(&mut self, mut other: Self) {
match other.take() {
Some(x) => {
if let Some(y) = self.as_mut() {
y.merge(x);
} else {
let _ = self.insert(x);
}
},
None => {}
};
}
}

View file

@ -1,35 +0,0 @@
mod trusted_node;
mod record_deletion;
pub use trusted_node::{
ActiveModel as TrustedNodeActiveModel,
Column as TrustedNodeColumn,
Entity as TrustedNodeEntity,
Model as TrustedNodeModel,
};
pub use record_deletion::{
ActiveModel as RecordDeletionActiveModel,
Column as RecordDeletionColumn,
Entity as RecordDeletionEntity,
Model as RecordDeletionModel,
};
#[cfg(test)]
mod tests {
use crate::{data::{migration::DataMigrator, value::PeerIdValue}, global::{generate_uuid, DATABASE_CONNECTIONS}, tests::TEST_CONFIG};
use super::*;
use libp2p::{identity, PeerId};
use sea_orm::ActiveModelTrait;
#[tokio::test]
async fn check_insert() {
let db = DATABASE_CONNECTIONS.get_or_init_unchecked(&*TEST_CONFIG, DataMigrator).await.cache;
let node = TrustedNodeActiveModel::new(PeerId::random(), "test note".to_owned()).insert(db).await.unwrap();
let _ = RecordDeletionActiveModel::new(node.id, "test_table".to_string(), generate_uuid()).insert(db).await.unwrap();
}
}

View file

@ -1,38 +0,0 @@
#[cfg(any(test,feature="test"))]
use tempfile::TempDir;
use tokio::sync::OnceCell;
use crate::{config::{Config, ConfigError, PartialP2pConfig, PartialRpcConfig, PartialStorageConfig, StorageConfig}, error::Error};
pub static CONFIG: GlobalConfig = GlobalConfig::const_new();
pub struct GlobalConfig {
inner: OnceCell<Config>
}
impl GlobalConfig {
pub const fn const_new() -> Self {
Self{
inner: OnceCell::const_new()
}
}
pub async fn get_or_init<T>(&'static self, config: Config) -> &'static Config where
T: Into<Config>{
self.inner.get_or_init(|| async {
config.into()
}).await
}
pub async fn get_or_try_init<T, E>(&'static self, config: T) -> Result<&'static Config, <T as TryInto<Config>>::Error> where
T: TryInto<Config>,
{
self.inner.get_or_try_init(|| async {
config.try_into()
}).await
}
pub fn get(&'static self) -> Option<&'static Config> {
self.inner.get()
}
pub fn get_unchecked(&'static self) -> &'static Config {
self.get().expect("Config must be initialized before use!")
}
}

View file

@ -1,121 +0,0 @@
use std::path::{Path, PathBuf};
use dirs::cache_dir;
use sea_orm::{ConnectOptions, Database, DbErr, DatabaseConnection};
use sea_orm_migration::MigratorTrait;
use crate::{cache::migration::CacheMigrator, config::StorageConfig, error::Error};
use tokio::sync::OnceCell;
pub static DATABASE_CONNECTIONS: GlobalDatabaseConnections = GlobalDatabaseConnections::const_new();
pub struct DatabaseConnections<'a> {
pub data: &'a DatabaseConnection,
pub cache: &'a DatabaseConnection
}
pub struct GlobalDatabaseConnections {
data: OnceCell<DatabaseConnection>,
cache: OnceCell<DatabaseConnection>,
}
impl GlobalDatabaseConnections {
pub const fn const_new() -> Self {
Self {
data: OnceCell::const_new(),
cache: OnceCell::const_new()
}
}
pub fn get_data(&'static self) -> Option<&'static DatabaseConnection> {
self.data.get()
}
pub fn get_data_unchecked(&'static self) -> &'static DatabaseConnection {
self.get_data().expect("Global data database connection should initialized before access!")
}
pub fn get_cache(&'static self) -> Option<&'static DatabaseConnection> {
self.cache.get()
}
pub fn get_cache_unchecked(&'static self) -> &'static DatabaseConnection {
self.get_cache().expect("Global cache database connection should initialized before access!")
}
fn get_data_file_path<T>(config: &T) -> PathBuf
where
T: AsRef<StorageConfig>
{
config.as_ref().data_directory.join("data.sqlite")
}
fn get_cache_file_path<T>(config: &T) -> PathBuf
where
T: AsRef<StorageConfig>
{
config.as_ref().cache_directory.join("cache.sqlite")
}
fn get_url_unchecked<T>(path: T) -> String
where
T: AsRef<Path>
{
"sqlite://".to_string() + path.as_ref().as_os_str().to_str().expect("Failed to convert path to string!") + "?mode=rwc"
}
async fn get_or_init_database_connection_unchecked<T, U>(cell: &OnceCell<DatabaseConnection>, options: T, _: U ) -> &DatabaseConnection
where
T: Into<ConnectOptions>,
U: MigratorTrait
{
cell.get_or_init(|| async {
let db = Database::connect(options.into()).await.unwrap();
U::up(&db, None).await.unwrap();
db
}).await
}
pub async fn get_or_init_unchecked<T, U>(&'static self, config: T, _migrator: U) -> DatabaseConnections
where
T: AsRef<StorageConfig>,
U: MigratorTrait,
{
let data_path = Self::get_data_file_path(&config);
if let Some(x) = data_path.parent() {
std::fs::create_dir_all(x).expect("Failed to create directory for data database");
}
let cache_path = Self::get_cache_file_path(&config);
if let Some(x) = cache_path.parent() {
std::fs::create_dir_all(x).expect("Failed to create directory for cache database");
}
DatabaseConnections{
data: Self::get_or_init_database_connection_unchecked(
&self.data,
Self::get_url_unchecked(data_path),
_migrator
).await,
cache: Self::get_or_init_database_connection_unchecked(
&self.cache,
Self::get_url_unchecked(cache_path),
CacheMigrator
).await,
}
}
}
#[cfg(test)]
pub use tests::*;
#[cfg(test)]
mod tests {
use super::*;
use crate::{cache::migration::CacheMigrator, data::migration::DataMigrator, global::CONFIG, tests::{TEST_CONFIG}};
#[tokio::test]
pub async fn get_or_init_database() {
DATABASE_CONNECTIONS.get_or_init_unchecked(&*TEST_CONFIG, DataMigrator).await;
}
}

View file

@ -1,24 +0,0 @@
use std::{any::type_name, collections::HashMap, net::{IpAddr, Ipv4Addr}, path::{Path, PathBuf}, sync::LazyLock};
use crate::{config::{P2pConfig, PartialP2pConfig, StorageConfig}, error::Error };
use libp2p::{swarm::SwarmEvent, Multiaddr, PeerId};
use sea_orm::{prelude::*, Database};
use sea_orm_migration::MigratorTrait;
use tokio::sync::{OnceCell, RwLock, RwLockReadGuard, RwLockWriteGuard};
mod config;
pub use config::*;
mod database_connection;
pub use database_connection::*;
use uuid::{ContextV7, Timestamp, Uuid};
pub fn generate_uuid() -> Uuid {
Uuid::new_v7(Timestamp::now(ContextV7::new()))
}
pub static DEFAULT_LISTEN_IPS: &[IpAddr] = &[IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0))];
fn uninitialized_message<T>(var: T) -> String {
format!("{} is uninitialized!", &stringify!(var))
}

View file

@ -1,104 +0,0 @@
pub mod error;
use chrono::Local;
use libp2p::{ identity::Keypair, mdns, ping, swarm, Multiaddr, PeerId};
use sea_orm::{prelude::DateTimeUtc, ActiveModelTrait, ActiveValue::Set, ColumnTrait, EntityTrait, ModelTrait, QueryFilter};
use tracing::{event, Level};
use crate::{cache::entity::{CachedPeerActiveModel, CachedAddressActiveModel, CachedAddressColumn, CachedAddressEntity, CachedAddressModel, CachedPeerColumn, CachedPeerEntity, CachedPeerModel}, data::value::{MultiaddrValue, PeerIdValue}, error::Error, global::DATABASE_CONNECTIONS};
#[derive(swarm::NetworkBehaviour)]
#[behaviour(to_swarm = "Event")]
pub struct Behaviour {
pub mdns: mdns::tokio::Behaviour,
pub ping: ping::Behaviour,
}
impl TryFrom<&Keypair> for Behaviour {
type Error = Error;
fn try_from(keypair: &Keypair) -> Result<Self, Error> {
Ok(Self {
mdns: mdns::tokio::Behaviour::new(
mdns::Config::default(),
keypair.public().into(),
)?,
ping: libp2p::ping::Behaviour::new(ping::Config::new()),
})
}
}
#[derive(Debug)]
pub enum Event {
Mdns(mdns::Event),
Ping(ping::Event),
}
impl Event {
pub async fn run(&self)
{
match self {
Self::Mdns(x) => {
match x {
mdns::Event::Discovered(e) => {
for peer in e.iter() {
event!(Level::TRACE, "Peer discovered via mdns: {}, {}", &peer.0, &peer.1);
match try_get_or_insert_cached_peer(&peer.0, &peer.1).await {
Ok(_) => {},
Err(e) => {
event!(Level::WARN, "{:?}", e);
}
};
}
},
_ => {},
}
},
_ => {}
}
}
}
impl From<mdns::Event> for Event {
fn from(event: mdns::Event) -> Self {
Self::Mdns(event)
}
}
impl From<ping::Event> for Event {
fn from(event: ping::Event) -> Self {
Self::Ping(event)
}
}
async fn try_get_or_insert_cached_peer(peer_id: &PeerId, peer_addr: &Multiaddr) -> Result<(CachedPeerModel, CachedAddressModel), Error> {
match (
CachedPeerEntity::find().filter(CachedPeerColumn::PeerId.eq(PeerIdValue::from(peer_id.clone()))).one(DATABASE_CONNECTIONS.get_cache_unchecked()).await?,
CachedAddressEntity::find().filter(CachedAddressColumn::Multiaddress.eq(MultiaddrValue::from(peer_addr.clone()))).one(DATABASE_CONNECTIONS.get_cache_unchecked()).await?,
) {
(Some(x), Some(y) ) => {
if x.id == y.cached_peer_id {
event!(Level::TRACE, "Known peer: {}, {}", peer_id, peer_addr);
let mut addr: CachedAddressActiveModel = y.into();
addr.updated_at = Set(Local::now().to_utc());
let updated = addr.update(DATABASE_CONNECTIONS.get_cache_unchecked()).await?;
Ok((x, updated))
} else {
y.delete(DATABASE_CONNECTIONS.get_cache().expect("Cache database should initialized beforehand!")).await?;
Ok((x.clone(), CachedAddressActiveModel::new(x.id, peer_addr.clone()).insert(DATABASE_CONNECTIONS.get_cache_unchecked()).await?))
}
}
(Some(x), None) => {
event!(Level::INFO, "New address {} for {}", peer_addr, peer_id);
Ok((x.clone(),CachedAddressActiveModel::new(x.id, peer_addr.clone()).insert(DATABASE_CONNECTIONS.get_cache_unchecked()).await?))
},
(None, x) => {
event!(Level::INFO, "Add new peer: {}", peer_id);
let inserted = CachedPeerActiveModel::new(peer_id.clone()).insert(DATABASE_CONNECTIONS.get_cache_unchecked()).await?;
if let Some(y) = x {
event!(Level::INFO, "Remove {} from {}", peer_addr, peer_id);
y.delete(DATABASE_CONNECTIONS.get_cache_unchecked()).await?;
};
event!(Level::INFO, "Add address {} to {}", peer_addr, peer_id);
Ok((inserted.clone(), CachedAddressActiveModel::new(inserted.id, peer_addr.clone()).insert(DATABASE_CONNECTIONS.get_cache_unchecked()).await?))
},
}
}

View file

@ -1,16 +0,0 @@
use libp2p::Multiaddr;
use crate::cache::entity::CachedAddressModel;
use crate::utils::utc_to_timestamp;
use crate::proto::CachedAddressMessage;
impl From<&CachedAddressModel> for CachedAddressMessage {
fn from(a: &CachedAddressModel) -> Self {
Self {
number: a.id,
created_at: Some(utc_to_timestamp(&a.created_at)),
updated_at: Some(utc_to_timestamp(&a.updated_at)),
multiaddress: Multiaddr::from(a.multiaddress.clone()).to_string(),
}
}
}

View file

@ -1,14 +0,0 @@
use crate::{cache::entity::{CachedAddressModel, CachedPeerModel}, proto::{CachedAddressMessage, CachedPeerMessage}, utils::utc_to_timestamp};
impl From<(&CachedPeerModel, &Vec<CachedAddressModel>)> for CachedPeerMessage {
fn from(source: (&CachedPeerModel, &Vec<CachedAddressModel>)) -> Self {
let (peer, addresses) = source;
Self {
number: peer.id,
peer_id: peer.peer_id.to_string(),
created_at: Some(utc_to_timestamp(&peer.created_at)),
addresses: addresses.iter().map(|x| CachedAddressMessage::from(x)).collect(),
}
}
}

View file

@ -1,5 +0,0 @@
mod cached_address;
mod cached_peer;
tonic::include_proto!("caretta_sync");

View file

@ -1,2 +0,0 @@
pub mod service;

View file

@ -1,30 +0,0 @@
use crate::{cache::entity::{CachedAddressEntity, CachedPeerEntity, CachedPeerModel}, global::{DATABASE_CONNECTIONS}, proto::CachedAddressMessage};
use futures::future::join_all;
use tonic::{Request, Response, Status};
use crate::proto::{cached_peer_service_server::{CachedPeerServiceServer}, CachedPeerListRequest, CachedPeerListResponse, CachedPeerMessage};
use sea_orm::prelude::*;
#[derive(Debug, Default)]
pub struct CachedPeerService {}
#[tonic::async_trait]
impl crate::proto::cached_peer_service_server::CachedPeerService for CachedPeerService {
async fn list(&self, request: Request<CachedPeerListRequest>) -> Result<Response<CachedPeerListResponse>, Status> {
println!("Got a request: {:?}", request);
let reply = CachedPeerListResponse {
peers: join_all( CachedPeerEntity::find().all(DATABASE_CONNECTIONS.get_cache_unchecked()).await.or_else(|e| Err(Status::from_error(Box::new(e))))?.iter().map(|x| async move {
let addresses = CachedAddressEntity::find()
.all(DATABASE_CONNECTIONS.get_cache_unchecked())
.await
.or_else(|e| Err(Status::from_error(Box::new(e))))?;
Ok::<CachedPeerMessage, Status>(CachedPeerMessage::from((x, &addresses)))
})).await.into_iter().collect::<Result<Vec<_>,_>>()?,
};
Ok(Response::new(reply))
}
}

View file

@ -1 +0,0 @@
pub mod cached_peer;

View file

@ -1,17 +0,0 @@
use crate::{config::{Config, P2pConfig, RpcConfig}, error::Error};
pub trait ServerTrait {
async fn serve_p2p<T>(config: &T) -> Result<(), Error>
where T: AsRef<P2pConfig>;
async fn serve_rpc<T>(config: &T) -> Result<(), Error>
where T: AsRef<RpcConfig>;
async fn serve_all<T>(config: &T) -> Result<(), Error>
where
T: AsRef<P2pConfig> + AsRef<RpcConfig> {
tokio::try_join!(
Self::serve_p2p(config),
Self::serve_rpc(config)
)?;
Ok(())
}
}

View file

@ -1,26 +0,0 @@
use std::{path::PathBuf, sync::LazyLock};
use sea_orm::{sea_query::{FromValueTuple, IntoValueTuple, ValueType}, ActiveModelBehavior, ActiveModelTrait, ColumnTrait, Condition, DatabaseConnection, EntityTrait, IntoActiveModel, ModelTrait, PrimaryKeyToColumn, PrimaryKeyTrait, Value};
use sea_orm::QueryFilter;
use tempfile::TempDir;
use crate::{ config::{Config, PartialConfig, PartialP2pConfig, PartialRpcConfig, RpcConfig, StorageConfig}, message::Message};
use serde::{de::DeserializeOwned, Deserialize, Serialize};
pub static TEST_CONFIG: LazyLock<Config> = LazyLock::new(|| {
let test_dir = TempDir::new().unwrap().keep();
let data_dir = test_dir.join("data");
let cache_dir = test_dir.join("cache");
Config {
p2p: PartialP2pConfig::default().with_new_private_key().try_into().unwrap(),
storage: StorageConfig {
data_directory: data_dir,
cache_directory: cache_dir,
},
rpc: RpcConfig{
socket_path: test_dir.join("socket.sock"),
},
}
});

View file

@ -1,5 +0,0 @@
#[cfg(feature="macros")]
pub use caretta_sync_macros::Mergeable;
pub trait Mergeable: Sized {
fn merge(&mut self, other: Self);
}

View file

@ -1,47 +0,0 @@
use prost_types::Timestamp;
use chrono::{DateTime, TimeZone, Timelike, Utc};
pub mod async_convert;
pub mod emptiable;
pub mod mergeable;
pub mod runnable;
/// ## Examples
/// ```
/// use chrono::Utc;
/// use std::time::SystemTime;
/// use prost_types::Timestamp;
/// use caretta_sync_core::utils::utc_to_timestamp;
///
/// let now_utc = Utc::now();
/// let now_timestamp = utc_to_timestamp(&now_utc);
/// assert_eq!(SystemTime::try_from(now_utc).unwrap(), SystemTime::try_from(now_timestamp).unwrap());
/// ```
pub fn utc_to_timestamp(utc: &DateTime<Utc>) -> Timestamp {
Timestamp{
seconds: utc.timestamp(),
nanos: i32::try_from(utc.nanosecond()).unwrap(),
}
}
/// ## Examples
/// ```
/// use std::time::SystemTime;
/// use prost_types::Timestamp;
/// use caretta_sync_core::utils::timestamp_to_utc;
///
/// let now_timestamp = Timestamp::from(SystemTime::now());
/// let now_utc = timestamp_to_utc(&now_timestamp);
/// assert_eq!(SystemTime::try_from(now_utc).unwrap(), SystemTime::try_from(now_timestamp).unwrap());
/// ```
pub fn timestamp_to_utc(t: &Timestamp) -> DateTime<Utc> {
Utc.timestamp_opt(t.seconds, u32::try_from(t.nanos).unwrap()).unwrap()
}
pub fn get_binary_name() -> Option<String> {
std::env::current_exe()
.ok()?
.file_name()?
.to_str()?
.to_owned()
.into()
}

View file

@ -1,3 +0,0 @@
pub trait Runnable {
fn run(self, app_name: &'static str);
}

View file

@ -1,5 +1,5 @@
[package] [package]
name = "caretta-sync-example-core" name = "lazy-supplements-examples-core"
edition.workspace = true edition.workspace = true
version.workspace = true version.workspace = true
description.workspace = true description.workspace = true
@ -7,9 +7,4 @@ license.workspace = true
repository.workspace = true repository.workspace = true
[dependencies] [dependencies]
bevy.workspace = true dioxus.workspace = true
caretta-sync = { path = "../..", features = ["bevy"] }
libp2p.workspace = true
tokio.workspace = true
tokio-stream = { version = "0.1.17", features = ["net"] }
tonic.workspace = true

BIN
examples/core/assets/favicon.ico (Stored with Git LFS) Normal file

Binary file not shown.

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 23 KiB

View file

@ -0,0 +1,46 @@
/* App-wide styling */
body {
background-color: #0f1116;
color: #ffffff;
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
margin: 20px;
}
#hero {
margin: 0;
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
}
#links {
width: 400px;
text-align: left;
font-size: x-large;
color: white;
display: flex;
flex-direction: column;
}
#links a {
color: white;
text-decoration: none;
margin-top: 20px;
margin: 10px 0px;
border: white 1px solid;
border-radius: 5px;
padding: 10px;
}
#links a:hover {
background-color: #1f1f1f;
cursor: pointer;
}
#header {
max-width: 1200px;
}

View file

@ -1 +0,0 @@
pub const APP_NAME: &str = "caretta_sync_example";

View file

@ -1,13 +0,0 @@
use caretta_sync::{bevy::peer::PeerPlugin, utils::Runnable};
use bevy::prelude::*;
pub struct Gui {}
impl Runnable for Gui {
fn run(self, app_name: &'static str) {
App::new()
//.add_plugins(DefaultPlugins)
.add_plugins(PeerPlugin)
.run();
}
}

View file

@ -1,4 +1 @@
pub mod global; pub mod ui;
pub mod gui;
pub mod rpc;
pub mod server;

View file

@ -1 +0,0 @@
pub mod server;

View file

@ -1,63 +0,0 @@
use caretta_sync::{
config::P2pConfig,
proto::cached_peer_service_server::CachedPeerServiceServer,
server::ServerTrait,
rpc::service::cached_peer::CachedPeerService
};
use libp2p::{futures::StreamExt, noise, swarm::SwarmEvent, tcp, yamux};
use tokio::net::UnixListener;
use tokio_stream::wrappers::UnixListenerStream;
#[derive(Debug)]
pub struct Server{}
impl ServerTrait for Server {
async fn serve_p2p<T>(config: &T) -> Result<(), caretta_sync::error::Error>
where
T: AsRef<P2pConfig>
{
let mut swarm = libp2p::SwarmBuilder::with_existing_identity(config.as_ref().private_key.clone())
.with_tokio()
.with_tcp(
tcp::Config::default(),
noise::Config::new,
yamux::Config::default,
)?
.with_behaviour(|keypair| caretta_sync::p2p::Behaviour::try_from(keypair).unwrap())?
.build();
swarm.listen_on("/ip4/0.0.0.0/tcp/0".parse()?)?;
loop{
let swarm_event = swarm.select_next_some().await;
tokio::spawn(async move{
match swarm_event {
SwarmEvent::NewListenAddr { address, .. } => println!("Listening on {address:?}"),
SwarmEvent::Behaviour(event) => {
println!("{event:?}");
event.run().await;
},
_ => {}
}
});
}
}
async fn serve_rpc<T>(config: &T) -> Result<(), caretta_sync::error::Error>
where T: AsRef<caretta_sync::config::RpcConfig> {
let path = config.as_ref().socket_path.clone();
if let Some(x) = path.parent() {
if !x.exists() {
std::fs::create_dir_all(x).expect("Failed to create directory for socket file!");
}
}
if path.exists() {
std::fs::remove_file(&path).expect("Failed to remove existing socket file!")
}
let uds = UnixListener::bind(path).unwrap();
let uds_stream = UnixListenerStream::new(uds);
tonic::transport::Server::builder()
.add_service(CachedPeerServiceServer::new(CachedPeerService::default()))
.serve_with_incoming(uds_stream)
.await.unwrap();
Ok(())
}
}

View file

@ -0,0 +1 @@
pub mod plain;

View file

@ -0,0 +1,33 @@
use dioxus::prelude::*;
const FAVICON: Asset = asset!("/assets/favicon.ico");
const MAIN_CSS: Asset = asset!("/assets/main.css");
const HEADER_SVG: Asset = asset!("/assets/header.svg");
#[component]
pub fn App() -> Element {
rsx! {
document::Link { rel: "icon", href: FAVICON }
document::Link { rel: "stylesheet", href: MAIN_CSS }
Hero {}
}
}
#[component]
pub fn Hero() -> Element {
rsx! {
div {
id: "hero",
img { src: HEADER_SVG, id: "header" }
div { id: "links",
a { href: "https://dioxuslabs.com/learn/0.6/", "📚 Learn Dioxus" }
a { href: "https://dioxuslabs.com/awesome", "🚀 Awesome Dioxus" }
a { href: "https://github.com/dioxus-community/", "📡 Community Libraries" }
a { href: "https://github.com/DioxusLabs/sdk", "⚙️ Dioxus Development Kit" }
a { href: "https://marketplace.visualstudio.com/items?itemName=DioxusLabs.dioxus", "💫 VSCode Extension" }
a { href: "https://discord.gg/XgGxMSkvUM", "👋 Community Discord" }
}
}
}
}

View file

@ -1,14 +1,18 @@
[package] [package]
name = "caretta-sync-example-desktop" name = "lazy-supplements-examples-desktop"
edition.workspace = true version = "0.1.0"
version.workspace = true edition = "2021"
description.workspace = true
license.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
repository.workspace = true
[dependencies] [dependencies]
clap.workspace = true clap.workspace = true
caretta-sync = { path = "../..", features = ["cli", "bevy", "test"] } dioxus.workspace = true
caretta-sync-example-core.path = "../core" lazy-supplements-desktop.path = "../../lazy-supplements-desktop"
libp2p.workspace = true lazy-supplements-examples-core.path = "../core"
tokio.workspace = true
[features]
default = ["desktop"]
web = ["dioxus/web"]
desktop = ["dioxus/desktop"]
mobile = ["dioxus/mobile"]

View file

@ -0,0 +1,21 @@
[application]
[web.app]
# HTML title tag content
title = "desktop"
# include `assets` in web platform
[web.resource]
# Additional CSS style files
style = []
# Additional JavaScript files
script = []
[web.resource.dev]
# Javascript code file
# serve: [dev-server] only
script = []

View file

@ -0,0 +1,25 @@
# Development
Your new bare-bones project includes minimal organization with a single `main.rs` file and a few assets.
```
project/
├─ assets/ # Any assets that are used by the app should be placed here
├─ src/
│ ├─ main.rs # main.rs is the entry point to your application and currently contains all components for the app
├─ Cargo.toml # The Cargo.toml file defines the dependencies and feature flags for your project
```
### Serving Your App
Run the following command in the root of your project to start developing with the default platform:
```bash
dx serve
```
To run for a different platform, use the `--platform platform` flag. E.g.
```bash
dx serve --platform desktop
```

View file

@ -0,0 +1,8 @@
await-holding-invalid-types = [
"generational_box::GenerationalRef",
{ path = "generational_box::GenerationalRef", reason = "Reads should not be held over an await point. This will cause any writes to fail while the await is pending since the read borrow is still active." },
"generational_box::GenerationalRefMut",
{ path = "generational_box::GenerationalRefMut", reason = "Write should not be held over an await point. This will cause any reads or writes to fail while the await is pending since the write borrow is still active." },
"dioxus_signals::Write",
{ path = "dioxus_signals::Write", reason = "Write should not be held over an await point. This will cause any reads or writes to fail while the await is pending since the write borrow is still active." },
]

View file

@ -0,0 +1,16 @@
use clap::{Parser, Subcommand};
use lazy_supplements_desktop::cli::*;
#[derive(Debug, Parser)]
pub struct Cli {
#[command(subcommand)]
command: CliCommand
}
#[derive(Debug, Subcommand)]
pub enum CliCommand {
Config(ConfigCommandArgs),
Device(DeviceCommandArgs),
Log(LogCommandArgs),
Server(ServerCommandArgs),
}

View file

@ -1,39 +0,0 @@
use caretta_sync_example_core::{gui::Gui, server::Server};
use clap::{Parser, Subcommand};
use caretta_sync::{cli::*, config::Config, data::migration::DataMigrator, global::{CONFIG, DATABASE_CONNECTIONS}, utils::Runnable};
#[derive(Debug, Parser)]
pub struct Cli {
#[command(subcommand)]
command: Option<CliCommand>,
#[command(flatten)]
config: ConfigArgs,
}
impl Runnable for Cli {
fn run(self, app_name: &'static str) {
if let Some(x) = self.command {
x.run(app_name)
} else {
tokio::runtime::Builder::new_multi_thread()
.enable_all()
.build()
.unwrap()
.block_on(async {
let config: caretta_sync::config::Config = self.config.into_config(app_name).await;
let _ = CONFIG.get_or_init::<Config>(config).await;
});
//let _ = DATABASE_CONNECTIONS.get_or_init_unchecked(&config, DataMigrator).await;
Gui{}.run(app_name)
}
}
}
#[derive(Debug, Subcommand, Runnable)]
pub enum CliCommand {
Config(ConfigCommandArgs),
Device(DeviceCommandArgs),
Peer(PeerCommandArgs),
Serve(ServeCommandArgs<Server>),
}

View file

@ -1,10 +1,5 @@
use caretta_sync::utils::Runnable; mod cli;
use caretta_sync_example_core::global::APP_NAME; mod ipc;
use clap::Parser;
use crate::cli::Cli;
fn main() { fn main() {
let args = Cli::parse(); dioxus::launch(lazy_supplements_examples_core::ui::plain::App);
args.run(APP_NAME);
} }

View file

@ -1,7 +0,0 @@
/build
.gradle
.idea
.DS_Store
build
.cxx
local.properties

View file

@ -1,20 +1,17 @@
[package] [package]
name = "caretta-sync-example-mobile" name = "lazy-supplements-examples-mobile"
edition.workspace = true version = "0.1.0"
version.workspace = true authors = ["fluo10 <fluo10.dev@fireturtle.net>"]
description.workspace = true edition = "2021"
license.workspace = true
repository.workspace = true
[[bin]] # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
name = "caretta_sync_example"
path = "src/main.rs"
[lib]
name = "caretta_sync_example"
crate-type = ["lib", "cdylib"]
[dependencies] [dependencies]
bevy.workspace = true dioxus.workspace = true
caretta-sync-example-core.path = "../core" lazy-supplements-examples-core.path = "../core"
caretta-sync.path = "../.."
[features]
default = ["mobile"]
web = ["dioxus/web"]
desktop = ["dioxus/desktop"]
mobile = ["dioxus/mobile"]

View file

@ -1,25 +0,0 @@
.PHONY: xcodebuild run install boot-sim generate clean
DEVICE = ${DEVICE_ID}
ifndef DEVICE_ID
DEVICE=$(shell xcrun simctl list devices 'iOS' | grep -v 'unavailable' | grep -v '^--' | grep -v '==' | head -n 1 | grep -E -o -i "([0-9a-f]{8}-([0-9a-f]{4}-){3}[0-9a-f]{12})")
endif
run: install
xcrun simctl launch --console $(DEVICE) net.fireturtle.caretta-sync-example
boot-sim:
xcrun simctl boot $(DEVICE) || true
install: xcodebuild-simulator boot-sim
xcrun simctl install $(DEVICE) build/Build/Products/Debug-iphonesimulator/caretta_sync_example.app
xcodebuild-simulator:
IOS_TARGETS=x86_64-apple-ios xcodebuild -scheme caretta_sync_example -configuration Debug -derivedDataPath build -destination "id=$(DEVICE)"
xcodebuild-iphone:
IOS_TARGETS=aarch64-apple-ios xcodebuild -scheme caretta_sync_example -configuration Debug -derivedDataPath build -arch arm64
clean:
rm -r build
cargo clean

View file

@ -1,68 +0,0 @@
#!/usr/bin/env bash
# based on https://github.com/mozilla/glean/blob/main/build-scripts/xc-universal-binary.sh
set -eux
PATH=$PATH:$HOME/.cargo/bin
PROFILE=debug
RELFLAG=
if [[ "$CONFIGURATION" != "Debug" ]]; then
PROFILE=release
RELFLAG=--release
fi
set -euvx
# add homebrew bin path, as it's the most commonly used package manager on macOS
# this is needed for cmake on apple arm processors as it's not available by default
export PATH="$PATH:/opt/homebrew/bin"
# Make Cargo output cache files in Xcode's directories
export CARGO_TARGET_DIR="$DERIVED_FILE_DIR/cargo"
# Xcode places `/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin`
# at the front of the path, with makes the build fail with `ld: library 'System' not found`, upstream issue:
# <https://github.com/rust-lang/rust/issues/80817>.
#
# Work around it by resetting the path, so that we use the system `cc`.
export PATH="/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:$PATH"
IS_SIMULATOR=0
if [ "${LLVM_TARGET_TRIPLE_SUFFIX-}" = "-simulator" ]; then
IS_SIMULATOR=1
fi
EXECUTABLES=
for arch in $ARCHS; do
case "$arch" in
x86_64)
if [ $IS_SIMULATOR -eq 0 ]; then
echo "Building for x86_64, but not a simulator build. What's going on?" >&2
exit 2
fi
# Intel iOS simulator
export CFLAGS_x86_64_apple_ios="-target x86_64-apple-ios"
TARGET=x86_64-apple-ios
;;
arm64)
if [ $IS_SIMULATOR -eq 0 ]; then
# Hardware iOS targets
TARGET=aarch64-apple-ios
else
# M1 iOS simulator
TARGET=aarch64-apple-ios-sim
fi
esac
cd ..
cargo build $RELFLAG --target $TARGET --bin caretta_sync_example
cd -
# Collect the executables
EXECUTABLES="$EXECUTABLES $DERIVED_FILE_DIR/cargo/$TARGET/$PROFILE/caretta_sync_example"
done
# Combine executables, and place them at the output path excepted by Xcode
lipo -create -output "$TARGET_BUILD_DIR/$EXECUTABLE_PATH" $EXECUTABLES

View file

@ -1,307 +0,0 @@
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 77;
objects = {
/* Begin PBXFileReference section */
B617AE7C2E5D5E5A0013202E /* caretta_sync_example.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = caretta_sync_example.app; sourceTree = BUILT_PRODUCTS_DIR; };
B698F93D2E5E743A00C7EE06 /* caretta_sync_example.app */ = {isa = PBXFileReference; lastKnownFileType = wrapper.application; name = caretta_sync_example.app; path = "build/Build/Products/Debug-iphonesimulator/caretta_sync_example.app"; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXGroup section */
B617AE732E5D5E5A0013202E = {
isa = PBXGroup;
children = (
B617AE7D2E5D5E5A0013202E /* Products */,
B698F93D2E5E743A00C7EE06 /* caretta_sync_example.app */,
);
sourceTree = "<group>";
};
B617AE7D2E5D5E5A0013202E /* Products */ = {
isa = PBXGroup;
children = (
B617AE7C2E5D5E5A0013202E /* caretta_sync_example.app */,
);
name = Products;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
B617AE7B2E5D5E5A0013202E /* caretta_sync_example */ = {
isa = PBXNativeTarget;
buildConfigurationList = B617AE872E5D5E5B0013202E /* Build configuration list for PBXNativeTarget "caretta_sync_example" */;
buildPhases = (
B698F8CE2E5D609900C7EE06 /* ShellScript */,
);
buildRules = (
);
dependencies = (
);
name = caretta_sync_example;
packageProductDependencies = (
);
productName = caretta_sync_example;
productReference = B617AE7C2E5D5E5A0013202E /* caretta_sync_example.app */;
productType = "com.apple.product-type.application";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
B617AE742E5D5E5A0013202E /* Project object */ = {
isa = PBXProject;
attributes = {
BuildIndependentTargetsInParallel = NO;
LastSwiftUpdateCheck = 1640;
LastUpgradeCheck = 1640;
TargetAttributes = {
B617AE7B2E5D5E5A0013202E = {
CreatedOnToolsVersion = 16.4;
};
};
};
buildConfigurationList = B617AE772E5D5E5A0013202E /* Build configuration list for PBXProject "caretta_sync_example" */;
developmentRegion = en;
hasScannedForEncodings = 0;
knownRegions = (
en,
);
mainGroup = B617AE732E5D5E5A0013202E;
minimizedProjectReferenceProxies = 1;
preferredProjectObjectVersion = 77;
productRefGroup = B617AE7D2E5D5E5A0013202E /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
B617AE7B2E5D5E5A0013202E /* caretta_sync_example */,
);
};
/* End PBXProject section */
/* Begin PBXShellScriptBuildPhase section */
B698F8CE2E5D609900C7EE06 /* ShellScript */ = {
isa = PBXShellScriptBuildPhase;
alwaysOutOfDate = 1;
buildActionMask = 2147483647;
files = (
);
inputFileListPaths = (
);
inputPaths = (
"$(SRCROOT)/build_rust_deps.sh",
);
outputFileListPaths = (
);
outputPaths = (
$TARGET_BUILD_DIR/$EXECUTABLE_PATH,
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "./build_rust_deps.sh\n";
};
/* End PBXShellScriptBuildPhase section */
/* Begin XCBuildConfiguration section */
B617AE852E5D5E5B0013202E /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 18.5;
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
MTL_FAST_MATH = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)";
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
};
name = Debug;
};
B617AE862E5D5E5B0013202E /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 18.5;
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MTL_ENABLE_DEBUG_INFO = NO;
MTL_FAST_MATH = YES;
SDKROOT = iphoneos;
SWIFT_COMPILATION_MODE = wholemodule;
VALIDATE_PRODUCT = YES;
};
name = Release;
};
B617AE882E5D5E5B0013202E /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
ENABLE_PREVIEWS = YES;
ENABLE_USER_SCRIPT_SANDBOXING = NO;
GENERATE_INFOPLIST_FILE = YES;
INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES;
INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES;
INFOPLIST_KEY_UILaunchScreen_Generation = YES;
INFOPLIST_KEY_UISupportedInterfaceOrientations = "UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown";
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = "net.fireturtle.caretta-sync-example";
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
};
B617AE892E5D5E5B0013202E /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
ENABLE_PREVIEWS = YES;
ENABLE_USER_SCRIPT_SANDBOXING = NO;
GENERATE_INFOPLIST_FILE = YES;
INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES;
INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES;
INFOPLIST_KEY_UILaunchScreen_Generation = YES;
INFOPLIST_KEY_UISupportedInterfaceOrientations = "UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown";
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = "net.fireturtle.caretta-sync-example";
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
B617AE772E5D5E5A0013202E /* Build configuration list for PBXProject "caretta_sync_example" */ = {
isa = XCConfigurationList;
buildConfigurations = (
B617AE852E5D5E5B0013202E /* Debug */,
B617AE862E5D5E5B0013202E /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Debug;
};
B617AE872E5D5E5B0013202E /* Build configuration list for PBXNativeTarget "caretta_sync_example" */ = {
isa = XCConfigurationList;
buildConfigurations = (
B617AE882E5D5E5B0013202E /* Debug */,
B617AE892E5D5E5B0013202E /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Debug;
};
/* End XCConfigurationList section */
};
rootObject = B617AE742E5D5E5A0013202E /* Project object */;
}

View file

@ -1,7 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "self:">
</FileRef>
</Workspace>

View file

@ -1,14 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>SchemeUserState</key>
<dict>
<key>caretta_sync_example.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>0</integer>
</dict>
</dict>
</dict>
</plist>

View file

@ -1,70 +0,0 @@
use bevy::{
color::palettes::basic::*,
input::{gestures::RotationGesture, touch::TouchPhase},
log::{Level, LogPlugin},
prelude::*,
window::{AppLifecycle, ScreenEdge, WindowMode},
winit::WinitSettings,
};
#[bevy_main]
pub fn main() {
let mut app = App::new();
app.add_plugins(
DefaultPlugins.set(LogPlugin {
level: Level::DEBUG,
filter: "wgpu=error,bevy_render=info,bevy_ecs_trace".to_string(),
..Default::default()
})
.set(WindowPlugin {
primary_window: Some(Window {
resizable: false,
mode: WindowMode::BorderlessFullscreen(MonitorSelection::Primary),
recognize_rotation_gesture: true,
prefers_home_indicator_hidden: true,
prefers_status_bar_hidden: true,
preferred_screen_edges_deferring_system_gestures: ScreenEdge::Bottom,
..default()
}),
..default()
}),
)
.insert_resource(WinitSettings::mobile())
.add_systems(Startup, setup_scene)
.run();
}
fn setup_scene(
mut commands: Commands,
) {
commands.spawn((
Camera3d::default(),
Transform::from_xyz(-2.0, 2.5, 5.0).looking_at(Vec3::ZERO, Vec3::Y),
// MSAA makes some Android devices panic, this is under investigation
// https://github.com/bevyengine/bevy/issues/8229
#[cfg(target_os = "android")]
Msaa::Off,
));
commands
.spawn((
Button,
Node {
justify_content: JustifyContent::Center,
align_items: AlignItems::Center,
position_type: PositionType::Absolute,
left: Val::Px(50.0),
right: Val::Px(50.0),
bottom: Val::Px(50.0),
..default()
}
))
.with_child((
Text::new(format!( "{:?}", std::fs::read_dir(std::env::current_dir().unwrap()).unwrap().map(|x| x.unwrap().path()).collect::<Vec<std::path::PathBuf>>())),
TextFont {
font_size: 16.0,
..default()
},
TextColor::BLACK,
TextLayout::new_with_justify(Justify::Center),
));
}

View file

@ -1 +1,3 @@
use caretta_sync_example::main; fn main() {
dioxus::launch(lazy_supplements_examples_core::ui::plain::App);
}

View file

@ -1,5 +1,5 @@
[package] [package]
name = "caretta-sync-core" name = "lazy-supplements-core"
edition.workspace = true edition.workspace = true
version.workspace = true version.workspace = true
description.workspace = true description.workspace = true
@ -8,8 +8,10 @@ repository.workspace = true
[features] [features]
default = [] default = []
cli = ["dep:clap"] desktop = ["dep:clap", "macros"]
test = ["dep:tempfile", ] mobile = ["macros"]
macros = ["dep:lazy-supplements-macros"]
test = ["dep:tempfile", "macros"]
[dependencies] [dependencies]
base64 = "0.22.1" base64 = "0.22.1"
@ -17,12 +19,11 @@ chrono.workspace = true
chrono-tz = "0.10.3" chrono-tz = "0.10.3"
ciborium.workspace = true ciborium.workspace = true
clap = {workspace = true, optional = true} clap = {workspace = true, optional = true}
dirs = "6.0.0" futures = "0.3.31"
futures.workspace = true lazy-supplements-macros = { path = "../lazy-supplements-macros", optional = true }
libp2p.workspace = true libp2p.workspace = true
libp2p-core = { version = "0.43.0", features = ["serde"] } libp2p-core = { version = "0.43.0", features = ["serde"] }
libp2p-identity = { version = "0.2.11", features = ["ed25519", "peerid", "rand", "serde"] } libp2p-identity = { version = "0.2.11", features = ["ed25519", "peerid", "rand", "serde"] }
prost = "0.14.1"
sea-orm.workspace = true sea-orm.workspace = true
sea-orm-migration.workspace = true sea-orm-migration.workspace = true
serde.workspace = true serde.workspace = true
@ -30,26 +31,9 @@ tempfile = { version = "3.20.0", optional = true }
thiserror.workspace = true thiserror.workspace = true
tokio.workspace = true tokio.workspace = true
toml = "0.8.22" toml = "0.8.22"
tonic.workspace = true
tonic-prost = "0.14.0"
tracing = "0.1.41" tracing = "0.1.41"
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] } tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
uuid.workspace = true uuid.workspace = true
prost-types = "0.14.1"
sysinfo = "0.37.0"
whoami = "1.6.1"
[target.'cfg(target_os="android")'.dependencies]
jni = "0.21.1"
ndk = "0.9.0"
[target.'cfg(target_os="ios")'.dependencies]
objc = "0.2.7"
objc-foundation = "0.1.1"
objc_id = "0.1.1"
[dev-dependencies] [dev-dependencies]
tempfile = "3.20.0" tempfile = "3.20.0"
[build-dependencies]
tonic-prost-build = "0.14.0"

View file

@ -0,0 +1,8 @@
mod peer;
pub use peer::{
ActiveModel as ActivePeerModel,
Column as PeerColumn,
Model as PeerModel,
Entity as PeerEntity,
};

View file

@ -0,0 +1,73 @@
use std::str::FromStr;
use chrono::{Days, Local};
use libp2p::{multiaddr, Multiaddr, PeerId};
use sea_orm::{entity::{
prelude::*, *
}, sea_query};
use serde::{Deserialize, Serialize};
use crate::data::value::{MultiaddrValue, PeerIdValue};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Deserialize, Serialize)]
#[sea_orm(table_name = "peer")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: Uuid,
#[sea_orm(indexed)]
pub created_at: DateTimeUtc,
#[sea_orm(indexed)]
pub updated_at: DateTimeUtc,
#[sea_orm(indexed)]
pub expires_at: DateTimeUtc,
#[sea_orm(indexed)]
pub peer_id: PeerIdValue,
#[sea_orm(indexed)]
pub address: MultiaddrValue,
}
#[derive(Copy, Clone, Debug, DeriveRelation, EnumIter)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}
impl ActiveModel {
pub fn new(peer_id: PeerId, multiaddr: Multiaddr) -> Self {
let timestamp: DateTimeUtc = Local::now().to_utc();
Self{
peer_id: Set(PeerIdValue::from(peer_id)),
address: Set(MultiaddrValue::from(multiaddr)),
created_at: Set(timestamp),
updated_at: Set(timestamp),
expires_at: Set(timestamp.checked_add_days(Days::new(30)).unwrap()),
..Default::default()
}
}
}
#[cfg(test)]
mod tests {
use std::net::Ipv4Addr;
use crate::{cache::entity::peer, global::get_or_init_test_cache_database};
use super::*;
use libp2p::{identity::{self, Keypair}, swarm::handler::multi};
#[tokio::test]
async fn insert() {
let db = get_or_init_test_cache_database().await;
let peer_id = Keypair::generate_ed25519().public().to_peer_id();
let multiaddr = Multiaddr::empty()
.with(Ipv4Addr::new(127,0,0,1).into())
.with(multiaddr::Protocol::Tcp(0));
let inserted: Model = ActiveModel::new(peer_id.clone(), multiaddr.clone())
.insert(db).await.unwrap();
assert_eq!(PeerId::from(inserted.peer_id), peer_id);
assert_eq!(Multiaddr::from(inserted.address), multiaddr);
}
}

View file

@ -0,0 +1,93 @@
use sea_orm_migration::{prelude::*, schema::*};
use crate::migration::TableMigration;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
Peer::up(manager).await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
Peer::down(manager).await?;
Ok(())
}
}
#[derive(DeriveIden, DeriveMigrationName)]
enum Peer {
Table,
Id,
PeerId,
CreatedAt,
UpdatedAt,
ExpiresAt,
Address,
}
static IDX_PEER_ADDRESS: &str = "idx_peer_address";
static IDX_PEER_PEER_ID: &str = "idx_peer_peer_id";
static IDX_PEER_CREATED_AT: &str = "idx_peer_created_at";
static IDX_PEER_UPDATED_AT: &str = "idx_peer_updated_at";
static IDX_PEER_EXPIRES_AT: &str = "idx_peer_expires_at";
#[async_trait::async_trait]
impl TableMigration for Peer {
async fn up<'a>(manager: &'a SchemaManager<'a>) -> Result<(), DbErr> {
manager.create_table(
Table::create()
.table(Self::Table)
.if_not_exists()
.col(pk_uuid(Self::Id))
.col(string_len(Self::PeerId, 255))
.col(timestamp(Self::CreatedAt))
.col(timestamp(Self::UpdatedAt))
.col(timestamp(Self::ExpiresAt))
.col(text_uniq(Self::Address))
.to_owned()
).await?;
manager.create_index(
Index::create()
.name(IDX_PEER_PEER_ID)
.table(Self::Table)
.col(Self::PeerId)
.to_owned()
).await?;
manager.create_index(
Index::create()
.name(IDX_PEER_ADDRESS)
.table(Self::Table)
.col(Self::Address)
.to_owned()
).await?;
manager.create_index(
Index::create()
.name(IDX_PEER_CREATED_AT)
.table(Self::Table)
.col(Self::CreatedAt)
.to_owned()
).await?;
manager.create_index(
Index::create()
.name(IDX_PEER_UPDATED_AT)
.table(Self::Table)
.col(Self::UpdatedAt)
.to_owned()
).await?;
manager.create_index(
Index::create()
.name(IDX_PEER_EXPIRES_AT)
.table(Self::Table)
.col(Self::ExpiresAt)
.to_owned()
).await?;
Ok(())
}
async fn down<'a>(manager: &'a SchemaManager<'a>) -> Result<(), DbErr>{
manager.drop_table(Table::drop().table(Self::Table).to_owned()).await
}
}

View file

@ -0,0 +1,113 @@
pub mod error;
mod storage;
mod p2p;
use std::path::Path;
use crate::{utils::{emptiable::Emptiable, mergeable::Mergeable}};
pub use error::ConfigError;
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use tokio::{fs::File, io::{AsyncReadExt, AsyncWriteExt}};
pub use storage::{StorageConfig, PartialStorageConfig};
pub use p2p::{P2pConfig, PartialP2pConfig};
pub trait Config: TryFrom<Self::PartialConfig>{
type PartialConfig: PartialConfig<Config = Self>;
}
pub trait PartialConfig: Emptiable + From<Self::Config> + Mergeable {
type Config: Config<PartialConfig = Self>;
}
pub trait BaseConfig: DeserializeOwned + Serialize {
fn new() -> Self;
fn from_toml(s: &str) -> Result<Self, toml::de::Error> {
toml::from_str(s)
}
fn into_toml(&self) -> Result<String, toml::ser::Error> {
toml::to_string(self)
}
async fn read_or_create<T>(path: T) -> Result<Self, ConfigError>
where
T: AsRef<Path>
{
if !path.as_ref().exists() {
Self::new().write_to(&path).await?;
}
Self::read_from(&path).await
}
async fn read_from<T>(path:T) -> Result<Self, ConfigError>
where
T: AsRef<Path>
{
let mut file = File::open(path.as_ref()).await?;
let mut content = String::new();
file.read_to_string(&mut content).await?;
let config: Self = toml::from_str(&content)?;
Ok(config)
}
async fn write_to<T>(&self, path:T) -> Result<(), ConfigError>
where
T: AsRef<Path>
{
if !path.as_ref().exists() {
if let Some(x) = path.as_ref().parent() {
std::fs::create_dir_all(x)?;
};
let _ = File::create(&path).await?;
}
let mut file = File::create(&path).await?;
file.write_all(toml::to_string(self)?.as_bytes()).await?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use serde::{Deserialize, Serialize};
use crate::{tests::test_toml_serialize_deserialize, utils::{emptiable::Emptiable, mergeable::Mergeable}};
use super::{p2p::{P2pConfig, PartialP2pConfig}, PartialConfig};
#[derive(Debug, Deserialize, Serialize, PartialEq)]
pub struct TestConfig {
p2p: Option<PartialP2pConfig>
}
impl Default for TestConfig {
fn default() -> Self {
Self {
p2p: Some(PartialP2pConfig::default()),
}
}
}
impl Emptiable for TestConfig {
fn empty() -> Self {
Self {
p2p: None,
}
}
fn is_empty(&self) -> bool {
self.p2p.is_none()
}
}
impl Mergeable for TestConfig {
fn merge(&mut self, other: Self) {
if let Some(p2p) = other.p2p {
self.p2p = Some(p2p);
}
}
}
#[tokio::test]
async fn test_p2p_config_serialize_deserialize() {
test_toml_serialize_deserialize(TestConfig::empty());
test_toml_serialize_deserialize(TestConfig::default());
assert_eq!(TestConfig::empty(), toml::from_str("").unwrap());
assert_eq!("", &toml::to_string(&TestConfig::empty()).unwrap());
}
}

View file

@ -1,7 +1,7 @@
use std::{net::{IpAddr, Ipv4Addr}, ops, path::{Path, PathBuf}}; use std::{net::{IpAddr, Ipv4Addr}, ops, path::{Path, PathBuf}};
use base64::{prelude::BASE64_STANDARD, Engine}; use base64::{prelude::BASE64_STANDARD, Engine};
#[cfg(feature="cli")] #[cfg(feature="desktop")]
use clap::Args; use clap::Args;
use futures::StreamExt; use futures::StreamExt;
use libp2p::{identity::{self, DecodingError, Keypair}, noise, ping, swarm::SwarmEvent, tcp, yamux, Swarm}; use libp2p::{identity::{self, DecodingError, Keypair}, noise, ping, swarm::SwarmEvent, tcp, yamux, Swarm};
@ -31,16 +31,17 @@ fn base64_to_keypair(base64: &str) -> Result<Keypair, Error> {
Ok(Keypair::from_protobuf_encoding(&vec)?) Ok(Keypair::from_protobuf_encoding(&vec)?)
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug, Deserialize, Serialize,)]
pub struct P2pConfig { pub struct P2pConfig {
pub private_key: Keypair, #[serde(with = "keypair_parser")]
pub secret: Keypair,
pub listen_ips: Vec<IpAddr>, pub listen_ips: Vec<IpAddr>,
pub port: u16, pub port: u16,
} }
impl P2pConfig { impl P2pConfig {
async fn try_into_swarm (self) -> Result<Swarm<p2p::Behaviour>, Error> { async fn try_into_swarm (self) -> Result<Swarm<p2p::Behaviour>, Error> {
let mut swarm = libp2p::SwarmBuilder::with_existing_identity(self.private_key) let mut swarm = libp2p::SwarmBuilder::with_existing_identity(self.secret)
.with_tokio() .with_tokio()
.with_tcp( .with_tcp(
tcp::Config::default(), tcp::Config::default(),
@ -71,40 +72,57 @@ impl P2pConfig {
} }
impl TryFrom<PartialP2pConfig> for P2pConfig { impl TryFrom<PartialP2pConfig> for P2pConfig {
type Error = crate::error::Error; type Error = Error;
fn try_from(raw: PartialP2pConfig) -> Result<P2pConfig, Self::Error> { fn try_from(raw: PartialP2pConfig) -> Result<P2pConfig, Self::Error> {
Ok(P2pConfig { Ok(P2pConfig {
private_key: base64_to_keypair(&raw.private_key.ok_or(Error::MissingConfig("secret"))?)?, secret: base64_to_keypair(&raw.secret.ok_or(Error::MissingConfig("secret"))?)?,
listen_ips: raw.listen_ips.ok_or(Error::MissingConfig("listen_ips"))?, listen_ips: raw.listen_ips.ok_or(Error::MissingConfig("listen_ips"))?,
port: raw.port.ok_or(Error::MissingConfig("port"))? port: raw.port.ok_or(Error::MissingConfig("port"))?
}) })
} }
} }
#[cfg_attr(feature="cli",derive(Args))] mod keypair_parser {
use libp2p::identity::Keypair;
use serde::{Deserialize, Deserializer, Serializer};
pub fn serialize<S>(keypair: &Keypair, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer
{
serializer.serialize_str(&super::keypair_to_base64(keypair))
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<Keypair, D::Error>
where D: Deserializer<'de>
{
match super::base64_to_keypair(&String::deserialize(deserializer)?) {
Ok(x) => Ok(x),
Err(crate::error::Error::Base64Decode(_)) => Err(serde::de::Error::custom("Decoding base64 error")),
Err(_) => unreachable!()
}
}
}
#[cfg_attr(feature="desktop",derive(Args))]
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)] #[derive(Clone, Debug, Deserialize, Serialize, PartialEq)]
pub struct PartialP2pConfig { pub struct PartialP2pConfig {
#[cfg_attr(feature="cli",arg(long))] #[cfg_attr(feature="desktop",arg(long))]
pub private_key: Option<String>, pub secret: Option<String>,
#[cfg_attr(feature="cli",arg(long))] #[cfg_attr(feature="desktop",arg(long))]
pub listen_ips: Option<Vec<IpAddr>>, pub listen_ips: Option<Vec<IpAddr>>,
#[cfg_attr(feature="cli",arg(long))] #[cfg_attr(feature="desktop",arg(long))]
pub port: Option<u16>, pub port: Option<u16>,
} }
impl PartialP2pConfig { impl PartialP2pConfig {
pub fn with_new_private_key(mut self) -> Self { pub fn with_new_secret(mut self) -> Self {
self.private_key = Some(keypair_to_base64(&Keypair::generate_ed25519())); self.secret = Some(keypair_to_base64(&Keypair::generate_ed25519()));
self self
} }
pub fn init_private_key(&mut self) {
let _ = self.private_key.insert(keypair_to_base64(&Keypair::generate_ed25519()));
}
} }
impl From<P2pConfig> for PartialP2pConfig { impl From<P2pConfig> for PartialP2pConfig {
fn from(config: P2pConfig) -> Self { fn from(config: P2pConfig) -> Self {
Self { Self {
private_key: Some(keypair_to_base64(&config.private_key)), secret: Some(keypair_to_base64(&config.secret)),
listen_ips: Some(config.listen_ips), listen_ips: Some(config.listen_ips),
port: Some(config.port) port: Some(config.port)
} }
@ -114,7 +132,7 @@ impl From<P2pConfig> for PartialP2pConfig {
impl Default for PartialP2pConfig { impl Default for PartialP2pConfig {
fn default() -> Self { fn default() -> Self {
Self { Self {
private_key: None, secret: None,
listen_ips: Some(Vec::from(DEFAULT_P2P_LISTEN_IPS)), listen_ips: Some(Vec::from(DEFAULT_P2P_LISTEN_IPS)),
port: Some(DEFAULT_P2P_PORT), port: Some(DEFAULT_P2P_PORT),
} }
@ -124,21 +142,21 @@ impl Default for PartialP2pConfig {
impl Emptiable for PartialP2pConfig { impl Emptiable for PartialP2pConfig {
fn empty() -> Self { fn empty() -> Self {
Self{ Self{
private_key: None, secret: None,
listen_ips: None, listen_ips: None,
port: None port: None
} }
} }
fn is_empty(&self) -> bool { fn is_empty(&self) -> bool {
self.private_key.is_none() && self.listen_ips.is_none() && self.port.is_none() self.secret.is_none() && self.listen_ips.is_none() && self.port.is_none()
} }
} }
impl Mergeable for PartialP2pConfig { impl Mergeable for PartialP2pConfig {
fn merge(&mut self, mut other: Self) { fn merge(&mut self, mut other: Self) {
if let Some(x) = other.private_key.take() { if let Some(x) = other.secret.take() {
let _ = self.private_key.insert(x); let _ = self.secret.insert(x);
}; };
if let Some(x) = other.listen_ips.take() { if let Some(x) = other.listen_ips.take() {
let _ = self.listen_ips.insert(x); let _ = self.listen_ips.insert(x);
@ -148,26 +166,13 @@ impl Mergeable for PartialP2pConfig {
}; };
} }
} }
impl Mergeable for Option<PartialP2pConfig> {
fn merge(&mut self, mut other: Self) {
match other.take() {
Some(x) => {
if let Some(y) = self.as_mut() {
y.merge(x);
} else {
let _ = self.insert(x);
}
},
None => {}
};
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use libp2p::identity; use libp2p::identity;
use super::*; use super::*;
use crate::{config::PartialConfig, tests::test_toml_serialize_deserialize};
#[tokio::test] #[tokio::test]
@ -177,5 +182,9 @@ mod tests {
assert_eq!(keypair.public(), keypair2.public()); assert_eq!(keypair.public(), keypair2.public());
} }
#[tokio::test]
async fn test_p2p_config_serialize_deserialize() {
test_toml_serialize_deserialize(PartialP2pConfig::empty());
test_toml_serialize_deserialize(PartialP2pConfig::default());
}
} }

View file

@ -0,0 +1,91 @@
use std::path::PathBuf;
#[cfg(feature="desktop")]
use clap::Args;
#[cfg(any(test, feature="test"))]
use tempfile::tempdir;
use crate::{config::{ConfigError, PartialConfig}, utils::{emptiable::Emptiable, mergeable::Mergeable}};
use libp2p::mdns::Config;
use serde::{Deserialize, Serialize};
static DATA_DATABASE_NAME: &str = "data.sqlite";
static CACHE_DATABASE_NAME: &str = "cache.sqlite";
#[cfg(any(test, feature="test"))]
use crate::tests::{GlobalTestDefault, TestDefault};
#[derive(Debug)]
pub struct StorageConfig {
pub data_directory: PathBuf,
pub cache_directory: PathBuf,
}
impl StorageConfig {
pub fn get_data_database_path(&self) -> PathBuf{
self.data_directory.join(DATA_DATABASE_NAME)
}
pub fn get_cache_database_path(&self) -> PathBuf {
self.cache_directory.join(CACHE_DATABASE_NAME)
}
}
#[cfg(any(test, feature="test"))]
impl TestDefault for StorageConfig {
fn test_default() -> Self {
let temp_dir = tempdir().unwrap().keep();
Self { data_directory: temp_dir.clone(), cache_directory: temp_dir }
}
}
impl TryFrom<PartialStorageConfig> for StorageConfig {
type Error = ConfigError;
fn try_from(value: PartialStorageConfig) -> Result<Self, Self::Error> {
Ok(Self {
data_directory: value.data_directory.ok_or(ConfigError::MissingConfig("data_directory".to_string()))?,
cache_directory: value.cache_directory.ok_or(ConfigError::MissingConfig("cache_directory".to_string()))?,
})
}
}
#[cfg_attr(feature="desktop", derive(Args))]
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct PartialStorageConfig {
#[cfg_attr(feature="desktop", arg(long))]
pub data_directory: Option<PathBuf>,
#[cfg_attr(feature="desktop", arg(long))]
pub cache_directory: Option<PathBuf>,
}
impl From<StorageConfig> for PartialStorageConfig {
fn from(config: StorageConfig) -> PartialStorageConfig {
Self {
data_directory: Some(config.data_directory),
cache_directory: Some(config.cache_directory),
}
}
}
impl Emptiable for PartialStorageConfig {
fn empty() -> Self {
Self {
data_directory: None,
cache_directory: None
}
}
fn is_empty(&self) -> bool {
self.data_directory.is_none() && self.cache_directory.is_none()
}
}
impl Mergeable for PartialStorageConfig {
fn merge(&mut self, mut other: Self) {
if let Some(x) = other.data_directory.take() {
let _ = self.data_directory.insert(x);
};
if let Some(x) = other.cache_directory.take() {
let _ = self.cache_directory.insert(x);
};
}
}

View file

@ -0,0 +1,16 @@
mod trusted_node;
mod record_deletion;
pub use trusted_node::{
ActiveModel as TrustedNodeActiveModel,
Column as TrustedNodeColumn,
Entity as TrustedNodeEntity,
Model as TrustedNodeModel,
};
pub use record_deletion::{
ActiveModel as RecordDeletionActiveModel,
Column as RecordDeletionColumn,
Entity as RecordDeletionEntity,
Model as RecordDeletionModel,
};

View file

@ -1,7 +1,8 @@
use chrono::Local; use chrono::Local;
use sea_orm::{entity::{ use sea_orm::entity::{
prelude::*, * *,
}, sea_query::table}; prelude::*
};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::data::syncable::*; use crate::data::syncable::*;
@ -28,15 +29,33 @@ pub enum Relation{}
impl ActiveModelBehavior for ActiveModel {} impl ActiveModelBehavior for ActiveModel {}
impl ActiveModel { impl ActiveModel {
pub fn new(node_id: Uuid, table_name: String, record_id: Uuid) -> Self { pub fn new() -> Self {
let timestamp: DateTimeUtc = Local::now().to_utc(); let timestamp: DateTimeUtc = Local::now().to_utc();
Self{ Self{
id: Set(crate::global::generate_uuid()), id: Set(crate::global::generate_uuid()),
created_at: Set(timestamp), created_at: Set(timestamp),
created_by: Set(node_id), ..Default::default()
table_name: Set(table_name),
record_id: Set(record_id),
} }
} }
} }
#[cfg(test)]
mod tests {
use crate::global::get_or_init_test_data_database;
use super::*;
use uuid::{Timestamp, Uuid};
#[tokio::test]
async fn check_insert_record_deletion() {
let db = get_or_init_test_data_database().await;
assert!(ActiveModel{
table_name: Set("test_table".to_string()),
record_id: Set(crate::global::generate_uuid()),
..ActiveModel::new()
}.insert(db).await.is_ok());
}
}

View file

@ -1,5 +1,4 @@
use chrono::Local; use chrono::Local;
use libp2p::PeerId;
use sea_orm::entity::{ use sea_orm::entity::{
*, *,
prelude::* prelude::*
@ -33,17 +32,34 @@ pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {} impl ActiveModelBehavior for ActiveModel {}
impl ActiveModel { impl ActiveModel {
pub fn new(peer_id: PeerId, note: String) -> Self { pub fn new() -> Self {
let timestamp: DateTimeUtc = Local::now().to_utc(); let timestamp: DateTimeUtc = Local::now().to_utc();
Self{ Self{
id: Set(crate::global::generate_uuid()), id: Set(crate::global::generate_uuid()),
peer_id: Set(PeerIdValue::from(peer_id)),
created_at: Set(timestamp), created_at: Set(timestamp),
updated_at: Set(timestamp), updated_at: Set(timestamp),
synced_at: Set(None), ..Default::default()
is_prefered: Set(false),
note: Set(note),
} }
} }
} }
#[cfg(test)]
mod tests {
use crate::global::get_or_init_test_data_database;
use super::*;
use libp2p::{identity, PeerId};
#[tokio::test]
async fn check_insert_node() {
let db = get_or_init_test_data_database().await;
ActiveModel{
peer_id: Set(PeerIdValue::from(PeerId::random())),
note: Set("test note".to_owned()),
..ActiveModel::new()
}.insert(db).await.unwrap();
}
}

View file

@ -62,13 +62,10 @@ enum RecordDeletion {
Table, Table,
Id, Id,
CreatedAt, CreatedAt,
CreatedBy,
TableName, TableName,
RecordId, RecordId,
} }
static FK_RECORD_DELETION_TRUSTED_NODE: &str = "fk_record_deletion_trusted_node";
#[async_trait::async_trait] #[async_trait::async_trait]
impl TableMigration for RecordDeletion { impl TableMigration for RecordDeletion {
async fn up<'a>(manager: &'a SchemaManager<'a>) -> Result<(), DbErr> { async fn up<'a>(manager: &'a SchemaManager<'a>) -> Result<(), DbErr> {
@ -78,14 +75,6 @@ impl TableMigration for RecordDeletion {
.if_not_exists() .if_not_exists()
.col(pk_uuid(Self::Id)) .col(pk_uuid(Self::Id))
.col(timestamp_with_time_zone(Self::CreatedAt)) .col(timestamp_with_time_zone(Self::CreatedAt))
.col(uuid(Self::CreatedBy))
.foreign_key(ForeignKey::create()
.name(FK_RECORD_DELETION_TRUSTED_NODE)
.from(Self::Table,Self::CreatedBy)
.to(TrustedNode::Table, TrustedNode::Id)
.on_delete(ForeignKeyAction::Cascade)
.on_update(ForeignKeyAction::Cascade)
)
.col(string(Self::TableName)) .col(string(Self::TableName))
.col(uuid(Self::RecordId)) .col(uuid(Self::RecordId))
.to_owned() .to_owned()

View file

@ -1,6 +1,6 @@
use sea_orm::{prelude::*, query::*, sea_query::SimpleExpr, *}; use sea_orm::{prelude::*, query::*, sea_query::SimpleExpr, *};
#[cfg(feature="macros")] #[cfg(feature="macros")]
pub use caretta_sync_macros::SyncableModel; pub use lazy_supplements_macros::SyncableModel;
pub trait SyncableModel: ModelTrait<Entity = Self::SyncableEntity> { pub trait SyncableModel: ModelTrait<Entity = Self::SyncableEntity> {
type SyncableEntity: SyncableEntity<SyncableModel = Self>; type SyncableEntity: SyncableEntity<SyncableModel = Self>;
fn get_timestamp(&self) -> DateTimeUtc; fn get_timestamp(&self) -> DateTimeUtc;

View file

@ -99,3 +99,22 @@ impl sea_orm::sea_query::Nullable for PeerIdValue {
<String as sea_orm::sea_query::Nullable>::null() <String as sea_orm::sea_query::Nullable>::null()
} }
} }
#[cfg(test)]
mod tests {
use crate::tests::{test_cbor_serialize_deserialize, test_toml_serialize_deserialize};
use super::*;
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
struct PeerIdValueWrapper {
content: PeerIdValue
}
#[test]
fn test_serialize_deserialize() {
let peer_id= PeerIdValueWrapper{content: PeerIdValue::from(PeerId::random())};
let x = toml::to_string(&peer_id).unwrap();
assert_eq!(peer_id.content, toml::from_str::<PeerIdValueWrapper>(&x).unwrap().content)
}
}

View file

@ -1,5 +1,3 @@
use std::ffi::OsString;
#[derive(thiserror::Error, Debug)] #[derive(thiserror::Error, Debug)]
pub enum Error { pub enum Error {
#[error("Base64 decode error: {0}")] #[error("Base64 decode error: {0}")]
@ -8,8 +6,6 @@ pub enum Error {
CiborDeserialize(#[from] ciborium::de::Error<std::io::Error>), CiborDeserialize(#[from] ciborium::de::Error<std::io::Error>),
#[error(transparent)] #[error(transparent)]
CiborSerialize(#[from] ciborium::ser::Error<std::io::Error>), CiborSerialize(#[from] ciborium::ser::Error<std::io::Error>),
#[error("Config error: {0}")]
Config(#[from] crate::config::error::ConfigError),
#[error("DB Error: {0}")] #[error("DB Error: {0}")]
Db(#[from]sea_orm::DbErr), Db(#[from]sea_orm::DbErr),
#[error("Dial Error: {0}")] #[error("Dial Error: {0}")]
@ -26,9 +22,7 @@ pub enum Error {
Multiaddr(#[from] libp2p::multiaddr::Error), Multiaddr(#[from] libp2p::multiaddr::Error),
#[error("Noise error: {0}")] #[error("Noise error: {0}")]
Noise(#[from] libp2p::noise::Error), Noise(#[from] libp2p::noise::Error),
#[error("Parse OsString error: {0:?}")] #[cfg(feature="desktop")]
OsStringConvert(std::ffi::OsString),
#[cfg(feature="cli")]
#[error("Parse args error: {0}")] #[error("Parse args error: {0}")]
ParseCommand(#[from] clap::Error), ParseCommand(#[from] clap::Error),
#[error("toml deserialization error: {0}")] #[error("toml deserialization error: {0}")]
@ -37,10 +31,4 @@ pub enum Error {
TomlSer(#[from] toml::ser::Error), TomlSer(#[from] toml::ser::Error),
#[error("Transport error: {0}")] #[error("Transport error: {0}")]
Transport(#[from]libp2p::TransportError<std::io::Error>) Transport(#[from]libp2p::TransportError<std::io::Error>)
}
impl From<std::ffi::OsString> for Error {
fn from(s: OsString) -> Error {
Self::OsStringConvert(s)
}
} }

View file

@ -0,0 +1,15 @@
use crate::{config::{P2pConfig, StorageConfig}, error::Error, global::GlobalConstant};
pub static STORAGE_CONFIG: GlobalConstant<StorageConfig> = GlobalConstant::const_new(stringify!(STORAGE_CONFIG));
pub static P2P_CONFIG: GlobalConstant<P2pConfig> = GlobalConstant::const_new(stringify!(P2P_CONFIG));
#[cfg(test)]
mod tests {
use crate::global::{config::P2P_CONFIG, STORAGE_CONFIG};
#[test]
fn test_global_constant_names() {
assert_eq!(STORAGE_CONFIG.name, stringify!(STORAGE_CONFIG));
assert_eq!(P2P_CONFIG.name, stringify!(P2P_CONFIG));
}
}

View file

@ -0,0 +1,55 @@
use std::path::Path;
use sea_orm::{ConnectOptions, Database, DbErr, DatabaseConnection};
use sea_orm_migration::MigratorTrait;
use crate::error::Error;
use tokio::sync::OnceCell;
pub static DATA_DATABASE_CONNECTION: GlobalDatabaseConnection = GlobalDatabaseConnection::const_new(stringify!(DATA_DATABASE_CONNECTION));
pub static CACHE_DATABASE_CONNECTION: GlobalDatabaseConnection = GlobalDatabaseConnection::const_new(stringify!(CACHE_DATABASE_CONNECTION));
pub struct GlobalDatabaseConnection {
name: &'static str,
inner: OnceCell<DatabaseConnection>
}
impl GlobalDatabaseConnection {
pub const fn const_new(name: &'static str) -> Self {
Self {
name: name,
inner: OnceCell::const_new()
}
}
pub fn get(&'static self) -> &'static DatabaseConnection {
self.inner.get().expect(&format!("{} is uninitialized!", self.name))
}
pub async fn get_or_init<T, U>(&'static self, path: T, _: U) -> &'static DatabaseConnection
where
T: AsRef<Path>,
U: MigratorTrait
{
let url = "sqlite://".to_string() + path.as_ref().to_str().unwrap() + "?mode=rwc";
self.inner.get_or_try_init(|| async {
let db = Database::connect(&url).await?;
U::up(&db, None).await?;
Ok::<DatabaseConnection, DbErr>(db)
}).await.expect(&format!("Fail to initialize {}!", self.name))
}
}
#[cfg(test)]
pub use tests::*;
#[cfg(test)]
mod tests {
use super::*;
use crate::{cache::migration::CacheMigrator, data::migration::DataMigrator, global::STORAGE_CONFIG, tests::GlobalTestDefault};
pub async fn get_or_init_test_data_database() -> &'static DatabaseConnection{
DATA_DATABASE_CONNECTION.get_or_init(STORAGE_CONFIG.get_or_init_test_default().await.get_data_database_path(), DataMigrator).await
}
pub async fn get_or_init_test_cache_database() -> &'static DatabaseConnection{
CACHE_DATABASE_CONNECTION.get_or_init(STORAGE_CONFIG.get_or_init_test_default().await.get_cache_database_path(), CacheMigrator).await
}
}

View file

@ -0,0 +1,103 @@
use std::{any::type_name, collections::HashMap, net::{IpAddr, Ipv4Addr}, path::{Path, PathBuf}, sync::LazyLock};
use crate::{config::{P2pConfig, PartialP2pConfig, StorageConfig}, error::Error };
#[cfg(any(test, feature="test"))]
use crate::tests::{GlobalTestDefault, TestDefault};
use futures::StreamExt;
use libp2p::{swarm::SwarmEvent, Multiaddr, PeerId};
use sea_orm::{prelude::*, Database};
use sea_orm_migration::MigratorTrait;
use tokio::sync::{OnceCell, RwLock, RwLockReadGuard, RwLockWriteGuard};
mod peers;
pub use peers::*;
mod config;
pub use config::*;
mod database_connection;
pub use database_connection::*;
use uuid::{ContextV7, Timestamp, Uuid};
pub fn generate_uuid() -> Uuid {
Uuid::new_v7(Timestamp::now(ContextV7::new()))
}
pub static PRODUCT_NAME: LazyLock<String> = LazyLock::new(|| {
env!("CARGO_PKG_NAME").to_string()
});
pub static DEFAULT_LISTEN_IPS: &[IpAddr] = &[IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0))];
pub static DEFAULT_CONFIG_FILE_NAME: LazyLock<PathBuf> = LazyLock::new(|| {
PathBuf::from(String::new() + env!("CARGO_PKG_NAME") + ".toml")
});
pub static DEFAULT_DATABASE_FILE_NAME: LazyLock<PathBuf> = LazyLock::new(|| {
PathBuf::from(String::new() + env!("CARGO_PKG_NAME") + ".sqlite")
});
fn uninitialized_message<T>(var: T) -> String {
format!("{} is uninitialized!", &stringify!(var))
}
pub struct GlobalConstant<T> {
pub name: &'static str,
inner: OnceCell<T>
}
impl<T> GlobalConstant<T> {
pub const fn const_new(name: &'static str ) -> Self {
Self{
name: name,
inner: OnceCell::const_new()
}
}
pub async fn get_or_init(&'static self, source: T) -> &'static T {
self.inner.get_or_init(|| async {
source
}).await
}
pub fn get(&'static self) -> Option<&'static T> {
self.inner.get()
}
pub fn get_and_unwrap(&'static self) -> &'static T {
self.get().expect(&format!("{} is uninitialized!", &stringify!(self)))
}
}
#[cfg(any(test, feature="test"))]
impl<T> GlobalTestDefault<T> for GlobalConstant<T>
where
T: TestDefault + 'static
{
async fn get_or_init_test_default(&'static self) -> &'static T {
self.get_or_init(T::test_default()).await
}
}
struct GlobalRwLock<T> {
pub name: &'static str,
inner: OnceCell<RwLock<T>>
}
impl<T> GlobalRwLock<T> {
pub const fn const_new(name: &'static str) -> Self {
Self{
name: name,
inner: OnceCell::const_new()
}
}
pub fn get(&'static self) -> &'static RwLock<T> {
self.inner.get().expect(&format!("{} is uninitialized", self.name))
}
pub async fn write(&'static self) -> RwLockWriteGuard<'_ ,T> {
self.get().write().await
}
pub async fn read(&'static self) -> RwLockReadGuard<'_, T> {
self.get().read().await
}
}
#[cfg(test)]
mod tests {
}

View file

@ -0,0 +1,11 @@
use std::collections::HashSet;
use libp2p::bytes::buf::UninitSlice;
use tokio::sync::{OnceCell, RwLock, RwLockReadGuard};
use crate::cache::entity::PeerModel;
use super::GlobalRwLock;
pub static PEERS: GlobalRwLock<HashSet<PeerModel>> = GlobalRwLock::const_new(stringify!(PEERS));

View file

@ -0,0 +1,4 @@
pub trait Message {
fn into_vec_u8(self) -> Vec<u8>;
fn from_vec_u8() -> Self;
}

View file

@ -3,12 +3,10 @@ pub mod config;
pub mod data; pub mod data;
pub mod error; pub mod error;
pub mod global; pub mod global;
pub mod ipc;
pub mod message; pub mod message;
pub mod migration; pub mod migration;
pub mod p2p; pub mod p2p;
pub mod proto;
pub mod rpc;
#[cfg(any(test, feature="test"))] #[cfg(any(test, feature="test"))]
pub mod tests; pub mod tests;
pub mod utils; pub mod utils;
pub mod server;

Some files were not shown because too many files have changed in this diff Show more