Compare commits
No commits in common. "main" and "feature/syncable_derive" have entirely different histories.
main
...
feature/sy
230 changed files with 2407 additions and 4779 deletions
5
.vscode/extensions.json
vendored
5
.vscode/extensions.json
vendored
|
|
@ -1,5 +0,0 @@
|
|||
{
|
||||
"recommendations": [
|
||||
"zxh404.vscode-proto3"
|
||||
]
|
||||
}
|
||||
10
.vscode/settings.json
vendored
10
.vscode/settings.json
vendored
|
|
@ -1,10 +0,0 @@
|
|||
{
|
||||
"protoc": {
|
||||
"compile_on_save": true,
|
||||
"options": [
|
||||
"--proto_path=${workspaceRoot}/tripod-id/proto",
|
||||
"--proto_path=${workspaceRoot}/core/proto",
|
||||
"--java_out=${workspaceRoot}/.tmp"
|
||||
]
|
||||
}
|
||||
}
|
||||
59
Cargo.toml
59
Cargo.toml
|
|
@ -1,32 +1,5 @@
|
|||
[package]
|
||||
name = "caretta-sync"
|
||||
edition.workspace = true
|
||||
version.workspace = true
|
||||
description.workspace = true
|
||||
license.workspace = true
|
||||
repository.workspace = true
|
||||
|
||||
[features]
|
||||
default = ["macros"]
|
||||
bevy = ["dep:caretta-sync-bevy"]
|
||||
mobile = ["dep:caretta-sync-mobile"]
|
||||
cli = ["dep:caretta-sync-cli"]
|
||||
desktop = ["cli", "bevy"]
|
||||
macros = ["dep:caretta-sync-macros"]
|
||||
test = ["caretta-sync-core/test"]
|
||||
|
||||
[dependencies]
|
||||
caretta-sync-bevy = { path = "bevy", optional = true }
|
||||
caretta-sync-core.workspace = true
|
||||
caretta-sync-cli = { path="cli", optional = true }
|
||||
caretta-sync-mobile = { path = "mobile", optional = true }
|
||||
caretta-sync-macros = { path="macros", optional = true}
|
||||
|
||||
[dev-dependencies]
|
||||
caretta-sync-core = {workspace = true, features = ["test"]}
|
||||
|
||||
[workspace]
|
||||
members = [ ".", "core", "macros", "cli", "mobile", "examples/*" , "bevy", "tripod-id"]
|
||||
members = [ "lazy-supplements-*", "examples/*" ]
|
||||
resolver = "3"
|
||||
|
||||
[workspace.package]
|
||||
|
|
@ -37,35 +10,15 @@ license = "MIT OR Apache-2.0"
|
|||
repository = "https://forgejo.fireturlte.net/lazy-supplements"
|
||||
|
||||
[workspace.dependencies]
|
||||
bevy = { git = "https://github.com/bevyengine/bevy.git", rev="16ffdaea0daec11e4347d965f56c9c8e1122a488" }
|
||||
tripod-id = {path="./tripod-id", features=["prost", "rusqlite", "serde"]}
|
||||
chrono = "0.4.41"
|
||||
ciborium = "0.2.2"
|
||||
clap = { version = "4.5.38", features = ["derive"] }
|
||||
caretta-sync-core.path = "core"
|
||||
futures = { version = "0.3.31", features = ["executor"] }
|
||||
rand = "0.8.5"
|
||||
rusqlite = "0.37.0"
|
||||
dioxus = { version = "0.6.0", features = [] }
|
||||
lazy-supplements-core.path = "lazy-supplements-core"
|
||||
libp2p = { version = "0.55.0", features = ["macros", "mdns", "noise", "ping", "tcp", "tokio", "yamux" ] }
|
||||
sea-orm = { version = "1.1.11", features = ["sqlx-sqlite", "runtime-tokio-native-tls", "macros", "with-chrono", "with-uuid"] }
|
||||
sea-orm-migration = { version = "1.1.0", features = ["runtime-tokio-rustls", "sqlx-postgres"] }
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
thiserror = "2.0.12"
|
||||
tokio = { version = "1.45.0", features = ["macros", "rt", "rt-multi-thread"] }
|
||||
tokio-stream = "0.1.17"
|
||||
tonic = "0.14.0"
|
||||
url = { version = "2.5.7", features = ["serde"] }
|
||||
uuid = { version = "1.17.0", features = ["v7"] }
|
||||
iroh = { version = "0.92.0", features = ["discovery-local-network", "discovery-pkarr-dht"] }
|
||||
prost = "0.14.1"
|
||||
prost-types = "0.14.1"
|
||||
tonic-prost-build = "0.14.0"
|
||||
tonic-prost = "0.14.0"
|
||||
|
||||
|
||||
[profile.dev]
|
||||
opt-level = 1
|
||||
|
||||
[profile.dev.package."*"]
|
||||
opt-level = 3
|
||||
|
||||
[profile.release]
|
||||
codegen-units = 1
|
||||
lto = "thin"
|
||||
|
|
|
|||
|
|
@ -1,8 +1,3 @@
|
|||
# Caretta Framework
|
||||
# Lazy Supplements Framework
|
||||
|
||||
A local-first application framework.
|
||||
|
||||
## Features
|
||||
- Local first
|
||||
- Decentralized data synchronization with libp2p
|
||||
- Device management
|
||||
A local-first application framework for lazy person
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
use bevy::{asset::uuid::Uuid, ecs::component::Component};
|
||||
|
||||
#[derive(Component)]
|
||||
struct Id(Uuid);
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
pub mod global;
|
||||
pub mod peer;
|
||||
|
|
@ -1,47 +0,0 @@
|
|||
use bevy::{app::{App, Plugin, Startup, Update}, ecs::{component::Component, query::With, system::{Commands, Query}}, tasks::TaskPool};
|
||||
use caretta_sync_core::{cache::entity::{CachedPeerEntity, CachedPeerModel}, global::{CONFIG, DATABASE_CONNECTIONS}};
|
||||
use caretta_sync_core::{
|
||||
proto::*,
|
||||
};
|
||||
use sea_orm::EntityTrait;
|
||||
|
||||
#[derive(Component)]
|
||||
pub struct Peer;
|
||||
|
||||
#[derive(Component)]
|
||||
pub struct PeerId(String);
|
||||
|
||||
#[derive(Component)]
|
||||
pub struct PeerAddress(String);
|
||||
|
||||
#[tokio::main]
|
||||
async fn add_cached_peers(mut commands: Commands) {
|
||||
let config = CONFIG.get_unchecked();
|
||||
let url = config.rpc.endpoint_url.to_string();
|
||||
let mut client = caretta_sync_core::proto::cached_peer_service_client::CachedPeerServiceClient::connect(url).await.expect("Unix socket should be accessible");
|
||||
let request = tonic::Request::new(CachedPeerListRequest {});
|
||||
let response = client.list(request).await.expect("Faild to request/response");
|
||||
let peers = response.into_inner().peers;
|
||||
for model in peers.into_iter() {
|
||||
commands.spawn((Peer, PeerId(model.peer_id.to_string())));
|
||||
}
|
||||
}
|
||||
|
||||
fn print_peer(query: Query<&PeerId, With<Peer>>) {
|
||||
for peer_id in &query {
|
||||
println!("Hello {}!", peer_id.0);
|
||||
}
|
||||
}
|
||||
|
||||
fn hello_world() {
|
||||
println!("hello world!");
|
||||
}
|
||||
|
||||
pub struct PeerPlugin;
|
||||
|
||||
impl Plugin for PeerPlugin {
|
||||
fn build(&self, app: &mut App) {
|
||||
app.add_systems(Startup, add_cached_peers);
|
||||
app.add_systems(Update, (hello_world, print_peer));
|
||||
}
|
||||
}
|
||||
|
|
@ -1,69 +0,0 @@
|
|||
use std::{net::IpAddr, path::PathBuf, sync::LazyLock};
|
||||
|
||||
use clap::Args;
|
||||
use caretta_sync_core::{
|
||||
config::{Config, ConfigError, PartialConfig, PartialP2pConfig, PartialStorageConfig},
|
||||
utils::{emptiable::Emptiable, mergeable::Mergeable}
|
||||
};
|
||||
|
||||
use libp2p::identity::Keypair;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::sync::OnceCell;
|
||||
|
||||
#[derive(Args, Clone, Debug)]
|
||||
pub struct ConfigArgs {
|
||||
#[arg(short = 'c', long = "config")]
|
||||
pub file_path: Option<PathBuf>,
|
||||
#[arg(skip)]
|
||||
pub file_content: OnceCell<PartialConfig>,
|
||||
#[command(flatten)]
|
||||
pub args: PartialConfig,
|
||||
}
|
||||
|
||||
|
||||
impl ConfigArgs {
|
||||
fn get_file_path_or_default(&self, app_name: &'static str) -> PathBuf {
|
||||
self.file_path.clone().unwrap_or(
|
||||
dirs::config_local_dir()
|
||||
.expect("Config user directory should be set")
|
||||
.join(app_name)
|
||||
.join("config.toml")
|
||||
)
|
||||
}
|
||||
async fn get_or_read_file_content(&self, app_name: &'static str) -> PartialConfig {
|
||||
self.file_content.get_or_init(|| async {
|
||||
PartialConfig::read_from(self.get_file_path_or_default(app_name)).await.expect("Config file should be invalid!")
|
||||
}).await.clone()
|
||||
}
|
||||
pub async fn to_partial_config_with_default(&self, app_name: &'static str) -> PartialConfig {
|
||||
let mut default = PartialConfig::default(app_name);
|
||||
default.merge(self.to_partial_config_without_default(app_name).await);
|
||||
default
|
||||
}
|
||||
pub async fn to_partial_config_without_default(&self, app_name: &'static str) -> PartialConfig {
|
||||
let mut file_content = self.get_or_read_file_content(app_name).await;
|
||||
let args = self.args.clone();
|
||||
file_content.merge(args);
|
||||
file_content
|
||||
}
|
||||
async fn has_p2p_private_key(&self, app_name: &'static str) -> bool {
|
||||
let merged = self.to_partial_config_with_default(app_name).await;
|
||||
match merged.p2p {
|
||||
Some(p2p) => p2p.private_key.is_some(),
|
||||
None => false
|
||||
}
|
||||
}
|
||||
pub async fn into_config(mut self, app_name: &'static str) -> Config {
|
||||
if !self.has_p2p_private_key(app_name).await {
|
||||
let path = self.get_file_path_or_default(app_name);
|
||||
let mut content = self.file_content.get_mut().unwrap();
|
||||
if let Some(p2p) = content.p2p.as_mut() {
|
||||
p2p.init_private_key();
|
||||
} else {
|
||||
content.p2p.insert(PartialP2pConfig::empty().with_new_private_key());
|
||||
}
|
||||
content.write_to(path).await.expect("Config file should be writable first time to initialize secret");
|
||||
}
|
||||
self.to_partial_config_with_default(app_name).await.try_into().expect("Some configurations are missing!")
|
||||
}
|
||||
}
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
use clap::Args;
|
||||
use caretta_sync_core::utils::runnable::Runnable;
|
||||
use crate::cli::ConfigArgs;
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct ConfigCheckCommandArgs{
|
||||
#[command(flatten)]
|
||||
config: ConfigArgs
|
||||
}
|
||||
|
||||
impl Runnable for ConfigCheckCommandArgs {
|
||||
#[tokio::main]
|
||||
async fn run(self, app_name: &'static str) {
|
||||
let _ = self.config.into_config(app_name).await;
|
||||
println!("Ok");
|
||||
}
|
||||
}
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
use clap::Args;
|
||||
use caretta_sync_core::{config::PartialConfig, utils::runnable::Runnable};
|
||||
use crate::cli::ConfigArgs;
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct ConfigListCommandArgs{
|
||||
#[command(flatten)]
|
||||
config: ConfigArgs,
|
||||
#[arg(short,long)]
|
||||
all: bool
|
||||
}
|
||||
|
||||
impl Runnable for ConfigListCommandArgs {
|
||||
#[tokio::main]
|
||||
async fn run(self, app_name: &'static str) {
|
||||
let config: PartialConfig = if self.all {
|
||||
self.config.into_config(app_name).await.into()
|
||||
} else {
|
||||
self.config.to_partial_config_without_default(app_name).await
|
||||
};
|
||||
println!("{}", config.into_toml().unwrap())
|
||||
|
||||
}
|
||||
}
|
||||
|
|
@ -1,38 +0,0 @@
|
|||
mod check;
|
||||
mod list;
|
||||
|
||||
pub use check::*;
|
||||
pub use list::*;
|
||||
|
||||
use caretta_sync_core::utils::runnable::Runnable;
|
||||
use clap::{Args, Subcommand};
|
||||
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct ConfigCommandArgs {
|
||||
#[command(subcommand)]
|
||||
pub command: ConfigSubcommand
|
||||
}
|
||||
|
||||
impl Runnable for ConfigCommandArgs {
|
||||
fn run(self, app_name: &'static str) {
|
||||
self.command.run(app_name)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Subcommand)]
|
||||
pub enum ConfigSubcommand {
|
||||
Check(ConfigCheckCommandArgs),
|
||||
List(ConfigListCommandArgs),
|
||||
}
|
||||
|
||||
impl Runnable for ConfigSubcommand {
|
||||
fn run(self, app_name: &'static str) {
|
||||
match self {
|
||||
Self::Check(x) => x.run(app_name),
|
||||
Self::List(x) => x.run(app_name),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -1,49 +0,0 @@
|
|||
mod add;
|
||||
mod list;
|
||||
mod ping;
|
||||
mod remove;
|
||||
mod scan;
|
||||
|
||||
pub use add::DeviceAddCommandArgs;
|
||||
use caretta_sync_core::utils::runnable::Runnable;
|
||||
pub use list::DeviceListCommandArgs;
|
||||
pub use ping::DevicePingCommandArgs;
|
||||
pub use remove::DeviceRemoveCommandArgs;
|
||||
pub use scan::DeviceScanCommandArgs;
|
||||
|
||||
use clap::{Args, Parser, Subcommand};
|
||||
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct DeviceCommandArgs {
|
||||
#[command(subcommand)]
|
||||
pub command: DeviceSubcommand
|
||||
}
|
||||
|
||||
impl Runnable for DeviceCommandArgs {
|
||||
fn run(self, app_name: &'static str) {
|
||||
self.command.run(app_name)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Subcommand)]
|
||||
pub enum DeviceSubcommand {
|
||||
Add(DeviceAddCommandArgs),
|
||||
List(DeviceListCommandArgs),
|
||||
Ping(DevicePingCommandArgs),
|
||||
Remove(DeviceRemoveCommandArgs),
|
||||
Scan(DeviceScanCommandArgs),
|
||||
}
|
||||
|
||||
impl Runnable for DeviceSubcommand {
|
||||
fn run(self, app_name: &'static str) {
|
||||
match self {
|
||||
Self::Add(x) => x.run(app_name),
|
||||
Self::List(x) => x.run(app_name),
|
||||
Self::Ping(x) => x.run(app_name),
|
||||
Self::Remove(x) => x.run(app_name),
|
||||
Self::Scan(x) => x.run(app_name),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
mod args;
|
||||
mod config;
|
||||
mod device;
|
||||
mod peer;
|
||||
mod serve;
|
||||
|
||||
pub use args::*;
|
||||
pub use config::*;
|
||||
pub use device::*;
|
||||
pub use peer::*;
|
||||
pub use serve::*;
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
use clap::Args;
|
||||
use caretta_sync_core::utils::runnable::Runnable;
|
||||
use crate::cli::{ConfigArgs, PeerArgs};
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct PeerInfoCommandArgs{
|
||||
#[command(flatten)]
|
||||
config: ConfigArgs,
|
||||
#[command(flatten)]
|
||||
peer: PeerArgs,
|
||||
}
|
||||
|
||||
impl Runnable for PeerInfoCommandArgs {
|
||||
fn run(self, app_name: &'static str) {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
use clap::Args;
|
||||
use caretta_sync_core::{
|
||||
utils::runnable::Runnable,
|
||||
proto::*,
|
||||
};
|
||||
use crate::cli::ConfigArgs;
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct PeerListCommandArgs{
|
||||
#[command(flatten)]
|
||||
config: ConfigArgs
|
||||
}
|
||||
|
||||
impl Runnable for PeerListCommandArgs {
|
||||
#[tokio::main]
|
||||
async fn run(self, app_name: &'static str) {
|
||||
let config = self.config.into_config(app_name).await;
|
||||
let url = config.rpc.endpoint_url.to_string();
|
||||
let mut client = caretta_sync_core::proto::cached_peer_service_client::CachedPeerServiceClient::connect(url).await.expect("Target endpoint should be accessible");
|
||||
let request = tonic::Request::new(CachedPeerListRequest {});
|
||||
let response = client.list(request).await.expect("Faild to request/response");
|
||||
println!("{:?}", response);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,42 +0,0 @@
|
|||
mod info;
|
||||
mod list;
|
||||
mod ping;
|
||||
|
||||
pub use info::*;
|
||||
pub use list::*;
|
||||
pub use ping::*;
|
||||
|
||||
use caretta_sync_core::utils::runnable::Runnable;
|
||||
use clap::{Args, Subcommand};
|
||||
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct PeerCommandArgs {
|
||||
#[command(subcommand)]
|
||||
pub command: PeerSubcommand
|
||||
}
|
||||
|
||||
impl Runnable for PeerCommandArgs {
|
||||
fn run(self, app_name: &'static str) {
|
||||
self.command.run(app_name)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Subcommand)]
|
||||
pub enum PeerSubcommand {
|
||||
Info(PeerInfoCommandArgs),
|
||||
List(PeerListCommandArgs),
|
||||
Ping(PeerPingCommandArgs),
|
||||
}
|
||||
|
||||
impl Runnable for PeerSubcommand {
|
||||
fn run(self, app_name: &'static str) {
|
||||
match self {
|
||||
Self::Info(x) => x.run(app_name),
|
||||
Self::List(x) => x.run(app_name),
|
||||
Self::Ping(x) => x.run(app_name),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -1,18 +0,0 @@
|
|||
use clap::Args;
|
||||
use caretta_sync_core::utils::runnable::Runnable;
|
||||
use crate::cli::{ConfigArgs, PeerArgs};
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct PeerPingCommandArgs{
|
||||
#[command(flatten)]
|
||||
config: ConfigArgs,
|
||||
#[command(flatten)]
|
||||
peer: PeerArgs,
|
||||
}
|
||||
|
||||
impl Runnable for PeerPingCommandArgs {
|
||||
#[tokio::main]
|
||||
async fn run(self, app_name: &'static str) {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
|
@ -1,28 +0,0 @@
|
|||
use std::marker::PhantomData;
|
||||
|
||||
use clap::Args;
|
||||
use caretta_sync_core::{config::Config, global::{CONFIG, LOCAL_DATABASE_CONNECTION}, server::ServerTrait, utils::runnable::Runnable};
|
||||
|
||||
use super::ConfigArgs;
|
||||
|
||||
#[derive(Args, Debug)]
|
||||
pub struct ServeCommandArgs<T>
|
||||
where
|
||||
T: ServerTrait
|
||||
{
|
||||
#[arg(skip)]
|
||||
server: PhantomData<T>,
|
||||
#[command(flatten)]
|
||||
config: ConfigArgs,
|
||||
}
|
||||
impl<T> Runnable for ServeCommandArgs<T>
|
||||
where
|
||||
T: ServerTrait
|
||||
{
|
||||
#[tokio::main]
|
||||
async fn run(self, app_name: &'static str) {
|
||||
let config = CONFIG.get_or_init::<Config>(self.config.into_config(app_name).await).await;
|
||||
let _ = LOCAL_DATABASE_CONNECTION.get_or_init(&config.storage.get_local_database_path() );
|
||||
T::serve_all(config).await.unwrap();
|
||||
}
|
||||
}
|
||||
|
|
@ -1 +0,0 @@
|
|||
pub mod cli;
|
||||
|
|
@ -1,57 +0,0 @@
|
|||
[package]
|
||||
name = "caretta-sync-core"
|
||||
edition.workspace = true
|
||||
version.workspace = true
|
||||
description.workspace = true
|
||||
license.workspace = true
|
||||
repository.workspace = true
|
||||
|
||||
[features]
|
||||
default = []
|
||||
cli = ["dep:clap"]
|
||||
test = ["dep:tempfile", ]
|
||||
|
||||
[dependencies]
|
||||
base64 = "0.22.1"
|
||||
tripod-id.workspace = true
|
||||
chrono.workspace = true
|
||||
chrono-tz = "0.10.3"
|
||||
ciborium.workspace = true
|
||||
clap = {workspace = true, optional = true}
|
||||
dirs = "6.0.0"
|
||||
futures.workspace = true
|
||||
iroh.workspace = true
|
||||
prost.workspace = true
|
||||
prost-types.workspace = true
|
||||
rusqlite = { workspace = true, features = ["bundled", "chrono", "uuid"] }
|
||||
serde.workspace = true
|
||||
sysinfo = "0.37.0"
|
||||
tempfile = { version = "3.20.0", optional = true }
|
||||
thiserror.workspace = true
|
||||
tokio.workspace = true
|
||||
toml = "0.8.22"
|
||||
tonic.workspace = true
|
||||
tonic-prost = "0.14.0"
|
||||
tracing = "0.1.41"
|
||||
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
|
||||
uuid.workspace = true
|
||||
url.workspace = true
|
||||
whoami = "1.6.1"
|
||||
rand.workspace = true
|
||||
ed25519-dalek = { version = "2.2.0", features = ["signature"] }
|
||||
tokio-stream.workspace = true
|
||||
|
||||
[target.'cfg(target_os="android")'.dependencies]
|
||||
jni = "0.21.1"
|
||||
ndk = "0.9.0"
|
||||
|
||||
[target.'cfg(target_os="ios")'.dependencies]
|
||||
objc2 = "0.6.2"
|
||||
objc2-foundation = "0.3.1"
|
||||
objc2-app-kit = "0.3.1"
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.20.0"
|
||||
|
||||
[build-dependencies]
|
||||
tonic-prost-build.workspace = true
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
tonic_prost_build::configure()
|
||||
.extern_path(".tripod_id", "::tripod_id::prost")
|
||||
.compile_protos(
|
||||
&[
|
||||
"proto/caretta_sync/authorization_request/authorization_request.proto",
|
||||
"proto/caretta_sync/authorized_node/authorized_node.proto",
|
||||
"proto/caretta_sync/remote_node/remote_node.proto",
|
||||
|
||||
],
|
||||
&["proto", "../tripod-id/proto"]
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
syntax = "proto3";
|
||||
package caretta_sync.authorization_request;
|
||||
|
||||
import "caretta_sync/authorization_request/identifier.proto";
|
||||
|
||||
message AcceptRequest {
|
||||
Identifier authorization_request = 1;
|
||||
string passcode = 2;
|
||||
}
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
syntax = "proto3";
|
||||
package caretta_sync.authorization_request;
|
||||
|
||||
import "caretta_sync/authorized_node/info.proto";
|
||||
|
||||
message AcceptResponse {
|
||||
caretta_sync.authorized_node.Info authorized_node_info = 1;
|
||||
}
|
||||
|
|
@ -1,23 +0,0 @@
|
|||
syntax = "proto3";
|
||||
package caretta_sync.authorization_request;
|
||||
|
||||
import "caretta_sync/authorization_request/accept_request.proto";
|
||||
import "caretta_sync/authorization_request/accept_response.proto";
|
||||
import "caretta_sync/authorization_request/info_request.proto";
|
||||
import "caretta_sync/authorization_request/info_response.proto";
|
||||
import "caretta_sync/authorization_request/list_request.proto";
|
||||
import "caretta_sync/authorization_request/list_response.proto";
|
||||
import "caretta_sync/authorization_request/reject_request.proto";
|
||||
import "caretta_sync/authorization_request/reject_response.proto";
|
||||
import "caretta_sync/authorization_request/send_request.proto";
|
||||
import "caretta_sync/authorization_request/send_response.proto";
|
||||
|
||||
|
||||
|
||||
service AuthorizationRequest {
|
||||
rpc Send(SendRequest) returns (SendResponse);
|
||||
rpc Accept(AcceptRequest) returns (AcceptResponse);
|
||||
rpc Reject(RejectRequest) returns (RejectResponse);
|
||||
rpc Info(InfoRequest) returns (InfoResponse);
|
||||
rpc List(stream ListRequest) returns (stream ListResponse);
|
||||
}
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
syntax = "proto3";
|
||||
package caretta_sync.authorization_request;
|
||||
|
||||
import "caretta_sync/common/uuid.proto";
|
||||
import "tripod_id/double.proto";
|
||||
|
||||
message Identifier {
|
||||
oneof identifier_value {
|
||||
caretta_sync.common.Uuid uuid = 1;
|
||||
tripod_id.Double id = 2;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
syntax = "proto3";
|
||||
package caretta_sync.authorization_request;
|
||||
|
||||
import "caretta_sync/iroh/public_key.proto";
|
||||
import "caretta_sync/authorization_request/status.proto";
|
||||
import "tripod_id/double.proto";
|
||||
import "google/protobuf/timestamp.proto";
|
||||
|
||||
message Info {
|
||||
tripod_id.Double id = 1;
|
||||
caretta_sync.iroh.PublicKey public_key = 2;
|
||||
google.protobuf.Timestamp created_at = 3;
|
||||
google.protobuf.Timestamp closed_at = 6;
|
||||
Status status = 4;
|
||||
}
|
||||
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
syntax = "proto3";
|
||||
package caretta_sync.authorization_request;
|
||||
|
||||
import "caretta_sync/authorization_request/identifier.proto";
|
||||
|
||||
message InfoRequest {
|
||||
Identifier request = 1;
|
||||
}
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
syntax = "proto3";
|
||||
package caretta_sync.authorization_request;
|
||||
|
||||
import "caretta_sync/authorization_request/info.proto";
|
||||
|
||||
|
||||
message InfoResponse{
|
||||
Info info = 1;
|
||||
}
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
syntax = "proto3";
|
||||
package caretta_sync.authorization_request;
|
||||
|
||||
message ListRequest {}
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
syntax = "proto3";
|
||||
package caretta_sync.authorization_request;
|
||||
|
||||
import "caretta_sync/authorization_request/info.proto";
|
||||
|
||||
message ListResponse {
|
||||
Info request_info = 1;
|
||||
}
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
syntax = "proto3";
|
||||
package caretta_sync.authorization_request;
|
||||
|
||||
import "caretta_sync/authorization_request/identifier.proto";
|
||||
|
||||
message RejectRequest {
|
||||
Identifier authorization_request = 1;
|
||||
}
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
syntax = "proto3";
|
||||
package caretta_sync.authorization_request;
|
||||
|
||||
import "caretta_sync/authorization_request/info.proto";
|
||||
|
||||
message RejectResponse {
|
||||
Info request_info = 1;
|
||||
}
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
syntax = "proto3";
|
||||
package caretta_sync.authorization_request;
|
||||
|
||||
import "caretta_sync/remote_node/identifier.proto";
|
||||
|
||||
message SendRequest {
|
||||
caretta_sync.remote_node.Identifier remote_node = 1;
|
||||
}
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
syntax = "proto3";
|
||||
package caretta_sync.authorization_request;
|
||||
|
||||
import "caretta_sync/remote_node/info.proto";
|
||||
|
||||
message SendResponse {
|
||||
caretta_sync.remote_node.Info remote_node_info = 1;
|
||||
string passcode = 2;
|
||||
}
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
syntax = "proto3";
|
||||
package caretta_sync.authorization_request;
|
||||
|
||||
enum Status {
|
||||
UNSPECIFIED = 0;
|
||||
SENT = 1;
|
||||
RECEIVED = 2;
|
||||
ACCEPTED = 3;
|
||||
REJECTED = 4;
|
||||
}
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package caretta_sync.authorized_node;
|
||||
|
||||
import "caretta_sync/authorized_node/info_request.proto";
|
||||
import "caretta_sync/authorized_node/info_response.proto";
|
||||
import "caretta_sync/authorized_node/list_request.proto";
|
||||
import "caretta_sync/authorized_node/list_response.proto";
|
||||
|
||||
|
||||
service AuthorizedNode {
|
||||
rpc Info(InfoRequest) returns (InfoResponse);
|
||||
rpc List(stream ListRequest) returns (stream ListResponse);
|
||||
}
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package caretta_sync.authorized_node;
|
||||
|
||||
import "tripod_id/single.proto";
|
||||
import "caretta_sync/iroh/public_key.proto";
|
||||
|
||||
|
||||
message Identifier {
|
||||
oneof identifier_value {
|
||||
tripod_id.Single id = 1;
|
||||
caretta_sync.iroh.PublicKey public_key = 2;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package caretta_sync.authorized_node;
|
||||
|
||||
import "tripod_id/single.proto";
|
||||
import "caretta_sync/iroh/public_key.proto";
|
||||
|
||||
message Info {
|
||||
tripod_id.Single id = 1;
|
||||
caretta_sync.iroh.PublicKey public_key = 2;
|
||||
string note = 3;
|
||||
}
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package caretta_sync.authorized_node;
|
||||
|
||||
import "caretta_sync/authorized_node/identifier.proto";
|
||||
|
||||
message InfoRequest {
|
||||
Identifier node = 1;
|
||||
}
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package caretta_sync.authorized_node;
|
||||
import "caretta_sync/authorized_node/info.proto";
|
||||
|
||||
message InfoResponse {
|
||||
Info node_info = 1;
|
||||
}
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package caretta_sync.authorized_node;
|
||||
|
||||
message ListRequest{}
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package caretta_sync.authorized_node;
|
||||
|
||||
import "caretta_sync/authorized_node/info.proto";
|
||||
|
||||
message ListResponse {
|
||||
Info node_info = 1;
|
||||
}
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package caretta_sync.common;
|
||||
|
||||
// protobuf message of url::Url.
|
||||
message Url {
|
||||
string url = 1;
|
||||
}
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package caretta_sync.common;
|
||||
|
||||
// protobuf message of uuid::Uuid
|
||||
message Uuid {
|
||||
uint64 high_bits = 1;
|
||||
uint64 low_bits = 2;
|
||||
}
|
||||
|
|
@ -1,26 +0,0 @@
|
|||
syntax = "proto3";
|
||||
package caretta_sync.iroh;
|
||||
|
||||
import "caretta_sync/common/url.proto";
|
||||
import "caretta_sync/net/socket_addr.proto";
|
||||
|
||||
// A protobuf message of iroh::ConnectionType
|
||||
message ConnectionType {
|
||||
message Direct {
|
||||
caretta_sync.net.SocketAddr direct_value = 1;
|
||||
}
|
||||
message Relay {
|
||||
caretta_sync.common.Url relay_value = 1;
|
||||
}
|
||||
message Mixed {
|
||||
caretta_sync.net.SocketAddr socket_addr = 1;
|
||||
caretta_sync.common.Url relay_url = 2;
|
||||
}
|
||||
message None{}
|
||||
oneof connection_type_value {
|
||||
Direct direct = 1;
|
||||
Relay relay = 2;
|
||||
Mixed mixed = 3;
|
||||
None none = 4;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,18 +0,0 @@
|
|||
syntax = "proto3";
|
||||
package caretta_sync.iroh;
|
||||
|
||||
// The message of iroh::endpoint::ControlMsg.
|
||||
// To ensure compatiility with irof::endpoint::ControlMsg,
|
||||
// it's implemented as a message rather than an enum to accommodate the posibility
|
||||
// that values may be added to the enum in rust in the future.
|
||||
message ControlMsg {
|
||||
message Ping {}
|
||||
message Pong {}
|
||||
message CallMeMayBe {}
|
||||
|
||||
oneof control_msg_vaue {
|
||||
Ping ping = 1;
|
||||
Pong pong = 2;
|
||||
CallMeMayBe call_me_maybe = 3;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,30 +0,0 @@
|
|||
syntax = "proto3";
|
||||
package caretta_sync.iroh;
|
||||
|
||||
import "google/protobuf/duration.proto";
|
||||
import "caretta_sync/iroh/control_msg.proto";
|
||||
import "caretta_sync/iroh/source.proto";
|
||||
import "caretta_sync/net/socket_addr.proto";
|
||||
|
||||
// A protobuf message of iroh::endpoint::DirectAddrInfo
|
||||
message DirectAddrInfo {
|
||||
// A protobuf message of (Duration, ControlMsg)
|
||||
message DurationControlMsg {
|
||||
|
||||
google.protobuf.Duration duration = 1;
|
||||
ControlMsg control_msg = 2;
|
||||
}
|
||||
|
||||
// A protobuf message of (iroh::Source, Duration)
|
||||
message SourceDuration {
|
||||
Source source = 1;
|
||||
google.protobuf.Duration duration = 2;
|
||||
}
|
||||
|
||||
caretta_sync.net.SocketAddr addr = 1;
|
||||
google.protobuf.Duration latency = 2;
|
||||
DurationControlMsg last_control = 3;
|
||||
google.protobuf.Duration last_payload = 4;
|
||||
google.protobuf.Duration last_alive = 5;
|
||||
repeated SourceDuration sources = 6;
|
||||
}
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package caretta_sync.iroh;
|
||||
|
||||
// protobuf message of iroh::PublicKey.
|
||||
message PublicKey {
|
||||
bytes key = 1;
|
||||
}
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
syntax = "proto3";
|
||||
package caretta_sync.iroh;
|
||||
|
||||
import "google/protobuf/duration.proto";
|
||||
import "caretta_sync/common/url.proto";
|
||||
|
||||
// A protobuf message of iroh::RelayUrlInfo
|
||||
message RelayUrlInfo {
|
||||
caretta_sync.common.Url relay_url = 1;
|
||||
google.protobuf.Duration last_alive = 2;
|
||||
google.protobuf.Duration latency = 3;
|
||||
}
|
||||
|
|
@ -1,18 +0,0 @@
|
|||
syntax = "proto3";
|
||||
package caretta_sync.iroh;
|
||||
|
||||
import "caretta_sync/iroh/public_key.proto";
|
||||
import "caretta_sync/iroh/relay_url_info.proto";
|
||||
import "caretta_sync/iroh/direct_addr_info.proto";
|
||||
import "caretta_sync/iroh/connection_type.proto";
|
||||
import "google/protobuf/duration.proto";
|
||||
|
||||
// A messege of iroh::RemoteInfo.
|
||||
message RemoteInfo {
|
||||
PublicKey node_id = 3;
|
||||
RelayUrlInfo relay_url = 4;
|
||||
repeated DirectAddrInfo addrs = 5;
|
||||
ConnectionType conn_type = 6;
|
||||
google.protobuf.Duration latency = 7;
|
||||
google.protobuf.Duration last_used = 8;
|
||||
}
|
||||
|
|
@ -1,23 +0,0 @@
|
|||
syntax = "proto3";
|
||||
package caretta_sync.iroh;
|
||||
|
||||
message Source {
|
||||
message Saved{}
|
||||
message Udp{}
|
||||
message Relay{}
|
||||
message App{}
|
||||
message Discovery{
|
||||
string value = 1;
|
||||
}
|
||||
message NamedApp {
|
||||
string value = 1;
|
||||
}
|
||||
oneof source_value {
|
||||
Saved saved = 1;
|
||||
Udp udp = 2;
|
||||
Relay relay = 3;
|
||||
App app = 4;
|
||||
Discovery discovery = 5;
|
||||
NamedApp named_app = 6;
|
||||
};
|
||||
}
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
syntax = "proto3";
|
||||
package caretta_sync.net;
|
||||
|
||||
message Ipv4Addr {
|
||||
uint32 bits = 1;
|
||||
}
|
||||
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
syntax = "proto3";
|
||||
package caretta_sync.net;
|
||||
|
||||
message Ipv6Addr {
|
||||
uint64 high_bits = 1;
|
||||
uint64 low_bits = 2;
|
||||
}
|
||||
|
|
@ -1,19 +0,0 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package caretta_sync.net;
|
||||
|
||||
import "caretta_sync/net/socket_addr_v4.proto";
|
||||
import "caretta_sync/net/socket_addr_v6.proto";
|
||||
|
||||
// Protobuf message of std::net::SocketAddr.
|
||||
message SocketAddr {
|
||||
oneof socket_addr_value {
|
||||
SocketAddrV4 v4 = 1;
|
||||
SocketAddrV6 v6 = 2;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
syntax = "proto3";
|
||||
package caretta_sync.net;
|
||||
|
||||
import "caretta_sync/net/ipv4_addr.proto";
|
||||
|
||||
message SocketAddrV4 {
|
||||
Ipv4Addr ip = 1;
|
||||
uint32 port = 2;
|
||||
}
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
syntax = "proto3";
|
||||
package caretta_sync.net;
|
||||
|
||||
import "caretta_sync/net/ipv6_addr.proto";
|
||||
|
||||
message SocketAddrV6 {
|
||||
Ipv6Addr ip = 1;
|
||||
uint32 port = 2;
|
||||
}
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
syntax = "proto3";
|
||||
package caretta_sync.remote_node;
|
||||
|
||||
import "caretta_sync/iroh/public_key.proto";
|
||||
import "tripod_id/double.proto";
|
||||
|
||||
|
||||
message Identifier {
|
||||
oneof identifier_value {
|
||||
tripod_id.Double id = 1;
|
||||
caretta_sync.iroh.PublicKey public_key = 2;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,19 +0,0 @@
|
|||
syntax = "proto3";
|
||||
package caretta_sync.remote_node;
|
||||
|
||||
import "caretta_sync/iroh/remote_info.proto";
|
||||
import "tripod_id/double.proto";
|
||||
|
||||
message Info {
|
||||
tripod_id.Double public_id = 1;
|
||||
caretta_sync.iroh.RemoteInfo remote_info = 2;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
syntax = "proto3";
|
||||
package caretta_sync.remote_node;
|
||||
|
||||
import "caretta_sync/remote_node/identifier.proto";
|
||||
|
||||
message InfoRequest {
|
||||
Identifier remote_node = 1;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
syntax = "proto3";
|
||||
package caretta_sync.remote_node;
|
||||
|
||||
import "caretta_sync/remote_node/info.proto";
|
||||
|
||||
message InfoResponse {
|
||||
Info remote_node_info = 1;
|
||||
}
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
syntax = "proto3";
|
||||
package caretta_sync.remote_node;
|
||||
|
||||
message ListRequest{}
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
syntax = "proto3";
|
||||
package caretta_sync.remote_node;
|
||||
|
||||
import "caretta_sync/remote_node/info.proto";
|
||||
|
||||
message ListResponse {
|
||||
Info remote_node_info = 1;
|
||||
}
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
syntax = "proto3";
|
||||
package caretta_sync.remote_node;
|
||||
|
||||
import "caretta_sync/remote_node/info_request.proto";
|
||||
import "caretta_sync/remote_node/info_response.proto";
|
||||
import "caretta_sync/remote_node/list_request.proto";
|
||||
import "caretta_sync/remote_node/list_response.proto";
|
||||
|
||||
service RemoteNode {
|
||||
rpc Info(InfoRequest) returns (InfoResponse);
|
||||
rpc List(stream ListRequest) returns (stream ListResponse);
|
||||
}
|
||||
|
|
@ -1,134 +0,0 @@
|
|||
use std::{net::{IpAddr, Ipv4Addr}, ops, path::{Path, PathBuf}};
|
||||
|
||||
use base64::{prelude::BASE64_STANDARD, Engine};
|
||||
#[cfg(feature="cli")]
|
||||
use clap::Args;
|
||||
use futures::StreamExt;
|
||||
use iroh::{Endpoint, SecretKey};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::{fs::File, io::{AsyncReadExt, AsyncWriteExt}};
|
||||
use tracing_subscriber::EnvFilter;
|
||||
|
||||
|
||||
use crate::{
|
||||
config::PartialConfig,
|
||||
error::Error, utils::{emptiable::Emptiable, mergeable::Mergeable}
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct IrohConfig {
|
||||
pub enable: bool,
|
||||
pub secret_key: SecretKey,
|
||||
pub use_n0_discovery_service: bool,
|
||||
}
|
||||
|
||||
impl IrohConfig {
|
||||
async fn into_endpoint(config: Self) -> Result<Option<Endpoint>, crate::error::Error> {
|
||||
if config.enable {
|
||||
let mut endpoint = Endpoint::builder()
|
||||
.secret_key(config.secret_key)
|
||||
.discovery_dht()
|
||||
.discovery_local_network();
|
||||
if config.use_n0_discovery_service {
|
||||
endpoint = endpoint.discovery_n0();
|
||||
}
|
||||
Ok(Some(endpoint.bind().await?))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<PartialIrohConfig> for IrohConfig {
|
||||
type Error = crate::error::Error;
|
||||
fn try_from(raw: PartialIrohConfig) -> Result<IrohConfig, Self::Error> {
|
||||
Ok(IrohConfig {
|
||||
enable: raw.enable.ok_or(Error::MissingConfig("iroh.enable"))?,
|
||||
secret_key: raw.secret_key.ok_or(Error::MissingConfig("iroh.secret_key"))?,
|
||||
use_n0_discovery_service: raw.use_n0_discovery_service.ok_or(Error::MissingConfig("iroh.use_n0_discovery_service"))?
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
#[cfg_attr(feature="cli",derive(Args))]
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct PartialIrohConfig {
|
||||
#[cfg_attr(feature="cli",arg(long="p2p_enable"))]
|
||||
pub enable: Option<bool>,
|
||||
#[cfg_attr(feature="cli",arg(long))]
|
||||
pub secret_key: Option<SecretKey>,
|
||||
#[cfg_attr(feature="cli",arg(long))]
|
||||
pub use_n0_discovery_service: Option<bool>,
|
||||
}
|
||||
|
||||
impl PartialIrohConfig {
|
||||
pub fn with_new_secret_key(mut self) -> Self {
|
||||
let mut rng = rand::rngs::OsRng;
|
||||
self.secret_key = Some(SecretKey::generate(&mut rng));
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl From<IrohConfig> for PartialIrohConfig {
|
||||
fn from(config: IrohConfig) -> Self {
|
||||
Self {
|
||||
enable: Some(config.enable),
|
||||
secret_key: Some(config.secret_key),
|
||||
use_n0_discovery_service: Some(config.use_n0_discovery_service)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for PartialIrohConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
enable: Some(true),
|
||||
secret_key: None,
|
||||
use_n0_discovery_service: Some(true)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Emptiable for PartialIrohConfig {
|
||||
fn empty() -> Self {
|
||||
Self{
|
||||
enable: None,
|
||||
secret_key: None,
|
||||
use_n0_discovery_service: None
|
||||
}
|
||||
}
|
||||
|
||||
fn is_empty(&self) -> bool {
|
||||
self.enable.is_none() && self.secret_key.is_none() && self.use_n0_discovery_service.is_none()
|
||||
}
|
||||
}
|
||||
|
||||
impl Mergeable for PartialIrohConfig {
|
||||
fn merge(&mut self, mut other: Self) {
|
||||
if let Some(x) = other.enable.take() {
|
||||
let _ = self.enable.insert(x);
|
||||
};
|
||||
if let Some(x) = other.secret_key.take() {
|
||||
let _ = self.secret_key.insert(x);
|
||||
};
|
||||
if let Some(x) = other.use_n0_discovery_service.take() {
|
||||
let _ = self.use_n0_discovery_service.insert(x);
|
||||
};
|
||||
}
|
||||
}
|
||||
impl Mergeable for Option<PartialIrohConfig> {
|
||||
fn merge(&mut self, mut other: Self) {
|
||||
match other.take() {
|
||||
Some(x) => {
|
||||
if let Some(y) = self.as_mut() {
|
||||
y.merge(x);
|
||||
} else {
|
||||
let _ = self.insert(x);
|
||||
}
|
||||
},
|
||||
None => {}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
@ -1,158 +0,0 @@
|
|||
pub mod error;
|
||||
mod storage;
|
||||
mod iroh;
|
||||
mod rpc;
|
||||
|
||||
use std::{default::Default, fs::File, io::{Read, Write}, path::Path};
|
||||
use crate::{utils::{emptiable::Emptiable, mergeable::Mergeable}};
|
||||
pub use error::ConfigError;
|
||||
use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
||||
|
||||
use tokio::{io::{AsyncReadExt, AsyncWriteExt}};
|
||||
pub use storage::{StorageConfig, PartialStorageConfig};
|
||||
pub use iroh::{IrohConfig, PartialIrohConfig};
|
||||
pub use rpc::*;
|
||||
|
||||
#[cfg(feature="cli")]
|
||||
use clap::Args;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Config {
|
||||
pub iroh: IrohConfig,
|
||||
pub storage: StorageConfig,
|
||||
pub rpc: RpcConfig,
|
||||
}
|
||||
|
||||
impl AsRef<StorageConfig> for Config {
|
||||
fn as_ref(&self) -> &StorageConfig {
|
||||
&self.storage
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<IrohConfig> for Config {
|
||||
fn as_ref(&self) -> &IrohConfig {
|
||||
&self.iroh
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<RpcConfig> for Config {
|
||||
fn as_ref(&self) -> &RpcConfig {
|
||||
&self.rpc
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<PartialConfig> for Config {
|
||||
type Error = crate::error::Error;
|
||||
fn try_from(value: PartialConfig) -> Result<Self, Self::Error> {
|
||||
Ok(Self{
|
||||
rpc: value.rpc.ok_or(crate::error::Error::MissingConfig("rpc"))?.try_into()?,
|
||||
iroh: value.iroh.ok_or(crate::error::Error::MissingConfig("p2p"))?.try_into()?,
|
||||
storage: value.storage.ok_or(crate::error::Error::MissingConfig("storage"))?.try_into()?
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(feature="cli", derive(Args))]
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct PartialConfig {
|
||||
#[cfg_attr(feature="cli", command(flatten))]
|
||||
pub iroh: Option<PartialIrohConfig>,
|
||||
#[cfg_attr(feature="cli", command(flatten))]
|
||||
pub storage: Option<PartialStorageConfig>,
|
||||
#[cfg_attr(feature="cli", command(flatten))]
|
||||
pub rpc: Option<PartialRpcConfig>,
|
||||
}
|
||||
|
||||
impl PartialConfig {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
iroh : Some(PartialIrohConfig::empty().with_new_secret_key()),
|
||||
storage: Some(PartialStorageConfig::empty()),
|
||||
rpc: Some(PartialRpcConfig::empty()),
|
||||
}
|
||||
}
|
||||
pub fn from_toml(s: &str) -> Result<Self, toml::de::Error> {
|
||||
toml::from_str(s)
|
||||
}
|
||||
pub fn into_toml(&self) -> Result<String, toml::ser::Error> {
|
||||
toml::to_string(self)
|
||||
}
|
||||
pub fn read_or_create<T>(path: T) -> Result<Self, ConfigError>
|
||||
where
|
||||
T: AsRef<Path>
|
||||
{
|
||||
if !path.as_ref().exists() {
|
||||
Self::new().write_to(&path)?;
|
||||
}
|
||||
Self::read_from(&path)
|
||||
}
|
||||
pub fn read_from<T>(path:T) -> Result<Self, ConfigError>
|
||||
where
|
||||
T: AsRef<Path>
|
||||
{
|
||||
if !path.as_ref().exists() {
|
||||
if let Some(x) = path.as_ref().parent() {
|
||||
std::fs::create_dir_all(x)?;
|
||||
};
|
||||
let _ = File::create(&path)?;
|
||||
}
|
||||
let mut file = File::open(path.as_ref())?;
|
||||
let mut content = String::new();
|
||||
file.read_to_string(&mut content)?;
|
||||
let config: Self = toml::from_str(&content)?;
|
||||
Ok(config)
|
||||
}
|
||||
pub fn write_to<T>(&self, path:T) -> Result<(), ConfigError>
|
||||
where
|
||||
T: AsRef<Path>
|
||||
{
|
||||
if !path.as_ref().exists() {
|
||||
if let Some(x) = path.as_ref().parent() {
|
||||
std::fs::create_dir_all(x)?;
|
||||
};
|
||||
let _ = File::create(&path)?;
|
||||
}
|
||||
let mut file = File::create(&path)?;
|
||||
file.write_all(toml::to_string(self)?.as_bytes())?;
|
||||
Ok(())
|
||||
}
|
||||
pub fn default(app_name: &'static str) -> Self {
|
||||
Self {
|
||||
iroh: Some(PartialIrohConfig::default()),
|
||||
rpc: Some(PartialRpcConfig::default(app_name)),
|
||||
storage: Some(PartialStorageConfig::default(app_name)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Config> for PartialConfig {
|
||||
fn from(value: Config) -> Self {
|
||||
Self {
|
||||
iroh: Some(value.iroh.into()),
|
||||
storage: Some(value.storage.into()),
|
||||
rpc: Some(value.rpc.into())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Emptiable for PartialConfig {
|
||||
fn empty() -> Self {
|
||||
Self {
|
||||
iroh: None,
|
||||
storage: None,
|
||||
rpc: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_empty(&self) -> bool {
|
||||
self.iroh.is_empty() && self.rpc.is_empty() && self.storage.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
impl Mergeable for PartialConfig {
|
||||
fn merge(&mut self, other: Self) {
|
||||
self.iroh.merge(other.iroh);
|
||||
self.rpc.merge(other.rpc);
|
||||
self.storage.merge(other.storage);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,91 +0,0 @@
|
|||
use std::{net::{IpAddr, Ipv4Addr, SocketAddr, TcpListener}, path::PathBuf, str::FromStr};
|
||||
#[cfg(feature="cli")]
|
||||
use clap::Args;
|
||||
use url::Url;
|
||||
use crate::{config::PartialConfig, utils::{emptiable::Emptiable, mergeable::Mergeable}};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::config::error::ConfigError;
|
||||
|
||||
|
||||
|
||||
#[cfg(unix)]
|
||||
static DEFAULT_PORT: u16 = 54321;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct RpcConfig {
|
||||
pub endpoint_url: Url,
|
||||
}
|
||||
|
||||
impl TryFrom<PartialRpcConfig> for RpcConfig {
|
||||
type Error = ConfigError;
|
||||
fn try_from(config: PartialRpcConfig) -> Result<Self, Self::Error> {
|
||||
Ok(Self{
|
||||
endpoint_url: config.endpoint_url.ok_or(ConfigError::MissingConfig("endpoint".to_string()))?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(feature="cli", derive(Args))]
|
||||
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)]
|
||||
pub struct PartialRpcConfig {
|
||||
pub endpoint_url: Option<Url>,
|
||||
}
|
||||
|
||||
impl PartialRpcConfig {
|
||||
#[cfg(not(any(all(target_os="ios", target_abi="sim"), target_os="windows")))]
|
||||
pub fn default(app_name: &'static str) -> Self {
|
||||
let username = whoami::username();
|
||||
Self{
|
||||
endpoint_url: Some(Url::parse(&(String::from("unix://") + std::env::temp_dir().join(username).join(String::from(app_name) + ".sock").to_str().unwrap())).unwrap()),
|
||||
}
|
||||
}
|
||||
#[cfg(any(all(target_os="ios", target_abi="sim"), target_os="windows"))]
|
||||
pub fn default(app_name: &'static str) -> Self {
|
||||
Self{
|
||||
endpoint_url: Some(Url::parse("http://127.0.0.1:54321").unwrap()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Emptiable for PartialRpcConfig {
|
||||
fn empty() -> Self {
|
||||
Self {
|
||||
endpoint_url: None,
|
||||
}
|
||||
}
|
||||
fn is_empty(&self) -> bool {
|
||||
self.endpoint_url.is_none()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<RpcConfig> for PartialRpcConfig {
|
||||
fn from(source: RpcConfig) -> Self {
|
||||
Self {
|
||||
endpoint_url: Some(source.endpoint_url),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Mergeable for PartialRpcConfig {
|
||||
fn merge(&mut self, other: Self) {
|
||||
if let Some(x) = other.endpoint_url {
|
||||
self.endpoint_url = Some(x);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Mergeable for Option<PartialRpcConfig> {
|
||||
fn merge(&mut self, mut other: Self) {
|
||||
match other.take() {
|
||||
Some(x) => {
|
||||
if let Some(y) = self.as_mut() {
|
||||
y.merge(x);
|
||||
} else {
|
||||
let _ = self.insert(x);
|
||||
}
|
||||
},
|
||||
None => {}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
@ -1,150 +0,0 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
#[cfg(feature="cli")]
|
||||
use clap::Args;
|
||||
|
||||
use rusqlite::Connection;
|
||||
#[cfg(any(test, feature="test"))]
|
||||
use tempfile::tempdir;
|
||||
use crate::{config::{ConfigError, PartialConfig}, utils::{emptiable::Emptiable, get_binary_name, mergeable::Mergeable}};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct StorageConfig {
|
||||
pub data_directory: PathBuf,
|
||||
pub cache_directory: PathBuf,
|
||||
}
|
||||
|
||||
impl StorageConfig {
|
||||
pub fn get_global_data_directory(&self) -> PathBuf {
|
||||
self.data_directory.join("global")
|
||||
}
|
||||
pub fn get_global_root_document_path(&self) -> PathBuf {
|
||||
self.data_directory.join("global.bin")
|
||||
}
|
||||
pub fn get_local_data_directory(&self) -> PathBuf {
|
||||
self.data_directory.join("local")
|
||||
}
|
||||
pub fn get_local_database_path(&self) -> PathBuf {
|
||||
self.data_directory.join("local.sqlite")
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<PartialStorageConfig> for StorageConfig {
|
||||
type Error = ConfigError;
|
||||
|
||||
fn try_from(value: PartialStorageConfig) -> Result<Self, Self::Error> {
|
||||
Ok(Self {
|
||||
data_directory: value.data_directory.ok_or(ConfigError::MissingConfig("data_directory".to_string()))?,
|
||||
cache_directory: value.cache_directory.ok_or(ConfigError::MissingConfig("cache_directory".to_string()))?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(feature="cli", derive(Args))]
|
||||
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
|
||||
pub struct PartialStorageConfig {
|
||||
#[cfg_attr(feature="cli", arg(long))]
|
||||
pub data_directory: Option<PathBuf>,
|
||||
#[cfg_attr(feature="cli", arg(long))]
|
||||
pub cache_directory: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl PartialStorageConfig {
|
||||
#[cfg(not(any(target_os="android", target_os="ios")))]
|
||||
pub fn default(app_name: &'static str) -> Self {
|
||||
|
||||
let mut data_dir = dirs::data_local_dir().unwrap();
|
||||
data_dir.push(app_name);
|
||||
let mut cache_dir = dirs::cache_dir().unwrap();
|
||||
cache_dir.push(app_name);
|
||||
|
||||
Self {
|
||||
data_directory: Some(data_dir),
|
||||
cache_directory: Some(cache_dir)
|
||||
}
|
||||
}
|
||||
#[cfg(target_os="android")]
|
||||
pub fn default_android() -> Self{
|
||||
let ctx = ndk_context::android_context();
|
||||
let vm = unsafe { jni::JavaVM::from_raw(ctx.vm().cast()) }?;
|
||||
let mut env = vm.attach_current_thread()?;
|
||||
let ctx = unsafe { jni::objects::JObject::from_raw(ctx.context().cast()) };
|
||||
let cache_dir = env
|
||||
.call_method(ctx, "getFilesDir", "()Ljava/io/File;", &[])?
|
||||
.l()?;
|
||||
let cache_dir: jni::objects::JString = env
|
||||
.call_method(&cache_dir, "toString", "()Ljava/lang/String;", &[])?
|
||||
.l()?
|
||||
.try_into()?;
|
||||
let cache_dir = env.get_string(&cache_dir)?;
|
||||
let cache_dir = cache_dir.to_str()?;
|
||||
Ok(cache_dir.to_string())
|
||||
|
||||
}
|
||||
#[cfg(target_os="ios")]
|
||||
pub fn default(_: &'static str) -> Self{
|
||||
|
||||
use objc2::rc::Retained;
|
||||
use objc2::msg_send;
|
||||
use objc2_foundation::*;
|
||||
|
||||
let home_dir: Retained<NSString> = unsafe {NSHomeDirectory()};
|
||||
|
||||
let path = PathBuf::from(home_dir.to_string());
|
||||
Self {
|
||||
data_directory: Some(path.join("Library")),
|
||||
cache_directory: Some(path.join("Library").join("Cache")),
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl From<StorageConfig> for PartialStorageConfig {
|
||||
fn from(config: StorageConfig) -> PartialStorageConfig {
|
||||
Self {
|
||||
data_directory: Some(config.data_directory),
|
||||
cache_directory: Some(config.cache_directory),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Emptiable for PartialStorageConfig {
|
||||
fn empty() -> Self {
|
||||
Self {
|
||||
data_directory: None,
|
||||
cache_directory: None
|
||||
}
|
||||
}
|
||||
|
||||
fn is_empty(&self) -> bool {
|
||||
self.data_directory.is_none() && self.cache_directory.is_none()
|
||||
}
|
||||
}
|
||||
impl Mergeable for PartialStorageConfig {
|
||||
fn merge(&mut self, mut other: Self) {
|
||||
if let Some(x) = other.data_directory.take() {
|
||||
let _ = self.data_directory.insert(x);
|
||||
};
|
||||
if let Some(x) = other.cache_directory.take() {
|
||||
let _ = self.cache_directory.insert(x);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
impl Mergeable for Option<PartialStorageConfig> {
|
||||
fn merge(&mut self, mut other: Self) {
|
||||
match other.take() {
|
||||
Some(x) => {
|
||||
if let Some(y) = self.as_mut() {
|
||||
y.merge(x);
|
||||
} else {
|
||||
let _ = self.insert(x);
|
||||
}
|
||||
},
|
||||
None => {}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
@ -1,80 +0,0 @@
|
|||
//! Structs about authorization.
|
||||
|
||||
mod sent;
|
||||
mod received;
|
||||
|
||||
use std::os::unix::raw::time_t;
|
||||
|
||||
use tripod_id::Double;
|
||||
use chrono::{DateTime, Local, NaiveDateTime};
|
||||
use iroh::{NodeId, PublicKey};
|
||||
pub use sent::*;
|
||||
pub use received::*;
|
||||
use rusqlite::{params, types::FromSqlError, Connection};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::data::local::LocalRecord;
|
||||
|
||||
|
||||
/// Request of node authentication.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AuthorizationRequestRecord {
|
||||
id: u32,
|
||||
uuid: Uuid,
|
||||
public_id: Double,
|
||||
peer_id: u32,
|
||||
created_at: DateTime<Local>,
|
||||
closed_at: Option<DateTime<Local>>,
|
||||
}
|
||||
|
||||
impl LocalRecord for AuthorizationRequestRecord {
|
||||
|
||||
const TABLE_NAME: &str = "authorization_request";
|
||||
const SELECT_COLUMNS: &[&str] = &[
|
||||
"id",
|
||||
"uuid",
|
||||
"public_id",
|
||||
"peer_id",
|
||||
"created_at",
|
||||
"closed_at"
|
||||
];
|
||||
const INSERT_COLUMNS: &[&str] = &[
|
||||
"uuid",
|
||||
"public_id",
|
||||
"peer_id",
|
||||
"created_at"
|
||||
];
|
||||
|
||||
type InsertParams<'a> = (&'a Double, &'a [u8;32], &'a NaiveDateTime);
|
||||
type SelectValues = (u32, Uuid, Double, PublicKey, NaiveDateTime, Option<NaiveDateTime>);
|
||||
fn from_row(row: &rusqlite::Row<'_>) -> Result<Self, rusqlite::Error> {
|
||||
let created_at: NaiveDateTime = row.get(4)?;
|
||||
let closed_at: Option<NaiveDateTime> = row.get(5)?;
|
||||
Ok(Self {
|
||||
id: row.get(0)?,
|
||||
uuid: row.get(1)?,
|
||||
public_id: row.get(2)?,
|
||||
peer_id: row.get(3)?,
|
||||
created_at: created_at.and_utc().into(),
|
||||
closed_at: closed_at.map(|x| x.and_utc().into())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<(u32, Uuid, Double, u32, NaiveDateTime, Option<NaiveDateTime>)> for AuthorizationRequestRecord {
|
||||
fn from(value: (u32, Uuid, Double, u32, NaiveDateTime, Option<NaiveDateTime>)) -> Self {
|
||||
Self {
|
||||
id: value.0,
|
||||
uuid: value.1,
|
||||
public_id: value.2,
|
||||
peer_id: value.3,
|
||||
created_at: value.4.and_utc().into(),
|
||||
closed_at: value.5.map(|x| x.and_utc().into())
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<'a> From<&'a rusqlite::Row<'_>> for AuthorizationRequestRecord {
|
||||
fn from(value: &'a rusqlite::Row<'_>) -> Self {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
|
@ -1,41 +0,0 @@
|
|||
use caretta_id::{DoubleId, SingleId};
|
||||
use chrono::{DateTime, Local, NaiveDateTime};
|
||||
use iroh::{NodeId, PublicKey};
|
||||
|
||||
use crate::{data::local::LocalRecord, global::LOCAL_DATABASE_CONNECTION};
|
||||
|
||||
/// Response of node authentication.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ReceivedAuthorizationRequestRecord {
|
||||
id: u32,
|
||||
authorization_request_id: u32,
|
||||
peer_note: String,
|
||||
}
|
||||
|
||||
impl LocalRecord for ReceivedAuthorizationRequestRecord {
|
||||
const TABLE_NAME: &str = "received_authorization_request";
|
||||
|
||||
const SELECT_COLUMNS: &[&str] = &[
|
||||
"id",
|
||||
"authorization_request_id",
|
||||
"peer_note"
|
||||
];
|
||||
|
||||
const INSERT_COLUMNS: &[&str] = &[
|
||||
"authorization_request_id",
|
||||
"peer_note"
|
||||
];
|
||||
|
||||
type InsertParams<'a> = (&'a u32, &'a str);
|
||||
|
||||
fn from_row(row: &rusqlite::Row<'_>) -> Result<Self, rusqlite::Error> {
|
||||
Ok(Self {
|
||||
id: row.get(0)?,
|
||||
authorization_request_id: row.get(1)?,
|
||||
peer_note: row.get(2)?
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
|
@ -1,39 +0,0 @@
|
|||
use caretta_id::SingleId;
|
||||
use chrono::{DateTime, Local, NaiveDateTime};
|
||||
use iroh::{NodeId, PublicKey};
|
||||
use rusqlite::types::FromSqlError;
|
||||
|
||||
use crate::{data::local::LocalRecord, global::LOCAL_DATABASE_CONNECTION};
|
||||
|
||||
/// Request of node authentication.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SentAuthorizationRequestRecord {
|
||||
id: u32,
|
||||
authorization_request_id: u32,
|
||||
passcode: String,
|
||||
}
|
||||
|
||||
impl LocalRecord for SentAuthorizationRequestRecord {
|
||||
|
||||
const TABLE_NAME: &str = "sent_authorization_request";
|
||||
const SELECT_COLUMNS: &[&str] = &[
|
||||
"id",
|
||||
"authorization_request_id",
|
||||
"passcode",
|
||||
];
|
||||
const INSERT_COLUMNS: &[&str] = &[
|
||||
"authorization_request_id",
|
||||
"passcode"
|
||||
];
|
||||
|
||||
type InsertParams<'a> = (&'a u32, &'a str);
|
||||
|
||||
fn from_row(row: &rusqlite::Row<'_>) -> Result<Self, rusqlite::Error> {
|
||||
Ok(Self{
|
||||
id: row.get(0)?,
|
||||
authorization_request_id: row.get(0)?,
|
||||
passcode: row.get(2)?
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
mod v1;
|
||||
|
||||
use rusqlite::{Error, Connection};
|
||||
use tracing::{event, Level};
|
||||
|
||||
pub fn migrate(con: &mut Connection) -> Result<(), Error>{
|
||||
let version: u32 = con.pragma_query_value(None,"user_version", |row| row.get(0)).expect("Failed to get user_version");
|
||||
if version < 1 {
|
||||
let tx = con.transaction()?;
|
||||
event!(Level::INFO, "Migrate local db to version 1");
|
||||
v1::migrate(&tx)?;
|
||||
tx.pragma_update(None, "user_version", 1)?;
|
||||
tx.commit()?;
|
||||
event!(Level::INFO, "Migration done.");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -1,45 +0,0 @@
|
|||
use rusqlite::{Connection, Error, Transaction};
|
||||
|
||||
pub fn migrate(tx: &Transaction) -> Result<(), Error>{
|
||||
tx.execute_batch(
|
||||
"CREATE TABLE remote_node (
|
||||
id INTEGER PRIMARY KEY,
|
||||
public_id INTEGER NOT NULL UNIQUE,
|
||||
public_key BLOB UNIQUE NOT NULL
|
||||
);
|
||||
CREATE TABLE authorization_request (
|
||||
id INTEGER PRIMARY KEY,
|
||||
uuid BLOB NOT NULL UNIQUE,
|
||||
public_id INTEGER NOT NULL UNIQUE,
|
||||
remote_node_id INTEGER NOT NULL UNIQUE,
|
||||
created_at TEXT NOT NULL,
|
||||
closed_at TEXT,
|
||||
FOREIGN KEY(remote_node_id) REFERENCES remote_node(id)
|
||||
);
|
||||
CREATE TABLE received_authorization_request (
|
||||
id INTEGER PRIMARY KEY,
|
||||
authorization_request_id INTEGER NOT NULL UNIQUE,
|
||||
node_note TEXT,
|
||||
FOREIGN KEY(authorization_request_id) REFERENCES authorization_request(id)
|
||||
);
|
||||
CREATE TABLE sent_authorization_request (
|
||||
id INTEGER PRIMARY KEY,
|
||||
authorization_request_id INTEGER NOT NULL UNIQUE,
|
||||
passcode TEXT NOT NULL,
|
||||
FOREIGN KEY(authorization_request_id) REFERENCES authorization_request(id)
|
||||
);
|
||||
CREATE TABLE authorized_remote_node (
|
||||
id INTEGER PRIMARY KEY,
|
||||
uuid BLOB UNIQUE NOT NULL,
|
||||
public_id INTEGER NOT NULL UNIQUE,
|
||||
public_key BLOB NOT NULL UNIQUE,
|
||||
note TEXT NOT NULL,
|
||||
last_synced_at TEXT,
|
||||
last_sent_version_vector BLOB,
|
||||
created_at TEXT NOT NULL,
|
||||
updated_at TEXT NOT NULL
|
||||
);",
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -1,121 +0,0 @@
|
|||
// mod authorization_request;
|
||||
mod remote_node;
|
||||
pub use remote_node::RemoteNodeRecord;
|
||||
pub mod migration;
|
||||
|
||||
use std::{cell::OnceCell, convert::Infallible, iter::Map, path::Path, sync::{LazyLock, OnceLock}};
|
||||
|
||||
use migration::migrate;
|
||||
use rusqlite::{ffi::Error, params, types::FromSql, Connection, MappedRows, OptionalExtension, Params, Row, ToSql};
|
||||
|
||||
use crate::{config::StorageConfig, global::{CONFIG, LOCAL_DATABASE_CONNECTION}};
|
||||
|
||||
// pub use authorization_request::*;
|
||||
pub type LocalRecordError = rusqlite::Error;
|
||||
|
||||
|
||||
/// a struct of id for local database record.
|
||||
pub type LocalRecordId = u32;
|
||||
|
||||
/// a struct for the record without id before inserted
|
||||
pub type NoLocalRecordId = rusqlite::types::Null;
|
||||
|
||||
|
||||
|
||||
/// A id struct
|
||||
/// Model trait for local database data.
|
||||
/// use LOCAL_DATABASE_CONNECTION for database connection.
|
||||
pub trait LocalRecord: Sized {
|
||||
const TABLE_NAME: &str;
|
||||
const COLUMNS: &[&str];
|
||||
|
||||
/// Tuple form of the record.
|
||||
/// the order of field must be same as COLUMNS.
|
||||
type RowValues;
|
||||
}
|
||||
pub trait SelectableLocalRecord: LocalRecord<RowValues: TryInto<Self>> {
|
||||
|
||||
const SELECT_STATEMENT: LazyLock<String> = LazyLock::new(|| {
|
||||
String::from("SELECT ") + &Self::COLUMNS.join(", ") + " FROM " + Self::TABLE_NAME
|
||||
});
|
||||
|
||||
const SELECT_PLACEHOLDER: LazyLock<String> = LazyLock::new(|| {
|
||||
let mut result : Vec<String> = Vec::new();
|
||||
for i in 0..Self::COLUMNS.len() {
|
||||
result.push(String::from("?") + &(i+1).to_string());
|
||||
}
|
||||
result.join(", ")
|
||||
});
|
||||
|
||||
|
||||
|
||||
fn get_one_where<P>(where_statement: &str, params: P) -> Result<Self, rusqlite::Error>
|
||||
where P: Params
|
||||
{
|
||||
let connection = LOCAL_DATABASE_CONNECTION.get_unchecked();
|
||||
Ok(connection.query_row(
|
||||
&(String::new() + &Self::SELECT_STATEMENT + " " + where_statement),
|
||||
params,
|
||||
Self::from_row
|
||||
)?)
|
||||
}
|
||||
|
||||
fn get_one_by_field<T>(field_name: &str, field_value: T) -> Result<Self, rusqlite::Error>
|
||||
where
|
||||
T: ToSql
|
||||
{
|
||||
let connection = LOCAL_DATABASE_CONNECTION.get_unchecked();
|
||||
Ok(connection.query_row(
|
||||
&([&Self::SELECT_STATEMENT, "FROM", Self::TABLE_NAME, "WHERE", field_name,"= ?1"].join(" ")),
|
||||
params![field_value],
|
||||
Self::from_row
|
||||
)?)
|
||||
}
|
||||
fn get_one_by_id(id: u32) -> Result<Self, rusqlite::Error> {
|
||||
Self::get_one_by_field("id", id )
|
||||
}
|
||||
fn from_row(row: &Row<'_>) -> Result<Self, rusqlite::Error>;
|
||||
fn get_all() -> Result<Vec<Self>, rusqlite::Error> {
|
||||
let connection = LOCAL_DATABASE_CONNECTION.get_unchecked();
|
||||
let mut stmt = connection.prepare(&("SELECT ".to_string() + &Self::COLUMNS.join(", ") + " FROM " + Self::TABLE_NAME))?;
|
||||
let rows = stmt.query_map(
|
||||
[],
|
||||
Self::from_row
|
||||
)?;
|
||||
let mut result= Vec::new();
|
||||
for row in rows {
|
||||
result.push(row?);
|
||||
}
|
||||
Ok(result)
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
pub trait InsertableLocalRecord: LocalRecord<RowValues: From<Self> + Params> {
|
||||
type LocalRecord: Sized + SelectableLocalRecord;
|
||||
|
||||
/// Place holder for insertion.
|
||||
/// Generated from Columns
|
||||
const INSERT_PLACEHOLDER: LazyLock<String> = LazyLock::new(|| {
|
||||
let mut result : Vec<String> = Vec::new();
|
||||
for i in 0..Self::COLUMNS.len() {
|
||||
result.push(String::from("?") + &(i+1).to_string());
|
||||
}
|
||||
result.join(", ")
|
||||
});
|
||||
/// Insert and get the inserted record.
|
||||
fn insert(self) -> Result<Self::LocalRecord, rusqlite::Error>{
|
||||
let params= Self::RowValues::from(self);
|
||||
let connection = LOCAL_DATABASE_CONNECTION.get_unchecked();
|
||||
|
||||
Ok(connection.query_row(
|
||||
&[
|
||||
"INSERT INTO ", Self::TABLE_NAME, "(" , &Self::COLUMNS.join(", "), ")",
|
||||
"VALUES (" , &*Self::INSERT_PLACEHOLDER , ")",
|
||||
"RETURNING", &Self::COLUMNS.join(", ")
|
||||
].join(" "),
|
||||
params,
|
||||
Self::LocalRecord::from_row
|
||||
)?)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,120 +0,0 @@
|
|||
//! Structs about cached remote_node.
|
||||
|
||||
use std::os::unix::raw::time_t;
|
||||
|
||||
use tripod_id::Double;
|
||||
use chrono::{DateTime, Local, NaiveDateTime};
|
||||
use iroh::{NodeId, PublicKey};
|
||||
use rusqlite::{params, types::{FromSqlError, Null}, Connection};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{data::local::{self, InsertableLocalRecord, LocalRecord, LocalRecordId, NoLocalRecordId, SelectableLocalRecord}, global::LOCAL_DATABASE_CONNECTION};
|
||||
|
||||
/// RemoteNode information cached in local database.
|
||||
///
|
||||
/// - Currently this only contain local uid and public key (=node id) of iroh.
|
||||
/// - This is a junction table enable to use caretta-id to specify items in the UI, especially on the CLI.
|
||||
/// - Actual remote_node information is managed by iroh endpoint and not contained in this model.
|
||||
/// - Once a remote_node is authorized, it is assigned a global (=synced) ID as authorized_remote_node so essentially this local id targets unauthorized remote_nodes.
|
||||
///
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct RemoteNodeRecord<T> {
|
||||
|
||||
/// serial primary key.
|
||||
pub id: T,
|
||||
|
||||
/// public tripod id of remote_node.
|
||||
/// this id is use only the node itself and not synced so another node has different local_remote_node_id even if its public_key is same.
|
||||
pub public_id: Double,
|
||||
|
||||
/// Iroh public key
|
||||
pub public_key: PublicKey,
|
||||
}
|
||||
|
||||
impl RemoteNodeRecord<LocalRecordId> {
|
||||
pub fn get_or_insert_by_public_key(public_key: &PublicKey) -> Result<Self, rusqlite::Error> {
|
||||
match Self::get_by_public_key(public_key) {
|
||||
Ok(x) => Ok(x),
|
||||
Err(rusqlite::Error::QueryReturnedNoRows) => {
|
||||
let new = RemoteNodeRecord{
|
||||
id: NoLocalRecordId{},
|
||||
public_id: rand::random(),
|
||||
public_key: public_key.clone()
|
||||
};
|
||||
Ok(new.insert()?)
|
||||
},
|
||||
Err(e) => Err(e)
|
||||
}
|
||||
|
||||
}
|
||||
pub fn get_by_public_id(public_id: &Double) -> Result<Self, rusqlite::Error> {
|
||||
Self::get_one_where("WHERE public_id = ?1", (public_id,))
|
||||
}
|
||||
pub fn get_by_public_key(public_key: &PublicKey) -> Result<Self, rusqlite::Error> {
|
||||
Self::get_one_where("WHERE public_Key = ?1", (public_key.as_bytes(),))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> LocalRecord for RemoteNodeRecord<T> {
|
||||
const TABLE_NAME: &str = "remote_node";
|
||||
const COLUMNS: &[&str] = &[
|
||||
"id",
|
||||
"public_id",
|
||||
"public_key"
|
||||
];
|
||||
|
||||
type RowValues = (T, Double, [u8;32]);
|
||||
}
|
||||
|
||||
impl SelectableLocalRecord for RemoteNodeRecord<LocalRecordId> {
|
||||
fn from_row(row: &rusqlite::Row<'_>) -> Result<Self, rusqlite::Error> {
|
||||
Ok(Self {
|
||||
id: row.get(0)?,
|
||||
public_id: row.get(1)?,
|
||||
public_key: PublicKey::from_bytes(&row.get(2)?).map_err(|e| FromSqlError::Other(Box::new(e)))?
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<(LocalRecordId, Double, [u8;32])> for RemoteNodeRecord<LocalRecordId> {
|
||||
type Error = rusqlite::Error;
|
||||
fn try_from(value: (LocalRecordId, Double, [u8;32])) -> Result<Self, Self::Error> {
|
||||
Ok(Self {
|
||||
id: value.0,
|
||||
public_id: value.1,
|
||||
public_key: PublicKey::from_bytes(&value.2).map_err(|x| FromSqlError::Other(Box::new(x)))?
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl InsertableLocalRecord for RemoteNodeRecord<NoLocalRecordId> {
|
||||
type LocalRecord = RemoteNodeRecord<LocalRecordId>;
|
||||
|
||||
}
|
||||
|
||||
impl From<RemoteNodeRecord<NoLocalRecordId>> for (NoLocalRecordId, Double, [u8;32]){
|
||||
fn from(value: RemoteNodeRecord<NoLocalRecordId>) -> Self {
|
||||
(value.id, value.public_id, value.public_key.as_bytes().to_owned())
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use iroh::SecretKey;
|
||||
|
||||
use crate::tests::TEST_CONFIG;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn insert_get_remote_node_record() {
|
||||
LOCAL_DATABASE_CONNECTION.get_or_init(&TEST_CONFIG.storage.get_local_database_path());
|
||||
let key = SecretKey::generate(&mut rand::rngs::OsRng);
|
||||
let pubkey = key.public();
|
||||
let record = RemoteNodeRecord::get_or_insert_by_public_key(&pubkey).unwrap();
|
||||
assert_eq!(record, RemoteNodeRecord::get_by_public_id(&record.public_id).unwrap());
|
||||
assert_eq!(record, RemoteNodeRecord::get_by_public_key(&record.public_key).unwrap());
|
||||
}
|
||||
}
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
pub mod distributed;
|
||||
pub mod local;
|
||||
|
|
@ -1,63 +0,0 @@
|
|||
use std::{array::TryFromSliceError, ffi::OsString};
|
||||
use tonic::Status;
|
||||
|
||||
use crate::proto::ProtoDeserializeError;
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum Error {
|
||||
#[error("Base64 decode error: {0}")]
|
||||
Base64Decode(#[from] base64::DecodeError),
|
||||
#[error(transparent)]
|
||||
CiborDeserialize(#[from] ciborium::de::Error<std::io::Error>),
|
||||
#[error(transparent)]
|
||||
CiborSerialize(#[from] ciborium::ser::Error<std::io::Error>),
|
||||
#[error("Config error: {0}")]
|
||||
Config(#[from] crate::config::error::ConfigError),
|
||||
#[error("Infallible: {0}")]
|
||||
Infallible(#[from] std::convert::Infallible),
|
||||
#[error("IO Error: {0}")]
|
||||
Io(#[from]std::io::Error),
|
||||
#[error("Iroh bind error: {0}")]
|
||||
IrohBind(#[from] iroh::endpoint::BindError),
|
||||
#[error("mandatory config `{0}` is missing")]
|
||||
MissingConfig(&'static str),
|
||||
#[error("Parse OsString error: {0:?}")]
|
||||
OsStringConvert(std::ffi::OsString),
|
||||
#[cfg(feature="cli")]
|
||||
#[error("Parse args error: {0}")]
|
||||
ParseCommand(#[from] clap::Error),
|
||||
#[error("Signature error: {0}")]
|
||||
Signature(#[from] ed25519_dalek::SignatureError),
|
||||
#[error("slice parse error: {0}")]
|
||||
SliceTryFrom(#[from] TryFromSliceError),
|
||||
#[error("toml deserialization error: {0}")]
|
||||
TomlDe(#[from] toml::de::Error),
|
||||
#[error("toml serialization error: {0}")]
|
||||
TomlSer(#[from] toml::ser::Error),
|
||||
#[error("protobuf serialization error: {0}")]
|
||||
ProtoSerialize(#[from] crate::proto::ProtoSerializeError),
|
||||
#[error("protobuf deserialization error: {0}")]
|
||||
ProtoDeserialize(#[from] crate::proto::ProtoDeserializeError),
|
||||
#[error("Local record error: {0}")]
|
||||
LocalRecord(#[from] crate::data::local::LocalRecordError),
|
||||
#[error("Tripod id error: {0}")]
|
||||
TripodId(#[from] tripod_id::Error),
|
||||
}
|
||||
|
||||
impl From<std::ffi::OsString> for Error {
|
||||
fn from(s: OsString) -> Error {
|
||||
Self::OsStringConvert(s)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Error> for Status {
|
||||
fn from(value: Error) -> Self {
|
||||
match value {
|
||||
Error::ProtoDeserialize(x) => { match x {
|
||||
ProtoDeserializeError::MissingField(x) => Self::invalid_argument(format!("{} is required", x)),
|
||||
_ => Status::unimplemented("Unimplemented protobuf deserialize error status")
|
||||
}},
|
||||
_ => Status::unimplemented("Unimplemented error status")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,38 +0,0 @@
|
|||
#[cfg(any(test,feature="test"))]
|
||||
use tempfile::TempDir;
|
||||
use tokio::sync::OnceCell;
|
||||
|
||||
use crate::{config::{Config, ConfigError, PartialIrohConfig, PartialRpcConfig, PartialStorageConfig, StorageConfig}, error::Error};
|
||||
|
||||
pub static CONFIG: GlobalConfig = GlobalConfig::const_new();
|
||||
pub struct GlobalConfig {
|
||||
inner: OnceCell<Config>
|
||||
}
|
||||
|
||||
impl GlobalConfig {
|
||||
pub const fn const_new() -> Self {
|
||||
Self{
|
||||
inner: OnceCell::const_new()
|
||||
}
|
||||
}
|
||||
pub async fn get_or_init<T>(&'static self, config: Config) -> &'static Config where
|
||||
T: Into<Config>{
|
||||
self.inner.get_or_init(|| async {
|
||||
config.into()
|
||||
}).await
|
||||
}
|
||||
pub async fn get_or_try_init<T, E>(&'static self, config: T) -> Result<&'static Config, <T as TryInto<Config>>::Error> where
|
||||
T: TryInto<Config>,
|
||||
{
|
||||
self.inner.get_or_try_init(|| async {
|
||||
config.try_into()
|
||||
}).await
|
||||
|
||||
}
|
||||
pub fn get(&'static self) -> Option<&'static Config> {
|
||||
self.inner.get()
|
||||
}
|
||||
pub fn get_unchecked(&'static self) -> &'static Config {
|
||||
self.get().expect("Config must be initialized before use!")
|
||||
}
|
||||
}
|
||||
|
|
@ -1,27 +0,0 @@
|
|||
use std::sync::OnceLock;
|
||||
|
||||
use iroh::Endpoint;
|
||||
|
||||
pub static IROH_ENDPOINT: GlobalIrohEndpoint = GlobalIrohEndpoint::const_new();
|
||||
|
||||
pub struct GlobalIrohEndpoint {
|
||||
inner: OnceLock<Endpoint>
|
||||
}
|
||||
|
||||
impl GlobalIrohEndpoint {
|
||||
const fn const_new() -> Self {
|
||||
Self {
|
||||
inner: OnceLock::new()
|
||||
}
|
||||
}
|
||||
pub fn get_or_init(&self, endpoint: &Endpoint) -> Endpoint {
|
||||
self.inner.get_or_init(|| endpoint.clone()).clone()
|
||||
}
|
||||
pub fn get(&self) -> Option<Endpoint> {
|
||||
self.inner.get().map(|x| x.clone())
|
||||
}
|
||||
pub fn get_unchecked(&self) -> Endpoint {
|
||||
self.get().expect("Global Iroh Endpoint must be initialized before use")
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1,49 +0,0 @@
|
|||
use std::{fs::create_dir_all, path::{Path, PathBuf}, sync::OnceLock};
|
||||
|
||||
use rusqlite::Connection;
|
||||
|
||||
use crate::{data::local::migration::migrate, error::Error};
|
||||
|
||||
pub static LOCAL_DATABASE_CONNECTION: GlobalLocalDatabaseConnection = GlobalLocalDatabaseConnection::const_new();
|
||||
|
||||
pub struct GlobalLocalDatabaseConnection {
|
||||
path: OnceLock<PathBuf>
|
||||
}
|
||||
|
||||
fn path_to_connection_or_panic<P>(path: &P) -> Connection
|
||||
where
|
||||
P: AsRef<Path>
|
||||
{
|
||||
Connection::open(path.as_ref()).expect("Failed to open database connection for local data")
|
||||
}
|
||||
|
||||
impl GlobalLocalDatabaseConnection {
|
||||
const fn const_new() -> Self {
|
||||
Self {
|
||||
path: OnceLock::new()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_or_init<P>(&self, path: &P) -> Connection
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
{
|
||||
path_to_connection_or_panic(self.path.get_or_init(|| {
|
||||
let path = path.as_ref();
|
||||
let parent = path.parent().expect("Database path should have parent directory");
|
||||
create_dir_all(parent).expect("Failed to create parent directory of database");
|
||||
let mut conn = path_to_connection_or_panic(&path);
|
||||
migrate(&mut conn).expect("Local database migration should be done correctly");
|
||||
path.to_path_buf()
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn get(&self) -> Option<Connection> {
|
||||
self.path.get().map(|path| {
|
||||
path_to_connection_or_panic(path)
|
||||
})
|
||||
}
|
||||
pub fn get_unchecked(&self) -> Connection {
|
||||
self.get().expect("Global database for local data mulst be initialized before use")
|
||||
}
|
||||
}
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
use std::{any::type_name, collections::HashMap, net::{IpAddr, Ipv4Addr}, path::{Path, PathBuf}, sync::LazyLock};
|
||||
|
||||
use crate::{config::{StorageConfig}, error::Error };
|
||||
use tokio::sync::{OnceCell, RwLock, RwLockReadGuard, RwLockWriteGuard};
|
||||
|
||||
mod config;
|
||||
mod iroh_endpoint;
|
||||
mod local_database_connection;
|
||||
|
||||
pub use config::*;
|
||||
pub use iroh_endpoint::*;
|
||||
pub use local_database_connection::*;
|
||||
use uuid::{ContextV7, Timestamp, Uuid};
|
||||
|
||||
pub fn generate_uuid() -> Uuid {
|
||||
Uuid::new_v7(Timestamp::now(ContextV7::new()))
|
||||
}
|
||||
|
||||
fn uninitialized_message<T>(var: T) -> String {
|
||||
format!("{} is uninitialized!", &stringify!(var))
|
||||
}
|
||||
|
||||
|
|
@ -1,27 +0,0 @@
|
|||
use std::pin::Pin;
|
||||
|
||||
use futures::Stream;
|
||||
use tonic::{Request, Response, Streaming};
|
||||
|
||||
tonic::include_proto!("caretta_sync.authorization_request");
|
||||
pub struct AuthorizationRequestService {}
|
||||
|
||||
#[tonic::async_trait]
|
||||
impl authorization_request_server::AuthorizationRequest for AuthorizationRequestService {
|
||||
type ListStream = Pin<Box<dyn Stream<Item = Result<ListResponse, tonic::Status>> + Send>>;
|
||||
async fn send(&self, request: Request<SendRequest>) -> Result<Response<SendResponse>, tonic::Status> {
|
||||
todo!()
|
||||
}
|
||||
async fn accept(&self, request: Request<AcceptRequest>) -> Result<Response<AcceptResponse>, tonic::Status>{
|
||||
todo!()
|
||||
}
|
||||
async fn reject(&self, request: Request<RejectRequest>) -> Result<Response<RejectResponse>, tonic::Status>{
|
||||
todo!()
|
||||
}
|
||||
async fn info(&self, request: Request<InfoRequest>) -> Result<Response<InfoResponse>, tonic::Status>{
|
||||
todo!()
|
||||
}
|
||||
async fn list(&self, request: Request<Streaming<ListRequest>>) -> Result<Response<Self::ListStream>, tonic::Status> {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
|
@ -1,19 +0,0 @@
|
|||
use std::pin::Pin;
|
||||
|
||||
use futures::Stream;
|
||||
use tonic::{Request, Response, Streaming, Status};
|
||||
|
||||
tonic::include_proto!("caretta_sync.authorized_node");
|
||||
pub struct AuthorizedNodeService {}
|
||||
|
||||
#[tonic::async_trait]
|
||||
impl authorized_node_server::AuthorizedNode for AuthorizedNodeService {
|
||||
type ListStream = Pin<Box<dyn Stream<Item = Result<ListResponse, tonic::Status>> + Send>>;
|
||||
|
||||
async fn info(&self, request: Request<InfoRequest>) -> Result<Response<InfoResponse>, tonic::Status>{
|
||||
todo!()
|
||||
}
|
||||
async fn list(&self, request: Request<Streaming<ListRequest>>) -> Result<Response<Self::ListStream>, tonic::Status> {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
use super::*;
|
||||
tonic::include_proto!("caretta_sync.common");
|
||||
|
||||
use crate::proto::{error::{ProtoDeserializeError, ProtoSerializeError}};
|
||||
|
||||
impl From<uuid::Uuid> for Uuid {
|
||||
fn from(value: uuid::Uuid) -> Self {
|
||||
let (first_half, second_half) = value.as_u64_pair();
|
||||
Self {
|
||||
high_bits: first_half,
|
||||
low_bits: second_half
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Uuid> for uuid::Uuid {
|
||||
fn from(value: Uuid) -> Self {
|
||||
uuid::Uuid::from_u64_pair(value.high_bits, value.low_bits)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
impl From<url::Url> for Url {
|
||||
fn from(value: url::Url) -> Self {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<Url> for url::Url {
|
||||
type Error = ProtoDeserializeError;
|
||||
fn try_from(value: Url) -> Result<Self, Self::Error> {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::{net::{self, Ipv4Addr}, u16};
|
||||
|
||||
use super::*;
|
||||
fn validate_uuid_conversion(uuid: uuid::Uuid) -> bool{
|
||||
let message = Uuid::from(uuid);
|
||||
uuid == uuid::Uuid::from(message)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn uuid_conversion() {
|
||||
assert!(validate_uuid_conversion(uuid::Uuid::nil()));
|
||||
assert!(validate_uuid_conversion(uuid::Uuid::max()));
|
||||
assert!(validate_uuid_conversion(uuid::Uuid::now_v7()));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
use iroh::endpoint::DirectAddrInfo;
|
||||
|
||||
use crate::proto::{error::ProtoSerializeError, DirectAddrInfoMessage, LastControlMessage, SourceMessage};
|
||||
|
||||
impl TryFrom<DirectAddrInfo> for DirectAddrInfoMessage {
|
||||
type Error = ProtoSerializeError;
|
||||
fn try_from(value: DirectAddrInfo) -> Result<Self, Self::Error> {
|
||||
Ok(DirectAddrInfoMessage {
|
||||
addr: value.addr.to_string(),
|
||||
latency: value.latency.map(|x| x.try_into()).transpose()?,
|
||||
last_control: value.last_control.map(|x| LastControlMessage::try_from(x)).transpose()?,
|
||||
last_payload: value.last_payload.map(|x| x.try_into()).transpose()?,
|
||||
last_alive: value.last_alive.map(|x| x.try_into()).transpose()?,
|
||||
sources: value.sources.into_iter().map(|x| SourceMessage::try_from(x)).collect::<Result<Vec<SourceMessage>, Self::Error>>()?
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
use std::time::Duration;
|
||||
|
||||
use iroh::endpoint::ControlMsg;
|
||||
use prost_types::DurationError;
|
||||
|
||||
use crate::proto::{error::ProtoSerializeError, LastControlMessage};
|
||||
|
||||
impl TryFrom<(Duration, ControlMsg)> for LastControlMessage {
|
||||
type Error = ProtoSerializeError;
|
||||
fn try_from(value: (Duration, ControlMsg)) -> Result<Self, Self::Error> {
|
||||
Ok(LastControlMessage {
|
||||
duration: Some(value.0.try_into()?),
|
||||
control_msg: value.1.to_string()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
mod direct_addr_info_message;
|
||||
mod last_control_message;
|
||||
mod node_id_message;
|
||||
mod remote_info_iter_request;
|
||||
mod remote_info_message;
|
||||
mod remote_info_request;
|
||||
mod remote_info_response;
|
||||
mod source_message;
|
||||
|
|
@ -1,19 +0,0 @@
|
|||
use iroh::endpoint::RemoteInfo;
|
||||
|
||||
use crate::{error::Error, proto::{error::ProtoSerializeError, DirectAddrInfoMessage, RemoteInfoMessage}};
|
||||
|
||||
impl TryFrom<RemoteInfo> for RemoteInfoMessage {
|
||||
type Error = ProtoSerializeError;
|
||||
fn try_from(value: RemoteInfo) -> Result<Self, Self::Error> {
|
||||
Ok(Self {
|
||||
node_id: Some(value.node_id.into()),
|
||||
relay_url: value.relay_url.map_or(String::from(""), |x| x.relay_url.to_string()),
|
||||
addrs: value.addrs.into_iter()
|
||||
.map(|x| DirectAddrInfoMessage::try_from(x))
|
||||
.collect::<Result<Vec<DirectAddrInfoMessage>,Self::Error>>()?,
|
||||
conn_type: value.conn_type.to_string(),
|
||||
latency: value.latency.map(|x| x.try_into()).transpose()?,
|
||||
last_used: value.last_used.map(|x| x.try_into()).transpose()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum ProtoSerializeError {
|
||||
#[error("Duration parse error: {0}")]
|
||||
Duration(#[from] prost_types::DurationError),
|
||||
}
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum ProtoDeserializeError {
|
||||
#[error("Missing field: {0}")]
|
||||
MissingField(&'static str),
|
||||
#[error("Signature error: {0}")]
|
||||
Signature(#[from] ed25519_dalek::SignatureError),
|
||||
#[error("slice parse error: {0}")]
|
||||
SliceTryFrom(#[from] std::array::TryFromSliceError),
|
||||
#[error("Int parse error: {0}")]
|
||||
IntTryFrom(#[from] std::num::TryFromIntError),
|
||||
}
|
||||
|
|
@ -1,120 +0,0 @@
|
|||
use std::pin::Pin;
|
||||
|
||||
use futures::Stream;
|
||||
use tonic::{async_trait, Request, Response, Status};
|
||||
|
||||
use crate::proto::{net::SocketAddr, remote_node_server, ProtoDeserializeError, ProtoSerializeError, };
|
||||
|
||||
|
||||
tonic::include_proto!("caretta_sync.iroh");
|
||||
|
||||
impl From<iroh::endpoint::ConnectionType> for ConnectionType {
|
||||
fn from(value: iroh::endpoint::ConnectionType) -> Self {
|
||||
use connection_type::*;
|
||||
Self {
|
||||
connection_type_value: Some(match value {
|
||||
iroh::endpoint::ConnectionType::Direct(socket_addr) => {
|
||||
connection_type::ConnectionTypeValue::Direct(connection_type::Direct{direct_value: Some(SocketAddr::from(socket_addr))})
|
||||
},
|
||||
iroh::endpoint::ConnectionType::Relay(relay_url) => {
|
||||
connection_type::ConnectionTypeValue::Relay(connection_type::Relay { relay_value: Some(super::common::Url::from((*relay_url).clone()))})
|
||||
},
|
||||
iroh::endpoint::ConnectionType::Mixed(socket_addr, relay_url) => {
|
||||
connection_type::ConnectionTypeValue::Mixed(connection_type::Mixed { socket_addr: Some(SocketAddr::from(socket_addr)), relay_url: Some(super::common::Url::from((*relay_url).clone()))})
|
||||
},
|
||||
iroh::endpoint::ConnectionType::None => {
|
||||
ConnectionTypeValue::None(None{})
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<iroh::endpoint::ControlMsg> for ControlMsg {
|
||||
fn from(value: iroh::endpoint::ControlMsg) -> Self {
|
||||
use control_msg::*;
|
||||
Self { control_msg_vaue: Some(match value {
|
||||
iroh::endpoint::ControlMsg::Ping => ControlMsgVaue::Ping(Ping{}),
|
||||
iroh::endpoint::ControlMsg::Pong => ControlMsgVaue::Pong(Pong {}),
|
||||
iroh::endpoint::ControlMsg::CallMeMaybe => ControlMsgVaue::CallMeMaybe(CallMeMayBe { }),
|
||||
}) }
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<iroh::endpoint::DirectAddrInfo> for DirectAddrInfo {
|
||||
type Error = ProtoSerializeError;
|
||||
fn try_from(value: iroh::endpoint::DirectAddrInfo) -> Result<Self, Self::Error> {
|
||||
use direct_addr_info::*;
|
||||
let last_control: Option<DurationControlMsg> = if let Some((duration, control_msg)) = value.last_control {
|
||||
Some(DurationControlMsg{
|
||||
control_msg: Some(control_msg.into()),
|
||||
duration: Some(duration.try_into()?)
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Ok(Self {
|
||||
addr: Some(value.addr.into()),
|
||||
latency: value.latency.map(|x| x.try_into()).transpose()?,
|
||||
last_control: last_control,
|
||||
last_payload: value.last_payload.map(|x| x.try_into()).transpose()?,
|
||||
last_alive: value.last_alive.map(|x| x.try_into()).transpose()?,
|
||||
sources: value.sources.into_iter().map(|(s, d)| {
|
||||
Ok::<SourceDuration, ProtoSerializeError>(SourceDuration{
|
||||
source: Some(s.into()),
|
||||
duration: Some(d.try_into()?)
|
||||
})
|
||||
}).collect::<Result<Vec<SourceDuration>, ProtoSerializeError>>()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<iroh::PublicKey> for PublicKey {
|
||||
fn from(value: iroh::PublicKey) -> Self {
|
||||
Self{ key: Vec::from(value.as_bytes()) }
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<PublicKey> for iroh::PublicKey {
|
||||
type Error = ProtoDeserializeError;
|
||||
fn try_from(value: PublicKey) -> Result<Self, Self::Error> {
|
||||
let slice: [u8; 32] = value.key[0..32].try_into()?;
|
||||
Ok(iroh::PublicKey::from_bytes(&slice)?)
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<iroh::endpoint::RemoteInfo> for RemoteInfo {
|
||||
type Error = ProtoSerializeError;
|
||||
fn try_from(value: iroh::endpoint::RemoteInfo) -> Result<Self, Self::Error> {
|
||||
Ok(Self {
|
||||
node_id: Some(value.node_id.into()),
|
||||
relay_url: value.relay_url.map(|x| {
|
||||
Ok::<RelayUrlInfo, ProtoSerializeError>(RelayUrlInfo {
|
||||
relay_url: Some((*x.relay_url).clone().into()),
|
||||
last_alive: x.last_alive.map(|x| x.try_into()).transpose()?,
|
||||
latency: x.latency.map(|x| x.try_into()).transpose()?
|
||||
})}).transpose()?,
|
||||
addrs: value.addrs.into_iter().map(|x| {
|
||||
x.try_into()
|
||||
}).collect::<Result<Vec<DirectAddrInfo>, ProtoSerializeError>>()?,
|
||||
conn_type: Some(value.conn_type.into()),
|
||||
latency: value.latency.map(|x| x.try_into()).transpose()?,
|
||||
last_used:value.last_used.map(|x| x.try_into()).transpose()?
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<iroh::endpoint::Source> for Source {
|
||||
fn from(value: iroh::endpoint::Source) -> Self {
|
||||
use source::*;
|
||||
Self {
|
||||
source_value:Some(match value {
|
||||
iroh::endpoint::Source::Saved => SourceValue::Saved(Saved { }),
|
||||
iroh::endpoint::Source::Udp => SourceValue::Udp(Udp { }),
|
||||
iroh::endpoint::Source::Relay => SourceValue::Relay(Relay { }),
|
||||
iroh::endpoint::Source::App => SourceValue::App(App{}),
|
||||
iroh::endpoint::Source::Discovery { name } => SourceValue::Discovery(Discovery { value: name }),
|
||||
iroh::endpoint::Source::NamedApp { name } => SourceValue::NamedApp(NamedApp { value: name }),
|
||||
}) }
|
||||
}
|
||||
}
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
mod authorization_request;
|
||||
mod authorized_node;
|
||||
mod remote_node;
|
||||
mod common;
|
||||
mod error;
|
||||
mod iroh;
|
||||
mod net;
|
||||
|
||||
|
||||
pub use common::*;
|
||||
pub use error::*;
|
||||
pub use remote_node::*;
|
||||
|
|
@ -1,140 +0,0 @@
|
|||
tonic::include_proto!("caretta_sync.net");
|
||||
|
||||
use crate::proto::{error::{ProtoDeserializeError, ProtoSerializeError}};
|
||||
|
||||
type Ipv4AddrMessage = Ipv4Addr;
|
||||
type Ipv6AddrMessage = Ipv6Addr;
|
||||
type SocketAddrMessage = SocketAddr;
|
||||
type SocketAddrV4Message = SocketAddrV4;
|
||||
type SocketAddrV6Message = SocketAddrV6;
|
||||
|
||||
impl From<std::net::SocketAddr> for SocketAddrMessage {
|
||||
fn from(value: std::net::SocketAddr) -> Self {
|
||||
Self{
|
||||
socket_addr_value: Some(match value {
|
||||
std::net::SocketAddr::V4(x) => socket_addr::SocketAddrValue::V4(SocketAddrV4Message::from(x)),
|
||||
std::net::SocketAddr::V6(x) => socket_addr::SocketAddrValue::V6(SocketAddrV6Message::from(x)),
|
||||
})}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<SocketAddrMessage> for std::net::SocketAddr {
|
||||
type Error = ProtoDeserializeError;
|
||||
fn try_from(value: SocketAddrMessage) -> Result<Self, Self::Error> {
|
||||
Ok(match value.socket_addr_value.ok_or(Self::Error::MissingField("SocketAddr.socket_addr"))? {
|
||||
socket_addr::SocketAddrValue::V4(x) => std::net::SocketAddr::V4(x.try_into()?),
|
||||
socket_addr::SocketAddrValue::V6(x) => std::net::SocketAddr::V6(x.try_into()?),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<std::net::SocketAddrV4> for SocketAddrV4Message {
|
||||
fn from(value: std::net::SocketAddrV4) -> Self {
|
||||
Self {
|
||||
ip : Some(value.ip().clone().into()),
|
||||
port: value.port().into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<SocketAddrV4Message> for std::net::SocketAddrV4 {
|
||||
type Error = ProtoDeserializeError;
|
||||
fn try_from(value: SocketAddrV4Message) -> Result<Self, Self::Error> {
|
||||
Ok(Self::new(value.ip.ok_or(ProtoDeserializeError::MissingField("SocketAddrV4.ip"))?.into(), value.port.try_into()?))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<std::net::Ipv4Addr> for Ipv4AddrMessage {
|
||||
fn from(value: std::net::Ipv4Addr) -> Self {
|
||||
Self{
|
||||
bits: value.to_bits()
|
||||
}
|
||||
}
|
||||
}
|
||||
impl From<Ipv4AddrMessage> for std::net::Ipv4Addr {
|
||||
fn from(value: Ipv4AddrMessage) -> Self{
|
||||
Self::from_bits(value.bits)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<std::net::SocketAddrV6> for SocketAddrV6Message {
|
||||
fn from(value: std::net::SocketAddrV6) -> Self {
|
||||
Self{
|
||||
ip: Some(value.ip().clone().into()),
|
||||
port: value.port().into()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<SocketAddrV6Message> for std::net::SocketAddrV6 {
|
||||
type Error = ProtoDeserializeError;
|
||||
fn try_from(value: SocketAddrV6Message) -> Result<Self, Self::Error> {
|
||||
Ok(Self::new(
|
||||
value.ip.ok_or(ProtoDeserializeError::MissingField("SocketAddrV6.ip"))?.into(),
|
||||
value.port.try_into()?,
|
||||
0,
|
||||
0
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<std::net::Ipv6Addr> for Ipv6AddrMessage {
|
||||
fn from(value: std::net::Ipv6Addr) -> Self {
|
||||
let bits = value.to_bits();
|
||||
|
||||
Self{
|
||||
high_bits: (bits >> 64) as u64,
|
||||
low_bits: bits as u64,
|
||||
}
|
||||
}
|
||||
}
|
||||
impl From<Ipv6AddrMessage> for std::net::Ipv6Addr{
|
||||
|
||||
fn from(value: Ipv6AddrMessage) -> Self {
|
||||
Self::from_bits(
|
||||
((value.high_bits as u128) << 64) + (value.low_bits as u128)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::{net::{self, Ipv4Addr}, u16, u8};
|
||||
|
||||
use rand::random;
|
||||
|
||||
use super::*;
|
||||
|
||||
fn validate_socket_addr_conversion(socket_addr: net::SocketAddr) -> Result<bool, ProtoDeserializeError> {
|
||||
let message = SocketAddrMessage::from(socket_addr);
|
||||
Ok(socket_addr == message.try_into()?)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn socket_addr_conversion_ipv4_min() {
|
||||
assert!(validate_socket_addr_conversion(net::SocketAddr::new(net::IpAddr::V4(net::Ipv4Addr::new(0, 0, 0, 0)),u16::MIN)).unwrap());
|
||||
}
|
||||
#[test]
|
||||
fn socket_addr_conversion_ipv4_max() {
|
||||
assert!(validate_socket_addr_conversion(net::SocketAddr::new(net::IpAddr::V4(net::Ipv4Addr::new(u8::MAX, u8::MAX, u8::MAX, u8::MAX)),u16::MAX)).unwrap());
|
||||
}
|
||||
#[test]
|
||||
fn socket_addr_conversion_ipv4_random() {
|
||||
for _ in 0..10 {
|
||||
assert!(validate_socket_addr_conversion(net::SocketAddr::new(net::IpAddr::V4(
|
||||
net::Ipv4Addr::new(random(), random(), random(), random())
|
||||
),
|
||||
random()
|
||||
)).unwrap())
|
||||
}
|
||||
}
|
||||
#[test]
|
||||
fn socket_addr_conversion_ipv6_min() {
|
||||
assert!(validate_socket_addr_conversion(net::SocketAddr::new(net::IpAddr::V6(net::Ipv6Addr::new(0,0,0,0,0,0,0,0)), u16::MIN)).unwrap());
|
||||
}
|
||||
#[test]
|
||||
fn socket_addr_conversion_ipv6_max() {
|
||||
assert!(validate_socket_addr_conversion(net::SocketAddr::new(net::IpAddr::V6(net::Ipv6Addr::new(u16::MAX, u16::MAX, u16::MAX, u16::MAX, u16::MAX, u16::MAX, u16::MAX, u16::MAX)), u16::MAX)).unwrap());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,31 +0,0 @@
|
|||
use std::{pin::Pin, time::Duration};
|
||||
|
||||
|
||||
use futures::{future::Remote, Stream};
|
||||
use iroh::{endpoint::{DirectAddrInfo, RemoteInfo}, PublicKey};
|
||||
use tonic::{Request, Response, Status, Streaming};
|
||||
use tripod_id::Double;
|
||||
|
||||
use crate::{data::local::{LocalRecordId, RemoteNodeRecord}, error::Error, global::IROH_ENDPOINT, proto::{error::{ProtoDeserializeError, ProtoSerializeError}}};
|
||||
|
||||
|
||||
tonic::include_proto!("caretta_sync.remote_node");
|
||||
|
||||
pub struct RemoteNodeServer{}
|
||||
|
||||
#[tonic::async_trait]
|
||||
impl remote_node_server::RemoteNode for RemoteNodeServer {
|
||||
type ListStream = Pin<Box<dyn Stream<Item = Result<ListResponse, Status>> + Send>>;
|
||||
async fn info(&self, request: Request<InfoRequest>) -> Result<Response<InfoResponse>, Status> {
|
||||
todo!()
|
||||
}
|
||||
async fn list(&self, request: Request<Streaming<ListRequest>>)
|
||||
-> Result<Response<Self::ListStream>, Status> {
|
||||
let iter = IROH_ENDPOINT.get_unchecked().remote_info_iter()
|
||||
.map(|x| {
|
||||
todo!();
|
||||
});
|
||||
let stream = futures::stream::iter(iter);
|
||||
Ok(Response::new(Box::pin(stream)))
|
||||
}
|
||||
}
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
use std::pin::Pin;
|
||||
|
||||
use iroh::{endpoint, Endpoint, NodeId};
|
||||
use tonic::{Response, Request, Status};
|
||||
use tokio_stream::Stream;
|
||||
|
||||
use crate::{global::IROH_ENDPOINT, proto::{error::ProtoDeserializeError, RemoteInfoIterRequest, RemoteInfoMessage, RemoteInfoRequest, RemoteInfoResponse}};
|
||||
|
||||
pub struct CarettaSyncServer{}
|
||||
|
||||
#[tonic::async_trait]
|
||||
impl crate::proto::caretta_sync_server::CarettaSync for CarettaSyncServer {
|
||||
type RemoteInfoIterStream = Pin<Box<dyn Stream<Item = Result<RemoteInfoResponse, Status>> + Send>>;
|
||||
async fn remote_info(&self, request: Request<RemoteInfoRequest>) -> Result<Response<RemoteInfoResponse>, Status> {
|
||||
let node_id = NodeId::try_from(request.into_inner().node_id.ok_or(Status::from_error(Box::new(ProtoDeserializeError::MissingField("node_id"))))?).or_else(|e| {
|
||||
Err(Status::from_error(Box::new(e)))
|
||||
})?;
|
||||
let remote_info: Option<RemoteInfoMessage> = IROH_ENDPOINT.get_unchecked().remote_info(node_id).map(|x| x.try_into()).transpose().or_else(|e| {
|
||||
Err(Status::from_error(Box::new(e)))
|
||||
})?;
|
||||
Ok(Response::new(RemoteInfoResponse::from(remote_info)))
|
||||
}
|
||||
async fn remote_info_iter(&self, _: Request<RemoteInfoIterRequest>)
|
||||
-> Result<Response<Self::RemoteInfoIterStream>, Status> {
|
||||
let iter = IROH_ENDPOINT.get_unchecked().remote_info_iter()
|
||||
.map(|x| {
|
||||
RemoteInfoMessage::try_from(x).map(|x| RemoteInfoResponse::from(x)).or_else(|e| {
|
||||
Err(Status::from_error(Box::new(e)))
|
||||
})
|
||||
});
|
||||
let stream = futures::stream::iter(iter);
|
||||
Ok(Response::new(Box::pin(stream)))
|
||||
}
|
||||
}
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
use crate::{config::{Config, IrohConfig, RpcConfig}, error::Error};
|
||||
|
||||
pub trait ServerTrait {
|
||||
async fn serve_p2p<T>(config: &T) -> Result<(), Error>
|
||||
where T: AsRef<IrohConfig>;
|
||||
async fn serve_rpc<T>(config: &T) -> Result<(), Error>
|
||||
where T: AsRef<RpcConfig>;
|
||||
async fn serve_all<T>(config: &T) -> Result<(), Error>
|
||||
where
|
||||
T: AsRef<IrohConfig> + AsRef<RpcConfig> {
|
||||
tokio::try_join!(
|
||||
Self::serve_p2p(config),
|
||||
Self::serve_rpc(config)
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
@ -1,25 +0,0 @@
|
|||
use std::{path::PathBuf, sync::LazyLock};
|
||||
|
||||
use tempfile::TempDir;
|
||||
use url::Url;
|
||||
use crate::{ config::{Config, PartialConfig, PartialIrohConfig, PartialRpcConfig, RpcConfig, StorageConfig}};
|
||||
|
||||
use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
||||
|
||||
pub static TEST_CONFIG: LazyLock<Config> = LazyLock::new(|| {
|
||||
let test_dir = TempDir::new().unwrap().keep();
|
||||
let data_dir = test_dir.join("data");
|
||||
let cache_dir = test_dir.join("cache");
|
||||
|
||||
|
||||
Config {
|
||||
iroh: PartialIrohConfig::default().with_new_secret_key().try_into().unwrap(),
|
||||
storage: StorageConfig {
|
||||
data_directory: data_dir,
|
||||
cache_directory: cache_dir,
|
||||
},
|
||||
rpc: RpcConfig{
|
||||
endpoint_url: Url::parse(&(String::from("unix://") + test_dir.join("socket.sock").to_str().unwrap())).unwrap(),
|
||||
},
|
||||
}
|
||||
});
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
#[cfg(feature="macros")]
|
||||
pub use caretta_sync_macros::Mergeable;
|
||||
pub trait Mergeable: Sized {
|
||||
fn merge(&mut self, other: Self);
|
||||
}
|
||||
|
|
@ -1,47 +0,0 @@
|
|||
use prost_types::Timestamp;
|
||||
use chrono::{DateTime, TimeZone, Timelike, Utc};
|
||||
pub mod async_convert;
|
||||
pub mod emptiable;
|
||||
pub mod mergeable;
|
||||
pub mod runnable;
|
||||
|
||||
/// ## Examples
|
||||
/// ```
|
||||
/// use chrono::Utc;
|
||||
/// use std::time::SystemTime;
|
||||
/// use prost_types::Timestamp;
|
||||
/// use caretta_sync_core::utils::utc_to_timestamp;
|
||||
///
|
||||
/// let now_utc = Utc::now();
|
||||
/// let now_timestamp = utc_to_timestamp(&now_utc);
|
||||
/// assert_eq!(SystemTime::try_from(now_utc).unwrap(), SystemTime::try_from(now_timestamp).unwrap());
|
||||
/// ```
|
||||
pub fn utc_to_timestamp(utc: &DateTime<Utc>) -> Timestamp {
|
||||
Timestamp{
|
||||
seconds: utc.timestamp(),
|
||||
nanos: i32::try_from(utc.nanosecond()).unwrap(),
|
||||
}
|
||||
}
|
||||
|
||||
/// ## Examples
|
||||
/// ```
|
||||
/// use std::time::SystemTime;
|
||||
/// use prost_types::Timestamp;
|
||||
/// use caretta_sync_core::utils::timestamp_to_utc;
|
||||
///
|
||||
/// let now_timestamp = Timestamp::from(SystemTime::now());
|
||||
/// let now_utc = timestamp_to_utc(&now_timestamp);
|
||||
/// assert_eq!(SystemTime::try_from(now_utc).unwrap(), SystemTime::try_from(now_timestamp).unwrap());
|
||||
/// ```
|
||||
pub fn timestamp_to_utc(t: &Timestamp) -> DateTime<Utc> {
|
||||
Utc.timestamp_opt(t.seconds, u32::try_from(t.nanos).unwrap()).unwrap()
|
||||
}
|
||||
|
||||
pub fn get_binary_name() -> Option<String> {
|
||||
std::env::current_exe()
|
||||
.ok()?
|
||||
.file_name()?
|
||||
.to_str()?
|
||||
.to_owned()
|
||||
.into()
|
||||
}
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
pub trait Runnable {
|
||||
fn run(self, app_name: &'static str);
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue