Add struct about authorization
This commit is contained in:
parent
b57734db33
commit
f2e79d4cd0
11 changed files with 203 additions and 79 deletions
|
|
@ -6,7 +6,6 @@ mod scan;
|
||||||
|
|
||||||
pub use add::DeviceAddCommandArgs;
|
pub use add::DeviceAddCommandArgs;
|
||||||
use caretta_sync_core::utils::runnable::Runnable;
|
use caretta_sync_core::utils::runnable::Runnable;
|
||||||
use libp2p::{Multiaddr, PeerId};
|
|
||||||
pub use list::DeviceListCommandArgs;
|
pub use list::DeviceListCommandArgs;
|
||||||
pub use ping::DevicePingCommandArgs;
|
pub use ping::DevicePingCommandArgs;
|
||||||
pub use remove::DeviceRemoveCommandArgs;
|
pub use remove::DeviceRemoveCommandArgs;
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,7 @@
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
|
|
||||||
use clap::Args;
|
use clap::Args;
|
||||||
use caretta_sync_core::{config::Config, data::migration::DataMigrator, global::{CONFIG, DATABASE_CONNECTIONS}, server::ServerTrait, utils::runnable::Runnable};
|
use caretta_sync_core::{config::Config, global::{CONFIG, LOCAL_DATABASE_CONNECTION}, server::ServerTrait, utils::runnable::Runnable};
|
||||||
use libp2p::{noise, ping, swarm::{NetworkBehaviour, SwarmEvent}, tcp, yamux, Swarm};
|
|
||||||
|
|
||||||
use super::ConfigArgs;
|
use super::ConfigArgs;
|
||||||
|
|
||||||
|
|
@ -23,7 +22,7 @@ where
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn run(self, app_name: &'static str) {
|
async fn run(self, app_name: &'static str) {
|
||||||
let config = CONFIG.get_or_init::<Config>(self.config.into_config(app_name).await).await;
|
let config = CONFIG.get_or_init::<Config>(self.config.into_config(app_name).await).await;
|
||||||
let _ = DATABASE_CONNECTIONS.get_or_init_unchecked(&config, DataMigrator).await;
|
let _ = LOCAL_DATABASE_CONNECTION.get_or_init(&config.storage.get_local_database_path() );
|
||||||
T::serve_all(config).await.unwrap();
|
T::serve_all(config).await.unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -22,7 +22,7 @@ futures.workspace = true
|
||||||
iroh.workspace = true
|
iroh.workspace = true
|
||||||
prost.workspace = true
|
prost.workspace = true
|
||||||
prost-types.workspace = true
|
prost-types.workspace = true
|
||||||
rusqlite = { version = "0.37.0", features = ["bundled"] }
|
rusqlite = { version = "0.37.0", features = ["bundled", "chrono"] }
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
sysinfo = "0.37.0"
|
sysinfo = "0.37.0"
|
||||||
tempfile = { version = "3.20.0", optional = true }
|
tempfile = { version = "3.20.0", optional = true }
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ use clap::Args;
|
||||||
use rusqlite::Connection;
|
use rusqlite::Connection;
|
||||||
#[cfg(any(test, feature="test"))]
|
#[cfg(any(test, feature="test"))]
|
||||||
use tempfile::tempdir;
|
use tempfile::tempdir;
|
||||||
use crate::{config::{ConfigError, PartialConfig}, data::local::LocalDatabaseConnection, utils::{emptiable::Emptiable, get_binary_name, mergeable::Mergeable}};
|
use crate::{config::{ConfigError, PartialConfig}, utils::{emptiable::Emptiable, get_binary_name, mergeable::Mergeable}};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
|
|
@ -16,12 +16,18 @@ pub struct StorageConfig {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl StorageConfig {
|
impl StorageConfig {
|
||||||
|
pub fn get_global_data_directory(&self) -> PathBuf {
|
||||||
|
self.data_directory.join("global")
|
||||||
|
}
|
||||||
|
pub fn get_global_root_document_path(&self) -> PathBuf {
|
||||||
|
self.data_directory.join("global.bin")
|
||||||
|
}
|
||||||
|
pub fn get_local_data_directory(&self) -> PathBuf {
|
||||||
|
self.data_directory.join("local")
|
||||||
|
}
|
||||||
pub fn get_local_database_path(&self) -> PathBuf {
|
pub fn get_local_database_path(&self) -> PathBuf {
|
||||||
self.data_directory.join("local.sqlite")
|
self.data_directory.join("local.sqlite")
|
||||||
}
|
}
|
||||||
pub fn create_local_database_connection(&self) -> Connection {
|
|
||||||
Connection::from_storage_config(self)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TryFrom<PartialStorageConfig> for StorageConfig {
|
impl TryFrom<PartialStorageConfig> for StorageConfig {
|
||||||
|
|
|
||||||
87
core/src/data/local/authorization/mod.rs
Normal file
87
core/src/data/local/authorization/mod.rs
Normal file
|
|
@ -0,0 +1,87 @@
|
||||||
|
//! Structs about authorization.
|
||||||
|
|
||||||
|
mod request;
|
||||||
|
mod response;
|
||||||
|
|
||||||
|
use std::os::unix::raw::time_t;
|
||||||
|
|
||||||
|
use chrono::{DateTime, Local, NaiveDateTime};
|
||||||
|
use iroh::NodeId;
|
||||||
|
pub use request::*;
|
||||||
|
pub use response::*;
|
||||||
|
use rusqlite::{params, types::FromSqlError, Connection};
|
||||||
|
|
||||||
|
use crate::data::local::RusqliteRecord;
|
||||||
|
|
||||||
|
/// On going authorization
|
||||||
|
pub struct Authorization {
|
||||||
|
node_id: NodeId,
|
||||||
|
passcode: String,
|
||||||
|
created_at: DateTime<Local>,
|
||||||
|
updated_at: DateTime<Local>,
|
||||||
|
}
|
||||||
|
static TABLE_NAME: &str = "authorization";
|
||||||
|
static DEFAULT_COLUMNS: [&str;4] = [
|
||||||
|
"node_id",
|
||||||
|
"passcode",
|
||||||
|
"created_at",
|
||||||
|
"updated_at"
|
||||||
|
];
|
||||||
|
|
||||||
|
impl Authorization {
|
||||||
|
pub fn new(node_id: NodeId, passcode: String) -> Self {
|
||||||
|
let timestamp = Local::now();
|
||||||
|
Self {
|
||||||
|
node_id: node_id,
|
||||||
|
passcode: passcode,
|
||||||
|
created_at: timestamp.clone(),
|
||||||
|
updated_at: timestamp
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn get_by_node_id(node_id: NodeId, connection: &Connection) -> Result<Self, rusqlite::Error> {
|
||||||
|
connection.query_row(
|
||||||
|
"SELECT node_id, passcode, created_at, updated_at FROM authorizaation WHRE node_id=(?1)",
|
||||||
|
params![node_id.as_bytes()],
|
||||||
|
Self::from_row
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
impl RusqliteRecord for Authorization {
|
||||||
|
fn from_row(row: &rusqlite::Row<'_>) -> Result<Self, rusqlite::Error> {
|
||||||
|
let created_at: NaiveDateTime = row.get(2)?;
|
||||||
|
let updated_at: NaiveDateTime = row.get(3)?;
|
||||||
|
let node_id: Vec<u8> = row.get(0)?;
|
||||||
|
Ok(Self {
|
||||||
|
node_id: NodeId::from_bytes(node_id[..32].try_into().or_else(|e| {
|
||||||
|
Err(rusqlite::types::FromSqlError::InvalidBlobSize {
|
||||||
|
expected_size: 32,
|
||||||
|
blob_size: node_id.len()
|
||||||
|
})
|
||||||
|
})?).or(Err(FromSqlError::InvalidType))?,
|
||||||
|
passcode: row.get(1)?,
|
||||||
|
created_at: DateTime::from(created_at.and_utc()),
|
||||||
|
updated_at: DateTime::from(updated_at.and_utc()),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
fn insert(&self, connection: &rusqlite::Connection) -> Result<(), rusqlite::Error> {
|
||||||
|
connection.execute(
|
||||||
|
"INSERT INTO authorization (node_id, passcode, created_at, updated_at) VALUES (?1, ?2, ?3, ?4)",
|
||||||
|
(&self.node_id.as_bytes(), &self.passcode, &self.created_at.naive_utc(), &self.updated_at.naive_utc()),
|
||||||
|
)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
fn get_all(connection: &rusqlite::Connection) -> Result<Vec<Self>, rusqlite::Error> {
|
||||||
|
let mut stmt = connection.prepare(&(String::from("SELECT ") + &DEFAULT_COLUMNS.join(", ") + " FROM " + TABLE_NAME))?;
|
||||||
|
let rows = stmt.query_map(
|
||||||
|
[],
|
||||||
|
Self::from_row
|
||||||
|
)?;
|
||||||
|
let mut result= Vec::new();
|
||||||
|
for row in rows {
|
||||||
|
result.push(row?);
|
||||||
|
}
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
8
core/src/data/local/authorization/request.rs
Normal file
8
core/src/data/local/authorization/request.rs
Normal file
|
|
@ -0,0 +1,8 @@
|
||||||
|
use iroh::NodeId;
|
||||||
|
|
||||||
|
/// Request of node authentication.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct AuthorizationRequest {
|
||||||
|
sender_id: NodeId,
|
||||||
|
sender_info: String,
|
||||||
|
}
|
||||||
12
core/src/data/local/authorization/response.rs
Normal file
12
core/src/data/local/authorization/response.rs
Normal file
|
|
@ -0,0 +1,12 @@
|
||||||
|
use iroh::NodeId;
|
||||||
|
|
||||||
|
/// Response of node authentication.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct AuthorizationResponse {
|
||||||
|
sender_id: NodeId,
|
||||||
|
passcode: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -4,33 +4,22 @@ pub fn migrate(con: &mut Connection) -> Result<(), Error>{
|
||||||
let tx = con.transaction()?;
|
let tx = con.transaction()?;
|
||||||
tx.execute_batch(
|
tx.execute_batch(
|
||||||
"BEGIN;
|
"BEGIN;
|
||||||
CREATE TABLE peer (
|
|
||||||
id INTEGER PRIMARY KEY,
|
|
||||||
libp2p_peer_id TEXT UNIQUE NOT NULL,
|
|
||||||
created_at TEXT NOT NULL,
|
|
||||||
updated_at TEXT NOT NULL,
|
|
||||||
);
|
|
||||||
CREATE INDEX idx_peer_created_at ON peer(created_at);
|
|
||||||
CREATE INDEX idx_peer_updated_at ON peer(updated_at);
|
|
||||||
CREATE TABLE address (
|
|
||||||
id INTEGER PRIMARY KEY,
|
|
||||||
peer_id INTEGER NOT NULL,
|
|
||||||
multiaddr TEXT UNIQUE NOT NULL,
|
|
||||||
created_at TEXT NOT NULL,
|
|
||||||
updated_at TEXT NOT NULL,
|
|
||||||
protocol TEXT NOT NULL,
|
|
||||||
FOREIGN KEY(peer_id) REFERENCES peer(id)
|
|
||||||
);
|
|
||||||
CREATE INDEX idx_address_created_at ON address(created_at);
|
|
||||||
CREATE INDEX idx_address_updated_at ON address(updated_at);
|
|
||||||
CREATE TABLE authorized_peer (
|
CREATE TABLE authorized_peer (
|
||||||
id INTEGER PRIMARY KEY,
|
id INTEGER PRIMARY KEY,
|
||||||
peer_id INTEGER NOT NULL UNIQUE,
|
node_id BLOB NOT NULL UNIQUE,
|
||||||
synced_at TEXT,
|
last_synced_at TEXT,
|
||||||
|
last_sent_version_vector BLOB
|
||||||
created_at TEXT NOT NULL,
|
created_at TEXT NOT NULL,
|
||||||
updated_at TEXT NOT NULL,
|
updated_at TEXT NOT NULL,
|
||||||
FOREIGN KEY(peer_id) REFERENCES peer(id)
|
);
|
||||||
)",
|
CREATE TABLE authorization (
|
||||||
|
id INTEGER PRIMARY KEY,
|
||||||
|
node_id BLOB UNIQUE NOT NULL,
|
||||||
|
passcode TEXT NOT NULL,
|
||||||
|
created_at TEXT NOT NULL,
|
||||||
|
updated_at TEXT NOT NULL,
|
||||||
|
);
|
||||||
|
COMMIT;",
|
||||||
)?;
|
)?;
|
||||||
tx.pragma_update(None, "user_version", 1)?;
|
tx.pragma_update(None, "user_version", 1)?;
|
||||||
tx.commit()?;
|
tx.commit()?;
|
||||||
|
|
|
||||||
|
|
@ -1,56 +1,27 @@
|
||||||
|
mod authorization;
|
||||||
pub mod migration;
|
pub mod migration;
|
||||||
|
|
||||||
use std::{cell::OnceCell, path::Path, sync::{LazyLock, OnceLock}};
|
use std::{cell::OnceCell, iter::Map, path::Path, sync::{LazyLock, OnceLock}};
|
||||||
|
|
||||||
use migration::migrate;
|
use migration::migrate;
|
||||||
use rusqlite::{ffi::Error, Connection};
|
use rusqlite::{ffi::Error, Connection, MappedRows, Row};
|
||||||
|
|
||||||
use crate::{config::StorageConfig, global::CONFIG};
|
use crate::{config::StorageConfig, global::{CONFIG, LOCAL_DATABASE_CONNECTION}};
|
||||||
|
|
||||||
static INITIALIZE_PARENT_DIRECTORY_RESULT: OnceLock<()> = OnceLock::new();
|
pub use authorization::*;
|
||||||
|
|
||||||
static MIGRATE_RESULT: OnceLock<()> = OnceLock::new();
|
pub trait RusqliteRecord: Sized {
|
||||||
|
fn insert(&self, connection: &Connection) -> Result<(), rusqlite::Error>;
|
||||||
fn initialize_parent_directory<P>(path: &P)
|
fn from_row(row: &Row<'_>) -> Result<Self, rusqlite::Error>;
|
||||||
where
|
fn get_all(connection: &Connection) -> Result<Vec<Self>, rusqlite::Error>;
|
||||||
P: AsRef<Path>,
|
|
||||||
{
|
|
||||||
*INITIALIZE_PARENT_DIRECTORY_RESULT.get_or_init(|| {
|
|
||||||
let path2: &Path = path.as_ref();
|
|
||||||
if let Some(x) = path2.parent() {
|
|
||||||
if !x.exists() {
|
|
||||||
std::fs::create_dir_all(x).expect("Parent directory of the local database must be created.");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn migrate_once(conn: &mut Connection) -> () {
|
pub trait LocalRecord : RusqliteRecord{
|
||||||
*MIGRATE_RESULT.get_or_init(|| {
|
fn insert_global(&self) -> Result<(), rusqlite::Error> {
|
||||||
migrate(conn).expect("Local database migration should be done correctly")
|
self.insert(&LOCAL_DATABASE_CONNECTION.get_unchecked())
|
||||||
})
|
|
||||||
|
|
||||||
}
|
|
||||||
pub trait LocalDatabaseConnection: Sized {
|
|
||||||
fn from_path<P>(path: &P) -> Self
|
|
||||||
where
|
|
||||||
P: AsRef<Path>;
|
|
||||||
fn from_storage_config(config: &StorageConfig) -> Self {
|
|
||||||
Self::from_path(&config.get_local_database_path())
|
|
||||||
}
|
}
|
||||||
fn from_global_storage_config() -> Self {
|
fn get_all_global() -> Result<Vec<Self>, rusqlite::Error> {
|
||||||
Self::from_storage_config(&CONFIG.get_unchecked().storage)
|
let connection = LOCAL_DATABASE_CONNECTION.get_unchecked();
|
||||||
}
|
Self::get_all(&connection)
|
||||||
}
|
|
||||||
|
|
||||||
impl LocalDatabaseConnection for Connection {
|
|
||||||
fn from_path<P>(path: &P) -> Self
|
|
||||||
where
|
|
||||||
P: AsRef<Path>
|
|
||||||
{
|
|
||||||
initialize_parent_directory(path);
|
|
||||||
let mut conn = Connection::open(path).expect("local database connection must be opened without error");
|
|
||||||
migrate_once(&mut conn);
|
|
||||||
conn
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
49
core/src/global/local_database_connection.rs
Normal file
49
core/src/global/local_database_connection.rs
Normal file
|
|
@ -0,0 +1,49 @@
|
||||||
|
use std::{fs::create_dir_all, path::{Path, PathBuf}, sync::OnceLock};
|
||||||
|
|
||||||
|
use rusqlite::Connection;
|
||||||
|
|
||||||
|
use crate::{data::local::migration::migrate, error::Error};
|
||||||
|
|
||||||
|
pub static LOCAL_DATABASE_CONNECTION: GlobalLocalDatabaseConnection = GlobalLocalDatabaseConnection::const_new();
|
||||||
|
|
||||||
|
pub struct GlobalLocalDatabaseConnection {
|
||||||
|
path: OnceLock<PathBuf>
|
||||||
|
}
|
||||||
|
|
||||||
|
fn path_to_connection_or_panic<P>(path: &P) -> Connection
|
||||||
|
where
|
||||||
|
P: AsRef<Path>
|
||||||
|
{
|
||||||
|
Connection::open(path.as_ref()).expect("Failed to open database connection for local data")
|
||||||
|
}
|
||||||
|
|
||||||
|
impl GlobalLocalDatabaseConnection {
|
||||||
|
const fn const_new() -> Self {
|
||||||
|
Self {
|
||||||
|
path: OnceLock::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_or_init<P>(&self, path: &P) -> Connection
|
||||||
|
where
|
||||||
|
P: AsRef<Path>,
|
||||||
|
{
|
||||||
|
path_to_connection_or_panic(self.path.get_or_init(|| {
|
||||||
|
let path = path.as_ref();
|
||||||
|
let parent = path.parent().expect("Database path should have parent directory");
|
||||||
|
create_dir_all(parent).expect("Failed to create parent directory of database");
|
||||||
|
let mut conn = path_to_connection_or_panic(&path);
|
||||||
|
migrate(&mut conn).expect("Local database migration should be done correctly");
|
||||||
|
path.to_path_buf()
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get(&self) -> Option<Connection> {
|
||||||
|
self.path.get().map(|path| {
|
||||||
|
path_to_connection_or_panic(path)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
pub fn get_unchecked(&self) -> Connection {
|
||||||
|
self.get().expect("Global database for local data mulst be initialized before use")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -5,8 +5,11 @@ use tokio::sync::{OnceCell, RwLock, RwLockReadGuard, RwLockWriteGuard};
|
||||||
|
|
||||||
mod config;
|
mod config;
|
||||||
mod iroh_endpoint;
|
mod iroh_endpoint;
|
||||||
|
mod local_database_connection;
|
||||||
|
|
||||||
pub use config::*;
|
pub use config::*;
|
||||||
pub use iroh_endpoint::*;
|
pub use iroh_endpoint::*;
|
||||||
|
pub use local_database_connection::*;
|
||||||
use uuid::{ContextV7, Timestamp, Uuid};
|
use uuid::{ContextV7, Timestamp, Uuid};
|
||||||
|
|
||||||
pub fn generate_uuid() -> Uuid {
|
pub fn generate_uuid() -> Uuid {
|
||||||
|
|
@ -16,3 +19,4 @@ pub fn generate_uuid() -> Uuid {
|
||||||
fn uninitialized_message<T>(var: T) -> String {
|
fn uninitialized_message<T>(var: T) -> String {
|
||||||
format!("{} is uninitialized!", &stringify!(var))
|
format!("{} is uninitialized!", &stringify!(var))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue