Add rusqlite migration
This commit is contained in:
parent
069ce417df
commit
0092cfec49
22 changed files with 49 additions and 667 deletions
|
|
@ -44,8 +44,6 @@ clap = { version = "4.5.38", features = ["derive"] }
|
||||||
caretta-sync-core.path = "core"
|
caretta-sync-core.path = "core"
|
||||||
futures = { version = "0.3.31", features = ["executor"] }
|
futures = { version = "0.3.31", features = ["executor"] }
|
||||||
libp2p = { version = "0.55.0", features = ["macros", "mdns", "noise", "ping", "tcp", "tokio", "yamux" ] }
|
libp2p = { version = "0.55.0", features = ["macros", "mdns", "noise", "ping", "tcp", "tokio", "yamux" ] }
|
||||||
sea-orm = { version = "1.1.11", features = ["sqlx-sqlite", "runtime-tokio-native-tls", "macros", "with-chrono", "with-uuid"] }
|
|
||||||
sea-orm-migration = { version = "1.1.0", features = ["runtime-tokio-rustls", "sqlx-postgres"] }
|
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
thiserror = "2.0.12"
|
thiserror = "2.0.12"
|
||||||
tokio = { version = "1.45.0", features = ["macros", "rt", "rt-multi-thread"] }
|
tokio = { version = "1.45.0", features = ["macros", "rt", "rt-multi-thread"] }
|
||||||
|
|
|
||||||
|
|
@ -10,6 +10,5 @@ repository.workspace = true
|
||||||
bevy.workspace = true
|
bevy.workspace = true
|
||||||
caretta-sync-core.workspace = true
|
caretta-sync-core.workspace = true
|
||||||
futures.workspace = true
|
futures.workspace = true
|
||||||
sea-orm.workspace = true
|
|
||||||
tokio.workspace = true
|
tokio.workspace = true
|
||||||
tonic.workspace = true
|
tonic.workspace = true
|
||||||
|
|
@ -16,7 +16,6 @@ clap.workspace = true
|
||||||
dirs = "6.0.0"
|
dirs = "6.0.0"
|
||||||
caretta-sync-core = { workspace = true, features = ["cli"] }
|
caretta-sync-core = { workspace = true, features = ["cli"] }
|
||||||
libp2p.workspace = true
|
libp2p.workspace = true
|
||||||
sea-orm.workspace = true
|
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
thiserror.workspace = true
|
thiserror.workspace = true
|
||||||
tokio.workspace = true
|
tokio.workspace = true
|
||||||
|
|
|
||||||
|
|
@ -24,8 +24,6 @@ libp2p-core = { version = "0.43.0", features = ["serde"] }
|
||||||
libp2p-identity = { version = "0.2.11", features = ["ed25519", "peerid", "rand", "serde"] }
|
libp2p-identity = { version = "0.2.11", features = ["ed25519", "peerid", "rand", "serde"] }
|
||||||
prost = "0.14.1"
|
prost = "0.14.1"
|
||||||
prost-types = "0.14.1"
|
prost-types = "0.14.1"
|
||||||
sea-orm.workspace = true
|
|
||||||
sea-orm-migration.workspace = true
|
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
tempfile = { version = "3.20.0", optional = true }
|
tempfile = { version = "3.20.0", optional = true }
|
||||||
thiserror.workspace = true
|
thiserror.workspace = true
|
||||||
|
|
@ -39,6 +37,7 @@ uuid.workspace = true
|
||||||
url.workspace = true
|
url.workspace = true
|
||||||
sysinfo = "0.37.0"
|
sysinfo = "0.37.0"
|
||||||
whoami = "1.6.1"
|
whoami = "1.6.1"
|
||||||
|
rusqlite = { version = "0.37.0", features = ["bundled"] }
|
||||||
|
|
||||||
[target.'cfg(target_os="android")'.dependencies]
|
[target.'cfg(target_os="android")'.dependencies]
|
||||||
jni = "0.21.1"
|
jni = "0.21.1"
|
||||||
|
|
|
||||||
0
core/src/data/distributed/mod.rs
Normal file
0
core/src/data/distributed/mod.rs
Normal file
|
|
@ -1,35 +0,0 @@
|
||||||
mod trusted_node;
|
|
||||||
mod record_deletion;
|
|
||||||
|
|
||||||
pub use trusted_node::{
|
|
||||||
ActiveModel as TrustedNodeActiveModel,
|
|
||||||
Column as TrustedNodeColumn,
|
|
||||||
Entity as TrustedNodeEntity,
|
|
||||||
Model as TrustedNodeModel,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub use record_deletion::{
|
|
||||||
ActiveModel as RecordDeletionActiveModel,
|
|
||||||
Column as RecordDeletionColumn,
|
|
||||||
Entity as RecordDeletionEntity,
|
|
||||||
Model as RecordDeletionModel,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use crate::{data::{migration::DataMigrator, value::PeerIdValue}, global::{generate_uuid, DATABASE_CONNECTIONS}, tests::TEST_CONFIG};
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
use libp2p::{identity, PeerId};
|
|
||||||
use sea_orm::ActiveModelTrait;
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn check_insert() {
|
|
||||||
let db = DATABASE_CONNECTIONS.get_or_init_unchecked(&*TEST_CONFIG, DataMigrator).await.cache;
|
|
||||||
|
|
||||||
let node = TrustedNodeActiveModel::new(PeerId::random(), "test note".to_owned()).insert(db).await.unwrap();
|
|
||||||
let _ = RecordDeletionActiveModel::new(node.id, "test_table".to_string(), generate_uuid()).insert(db).await.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
@ -1,42 +0,0 @@
|
||||||
use chrono::Local;
|
|
||||||
use sea_orm::{entity::{
|
|
||||||
prelude::*, *
|
|
||||||
}, sea_query::table};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use crate::data::syncable::*;
|
|
||||||
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
|
|
||||||
#[cfg_attr(feature="macros", derive(SyncableModel))]
|
|
||||||
#[sea_orm(table_name = "record_deletion")]
|
|
||||||
pub struct Model {
|
|
||||||
#[sea_orm(primary_key, auto_increment = false)]
|
|
||||||
#[cfg_attr(feature="macros", syncable(id))]
|
|
||||||
pub id: Uuid,
|
|
||||||
#[sea_orm(indexed)]
|
|
||||||
#[cfg_attr(feature="macros", syncable(timestamp))]
|
|
||||||
pub created_at: DateTimeUtc,
|
|
||||||
#[cfg_attr(feature="macros", syncable(author_id))]
|
|
||||||
pub created_by: Uuid,
|
|
||||||
pub table_name: String,
|
|
||||||
pub record_id: Uuid,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, DeriveRelation, EnumIter)]
|
|
||||||
pub enum Relation{}
|
|
||||||
|
|
||||||
impl ActiveModelBehavior for ActiveModel {}
|
|
||||||
|
|
||||||
impl ActiveModel {
|
|
||||||
pub fn new(node_id: Uuid, table_name: String, record_id: Uuid) -> Self {
|
|
||||||
let timestamp: DateTimeUtc = Local::now().to_utc();
|
|
||||||
Self{
|
|
||||||
id: Set(crate::global::generate_uuid()),
|
|
||||||
created_at: Set(timestamp),
|
|
||||||
created_by: Set(node_id),
|
|
||||||
table_name: Set(table_name),
|
|
||||||
record_id: Set(record_id),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
@ -1,49 +0,0 @@
|
||||||
use chrono::Local;
|
|
||||||
use libp2p::PeerId;
|
|
||||||
use sea_orm::entity::{
|
|
||||||
*,
|
|
||||||
prelude::*
|
|
||||||
};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
use crate::data::value::PeerIdValue;
|
|
||||||
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
|
|
||||||
#[sea_orm(table_name = "trusted_node")]
|
|
||||||
pub struct Model {
|
|
||||||
#[sea_orm(primary_key, auto_increment = false)]
|
|
||||||
pub id: Uuid,
|
|
||||||
#[sea_orm(indexed)]
|
|
||||||
pub created_at: DateTimeUtc,
|
|
||||||
#[sea_orm(indexed)]
|
|
||||||
pub updated_at: DateTimeUtc,
|
|
||||||
#[sea_orm(indexed)]
|
|
||||||
pub synced_at: Option<DateTimeUtc>,
|
|
||||||
#[sea_orm(indexed)]
|
|
||||||
pub peer_id: PeerIdValue,
|
|
||||||
#[sea_orm(column_type = "Text")]
|
|
||||||
pub note: String,
|
|
||||||
pub is_prefered: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, DeriveRelation, EnumIter)]
|
|
||||||
pub enum Relation {}
|
|
||||||
|
|
||||||
impl ActiveModelBehavior for ActiveModel {}
|
|
||||||
|
|
||||||
impl ActiveModel {
|
|
||||||
pub fn new(peer_id: PeerId, note: String) -> Self {
|
|
||||||
let timestamp: DateTimeUtc = Local::now().to_utc();
|
|
||||||
Self{
|
|
||||||
id: Set(crate::global::generate_uuid()),
|
|
||||||
peer_id: Set(PeerIdValue::from(peer_id)),
|
|
||||||
created_at: Set(timestamp),
|
|
||||||
updated_at: Set(timestamp),
|
|
||||||
synced_at: Set(None),
|
|
||||||
is_prefered: Set(false),
|
|
||||||
note: Set(note),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
45
core/src/data/local/migration.rs
Normal file
45
core/src/data/local/migration.rs
Normal file
|
|
@ -0,0 +1,45 @@
|
||||||
|
use rusqlite::{ffi::Error, Connection};
|
||||||
|
use tracing::{event, Level};
|
||||||
|
|
||||||
|
|
||||||
|
pub fn migrate(con: &mut Connection) -> Result<(), Error>{
|
||||||
|
let version: u32 = con.pragma_query_value(None,"user_version", |row| row.get(0))?;
|
||||||
|
if version < 1 {
|
||||||
|
event!(Level::INFO, "Migrate local db to version 1");
|
||||||
|
let tx = con.transaction()?;
|
||||||
|
tx.execute(
|
||||||
|
"CREATE TABLE known_peer (
|
||||||
|
id INTEGER PRIMARY KEY,
|
||||||
|
peer_id TEXT UNIQUE NOT NULL,
|
||||||
|
created_at TEXT NOT NULL,
|
||||||
|
updated_at TEXT NOT NULL,
|
||||||
|
)",
|
||||||
|
()
|
||||||
|
)?;
|
||||||
|
tx.execute(
|
||||||
|
"CREATE TABLE known_address (
|
||||||
|
id INTEGER PRIMARY KEY,
|
||||||
|
known_peer_id INTEGER NOT NULL,
|
||||||
|
multiaddr TEXT UNIQUE NOT NULL,
|
||||||
|
created_at TEXT NOT NULL,
|
||||||
|
updated_at TEXT NOT NULL,
|
||||||
|
protocol TEXT NOT NULL,
|
||||||
|
FOREIGN KEY(knwon_peer_id) REFERENCES knwon_peer(id),
|
||||||
|
)",
|
||||||
|
()
|
||||||
|
)?;
|
||||||
|
tx.execute(
|
||||||
|
"CREATE TABLE authorized_peer (
|
||||||
|
id INTEGER PRIMARY KEY,
|
||||||
|
known_peer_id INTEGER NOT NULL UNIQUE,
|
||||||
|
synced_at TEXT,
|
||||||
|
created_at TEXT NOT NULL,
|
||||||
|
updated_at TEXT NOT NULL,
|
||||||
|
FOREIGN KEY(known_peer_id) REFERENCES knwon_peer(id)",
|
||||||
|
()
|
||||||
|
)?;
|
||||||
|
tx.pragma_update(None, "user_version", 1)?;
|
||||||
|
tx.commit()?;
|
||||||
|
event!(Level::INFO, "Migration done.")
|
||||||
|
}
|
||||||
|
}
|
||||||
1
core/src/data/local/mod.rs
Normal file
1
core/src/data/local/mod.rs
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
pub mod migration;
|
||||||
|
|
@ -1,98 +0,0 @@
|
||||||
use sea_orm_migration::{prelude::*, schema::*};
|
|
||||||
|
|
||||||
use crate::migration::TableMigration;
|
|
||||||
|
|
||||||
#[derive(DeriveMigrationName)]
|
|
||||||
pub struct Migration;
|
|
||||||
|
|
||||||
#[async_trait::async_trait]
|
|
||||||
impl MigrationTrait for Migration {
|
|
||||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
|
||||||
TrustedNode::up(manager).await?;
|
|
||||||
RecordDeletion::up(manager).await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
|
||||||
TrustedNode::down(manager).await?;
|
|
||||||
RecordDeletion::down(manager).await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(DeriveIden)]
|
|
||||||
enum TrustedNode {
|
|
||||||
Table,
|
|
||||||
Id,
|
|
||||||
CreatedAt,
|
|
||||||
UpdatedAt,
|
|
||||||
SyncedAt,
|
|
||||||
PeerId,
|
|
||||||
Note,
|
|
||||||
IsPrefered,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait::async_trait]
|
|
||||||
impl TableMigration for TrustedNode {
|
|
||||||
async fn up<'a>(manager: &'a SchemaManager<'a>) -> Result<(), DbErr> {
|
|
||||||
manager.create_table(
|
|
||||||
Table::create()
|
|
||||||
.table(Self::Table)
|
|
||||||
.if_not_exists()
|
|
||||||
.col(pk_uuid(Self::Id))
|
|
||||||
.col(timestamp(Self::CreatedAt))
|
|
||||||
.col(timestamp(Self::UpdatedAt))
|
|
||||||
.col(timestamp_null(Self::SyncedAt))
|
|
||||||
.col(string_len(Self::PeerId, 255))
|
|
||||||
.col(text(Self::Note))
|
|
||||||
.col(boolean(Self::IsPrefered))
|
|
||||||
.to_owned()
|
|
||||||
).await?;
|
|
||||||
Ok(())
|
|
||||||
|
|
||||||
|
|
||||||
}
|
|
||||||
async fn down<'a>(manager: &'a SchemaManager<'a>) -> Result<(), DbErr>{
|
|
||||||
manager.drop_table(Table::drop().table(Self::Table).to_owned()).await
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(DeriveIden, DeriveMigrationName)]
|
|
||||||
enum RecordDeletion {
|
|
||||||
Table,
|
|
||||||
Id,
|
|
||||||
CreatedAt,
|
|
||||||
CreatedBy,
|
|
||||||
TableName,
|
|
||||||
RecordId,
|
|
||||||
}
|
|
||||||
|
|
||||||
static FK_RECORD_DELETION_TRUSTED_NODE: &str = "fk_record_deletion_trusted_node";
|
|
||||||
|
|
||||||
#[async_trait::async_trait]
|
|
||||||
impl TableMigration for RecordDeletion {
|
|
||||||
async fn up<'a>(manager: &'a SchemaManager<'a>) -> Result<(), DbErr> {
|
|
||||||
manager.create_table(
|
|
||||||
Table::create()
|
|
||||||
.table(Self::Table)
|
|
||||||
.if_not_exists()
|
|
||||||
.col(pk_uuid(Self::Id))
|
|
||||||
.col(timestamp_with_time_zone(Self::CreatedAt))
|
|
||||||
.col(uuid(Self::CreatedBy))
|
|
||||||
.foreign_key(ForeignKey::create()
|
|
||||||
.name(FK_RECORD_DELETION_TRUSTED_NODE)
|
|
||||||
.from(Self::Table,Self::CreatedBy)
|
|
||||||
.to(TrustedNode::Table, TrustedNode::Id)
|
|
||||||
.on_delete(ForeignKeyAction::Cascade)
|
|
||||||
.on_update(ForeignKeyAction::Cascade)
|
|
||||||
)
|
|
||||||
.col(string(Self::TableName))
|
|
||||||
.col(uuid(Self::RecordId))
|
|
||||||
.to_owned()
|
|
||||||
).await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
async fn down<'a>(manager: &'a SchemaManager<'a>) -> Result<(), DbErr>{
|
|
||||||
manager.drop_table(Table::drop().table(Self::Table).to_owned()).await
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,14 +0,0 @@
|
||||||
use sea_orm_migration::prelude::*;
|
|
||||||
|
|
||||||
pub mod m20220101_000001_create_main_tables;
|
|
||||||
|
|
||||||
#[cfg(any(test, feature="test"))]
|
|
||||||
pub struct DataMigrator;
|
|
||||||
|
|
||||||
#[cfg(any(test, feature="test"))]
|
|
||||||
#[async_trait::async_trait]
|
|
||||||
impl MigratorTrait for DataMigrator {
|
|
||||||
fn migrations() -> Vec<Box<dyn MigrationTrait>> {
|
|
||||||
vec![Box::new(m20220101_000001_create_main_tables::Migration)]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,4 +1,2 @@
|
||||||
pub mod entity;
|
pub mod distributed;
|
||||||
pub mod migration;
|
pub mod local;
|
||||||
pub mod syncable;
|
|
||||||
pub mod value;
|
|
||||||
|
|
|
||||||
|
|
@ -1,79 +0,0 @@
|
||||||
use sea_orm::{prelude::*, query::*, sea_query::SimpleExpr, *};
|
|
||||||
#[cfg(feature="macros")]
|
|
||||||
pub use caretta_sync_macros::SyncableModel;
|
|
||||||
pub trait SyncableModel: ModelTrait<Entity = Self::SyncableEntity> {
|
|
||||||
type SyncableEntity: SyncableEntity<SyncableModel = Self>;
|
|
||||||
fn get_timestamp(&self) -> DateTimeUtc;
|
|
||||||
fn get_id(&self) -> Uuid;
|
|
||||||
fn get_author_id(&self) -> Uuid;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait SyncableEntity: EntityTrait<
|
|
||||||
Model = Self::SyncableModel,
|
|
||||||
ActiveModel = Self::SyncableActiveModel,
|
|
||||||
Column = Self::SyncableColumn,
|
|
||||||
>{
|
|
||||||
type SyncableModel: SyncableModel<SyncableEntity = Self> + FromQueryResult;
|
|
||||||
type SyncableActiveModel: SyncableActiveModel<SyncableEntity= Self>;
|
|
||||||
type SyncableColumn: SyncableColumn;
|
|
||||||
|
|
||||||
async fn get_updated(from: DateTimeUtc, db: &DatabaseConnection) -> Result<Vec<<Self as EntityTrait>::Model>, SyncableError> {
|
|
||||||
let result: Vec<Self::SyncableModel> = <Self as EntityTrait>::find()
|
|
||||||
.filter(Self::SyncableColumn::timestamp_after(from))
|
|
||||||
.all(db)
|
|
||||||
.await.unwrap();
|
|
||||||
Ok(result)
|
|
||||||
}
|
|
||||||
async fn get_updated_by(author: Uuid, from: DateTimeUtc, db: &DatabaseConnection) -> Result<Vec<<Self as EntityTrait>::Model>, SyncableError> {
|
|
||||||
let result: Vec<Self::SyncableModel> = <Self as EntityTrait>::find()
|
|
||||||
.filter(Self::SyncableColumn::timestamp_after(from))
|
|
||||||
.filter(Self::SyncableColumn::author_id_eq(author))
|
|
||||||
.all(db)
|
|
||||||
.await.unwrap();
|
|
||||||
Ok(result)
|
|
||||||
}
|
|
||||||
fn apply_updated(models: Vec<<Self as EntityTrait>::Model>, db: &DatabaseConnection) {
|
|
||||||
todo!()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait SyncableActiveModel: ActiveModelTrait<Entity = Self::SyncableEntity> {
|
|
||||||
|
|
||||||
type SyncableEntity: SyncableEntity<SyncableActiveModel = Self>;
|
|
||||||
fn get_id(&self) -> Option<Uuid>;
|
|
||||||
fn get_timestamp(&self) -> Option<DateTimeUtc>;
|
|
||||||
fn get_author_id(&self) -> Option<Uuid>;
|
|
||||||
fn try_merge(&mut self, other: <Self::SyncableEntity as SyncableEntity>::SyncableModel) -> Result<(), SyncableError> {
|
|
||||||
if self.get_id().ok_or(SyncableError::MissingField("uuid"))? != other.get_id() {
|
|
||||||
return Err(SyncableError::MismatchUuid)
|
|
||||||
}
|
|
||||||
if self.get_timestamp().ok_or(SyncableError::MissingField("updated_at"))? < other.get_timestamp() {
|
|
||||||
for column in <<<Self as ActiveModelTrait>::Entity as EntityTrait>::Column as Iterable>::iter() {
|
|
||||||
if column.should_synced(){
|
|
||||||
self.take(column).set_if_not_equals(other.get(column));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait SyncableColumn: ColumnTrait {
|
|
||||||
fn is_id(&self) -> bool;
|
|
||||||
fn is_timestamp(&self) -> bool;
|
|
||||||
fn should_synced(&self) -> bool;
|
|
||||||
fn timestamp_after(from: DateTimeUtc) -> SimpleExpr;
|
|
||||||
fn author_id_eq(author_id: Uuid) -> SimpleExpr;
|
|
||||||
fn is_author_id(&self) -> bool;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
|
||||||
pub enum SyncableError {
|
|
||||||
#[error("Invalid UUID")]
|
|
||||||
MismatchUuid,
|
|
||||||
#[error("mandatory field {0} is missing")]
|
|
||||||
MissingField(&'static str),
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
@ -1,5 +0,0 @@
|
||||||
mod multiaddr;
|
|
||||||
mod peer_id;
|
|
||||||
|
|
||||||
pub use multiaddr::MultiaddrValue;
|
|
||||||
pub use peer_id::PeerIdValue;
|
|
||||||
|
|
@ -1,68 +0,0 @@
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use libp2p::Multiaddr;
|
|
||||||
use sea_orm::{sea_query::ValueTypeErr, DbErr};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
|
|
||||||
pub struct MultiaddrValue(Multiaddr);
|
|
||||||
|
|
||||||
impl From<Multiaddr> for MultiaddrValue {
|
|
||||||
fn from(source: Multiaddr) -> Self {
|
|
||||||
Self(source)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl From<MultiaddrValue> for Multiaddr {
|
|
||||||
fn from(source: MultiaddrValue) -> Self {
|
|
||||||
source.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<MultiaddrValue> for sea_orm::Value {
|
|
||||||
fn from(value: MultiaddrValue) -> Self {
|
|
||||||
Self::from(value.0.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl sea_orm::TryGetable for MultiaddrValue {
|
|
||||||
fn try_get_by<I: sea_orm::ColIdx>(res: &sea_orm::QueryResult, idx: I)
|
|
||||||
-> std::result::Result<Self, sea_orm::TryGetError> {
|
|
||||||
match <String as sea_orm::TryGetable>::try_get_by(res, idx){
|
|
||||||
Ok(x) => match Multiaddr::from_str(&x) {
|
|
||||||
Ok(y) => Ok(Self(y)),
|
|
||||||
Err(_) => Err(DbErr::Type("Multiaddr".to_string()).into()),
|
|
||||||
},
|
|
||||||
Err(x) => Err(x),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl sea_orm::sea_query::ValueType for MultiaddrValue {
|
|
||||||
fn try_from(v: sea_orm::Value) -> std::result::Result<Self, sea_orm::sea_query::ValueTypeErr> {
|
|
||||||
match <String as sea_orm::sea_query::ValueType>::try_from(v) {
|
|
||||||
Ok(x) => match Multiaddr::from_str(&x) {
|
|
||||||
Ok(y) => Ok(Self(y)),
|
|
||||||
Err(_) => Err(ValueTypeErr{}),
|
|
||||||
},
|
|
||||||
Err(e) => Err(e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn type_name() -> std::string::String {
|
|
||||||
stringify!(MultiaddrValue).to_owned()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn array_type() -> sea_orm::sea_query::ArrayType {
|
|
||||||
sea_orm::sea_query::ArrayType::String
|
|
||||||
}
|
|
||||||
|
|
||||||
fn column_type() -> sea_orm::sea_query::ColumnType {
|
|
||||||
sea_orm::sea_query::ColumnType::Text
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl sea_orm::sea_query::Nullable for MultiaddrValue {
|
|
||||||
fn null() -> sea_orm::Value {
|
|
||||||
<String as sea_orm::sea_query::Nullable>::null()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,101 +0,0 @@
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use libp2p::PeerId;
|
|
||||||
use sea_orm::{sea_query::ValueTypeErr, DbErr};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
use crate::error::Error;
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
|
||||||
pub struct PeerIdValue(PeerId);
|
|
||||||
|
|
||||||
impl<'de> Deserialize<'de> for PeerIdValue {
|
|
||||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
|
||||||
where
|
|
||||||
D: serde::Deserializer<'de> {
|
|
||||||
Self::from_str(&String::deserialize(deserializer)?).or(Err(<D::Error as serde::de::Error>::custom("fail to parse PeerId")))
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Serialize for PeerIdValue {
|
|
||||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
|
||||||
where
|
|
||||||
S: serde::Serializer {
|
|
||||||
serializer.serialize_str(&self.0.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromStr for PeerIdValue{
|
|
||||||
type Err = libp2p::identity::ParseError;
|
|
||||||
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
|
||||||
Ok(Self(PeerId::from_str(s)?))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ToString for PeerIdValue {
|
|
||||||
fn to_string(&self) -> String {
|
|
||||||
self.0.to_string()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<PeerId> for PeerIdValue {
|
|
||||||
fn from(source: PeerId) -> Self {
|
|
||||||
Self(source)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl From<PeerIdValue> for PeerId {
|
|
||||||
fn from(source: PeerIdValue) -> Self {
|
|
||||||
source.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<PeerIdValue> for sea_orm::Value {
|
|
||||||
fn from(value: PeerIdValue) -> Self {
|
|
||||||
Self::from(value.0.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl sea_orm::TryGetable for PeerIdValue {
|
|
||||||
fn try_get_by<I: sea_orm::ColIdx>(res: &sea_orm::QueryResult, idx: I)
|
|
||||||
-> std::result::Result<Self, sea_orm::TryGetError> {
|
|
||||||
match <String as sea_orm::TryGetable>::try_get_by(res, idx){
|
|
||||||
Ok(x) => match PeerId::from_str(&x) {
|
|
||||||
Ok(y) => Ok(Self(y)),
|
|
||||||
Err(_) => Err(DbErr::Type("PeerId".to_string()).into()),
|
|
||||||
},
|
|
||||||
Err(x) => Err(x),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl sea_orm::sea_query::ValueType for PeerIdValue {
|
|
||||||
fn try_from(v: sea_orm::Value) -> std::result::Result<Self, sea_orm::sea_query::ValueTypeErr> {
|
|
||||||
match <String as sea_orm::sea_query::ValueType>::try_from(v) {
|
|
||||||
Ok(x) => match PeerId::from_str(&x) {
|
|
||||||
Ok(y) => Ok(Self(y)),
|
|
||||||
Err(_) => Err(ValueTypeErr{}),
|
|
||||||
},
|
|
||||||
Err(e) => Err(e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn type_name() -> std::string::String {
|
|
||||||
stringify!(PeerIdValue).to_owned()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn array_type() -> sea_orm::sea_query::ArrayType {
|
|
||||||
sea_orm::sea_query::ArrayType::String
|
|
||||||
}
|
|
||||||
|
|
||||||
fn column_type() -> sea_orm::sea_query::ColumnType {
|
|
||||||
sea_orm::sea_query::ColumnType::Text
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl sea_orm::sea_query::Nullable for PeerIdValue {
|
|
||||||
fn null() -> sea_orm::Value {
|
|
||||||
<String as sea_orm::sea_query::Nullable>::null()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,52 +0,0 @@
|
||||||
mod node;
|
|
||||||
use serde::{de::DeserializeOwned, Serialize};
|
|
||||||
use uuid::Uuid;
|
|
||||||
|
|
||||||
use crate::{utils::async_convert::{AsyncTryFrom, AsyncTryInto}, error::Error};
|
|
||||||
|
|
||||||
pub trait Message: DeserializeOwned + Sized + Serialize {
|
|
||||||
fn into_writer<W: std::io::Write>(&self, writer: W) -> Result<(), ciborium::ser::Error<std::io::Error>> {
|
|
||||||
ciborium::into_writer(self, writer)
|
|
||||||
}
|
|
||||||
fn into_vec_u8(&self) -> Result<Vec<u8>, ciborium::ser::Error<std::io::Error>> {
|
|
||||||
let mut buf: Vec<u8> = Vec::new();
|
|
||||||
self.into_writer(&mut buf)?;
|
|
||||||
Ok(buf)
|
|
||||||
}
|
|
||||||
fn from_reader<R: std::io::Read>(reader: R) -> Result<Self, ciborium::de::Error<std::io::Error>> {
|
|
||||||
ciborium::from_reader(reader)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait Request<T>: Into<T> + From<T> + AsyncTryInto<Self::Response>
|
|
||||||
where T: Message {
|
|
||||||
type Response: Response<T, Request = Self>;
|
|
||||||
async fn send_p2p(self) -> Result<Self::Response, Error>;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait Response<T>: Into<T> + From<T> + AsyncTryFrom<Self::Request>
|
|
||||||
where T: Message{
|
|
||||||
type Request: Request<T, Response = Self>;
|
|
||||||
async fn from_request_with_local(req: Self::Request) -> Result<Self,Error>;
|
|
||||||
async fn from_request_with_p2p(req: Self::Request) -> Result<Self, Error> {
|
|
||||||
todo!()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait FromDatabase {
|
|
||||||
async fn from_storage();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
pub trait P2pRequest<T>: Into<T> + From<T>
|
|
||||||
where T: Message {
|
|
||||||
type P2pResponse: P2pResponse<T, P2pRequest = Self>;
|
|
||||||
async fn send_p2p(&self) -> Result<Self::P2pResponse, crate::p2p::error::P2pError>{
|
|
||||||
todo!()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub trait P2pResponse<T>: Into<T> + From<T> + AsyncTryFrom<(Self::P2pRequest)>
|
|
||||||
where T: Message {
|
|
||||||
type P2pRequest: P2pRequest<T, P2pResponse = Self>;
|
|
||||||
async fn try_from_p2p_request(source: Self::P2pRequest) -> Result<Self, crate::p2p::error::P2pError>;
|
|
||||||
}
|
|
||||||
|
|
@ -1,10 +0,0 @@
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
|
|
||||||
#[derive(Debug, Deserialize, Serialize)]
|
|
||||||
pub struct ListTrustedNodeRequest;
|
|
||||||
|
|
||||||
#[derive(Debug, Deserialize, Serialize)]
|
|
||||||
pub struct ListTrustedNodeResponse {
|
|
||||||
node: Vec<crate::data::entity::TrustedNodeModel>
|
|
||||||
}
|
|
||||||
|
|
@ -18,6 +18,5 @@ syn = { version = "2.0.104", features = ["full"] }
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
chrono.workspace = true
|
chrono.workspace = true
|
||||||
caretta-sync-core.workspace = true
|
caretta-sync-core.workspace = true
|
||||||
sea-orm.workspace = true
|
|
||||||
tokio.workspace = true
|
tokio.workspace = true
|
||||||
uuid.workspace = true
|
uuid.workspace = true
|
||||||
|
|
|
||||||
|
|
@ -7,74 +7,6 @@ use quote::{format_ident, quote, ToTokens};
|
||||||
use syn::{parse_macro_input, Data, DataStruct, DeriveInput, Expr, ExprTuple, Field, Fields, FieldsNamed, Ident};
|
use syn::{parse_macro_input, Data, DataStruct, DeriveInput, Expr, ExprTuple, Field, Fields, FieldsNamed, Ident};
|
||||||
use derive::*;
|
use derive::*;
|
||||||
|
|
||||||
#[proc_macro_derive(SyncableModel, attributes(syncable))]
|
|
||||||
pub fn syncable_model(input: TokenStream) -> TokenStream {
|
|
||||||
let input = parse_macro_input!(input as DeriveInput);
|
|
||||||
let struct_name = input.ident;
|
|
||||||
assert_eq!(format_ident!("{}", struct_name), "Model");
|
|
||||||
let fields = extract_fields(&input.data);
|
|
||||||
let id_snake = extract_unique_field_ident(&fields, "id");
|
|
||||||
let id_camel = Ident::new(&id_snake.to_string().to_upper_camel_case(), Span::call_site());
|
|
||||||
let timestamp_snake = extract_unique_field_ident(&fields, "timestamp");
|
|
||||||
let timestamp_camel = Ident::new(×tamp_snake.to_string().to_upper_camel_case(), Span::call_site());
|
|
||||||
let author_id_snake = extract_unique_field_ident(&fields, "author_id");
|
|
||||||
let author_id_camel = Ident::new(&author_id_snake.to_string().to_upper_camel_case(), Span::call_site());
|
|
||||||
let skips_snake = extract_field_idents(&fields, "skip");
|
|
||||||
let output = quote!{
|
|
||||||
impl SyncableModel for #struct_name {
|
|
||||||
type SyncableEntity = Entity;
|
|
||||||
fn get_id(&self) -> Uuid {
|
|
||||||
self.#id_snake
|
|
||||||
}
|
|
||||||
fn get_timestamp(&self) -> DateTimeUtc {
|
|
||||||
self.#timestamp_snake
|
|
||||||
}
|
|
||||||
fn get_author_id(&self) -> Uuid {
|
|
||||||
self.#author_id_snake
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl SyncableEntity for Entity {
|
|
||||||
type SyncableModel = Model;
|
|
||||||
type SyncableActiveModel = ActiveModel;
|
|
||||||
type SyncableColumn = Column;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SyncableActiveModel for ActiveModel {
|
|
||||||
type SyncableEntity = Entity;
|
|
||||||
fn get_id(&self) -> Option<Uuid> {
|
|
||||||
self.#id_snake.try_as_ref().cloned()
|
|
||||||
}
|
|
||||||
fn get_timestamp(&self) -> Option<DateTimeUtc> {
|
|
||||||
self.#timestamp_snake.try_as_ref().cloned()
|
|
||||||
}
|
|
||||||
fn get_author_id(&self) -> Option<Uuid> {
|
|
||||||
self.#author_id_snake.try_as_ref().cloned()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl SyncableColumn for Column {
|
|
||||||
fn is_id(&self) -> bool {
|
|
||||||
matches!(self, Column::#id_camel)
|
|
||||||
}
|
|
||||||
fn is_timestamp(&self) -> bool {
|
|
||||||
matches!(self, Column::#timestamp_camel)
|
|
||||||
}
|
|
||||||
fn is_author_id(&self) -> bool {
|
|
||||||
matches!(self, Column::#author_id_camel)
|
|
||||||
}
|
|
||||||
fn should_synced(&self) -> bool {
|
|
||||||
todo!()
|
|
||||||
}
|
|
||||||
fn timestamp_after(timestamp: DateTimeUtc) -> sea_orm::sea_query::expr::SimpleExpr {
|
|
||||||
Column::#timestamp_camel.gte(timestamp)
|
|
||||||
}
|
|
||||||
fn author_id_eq(author_id: Uuid) -> sea_orm::sea_query::expr::SimpleExpr {
|
|
||||||
Column::#author_id_camel.eq(author_id)
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
};
|
|
||||||
output.into()
|
|
||||||
}
|
|
||||||
fn extract_unique_field_ident<'a>(fields: &'a FieldsNamed, attribute_arg: &'static str) -> &'a Ident {
|
fn extract_unique_field_ident<'a>(fields: &'a FieldsNamed, attribute_arg: &'static str) -> &'a Ident {
|
||||||
let mut fields = extract_field_idents(fields, attribute_arg);
|
let mut fields = extract_field_idents(fields, attribute_arg);
|
||||||
if fields.len() == 1 {
|
if fields.len() == 1 {
|
||||||
|
|
|
||||||
|
|
@ -1,35 +0,0 @@
|
||||||
use chrono::Local;
|
|
||||||
use sea_orm::{
|
|
||||||
prelude::*,
|
|
||||||
entity::{
|
|
||||||
*,
|
|
||||||
prelude::*
|
|
||||||
}
|
|
||||||
};
|
|
||||||
use caretta_sync_core::data::syncable::*;
|
|
||||||
use caretta_sync_macros::SyncableModel;
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, SyncableModel)]
|
|
||||||
#[sea_orm(table_name = "syncable")]
|
|
||||||
pub struct Model {
|
|
||||||
#[sea_orm(primary_key, auto_increment = false)]
|
|
||||||
#[syncable(id)]
|
|
||||||
pub id: Uuid,
|
|
||||||
#[sea_orm(indexed)]
|
|
||||||
#[syncable(timestamp)]
|
|
||||||
pub created_at: DateTimeUtc,
|
|
||||||
#[syncable(author_id)]
|
|
||||||
pub created_by: Uuid,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, DeriveRelation, EnumIter)]
|
|
||||||
pub enum Relation{}
|
|
||||||
|
|
||||||
impl ActiveModelBehavior for ActiveModel {}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_columns() {
|
|
||||||
assert!(Column::Id.is_id());
|
|
||||||
assert!(Column::CreatedAt.is_timestamp());
|
|
||||||
assert!(Column::CreatedBy.is_author_id());
|
|
||||||
}
|
|
||||||
Loading…
Add table
Reference in a new issue