Add Syncable trait and derive

This commit is contained in:
fluo10 2025-06-26 08:14:27 +09:00
parent ebbe3d82d6
commit 0e1227da85
5 changed files with 152 additions and 7 deletions

View file

@ -18,7 +18,10 @@ chrono-tz = "0.10.3"
ciborium.workspace = true
clap = {workspace = true, optional = true}
futures = "0.3.31"
lazy-supplements-macros.path = "../lazy-supplements-macros"
libp2p.workspace = true
libp2p-core = { version = "0.43.0", features = ["serde"] }
libp2p-identity = { version = "0.2.11", features = ["ed25519", "peerid", "rand", "serde"] }
sea-orm = { version = "1.1.11", features = ["sqlx-sqlite", "runtime-tokio-native-tls", "macros", "with-chrono", "with-uuid"] }
sea-orm-migration.workspace = true
serde.workspace = true

View file

@ -4,14 +4,17 @@ use sea_orm::entity::{
prelude::*
};
use serde::{Deserialize, Serialize};
use crate::data::syncable::*;
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize, SyncableModel)]
#[sea_orm(table_name = "record_deletion")]
pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
#[syncable(uuid)]
pub id: Uuid,
#[sea_orm(indexed)]
#[syncable(timestamp)]
pub created_at: DateTimeUtc,
pub table_name: String,
pub record_id: Uuid,

View file

@ -1,8 +1,8 @@
use sea_orm::{*, prelude::*, query::*};
pub use lazy_supplements_macros::SyncableModel;
pub trait SyncableModel: ModelTrait<Entity = Self::SyncableEntity> {
type SyncableEntity: SyncableEntity<SyncableModel = Self>;
fn get_updated_at(&self) -> DateTimeUtc;
fn get_timestamp(&self) -> DateTimeUtc;
fn get_uuid(&self) -> Uuid;
}
@ -31,12 +31,12 @@ pub trait SyncableActiveModel: ActiveModelTrait<Entity = Self::SyncableEntity> {
type SyncableEntity: SyncableEntity<SyncableActiveModel = Self>;
fn get_uuid(&self) -> Option<Uuid>;
fn get_updated_at(&self) -> Option<DateTimeUtc>;
fn get_timestamp(&self) -> Option<DateTimeUtc>;
fn try_merge(&mut self, other: <Self::SyncableEntity as SyncableEntity>::SyncableModel) -> Result<(), SyncableError> {
if self.get_uuid().ok_or(SyncableError::MissingField("uuid"))? != other.get_uuid() {
return Err(SyncableError::MismatchUuid)
}
if self.get_updated_at().ok_or(SyncableError::MissingField("updated_at"))? < other.get_updated_at() {
if self.get_timestamp().ok_or(SyncableError::MissingField("updated_at"))? < other.get_timestamp() {
for column in <<<Self as ActiveModelTrait>::Entity as EntityTrait>::Column as Iterable>::iter() {
self.take(column).set_if_not_equals(other.get(column));
}
@ -48,9 +48,9 @@ pub trait SyncableActiveModel: ActiveModelTrait<Entity = Self::SyncableEntity> {
pub trait SyncableColumn: ColumnTrait {
fn is_uuid(&self) -> bool;
fn is_updated_at(&self) -> bool;
fn is_timestamp(&self) -> bool;
fn updated_at() -> Self;
fn should_not_sync(&self);
fn should_skipped(&self);
}

View file

@ -0,0 +1,16 @@
[package]
name = "lazy-supplements-macros"
edition.workspace = true
version.workspace = true
description.workspace = true
license.workspace = true
repository.workspace = true
[lib]
proc-macro = true
[dependencies]
heck = "0.5.0"
proc-macro2 = "1.0.95"
quote = "1.0.40"
syn = { version = "2.0.104", features = ["full"] }

View file

@ -0,0 +1,123 @@
use heck::ToUpperCamelCase;
use proc_macro::{self, TokenStream};
use proc_macro2::Span;
use quote::{format_ident, quote, ToTokens};
use syn::{parse_macro_input, Data, DeriveInput, Expr, ExprTuple, Field, Fields, FieldsNamed, Ident};
#[proc_macro_derive(SyncableModel)]
pub fn syncable_model(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
let struct_name = input.ident;
assert_eq!(format_ident!("{}", struct_name), "Model");
let fields = extract_fields(&input.data);
let uuid_field = extract_uuid_field(&fields);
let uuid_field_camel = Ident::new(&uuid_field.to_string().to_upper_camel_case(), Span::call_site());
let timestamp_field = extract_timestamp_field(&fields);
let timestamp_field_camel = Ident::new(&timestamp_field.to_string().to_upper_camel_case(), Span::call_site());
let skip_fields = extract_skip_fields(&fields);
let output = quote!{
impl SyncableModel for #struct_name {
type SyncableEntity = Entity;
fn get_uuid(&self) -> Uuid {
self.#uuid_field
}
fn get_timestamp() -> DateTimeUtc {
self.#timestamp_field
}
}
impl SyncableEntity for Entity {
type SyncableModel = Model;
type SyncableActiveModel = ActiveModel;
type SyncableColumn = Column;
}
impl SyncableActiveModel for ActiveModel {
type SyncableEntity = Entity;
fn get_uuid(&self) -> Option<Uuid> {
self.#uuid_field.into_value()
}
fn get_timestamp(&self) -> Option<DateTimeUtc> {
self.#timestamp_field.into_value()
}
}
impl SyncableColumn for Column {
fn is_uuid(&self) -> bool {
self == &Column::#uuid_field_camel
}
fn is_timestamp(&self) -> bool {
self == &Column::#timestamp_field_camel
}
}
};
output.into()
}
fn extract_skip_fields(fields: &FieldsNamed) -> Vec<&Ident> {
extract_fields_with_attribute(fields, "skip")
}
fn extract_timestamp_field(fields: &FieldsNamed) -> &Ident {
let mut timestamp_fields = extract_fields_with_attribute(fields, "timestamp");
if timestamp_fields.len() == 1 {
timestamp_fields.pop().unwrap()
} else {
panic!("Model must need one timestamp field attribute")
}
}
fn extract_uuid_field(fields: &FieldsNamed) -> &Ident {
let mut uuid_fields = extract_fields_with_attribute(fields, "uuid");
if uuid_fields.len() == 1 {
uuid_fields.pop().unwrap()
} else {
panic!("Model must need one uuid field attribute")
}
}
fn extract_fields_with_attribute<'a>(fields: &'a FieldsNamed, attribute_arg: &'static str) -> Vec<&'a Ident>{
fields.named.iter()
.filter_map(|field| {
field.attrs.iter()
.find_map(|attr| {
if attr.path().is_ident("syncable") {
let args: Expr = attr.parse_args().unwrap();
match args {
Expr::Tuple(arg_tupple) => {
arg_tupple.elems.iter()
.find_map(|arg| {
if let Expr::Path(arg_path) = arg {
if arg_path.path.is_ident(attribute_arg) {
Some(field.ident.as_ref().unwrap())
} else {
None
}
} else {
None
}
})
},
Expr::Path(arg_path) => {
if arg_path.path.is_ident(attribute_arg) {
Some(field.ident.as_ref().unwrap())
} else {
None
}
},
_ => None
}
} else {
None
}
})
}).collect()
}
fn extract_fields(data: &Data) -> &FieldsNamed {
match *data {
Data::Struct(ref data) => match data.fields {
Fields::Named(ref fields) => fields,
_ => panic!("all fields must be named.")
},
_ => panic!("struct expected, but got other item."),
}
}