Remove automerge and implement migration
This commit is contained in:
parent
fa6591ab87
commit
d71f9e5007
8 changed files with 169 additions and 32 deletions
18
lazy-supplements-migration/Cargo.toml
Normal file
18
lazy-supplements-migration/Cargo.toml
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
[package]
|
||||||
|
name = "lazy-supplements-migration"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
publish = false
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
async-std = { version = "1", features = ["attributes", "tokio1"] }
|
||||||
|
|
||||||
|
[dependencies.sea-orm-migration]
|
||||||
|
version = "1.1.0"
|
||||||
|
features = [
|
||||||
|
# Enable at least one `ASYNC_RUNTIME` and `DATABASE_DRIVER` feature if you want to run migration via CLI.
|
||||||
|
# View the list of supported features at https://www.sea-ql.org/SeaORM/docs/install-and-config/database-and-async-runtime.
|
||||||
|
# e.g.
|
||||||
|
"runtime-tokio-rustls", # `ASYNC_RUNTIME` feature
|
||||||
|
"sqlx-postgres", # `DATABASE_DRIVER` feature
|
||||||
|
]
|
41
lazy-supplements-migration/README.md
Normal file
41
lazy-supplements-migration/README.md
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
# Running Migrator CLI
|
||||||
|
|
||||||
|
- Generate a new migration file
|
||||||
|
```sh
|
||||||
|
cargo run -- generate MIGRATION_NAME
|
||||||
|
```
|
||||||
|
- Apply all pending migrations
|
||||||
|
```sh
|
||||||
|
cargo run
|
||||||
|
```
|
||||||
|
```sh
|
||||||
|
cargo run -- up
|
||||||
|
```
|
||||||
|
- Apply first 10 pending migrations
|
||||||
|
```sh
|
||||||
|
cargo run -- up -n 10
|
||||||
|
```
|
||||||
|
- Rollback last applied migrations
|
||||||
|
```sh
|
||||||
|
cargo run -- down
|
||||||
|
```
|
||||||
|
- Rollback last 10 applied migrations
|
||||||
|
```sh
|
||||||
|
cargo run -- down -n 10
|
||||||
|
```
|
||||||
|
- Drop all tables from the database, then reapply all migrations
|
||||||
|
```sh
|
||||||
|
cargo run -- fresh
|
||||||
|
```
|
||||||
|
- Rollback all applied migrations, then reapply all migrations
|
||||||
|
```sh
|
||||||
|
cargo run -- refresh
|
||||||
|
```
|
||||||
|
- Rollback all applied migrations
|
||||||
|
```sh
|
||||||
|
cargo run -- reset
|
||||||
|
```
|
||||||
|
- Check the status of all migrations
|
||||||
|
```sh
|
||||||
|
cargo run -- status
|
||||||
|
```
|
18
lazy-supplements-migration/src/lib.rs
Normal file
18
lazy-supplements-migration/src/lib.rs
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
use sea_orm_migration::{prelude::*, schema::*};
|
||||||
|
|
||||||
|
mod m20220101_000001_create_lazy_supplements_tables;
|
||||||
|
|
||||||
|
pub struct Migrator;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
pub trait TableMigration {
|
||||||
|
async fn up<'a>(manager: &'a SchemaManager<'a>) -> Result<(), DbErr> ;
|
||||||
|
async fn down<'a>(manager: &'a SchemaManager<'a>) -> Result<(), DbErr>;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigratorTrait for Migrator {
|
||||||
|
fn migrations() -> Vec<Box<dyn MigrationTrait>> {
|
||||||
|
vec![Box::new(m20220101_000001_create_lazy_supplements_tables::Migration)]
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,85 @@
|
||||||
|
use sea_orm_migration::{prelude::*, schema::*};
|
||||||
|
|
||||||
|
use crate::TableMigration;
|
||||||
|
|
||||||
|
#[derive(DeriveMigrationName)]
|
||||||
|
pub struct Migration;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl MigrationTrait for Migration {
|
||||||
|
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
Device::up(manager).await?;
|
||||||
|
RecordDeletion::up(manager).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||||
|
Device::down(manager).await?;
|
||||||
|
RecordDeletion::down(manager).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden)]
|
||||||
|
enum Device {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
CreatedAt,
|
||||||
|
UpdatedAt,
|
||||||
|
SyncedAt,
|
||||||
|
Name,
|
||||||
|
Note,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl TableMigration for Device {
|
||||||
|
async fn up<'a>(manager: &'a SchemaManager<'a>) -> Result<(), DbErr> {
|
||||||
|
manager.create_table(
|
||||||
|
Table::create()
|
||||||
|
.table(Self::Table)
|
||||||
|
.if_not_exists()
|
||||||
|
.col(pk_uuid(Self::Id))
|
||||||
|
.col(timestamp_with_time_zone(Self::CreatedAt))
|
||||||
|
.col(timestamp(Self::UpdatedAt))
|
||||||
|
.col(timestamp_with_time_zone_null(Self::SyncedAt))
|
||||||
|
.col(string(Self::Name))
|
||||||
|
.col(text(Self::Note))
|
||||||
|
.to_owned()
|
||||||
|
).await?;
|
||||||
|
Ok(())
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
async fn down<'a>(manager: &'a SchemaManager<'a>) -> Result<(), DbErr>{
|
||||||
|
manager.drop_table(Table::drop().table(Self::Table).to_owned()).await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(DeriveIden, DeriveMigrationName)]
|
||||||
|
enum RecordDeletion {
|
||||||
|
Table,
|
||||||
|
Id,
|
||||||
|
CreatedAt,
|
||||||
|
TableName,
|
||||||
|
RecordId,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl TableMigration for RecordDeletion {
|
||||||
|
async fn up<'a>(manager: &'a SchemaManager<'a>) -> Result<(), DbErr> {
|
||||||
|
manager.create_table(
|
||||||
|
Table::create()
|
||||||
|
.table(Self::Table)
|
||||||
|
.if_not_exists()
|
||||||
|
.col(pk_uuid(Self::Id))
|
||||||
|
.col(timestamp_with_time_zone(Self::CreatedAt))
|
||||||
|
.col(string(Self::TableName))
|
||||||
|
.col(uuid(Self::RecordId))
|
||||||
|
.to_owned()
|
||||||
|
).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
async fn down<'a>(manager: &'a SchemaManager<'a>) -> Result<(), DbErr>{
|
||||||
|
manager.drop_table(Table::drop().table(Self::Table).to_owned()).await
|
||||||
|
}
|
||||||
|
}
|
6
lazy-supplements-migration/src/main.rs
Normal file
6
lazy-supplements-migration/src/main.rs
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
use sea_orm_migration::prelude::*;
|
||||||
|
|
||||||
|
#[async_std::main]
|
||||||
|
async fn main() {
|
||||||
|
cli::run_cli(lazy_supplements_migration::Migrator).await;
|
||||||
|
}
|
|
@ -7,11 +7,10 @@ license.workspace = true
|
||||||
repository.workspace = true
|
repository.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
automerge = "0.6.1"
|
|
||||||
autosurgeon = "0.8.7"
|
|
||||||
base64 = "0.22.1"
|
base64 = "0.22.1"
|
||||||
clap = { version = "4.5.38", features = ["derive"] }
|
clap = { version = "4.5.38", features = ["derive"] }
|
||||||
libp2p.workspace = true
|
libp2p.workspace = true
|
||||||
|
sea-orm = "1.1.11"
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
thiserror = "2.0.12"
|
thiserror = "2.0.12"
|
||||||
tokio = { version = "1.45.0", features = ["macros", "rt"] }
|
tokio = { version = "1.45.0", features = ["macros", "rt"] }
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
use automerge::ActorId;
|
|
||||||
use libp2p::identity::{self, Keypair};
|
use libp2p::identity::{self, Keypair};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
@ -6,8 +5,6 @@ use serde::{Deserialize, Serialize};
|
||||||
pub struct NodeConfig {
|
pub struct NodeConfig {
|
||||||
#[serde(with = "keypair")]
|
#[serde(with = "keypair")]
|
||||||
secret: Keypair,
|
secret: Keypair,
|
||||||
#[serde(with = "actor_id")]
|
|
||||||
actor_id: ActorId
|
|
||||||
}
|
}
|
||||||
|
|
||||||
mod keypair {
|
mod keypair {
|
||||||
|
@ -31,31 +28,8 @@ mod keypair {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
mod actor_id {
|
|
||||||
use automerge::ActorId;
|
|
||||||
use base64::{prelude::BASE64_STANDARD, Engine};
|
|
||||||
use serde::{Deserialize, Deserializer, Serializer};
|
|
||||||
|
|
||||||
pub fn serialize<S>(actor_id: &ActorId, serializer: S) -> Result<S::Ok, S::Error>
|
|
||||||
where S: Serializer
|
|
||||||
{
|
|
||||||
let bytes = actor_id.to_bytes();
|
|
||||||
let base64 = BASE64_STANDARD.encode(bytes);
|
|
||||||
serializer.serialize_str(&base64)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn deserialize<'de, D>(deserializer: D) -> Result<ActorId, D::Error>
|
|
||||||
where D: Deserializer<'de>
|
|
||||||
{
|
|
||||||
let base64 = String::deserialize(deserializer)?;
|
|
||||||
let vec = BASE64_STANDARD.decode(base64).unwrap();
|
|
||||||
Ok(ActorId::from(vec))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use automerge::ActorId;
|
|
||||||
use libp2p::identity;
|
use libp2p::identity;
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
|
@ -63,15 +37,12 @@ mod tests {
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn serialize_deserialize() {
|
async fn serialize_deserialize() {
|
||||||
let keypair = identity::Keypair::generate_ed25519();
|
let keypair = identity::Keypair::generate_ed25519();
|
||||||
let actor_id = ActorId::random();
|
|
||||||
let config = NodeConfig {
|
let config = NodeConfig {
|
||||||
secret: keypair.clone(),
|
secret: keypair.clone(),
|
||||||
actor_id: actor_id.clone(),
|
|
||||||
};
|
};
|
||||||
let string = toml::to_string(&config).unwrap();
|
let string = toml::to_string(&config).unwrap();
|
||||||
println!("Parsed config: {}", &string);
|
println!("Parsed config: {}", &string);
|
||||||
let parsed_config: NodeConfig = toml::from_str(&string).unwrap();
|
let parsed_config: NodeConfig = toml::from_str(&string).unwrap();
|
||||||
assert_eq!(keypair.public(), parsed_config.secret.public());
|
assert_eq!(keypair.public(), parsed_config.secret.public());
|
||||||
assert_eq!(actor_id, parsed_config.actor_id);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
use std::{collections::HashSet, net::{IpAddr, Ipv4Addr}, str::FromStr, sync::LazyLock};
|
use std::{collections::HashSet, net::{IpAddr, Ipv4Addr}, str::FromStr, sync::LazyLock};
|
||||||
|
|
||||||
use automerge::hydrate::List;
|
|
||||||
use clap::Args;
|
use clap::Args;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
|
Loading…
Add table
Reference in a new issue