diff --git a/.gitignore b/.gitignore index d8541d34..821b8191 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ .vscode target +*.bak.db *.db-shm *.db-wal Cargo.lock diff --git a/Cargo.toml b/Cargo.toml index 1f7f3348..61cfbcce 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -25,7 +25,7 @@ no-default-features = true rustdoc-args = ["--cfg", "docsrs"] [features] -default = ["all_backends", "ffi", "logger"] +default = ["all_backends", "ffi", "logger", "migration"] all_backends = ["any", "postgres", "sqlite"] any = [] ffi = ["any", "ffi-support", "logger"] @@ -34,6 +34,7 @@ logger = ["env_logger", "log"] postgres = ["sqlx", "sqlx/postgres", "sqlx/tls"] sqlite = ["num_cpus", "sqlx", "sqlx/sqlite"] pg_test = ["postgres"] +migration = ["rmp", "rmp-serde"] [dev-dependencies] hex-literal = "0.3" @@ -57,6 +58,8 @@ num_cpus = { version = "1.0", optional = true } once_cell = "1.5" percent-encoding = "2.0" rand = { version = "0.8", default-features = false } +rmp = { version = "0.8.11", optional = true } +rmp-serde = { version = "1.1.1", optional = true } serde = { version = "1.0", features = ["derive"] } serde_bytes = "0.11" serde_cbor = "0.11" @@ -75,7 +78,7 @@ features = ["all_keys", "any_key", "argon2", "crypto_box", "std"] [dependencies.sqlx] version = "0.6.2" default-features = false -features = ["chrono", "runtime-tokio-rustls"] +features = ["chrono", "runtime-tokio-rustls", "macros"] optional = true [profile.release] diff --git a/src/ffi/migration.rs b/src/ffi/migration.rs new file mode 100644 index 00000000..c520527f --- /dev/null +++ b/src/ffi/migration.rs @@ -0,0 +1,53 @@ +use ffi_support::FfiStr; + +use crate::{future::spawn_ok, migration::IndySdkToAriesAskarMigration}; + +use super::{ + error::{set_last_error, ErrorCode}, + CallbackId, EnsureCallback, +}; + +/// Migrate an sqlite wallet from an indy-sdk structure to an aries-askar structure. +/// It is important to note that this does not do any post-processing. If the record values, tags, +/// names, etc. have changed, it must be processed manually afterwards. This script does the following: +/// +/// 1. Create and rename the required tables +/// 2. Fetch the indy key from the wallet +/// 3. Create a new configuration +/// 4. Initialize a profile +/// 5. Update the items from the indy-sdk +/// 6. Clean up (drop tables and add a version of "1") +#[no_mangle] +pub extern "C" fn askar_migrate_indy_sdk( + spec_uri: FfiStr<'_>, + wallet_name: FfiStr<'_>, + wallet_key: FfiStr<'_>, + kdf_level: FfiStr<'_>, + cb: Option, + cb_id: CallbackId, +) -> ErrorCode { + catch_err!( + trace!("Migrate sqlite wallet from indy-sdk structure to aries-askar"); + let cb = cb.ok_or_else(|| err_msg!("No callback provided"))?; + let spec_uri = spec_uri.into_opt_string().ok_or_else(|| err_msg!("No provision spec URI provided"))?; + let wallet_name = wallet_name.into_opt_string().ok_or_else(|| err_msg!("No wallet name provided"))?; + let wallet_key = wallet_key.into_opt_string().ok_or_else(|| err_msg!("No wallet key provided"))?; + let kdf_level = kdf_level.into_opt_string().ok_or_else(|| err_msg!("No KDF level provided"))?; + + let cb = EnsureCallback::new(move |result| + match result { + Ok(_) => cb(cb_id, ErrorCode::Success), + Err(err) => cb(cb_id, set_last_error(Some(err))), + }); + + spawn_ok(async move { + let result = async { + let migrator = IndySdkToAriesAskarMigration::connect(&spec_uri, &wallet_name, &wallet_key, &kdf_level).await?; + migrator.migrate().await?; + Ok(()) + }.await; + cb.resolve(result); + }); + Ok(ErrorCode::Success) + ) +} diff --git a/src/ffi/mod.rs b/src/ffi/mod.rs index e1ecc942..806bf845 100644 --- a/src/ffi/mod.rs +++ b/src/ffi/mod.rs @@ -24,6 +24,9 @@ mod result_list; mod secret; mod store; +#[cfg(all(feature = "migration", feature = "sqlite"))] +mod migration; + use self::error::ErrorCode; use crate::error::Error; diff --git a/src/lib.rs b/src/lib.rs index 938b793a..c5f518e9 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -45,6 +45,9 @@ extern crate serde_json; #[cfg(feature = "ffi")] mod ffi; +#[cfg(all(feature = "migration", feature = "sqlite"))] +pub mod migration; + pub mod kms; mod protect; diff --git a/src/migration/mod.rs b/src/migration/mod.rs new file mode 100644 index 00000000..8005ddf0 --- /dev/null +++ b/src/migration/mod.rs @@ -0,0 +1,404 @@ +//! Support for migration from Indy-SDK wallets. + +use sha2::Sha256; +use sqlx::sqlite::SqliteRow; +use sqlx::{Connection, Row, SqliteConnection}; +use std::fmt::{Display, Formatter}; +use std::str::FromStr; + +use self::strategy::Strategy; +use crate::backend::sqlite::SqliteStoreOptions; +use crate::crypto::alg::chacha20::{Chacha20Key, C20P}; +use crate::crypto::generic_array::typenum::U32; +use crate::error::Error; +use crate::protect::kdf::Argon2Level; +use crate::protect::{ProfileKey, StoreKey, StoreKeyReference}; +use crate::storage::EncEntryTag; + +mod strategy; + +const CHACHAPOLY_NONCE_LEN: u8 = 12; + +#[derive(Serialize, Deserialize, Debug, Default)] +pub(crate) struct IndyKeyMetadata { + keys: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + master_key_salt: Option>, +} + +pub(crate) type EncryptionKey = Chacha20Key; +pub(crate) type MacKey = crate::protect::hmac_key::HmacKey; + +/// Copies: https://github.com/hyperledger/indy-sdk/blob/83547c4c01162f6323cf138f8b071da2e15f0c90/libindy/indy-wallet/src/wallet.rs#L18 +#[derive(Serialize, Deserialize)] +pub(crate) struct IndyKey { + type_key: EncryptionKey, + name_key: EncryptionKey, + value_key: EncryptionKey, + item_hmac_key: MacKey, + tag_name_key: EncryptionKey, + tag_value_key: EncryptionKey, + tag_hmac_key: MacKey, +} + +#[derive(Default)] +pub(crate) struct UpdatedIndyItem { + pub id: u32, + pub category: Vec, + pub name: Vec, + pub value: Vec, + pub tags: Vec, +} + +pub(crate) struct UpdatedKey { + master: StoreKey, + key_ref: StoreKeyReference, +} + +#[derive(Debug)] +pub(crate) enum KdfMethod { + Argon2i(Argon2Level), + Raw, +} + +impl KdfMethod { + pub(crate) fn to_store_key_reference( + &self, + salt: Option<&[u8]>, + ) -> Result { + match self { + KdfMethod::Raw => Ok(StoreKeyReference::RawKey), + KdfMethod::Argon2i(level) => { + let detail = salt + .map(|s| format!("?salt={}", hex::encode(s))) + .ok_or_else(|| err_msg!("Salt must be provided for argon2i kdf method"))?; + Ok(StoreKeyReference::DeriveKey( + crate::protect::kdf::KdfMethod::Argon2i(*level), + detail, + )) + } + } + } +} + +impl FromStr for KdfMethod { + type Err = Error; + + fn from_str(s: &str) -> Result { + match s { + "ARGON2I_MOD" => Ok(Self::Argon2i(Argon2Level::Moderate)), + "ARGON2I_INT" => Ok(Self::Argon2i(Argon2Level::Interactive)), + "RAW" => Ok(Self::Raw), + _ => Err(err_msg!("Invalid key derivation method")), + } + } +} + +/// Indy-SDK migrator implementation +#[derive(Debug)] +pub struct IndySdkToAriesAskarMigration { + conn: SqliteConnection, + spec_uri: String, + wallet_key: String, + wallet_name: String, + kdf_method: KdfMethod, +} + +impl IndySdkToAriesAskarMigration { + /// Create a new migrator connected to a database + pub async fn connect( + spec_uri: &str, + wallet_name: &str, + wallet_key: &str, + kdf_method: &str, + ) -> Result { + let kdf_method = KdfMethod::from_str(kdf_method)?; + let conn = SqliteConnection::connect(spec_uri).await?; + Ok(Self { + conn, + spec_uri: spec_uri.into(), + wallet_key: wallet_key.to_owned(), + wallet_name: wallet_name.to_owned(), + kdf_method, + }) + } + + /// Close the instance without migrating + pub async fn close(self) -> Result<(), Error> { + Ok(self.conn.close().await?) + } + + /// Perform the migration + pub async fn migrate(mut self) -> Result<(), Error> { + if self.is_migrated().await? { + self.close().await?; + return Err(err_msg!(Backend, "Database is already migrated")); + } + + self.pre_upgrade().await?; + debug!("Completed wallet pre-upgrade"); + + let (indy_key, upd_key) = self.fetch_indy_key().await?; + self.create_config(&upd_key).await?; + let profile_key = self.init_profile(&upd_key).await?; + debug!("Created wallet profile"); + + self.update_items(&indy_key, &profile_key).await?; + self.finish_upgrade().await?; + self.conn.close().await?; + debug!("Completed wallet upgrade"); + + debug!("Re-opening wallet"); + let db_opts = SqliteStoreOptions::new(self.spec_uri.as_str())?; + let key_method = upd_key.key_ref.into(); + let db = db_opts + .open(Some(key_method), self.wallet_key.as_str().into(), None) + .await?; + db.close().await?; + debug!("Verified wallet upgrade"); + + Ok(()) + } + + #[inline] + async fn is_migrated(&mut self) -> Result { + let res: Option = + sqlx::query("SELECT name FROM sqlite_master WHERE type='table' AND name='metadata'") + .fetch_optional(&mut self.conn) + .await?; + Ok(res.is_none()) + } + + async fn pre_upgrade(&mut self) -> Result<(), Error> { + sqlx::query( + " + BEGIN EXCLUSIVE TRANSACTION; + CREATE TABLE config ( + name TEXT NOT NULL, + value TEXT, + PRIMARY KEY (name) + ); + CREATE TABLE profiles ( + id INTEGER NOT NULL, + name TEXT NOT NULL, + reference TEXT NULL, + profile_key BLOB NULL, + PRIMARY KEY (id) + ); + CREATE UNIQUE INDEX ix_profile_name ON profiles (name); + ALTER TABLE items RENAME TO items_old; + CREATE TABLE items ( + id INTEGER NOT NULL, + profile_id INTEGER NOT NULL, + kind INTEGER NOT NULL, + category BLOB NOT NULL, + name BLOB NOT NULL, + value BLOB NOT NULL, + expiry DATETIME NULL, + PRIMARY KEY (id), + FOREIGN KEY (profile_id) REFERENCES profiles (id) + ON DELETE CASCADE ON UPDATE CASCADE + ); + CREATE UNIQUE INDEX ix_items_uniq ON items + (profile_id, kind, category, name); + CREATE TABLE items_tags ( + id INTEGER NOT NULL, + item_id INTEGER NOT NULL, + name BLOB NOT NULL, + value BLOB NOT NULL, + plaintext BOOLEAN NOT NULL, + PRIMARY KEY (id), + FOREIGN KEY (item_id) REFERENCES items (id) + ON DELETE CASCADE ON UPDATE CASCADE + ); + CREATE INDEX ix_items_tags_item_id ON items_tags (item_id); + CREATE INDEX ix_items_tags_name_enc ON items_tags + (name, SUBSTR(value, 1, 12)) WHERE plaintext=0; + CREATE INDEX ix_items_tags_name_plain ON items_tags + (name, value) WHERE plaintext=1; + COMMIT; + ", + ) + .execute(&mut self.conn) + .await?; + Ok(()) + } + + async fn fetch_indy_key(&mut self) -> Result<(IndyKey, UpdatedKey), Error> { + let metadata_row: Vec = sqlx::query("SELECT value FROM metadata") + .fetch_one(&mut self.conn) + .await? + .try_get(0)?; + let metadata_json = String::from_utf8_lossy(&metadata_row); + let metadata: IndyKeyMetadata = serde_json::from_str(&metadata_json).map_err(err_map!( + Input, + "Could not convert value from metadata to IndyKey", + ))?; + let keys_enc = metadata.keys; + let salt = metadata.master_key_salt.map(|s| s[..16].to_vec()); + + let key_ref = self.kdf_method.to_store_key_reference(salt.as_deref())?; + let master = key_ref.resolve(self.wallet_key.as_str().into())?; + + let keys_mpk = master + .unwrap_data(keys_enc) + .map_err(err_map!(Input, "Error decrypting wallet key"))?; + let indy_key = rmp_serde::from_slice(&keys_mpk) + .map_err(err_map!(Input, "indy key not valid msgpack"))?; + + Ok((indy_key, UpdatedKey { master, key_ref })) + } + + async fn init_profile(&mut self, key: &UpdatedKey) -> Result { + let profile_row: Option = sqlx::query("SELECT profile_key FROM profiles") + .fetch_optional(&mut self.conn) + .await?; + let profile_row: Option> = match profile_row { + Some(row) => row.try_get(0).ok(), + None => None, + }; + + let profile_key = match profile_row { + Some(profile_row) => serde_cbor::from_slice(&profile_row) + .map_err(err_map!(Input, "Invalid cbor encoding for profile_key"))?, + None => { + let pk = ProfileKey::new()?; + let enc_pk = key.master.wrap_data(pk.to_bytes()?)?; + self.insert_profile(enc_pk.as_slice()).await?; + pk + } + }; + + Ok(profile_key) + } + + async fn update_items( + &mut self, + indy_key: &IndyKey, + profile_key: &ProfileKey, + ) -> Result<(), Error> { + Strategy::update_items(self, indy_key, profile_key).await?; + Ok(()) + } + + async fn finish_upgrade(&mut self) -> Result<(), Error> { + sqlx::query( + r#" + BEGIN EXCLUSIVE TRANSACTION; + DROP TABLE items_old; + DROP TABLE metadata; + DROP TABLE tags_encrypted; + DROP TABLE tags_plaintext; + INSERT INTO config (name, value) VALUES ("version", "1"); + COMMIT;"#, + ) + .execute(&mut self.conn) + .await?; + Ok(()) + } + + async fn update_items_in_db(&mut self, items: Vec) -> Result<(), Error> { + let mut del_ids = vec![]; + + for item in items { + del_ids.push(item.id); + let ins = sqlx::query( + "INSERT INTO items (profile_id, kind, category, name, value) + VALUES (1, 2, ?1, ?2, ?3)", + ) + .bind(item.category) + .bind(item.name) + .bind(item.value) + .execute(&mut self.conn) + .await?; + let item_id = ins.last_insert_rowid(); + for EncEntryTag { + name, + value, + plaintext, + } in item.tags + { + sqlx::query("INSERT INTO items_tags (item_id, plaintext, name, value) VALUES (?1, ?2, ?3, ?4)") + .bind(item_id) + .bind(plaintext) + .bind(name) + .bind(value) + .execute(&mut self.conn) + .await?; + } + } + sqlx::query("DELETE FROM items_old WHERE id IN (?1)") + .bind(Separated(&del_ids, ",").to_string()) + .execute(&mut self.conn) + .await?; + Ok(()) + } + + async fn create_config(&mut self, key: &UpdatedKey) -> Result<(), Error> { + let pass_key = key.key_ref.clone().into_uri(); + + sqlx::query("INSERT INTO config (name, value) VALUES (?1, ?2)") + .bind("default_profile") + .bind(&self.wallet_name) + .execute(&mut self.conn) + .await?; + + sqlx::query("INSERT INTO config (name, value) VALUES (?1, ?2)") + .bind("key") + .bind(pass_key) + .execute(&mut self.conn) + .await?; + + Ok(()) + } + + async fn insert_profile(&mut self, key: &[u8]) -> Result<(), Error> { + sqlx::query("INSERT INTO profiles (name, profile_key) VALUES (?1, ?2)") + .bind(&self.wallet_name) + .bind(key.to_vec()) + .execute(&mut self.conn) + .await?; + + Ok(()) + } + + async fn fetch_pending_items< + T: Send + Unpin + for<'r> sqlx::FromRow<'r, sqlx::sqlite::SqliteRow>, + >( + &mut self, + limit: u8, + ) -> Result>, Error> { + let res = sqlx::query_as( + "SELECT i.id, i.type, i.name, i.value, i.key, + (SELECT GROUP_CONCAT(HEX(te.name) || ':' || HEX(te.value)) + FROM tags_encrypted te WHERE te.item_id = i.id) AS tags_enc, + (SELECT GROUP_CONCAT(HEX(tp.name) || ':' || HEX(tp.value)) + FROM tags_plaintext tp WHERE tp.item_id = i.id) AS tags_plain + FROM items_old i LIMIT ?1", + ) + .bind(limit) + .fetch_all(&mut self.conn) + .await?; + + match res.len() { + 0 => Ok(None), + _ => Ok(Some(res)), + } + } +} + +struct Separated<'a, T>(&'a [T], &'static str); + +impl Display for Separated<'_, T> { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + let mut first = true; + for item in self.0 { + if !first { + f.write_str(self.1)?; + } + item.fmt(f)?; + first = false; + } + Ok(()) + } +} diff --git a/src/migration/strategy.rs b/src/migration/strategy.rs new file mode 100644 index 00000000..b7be6b49 --- /dev/null +++ b/src/migration/strategy.rs @@ -0,0 +1,152 @@ +use super::{ + EncryptionKey, IndyKey, IndySdkToAriesAskarMigration, ProfileKey, UpdatedIndyItem, + CHACHAPOLY_NONCE_LEN, +}; +use crate::crypto::buffer::SecretBytes; +use crate::crypto::encrypt::KeyAeadInPlace; +use crate::crypto::repr::KeySecretBytes; +use crate::protect::EntryEncryptor; +use crate::storage::EntryTag; +use crate::Error; + +#[derive(Default)] +pub(crate) struct IndyItem { + id: u32, + typ: Vec, + name: Vec, + value: Option>, + tags: Vec, +} + +// TODO: should tags_enc and tags_plain be empty in the example? +#[derive(sqlx::FromRow, Debug)] +pub(crate) struct IndyRow { + id: u32, + #[sqlx(rename = "type")] + typ: Vec, + name: Vec, + value: Option>, + key: Vec, + tags_enc: Option, + tags_plain: Option, +} + +pub(crate) struct Strategy {} + +impl Strategy { + pub fn decrypt_merged(enc_value: &[u8], key: &EncryptionKey) -> Result, Error> { + let (nonce, ciphertext) = enc_value.split_at(CHACHAPOLY_NONCE_LEN.into()); + + let mut buffer = SecretBytes::from_slice(ciphertext); + + key.decrypt_in_place(&mut buffer, nonce, &[])?; + + Ok(buffer.to_vec()) + } + + pub fn decrypt_tags( + tags: &str, + name_key: &EncryptionKey, + value_key: Option<&EncryptionKey>, + ) -> Result, Error> { + let mut ret = vec![]; + for tag in tags.split(',') { + let mut t = tag.split(':'); + + let tag_name = hex::decode(t.next().unwrap()) + .map_err(err_map!(Input, "tag is not valid hex encoded"))?; + let tag_value = hex::decode(t.next().unwrap()) + .map_err(err_map!(Input, "tag is not valid hex encoded"))?; + + let name = String::from_utf8(Self::decrypt_merged(&tag_name, name_key)?) + .map_err(err_map!(Input, "tag name is not valid utf-8"))?; + let value = String::from_utf8(match value_key { + None => tag_value, + Some(value_key) => Self::decrypt_merged(&tag_value, value_key)?, + }) + .map_err(err_map!(Input, "tag value is not valid utf-8"))?; + ret.push((name, value)); + } + Ok(ret) + } + + pub fn decrypt_item(row: IndyRow, keys: &IndyKey) -> Result { + let value_key = Self::decrypt_merged(&row.key, &keys.value_key)?; + let value_key = EncryptionKey::from_secret_bytes(&value_key)?; + let value = match row.value { + Some(ref value) => Some(Self::decrypt_merged(value, &value_key)?), + None => None, + }; + let mut tags: Vec = vec![]; + + let resp = match row.tags_enc { + None => vec![], + Some(tags_enc) => Self::decrypt_tags( + tags_enc.as_str(), + &keys.tag_name_key, + Some(&keys.tag_value_key), + )?, + }; + for (name, value) in resp { + tags.push(EntryTag::Encrypted(name, value)); + } + + let resp_plain = match row.tags_plain { + None => vec![], + Some(tags_plain) => Self::decrypt_tags(tags_plain.as_str(), &keys.tag_name_key, None)?, + }; + for (name, value) in resp_plain { + tags.push(EntryTag::Plaintext(name, value)); + } + + let indy_item = IndyItem { + id: row.id, + typ: Self::decrypt_merged(&row.typ, &keys.type_key)?, + name: Self::decrypt_merged(&row.name, &keys.name_key)?, + value, + tags, + }; + + Ok(indy_item) + } + + pub fn update_item(item: IndyItem, key: &ProfileKey) -> Result { + let value = match item.value { + Some(v) => key.encrypt_entry_value(&item.typ, &item.name, v.into())?, + None => Default::default(), + }; + + let updated_indy_item = UpdatedIndyItem { + id: item.id, + category: key.encrypt_entry_category(item.typ.into())?, + name: key.encrypt_entry_name(item.name.into())?, + value, + tags: key.encrypt_entry_tags(item.tags)?, + }; + + Ok(updated_indy_item) + } + + pub async fn update_items( + conn: &mut IndySdkToAriesAskarMigration, + indy_key: &IndyKey, + profile_key: &ProfileKey, + ) -> Result<(), Error> { + loop { + let rows = conn.fetch_pending_items::(1).await?; + match rows { + None => break, + Some(rows) => { + let mut upd = vec![]; + for row in rows { + let result = Self::decrypt_item(row, &indy_key)?; + upd.push(Self::update_item(result, &profile_key)?); + } + conn.update_items_in_db(upd).await?; + } + } + } + + Ok(()) + } +} diff --git a/src/protect/mod.rs b/src/protect/mod.rs index c4f5a632..d4c4941e 100644 --- a/src/protect/mod.rs +++ b/src/protect/mod.rs @@ -4,7 +4,7 @@ use async_lock::RwLock; pub mod kdf; -mod hmac_key; +pub mod hmac_key; mod pass_key; pub use self::pass_key::PassKey; diff --git a/src/protect/store_key.rs b/src/protect/store_key.rs index 6272ab54..11d4e36c 100644 --- a/src/protect/store_key.rs +++ b/src/protect/store_key.rs @@ -164,6 +164,16 @@ impl Default for StoreKeyMethod { } } +impl From for StoreKeyMethod { + fn from(key_ref: StoreKeyReference) -> Self { + match key_ref { + StoreKeyReference::DeriveKey(method, _) => Self::DeriveKey(method), + StoreKeyReference::RawKey => Self::RawKey, + StoreKeyReference::Unprotected => Self::Unprotected, + } + } +} + #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub enum StoreKeyReference { // ManagedKey(String), diff --git a/tests/.gitignore b/tests/.gitignore new file mode 100644 index 00000000..654cceab --- /dev/null +++ b/tests/.gitignore @@ -0,0 +1 @@ +*upgraded.db* diff --git a/tests/indy_wallet_sqlite.db b/tests/indy_wallet_sqlite.db new file mode 100644 index 00000000..97c3f25a Binary files /dev/null and b/tests/indy_wallet_sqlite.db differ diff --git a/tests/migration.rs b/tests/migration.rs new file mode 100644 index 00000000..d1c20add --- /dev/null +++ b/tests/migration.rs @@ -0,0 +1,46 @@ +use std::path::PathBuf; + +use aries_askar::migration::IndySdkToAriesAskarMigration; +use aries_askar::{future::block_on, Error}; + +const DB_TEMPLATE_PATH: &str = "./tests/indy_wallet_sqlite.db"; +const DB_UPGRADE_PATH: &str = "./tests/indy_wallet_sqlite_upgraded.db"; + +/// Create a copy of the input DB for migration +fn prepare_db() { + let tpl_paths = [ + PathBuf::from(DB_TEMPLATE_PATH), + PathBuf::from(format!("{}-shm", DB_TEMPLATE_PATH)), + PathBuf::from(format!("{}-wal", DB_TEMPLATE_PATH)), + ]; + let upd_paths = [ + PathBuf::from(DB_UPGRADE_PATH), + PathBuf::from(format!("{}-shm", DB_UPGRADE_PATH)), + PathBuf::from(format!("{}-wal", DB_UPGRADE_PATH)), + ]; + for (tpl, upd) in tpl_paths.iter().zip(upd_paths) { + if tpl.exists() { + std::fs::copy(tpl, upd).expect("Error copying wallet database"); + } else { + std::fs::remove_file(upd).ok(); + } + } +} + +#[test] +fn test_migration() { + prepare_db(); + + let res = block_on(async { + let wallet_name = "walletwallet.0"; + let wallet_key = "GfwU1DC7gEZNs3w41tjBiZYj7BNToDoFEqKY6wZXqs1A"; + let migrator = + IndySdkToAriesAskarMigration::connect(DB_UPGRADE_PATH, wallet_name, &wallet_key, "RAW") + .await?; + migrator.migrate().await?; + Result::<_, Error>::Ok(()) + }); + + // We still need some indication if something returned with an error + res.expect("Migration failed"); +}