diff --git a/consistency_check/src/bin/compressed_assets/main.rs b/consistency_check/src/bin/compressed_assets/main.rs index cc1e3901..02af5bcb 100644 --- a/consistency_check/src/bin/compressed_assets/main.rs +++ b/consistency_check/src/bin/compressed_assets/main.rs @@ -64,11 +64,8 @@ pub async fn main() { let file = File::open(config.trees_file_path).unwrap(); let mut rdr = csv::Reader::from_reader(file); - let keys: Vec = rdr - .records() - .filter_map(Result::ok) - .map(|r| r.as_slice().to_string()) - .collect(); + let keys: Vec = + rdr.records().filter_map(Result::ok).map(|r| r.as_slice().to_string()).collect(); let rpc_client = Arc::new(RpcClient::new(config.rpc_endpoint)); @@ -138,11 +135,7 @@ pub async fn main() { let shutdown_token_clone = shutdown_token.clone(); // update rate on the background - tokio::spawn(update_rate( - shutdown_token_clone, - assets_processed_clone, - rate_clone, - )); + tokio::spawn(update_rate(shutdown_token_clone, assets_processed_clone, rate_clone)); // write found problematic assets to the files writers.spawn(async move { @@ -208,7 +201,7 @@ async fn verify_tree_batch( let mut f_ch = failed_check.lock().await; f_ch.insert(tree.clone()); continue; - } + }, }; let tree_config_key = Pubkey::find_program_address(&[t_key.as_ref()], &mpl_bubblegum::ID).0; @@ -337,7 +330,7 @@ async fn process_assets_batch( ) .await; } - } + }, Err(e) => { save_asset_w_inv_proofs( assets_with_failed_proofs.clone(), @@ -347,7 +340,7 @@ async fn process_assets_batch( Some(e), ) .await; - } + }, } } else { save_asset_w_inv_proofs( @@ -355,10 +348,7 @@ async fn process_assets_batch( failed_proofs.clone(), tree.clone(), asset.clone(), - Some(format!( - "API did not return any proofs for asset: {:?}", - asset - )), + Some(format!("API did not return any proofs for asset: {:?}", asset)), ) .await; } @@ -407,11 +397,8 @@ async fn check_if_asset_proofs_valid(asset_proofs_response: AssetProof) -> Resul .map_err(|e| e.to_string())? .to_bytes(); - let recomputed_root = recompute( - leaf_key, - asset_proofs.as_ref(), - asset_proofs_response.node_index as u32, - ); + let recomputed_root = + recompute(leaf_key, asset_proofs.as_ref(), asset_proofs_response.node_index as u32); Ok(recomputed_root == root_key) } @@ -508,9 +495,8 @@ mod tests { tree_id: "AxM84SgtLjS51ffA9DucZpGZc3xKDF7H4zU7U6hJQYbR".to_string(), }; - let proofs_valid = check_if_asset_proofs_valid(correct_asset_proofs_response.clone()) - .await - .unwrap(); + let proofs_valid = + check_if_asset_proofs_valid(correct_asset_proofs_response.clone()).await.unwrap(); assert!(proofs_valid); @@ -519,9 +505,7 @@ mod tests { invalid_first_proof_hash.proof[0] = "GuR1VgjoFvHU1vkh81LK1znDyWGjf1B2e4rQ4zQivAvT".to_string(); - let proofs_valid = check_if_asset_proofs_valid(invalid_first_proof_hash) - .await - .unwrap(); + let proofs_valid = check_if_asset_proofs_valid(invalid_first_proof_hash).await.unwrap(); assert_eq!(proofs_valid, false); @@ -529,9 +513,7 @@ mod tests { // change leaf hash to incorrect one invalid_leaf_hash.leaf = "GuR1VgjoFvHU1vkh81LK1znDyWGjf1B2e4rQ4zQivAvT".to_string(); - let proofs_valid = check_if_asset_proofs_valid(invalid_leaf_hash) - .await - .unwrap(); + let proofs_valid = check_if_asset_proofs_valid(invalid_leaf_hash).await.unwrap(); assert_eq!(proofs_valid, false); } diff --git a/consistency_check/src/bin/jsons/main.rs b/consistency_check/src/bin/jsons/main.rs index 2017387f..0c081e8a 100644 --- a/consistency_check/src/bin/jsons/main.rs +++ b/consistency_check/src/bin/jsons/main.rs @@ -66,19 +66,12 @@ pub async fn main() { let args = Args::parse(); match args.cmd { - Commands::CheckConsistency { - rocks_path, - postgre_creds, - batch, - } => { + Commands::CheckConsistency { rocks_path, postgre_creds, batch } => { check_jsons_consistency(rocks_path, postgre_creds, batch).await; - } - Commands::ChangeStatus { - postgre_creds, - file_path, - } => { + }, + Commands::ChangeStatus { postgre_creds, file_path } => { change_jsons_status(postgre_creds, file_path).await; - } + }, } } @@ -101,11 +94,8 @@ async fn change_jsons_status(postgre_creds: String, file_path: String) { let spinner_style = ProgressStyle::with_template("{prefix:>10.bold.dim} {spinner} total={human_pos} {msg}") .unwrap(); - let links_spinner = Arc::new( - ProgressBar::new_spinner() - .with_style(spinner_style) - .with_prefix("links"), - ); + let links_spinner = + Arc::new(ProgressBar::new_spinner().with_style(spinner_style).with_prefix("links")); let mut links_processed = 0; let mut missed_jsons = csv::Reader::from_path(file_path).unwrap(); @@ -223,20 +213,13 @@ async fn check_jsons_consistency(rocks_path: String, postgre_creds: String, batc let assets_processed_clone = assets_processed.clone(); let shutdown_token_clone = shutdown_token.clone(); let rate_clone = rate.clone(); - tokio::spawn(update_rate( - shutdown_token_clone, - assets_processed_clone, - rate_clone, - )); + tokio::spawn(update_rate(shutdown_token_clone, assets_processed_clone, rate_clone)); let mut last_key_in_batch = None; info!("Launching main loop..."); loop { - match index_pg_storage - .get_tasks(batch as i64, last_key_in_batch.clone()) - .await - { + match index_pg_storage.get_tasks(batch as i64, last_key_in_batch.clone()).await { Ok(tasks) => { if tasks.is_empty() { info!( @@ -256,11 +239,7 @@ async fn check_jsons_consistency(rocks_path: String, postgre_creds: String, batc .map(|t| t.metadata_url.clone()) .collect(); - match db_client - .asset_offchain_data - .batch_get(keys_to_check.clone()) - .await - { + match db_client.asset_offchain_data.batch_get(keys_to_check.clone()).await { Ok(jsons) => { let mut ms_jn = missed_jsons.lock().await; @@ -280,17 +259,14 @@ async fn check_jsons_consistency(rocks_path: String, postgre_creds: String, batc count_of_missed_jsons.fetch_add(1, Ordering::Relaxed); } } - } + }, Err(e) => { - error!( - "Error during selecting data from the Rocks: {}", - e.to_string() - ); + error!("Error during selecting data from the Rocks: {}", e.to_string()); count_of_missed_jsons .fetch_add(keys_to_check.len() as u64, Ordering::Relaxed); let mut ms_jn = missed_jsons.lock().await; ms_jn.extend(keys_to_check); - } + }, } assets_processed.fetch_add(tasks.len() as u64, Ordering::Relaxed); @@ -300,14 +276,11 @@ async fn check_jsons_consistency(rocks_path: String, postgre_creds: String, batc info!("Selected from the Postgre less jSONs that expected - meaning it's finished"); break; } - } + }, Err(e) => { - error!( - "Error during selecting data from the Postgre: {}", - e.to_string() - ); + error!("Error during selecting data from the Postgre: {}", e.to_string()); tokio::time::sleep(Duration::from_secs(5)).await; - } + }, } let current_missed_jsons = count_of_missed_jsons.load(Ordering::Relaxed); diff --git a/consistency_check/src/bin/regular_assets/main.rs b/consistency_check/src/bin/regular_assets/main.rs index 7b8c8aeb..842d3052 100644 --- a/consistency_check/src/bin/regular_assets/main.rs +++ b/consistency_check/src/bin/regular_assets/main.rs @@ -96,11 +96,8 @@ pub async fn main() { let spinner_style = ProgressStyle::with_template("{prefix:>10.bold.dim} {spinner} total={human_pos} {msg}") .unwrap(); - let accounts_spinner = Arc::new( - ProgressBar::new_spinner() - .with_style(spinner_style) - .with_prefix("accs"), - ); + let accounts_spinner = + Arc::new(ProgressBar::new_spinner().with_style(spinner_style).with_prefix("accs")); let assets_processed = Arc::new(AtomicU64::new(0)); let rate = Arc::new(Mutex::new(0.0)); @@ -156,11 +153,7 @@ pub async fn main() { let assets_processed_clone = assets_processed.clone(); let shutdown_token_clone = shutdown_token.clone(); let rate_clone = rate.clone(); - tokio::spawn(update_rate( - shutdown_token_clone, - assets_processed_clone, - rate_clone, - )); + tokio::spawn(update_rate(shutdown_token_clone, assets_processed_clone, rate_clone)); 'outer: for append_vec in snapshot_loader.iter() { match append_vec { @@ -173,9 +166,8 @@ pub async fn main() { let account = account.access().unwrap(); if account.account_meta.owner == *CORE_KEY { - if let Err(e) = nfts_channel_tx - .send((AccountType::Core, account.meta.pubkey)) - .await + if let Err(e) = + nfts_channel_tx.send((AccountType::Core, account.meta.pubkey)).await { error!("Could not send core key to the channel: {}", e.to_string()); } @@ -184,9 +176,8 @@ pub async fn main() { { // there only 2 types of accounts for that programs, so if it's not mint it's token account if account.data.len() == MINT_ACC_DATA_SIZE { - if let Err(e) = nfts_channel_tx - .send((AccountType::Mint, account.meta.pubkey)) - .await + if let Err(e) = + nfts_channel_tx.send((AccountType::Mint, account.meta.pubkey)).await { error!("Could not send mint key to the channel: {}", e.to_string()); } @@ -199,7 +190,7 @@ pub async fn main() { } } } - } + }, Err(error) => error!("append_vec: {:?}", error), }; } @@ -270,16 +261,16 @@ async fn process_nfts( m_d.insert(key.to_string()); drop(m_d); } - } + }, Err(e) => { error!( "Error during checking asset data key existence: {}", e.to_string() ); - } + }, } match acc_type { - AccountType::Core => {} // already checked above + AccountType::Core => {}, // already checked above // if we've got mint account we also should check spl_mints column AccountType::Mint => match rocks_db_cloned.spl_mints.has_key(key).await { Ok(exist) => { @@ -290,13 +281,13 @@ async fn process_nfts( m_d.insert(key.to_string()); drop(m_d); } - } + }, Err(e) => { error!( "Error during checking mint key existence: {}", e.to_string() ); - } + }, }, } @@ -322,11 +313,11 @@ async fn process_nfts( drop(permit); }); - } + }, None => { // if None is received - channel was closed break; - } + }, } } Ok(()) @@ -377,13 +368,13 @@ async fn process_fungibles( m_d.insert(key.to_string()); drop(m_d); } - } + }, Err(e) => { error!( "Error during checking token accounts key existence: {}", e.to_string() ); - } + }, } let current_assets_processed = @@ -408,11 +399,11 @@ async fn process_fungibles( drop(permit); }); - } + }, None => { // if None is received - channel was closed break; - } + }, } } Ok(()) diff --git a/consistency_check/src/bin/regular_assets/snapshot_reader.rs b/consistency_check/src/bin/regular_assets/snapshot_reader.rs index 4e059260..5c9deb19 100644 --- a/consistency_check/src/bin/regular_assets/snapshot_reader.rs +++ b/consistency_check/src/bin/regular_assets/snapshot_reader.rs @@ -18,43 +18,46 @@ // This file contains code vendored from https://github.com/solana-labs/solana +use std::{ + collections::{HashMap, HashSet}, + ffi::OsStr, + fs::OpenOptions, + io, + io::{BufReader, Read}, + mem, + path::{Component, Path}, + pin::Pin, + rc::Rc, + str::FromStr, + time::Instant, +}; + use bincode::Options; use memmap2::{Mmap, MmapMut}; -use serde::de::DeserializeOwned; -use serde::{Deserialize, Serialize}; -use solana_accounts_db::account_storage::meta::StoredMetaWriteVersion; -use solana_accounts_db::accounts_db::BankHashStats; -use solana_accounts_db::ancestors::AncestorsForSerialization; -use solana_accounts_db::blockhash_queue::BlockhashQueue; +use serde::{de::DeserializeOwned, Deserialize, Serialize}; +use solana_accounts_db::{ + account_storage::meta::StoredMetaWriteVersion, accounts_db::BankHashStats, + ancestors::AncestorsForSerialization, blockhash_queue::BlockhashQueue, +}; use solana_frozen_abi_macro::AbiExample; -use solana_runtime::epoch_stakes::EpochStakes; -use solana_runtime::stakes::Stakes; -use solana_sdk::account::{Account, AccountSharedData, ReadableAccount}; -use solana_sdk::clock::{Epoch, UnixTimestamp}; -use solana_sdk::deserialize_utils::default_on_eof; -use solana_sdk::epoch_schedule::EpochSchedule; -use solana_sdk::fee_calculator::{FeeCalculator, FeeRateGovernor}; -use solana_sdk::hard_forks::HardForks; -use solana_sdk::hash::Hash; -use solana_sdk::inflation::Inflation; -use solana_sdk::pubkey::Pubkey; -use solana_sdk::slot_history::Slot; -use solana_sdk::stake::state::Delegation; -use std::collections::{HashMap, HashSet}; -use std::rc::Rc; -use std::str::FromStr; -use std::{io, mem}; +use solana_runtime::{epoch_stakes::EpochStakes, stakes::Stakes}; +use solana_sdk::{ + account::{Account, AccountSharedData, ReadableAccount}, + clock::{Epoch, UnixTimestamp}, + deserialize_utils::default_on_eof, + epoch_schedule::EpochSchedule, + fee_calculator::{FeeCalculator, FeeRateGovernor}, + hard_forks::HardForks, + hash::Hash, + inflation::Inflation, + pubkey::Pubkey, + slot_history::Slot, + stake::state::Delegation, +}; +use tar::{Archive, Entries, Entry}; use thiserror::Error; use tracing::info; -use std::ffi::OsStr; -use std::fs::OpenOptions; -use std::io::{BufReader, Read}; -use std::path::{Component, Path}; -use std::pin::Pin; -use std::time::Instant; -use tar::{Archive, Entries, Entry}; - #[derive(Deserialize)] pub struct RentCollector { pub epoch: Epoch, @@ -215,10 +218,7 @@ where let accounts_db_fields_post_time = Instant::now(); drop(snapshot_file); - println!( - "Read bank fields in {:?}", - versioned_bank_post_time - pre_unpack - ); + println!("Read bank fields in {:?}", versioned_bank_post_time - pre_unpack); println!( "Read accounts DB fields in {:?}", accounts_db_fields_post_time - versioned_bank_post_time @@ -241,11 +241,7 @@ where _ => return false, }; // Check if slot number file is valid u64. - if slot_number_str_1 - .to_str() - .and_then(|s| s.parse::().ok()) - .is_none() - { + if slot_number_str_1.to_str().and_then(|s| s.parse::().ok()).is_none() { return false; } let slot_number_str_2 = match components.next() { @@ -268,22 +264,18 @@ where } fn unboxed_iter(&mut self) -> impl Iterator> + '_ { - self.entries - .take() - .into_iter() - .flatten() - .filter_map(|entry| { - let mut entry = match entry { - Ok(x) => x, - Err(e) => return Some(Err(e.into())), - }; - let path = match entry.path() { - Ok(x) => x, - Err(e) => return Some(Err(e.into())), - }; - let (slot, id) = path.file_name().and_then(parse_append_vec_name)?; - Some(self.process_entry(&mut entry, slot, id)) - }) + self.entries.take().into_iter().flatten().filter_map(|entry| { + let mut entry = match entry { + Ok(x) => x, + Err(e) => return Some(Err(e.into())), + }; + let path = match entry.path() { + Ok(x) => x, + Err(e) => return Some(Err(e.into())), + }; + let (slot, id) = path.file_name().and_then(parse_append_vec_name)?; + Some(self.process_entry(&mut entry, slot, id)) + }) } fn process_entry( @@ -292,22 +284,13 @@ where slot: u64, id: u64, ) -> Result { - let known_vecs = self - .accounts_db_fields - .0 - .get(&slot) - .map(|v| &v[..]) - .unwrap_or(&[]); + let known_vecs = self.accounts_db_fields.0.get(&slot).map(|v| &v[..]).unwrap_or(&[]); let known_vec = known_vecs.iter().find(|entry| entry.id == (id as usize)); let known_vec = match known_vec { None => return Err(SnapshotError::UnexpectedAppendVec), Some(v) => v, }; - Ok(AppendVec::new_from_reader( - entry, - known_vec.accounts_current_len, - slot, - )?) + Ok(AppendVec::new_from_reader(entry, known_vec.accounts_current_len, slot)?) } pub fn iter(&mut self) -> AppendVecIterator<'_> { @@ -424,7 +407,7 @@ pub fn append_vec_iter(append_vec: Rc) -> impl Iterator { offsets.push(offset); offset = next_offset; - } + }, } } let append_vec = Rc::clone(&append_vec); @@ -495,11 +478,7 @@ impl AppendVec { current_len: usize, slot: u64, ) -> io::Result { - let data = OpenOptions::new() - .read(true) - .write(false) - .create(false) - .open(&path)?; + let data = OpenOptions::new().read(true).write(false).create(false).open(&path)?; let file_size = std::fs::metadata(&path)?.len(); AppendVec::sanitize_len_and_size(current_len, file_size as usize)?; @@ -513,12 +492,7 @@ impl AppendVec { result? }; - let new = AppendVec { - map, - current_len, - file_size, - slot, - }; + let new = AppendVec { map, current_len, file_size, slot }; Ok(new) } @@ -578,17 +552,7 @@ impl AppendVec { let (hash, next): (&'a Hash, _) = self.get_type(next)?; let (data, next) = self.get_slice(next, meta.data_len as usize)?; let stored_size = next - offset; - Some(( - StoredAccountMeta { - meta, - account_meta, - data, - offset, - stored_size, - hash, - }, - next, - )) + Some((StoredAccountMeta { meta, account_meta, data, offset, stored_size, hash }, next)) } pub fn get_slot(&self) -> u64 { diff --git a/consistency_check/src/lib.rs b/consistency_check/src/lib.rs index 8cc6b72a..55141fe6 100644 --- a/consistency_check/src/lib.rs +++ b/consistency_check/src/lib.rs @@ -28,11 +28,7 @@ pub async fn update_rate( let elapsed = current_time.duration_since(last_time).as_secs_f64(); let count = current_count - last_count; - let current_rate = if elapsed > 0.0 { - (count as f64) / elapsed - } else { - 0.0 - }; + let current_rate = if elapsed > 0.0 { (count as f64) / elapsed } else { 0.0 }; // Update rate { diff --git a/entities/src/enums.rs b/entities/src/enums.rs index d22c69aa..fc181b6c 100644 --- a/entities/src/enums.rs +++ b/entities/src/enums.rs @@ -32,16 +32,7 @@ pub enum SpecificationAssetClass { FungibleToken, FungibleAsset, Nft, - PrintableNft, ProgrammableNft, - // legacy, doesn't exist - Print, - // legacy, doesn't exist - TransferRestrictedNft, - // legacy, doesn't exist - NonTransferableNft, - // legacy, doesn't exist - IdentityNft, MplCoreAsset, MplCoreCollection, } @@ -161,11 +152,12 @@ impl From for SpecificationAssetClass { match interface { Interface::FungibleAsset => Self::FungibleAsset, Interface::FungibleToken => Self::FungibleToken, - Interface::Identity => Self::IdentityNft, Interface::Nft | Interface::V1NFT | Interface::LegacyNft => Self::Nft, - Interface::V1PRINT => Self::Print, Interface::ProgrammableNFT => Self::ProgrammableNft, - Interface::Custom | Interface::Executable => Self::Unknown, + Interface::V1PRINT + | Interface::Custom + | Interface::Executable + | Interface::Identity => Self::Unknown, Interface::MplCoreAsset => Self::MplCoreAsset, Interface::MplCoreCollection => Self::MplCoreCollection, } @@ -176,7 +168,6 @@ impl From<(&SpecificationVersions, &SpecificationAssetClass)> for Interface { fn from(i: (&SpecificationVersions, &SpecificationAssetClass)) -> Self { match i { (SpecificationVersions::V1, SpecificationAssetClass::Nft) => Interface::V1NFT, - (SpecificationVersions::V1, SpecificationAssetClass::PrintableNft) => Interface::V1NFT, (SpecificationVersions::V0, SpecificationAssetClass::Nft) => Interface::LegacyNft, (SpecificationVersions::V1, SpecificationAssetClass::ProgrammableNft) => { Interface::ProgrammableNFT @@ -198,7 +189,6 @@ impl From for (SpecificationVersions, SpecificationAssetClass) { Interface::ProgrammableNFT => { (SpecificationVersions::V1, SpecificationAssetClass::ProgrammableNft) }, - Interface::V1PRINT => (SpecificationVersions::V1, SpecificationAssetClass::Print), Interface::FungibleAsset => { (SpecificationVersions::V1, SpecificationAssetClass::FungibleAsset) }, diff --git a/grpc/proto/gap_filler.proto b/grpc/proto/gap_filler.proto index 3d6fb26d..6e1ce216 100644 --- a/grpc/proto/gap_filler.proto +++ b/grpc/proto/gap_filler.proto @@ -22,14 +22,9 @@ enum SpecificationAssetClass { SPECIFICATION_ASSET_CLASS_FUNGIBLE_TOKEN = 1; SPECIFICATION_ASSET_CLASS_FUNGIBLE_ASSET = 2; SPECIFICATION_ASSET_CLASS_NFT = 3; - SPECIFICATION_ASSET_CLASS_PRINTABLE_NFT = 4; - SPECIFICATION_ASSET_CLASS_PROGRAMMABLE_NFT = 5; - SPECIFICATION_ASSET_CLASS_PRINT = 6; - SPECIFICATION_ASSET_CLASS_TRANSFER_RESTRICTED_NFT = 7; - SPECIFICATION_ASSET_CLASS_NON_TRANSFERABLE_NFT = 8; - SPECIFICATION_ASSET_CLASS_IDENTITY_NFT = 9; - SPECIFICATION_ASSET_CLASS_MPL_CORE_ASSET = 10; - SPECIFICATION_ASSET_CLASS_MPL_CORE_COLLECTION = 11; + SPECIFICATION_ASSET_CLASS_PROGRAMMABLE_NFT = 4; + SPECIFICATION_ASSET_CLASS_MPL_CORE_ASSET = 5; + SPECIFICATION_ASSET_CLASS_MPL_CORE_COLLECTION = 6; } enum OwnerType { diff --git a/grpc/src/gapfiller.rs b/grpc/src/gapfiller.rs index 8aa4b33a..5af3f74a 100644 --- a/grpc/src/gapfiller.rs +++ b/grpc/src/gapfiller.rs @@ -434,10 +434,6 @@ pub enum SpecificationAssetClass { Nft = 3, PrintableNft = 4, ProgrammableNft = 5, - Print = 6, - TransferRestrictedNft = 7, - NonTransferableNft = 8, - IdentityNft = 9, MplCoreAsset = 10, MplCoreCollection = 11, } @@ -456,14 +452,6 @@ impl SpecificationAssetClass { SpecificationAssetClass::ProgrammableNft => { "SPECIFICATION_ASSET_CLASS_PROGRAMMABLE_NFT" }, - SpecificationAssetClass::Print => "SPECIFICATION_ASSET_CLASS_PRINT", - SpecificationAssetClass::TransferRestrictedNft => { - "SPECIFICATION_ASSET_CLASS_TRANSFER_RESTRICTED_NFT" - }, - SpecificationAssetClass::NonTransferableNft => { - "SPECIFICATION_ASSET_CLASS_NON_TRANSFERABLE_NFT" - }, - SpecificationAssetClass::IdentityNft => "SPECIFICATION_ASSET_CLASS_IDENTITY_NFT", SpecificationAssetClass::MplCoreAsset => "SPECIFICATION_ASSET_CLASS_MPL_CORE_ASSET", SpecificationAssetClass::MplCoreCollection => { "SPECIFICATION_ASSET_CLASS_MPL_CORE_COLLECTION" @@ -479,12 +467,6 @@ impl SpecificationAssetClass { "SPECIFICATION_ASSET_CLASS_NFT" => Some(Self::Nft), "SPECIFICATION_ASSET_CLASS_PRINTABLE_NFT" => Some(Self::PrintableNft), "SPECIFICATION_ASSET_CLASS_PROGRAMMABLE_NFT" => Some(Self::ProgrammableNft), - "SPECIFICATION_ASSET_CLASS_PRINT" => Some(Self::Print), - "SPECIFICATION_ASSET_CLASS_TRANSFER_RESTRICTED_NFT" => { - Some(Self::TransferRestrictedNft) - }, - "SPECIFICATION_ASSET_CLASS_NON_TRANSFERABLE_NFT" => Some(Self::NonTransferableNft), - "SPECIFICATION_ASSET_CLASS_IDENTITY_NFT" => Some(Self::IdentityNft), "SPECIFICATION_ASSET_CLASS_MPL_CORE_ASSET" => Some(Self::MplCoreAsset), "SPECIFICATION_ASSET_CLASS_MPL_CORE_COLLECTION" => Some(Self::MplCoreCollection), _ => None, diff --git a/grpc/src/mapper.rs b/grpc/src/mapper.rs index 04c89de7..083775c0 100644 --- a/grpc/src/mapper.rs +++ b/grpc/src/mapper.rs @@ -822,12 +822,7 @@ impl_from_enum!( FungibleToken, FungibleAsset, Nft, - PrintableNft, ProgrammableNft, - Print, - TransferRestrictedNft, - NonTransferableNft, - IdentityNft, MplCoreAsset, MplCoreCollection ); diff --git a/nft_ingester/tests/api_tests.rs b/nft_ingester/tests/api_tests.rs index f636ee93..3e47a114 100644 --- a/nft_ingester/tests/api_tests.rs +++ b/nft_ingester/tests/api_tests.rs @@ -693,12 +693,8 @@ mod tests { ..Default::default() }; - let asset_authority = AssetAuthority { - pubkey: pb, - authority, - slot_updated: 12, - write_version: Some(1), - }; + let asset_authority = + AssetAuthority { pubkey: pb, authority, slot_updated: 12, write_version: Some(1) }; let owner = AssetOwner { pubkey: pb, @@ -733,11 +729,7 @@ mod tests { .storage .db .put_cf( - &env.rocks_env - .storage - .db - .cf_handle(AssetCompleteDetails::NAME) - .unwrap(), + &env.rocks_env.storage.db.cf_handle(AssetCompleteDetails::NAME).unwrap(), pb, asset_complete_details.convert_to_fb_bytes(), ) @@ -745,10 +737,7 @@ mod tests { let payload = GetAsset { id: pb.to_string(), - options: Options { - show_unverified_collections: true, - ..Default::default() - }, + options: Options { show_unverified_collections: true, ..Default::default() }, }; let response = api.get_asset(payload, mutexed_tasks.clone()).await.unwrap(); assert_eq!( diff --git a/postgre-client/src/converters.rs b/postgre-client/src/converters.rs index a510dc81..4065cd8f 100644 --- a/postgre-client/src/converters.rs +++ b/postgre-client/src/converters.rs @@ -33,22 +33,9 @@ impl From for SpecificationAssetClass SpecificationAssetClass::FungibleAsset }, entities::enums::SpecificationAssetClass::Nft => SpecificationAssetClass::Nft, - entities::enums::SpecificationAssetClass::PrintableNft => { - SpecificationAssetClass::PrintableNft - }, entities::enums::SpecificationAssetClass::ProgrammableNft => { SpecificationAssetClass::ProgrammableNft }, - entities::enums::SpecificationAssetClass::Print => SpecificationAssetClass::Print, - entities::enums::SpecificationAssetClass::TransferRestrictedNft => { - SpecificationAssetClass::TransferRestrictedNft - }, - entities::enums::SpecificationAssetClass::NonTransferableNft => { - SpecificationAssetClass::NonTransferableNft - }, - entities::enums::SpecificationAssetClass::IdentityNft => { - SpecificationAssetClass::IdentityNft - }, entities::enums::SpecificationAssetClass::MplCoreAsset => { SpecificationAssetClass::MplCoreAsset }, diff --git a/postgre-client/src/model.rs b/postgre-client/src/model.rs index 31c18b2d..4cf9e273 100644 --- a/postgre-client/src/model.rs +++ b/postgre-client/src/model.rs @@ -23,17 +23,7 @@ pub enum SpecificationAssetClass { FungibleToken, FungibleAsset, Nft, - // legacy code, never ever existed in the first place - PrintableNft, ProgrammableNft, - // legacy code, never ever existed in the first place - Print, - // legacy code, never ever existed in the first place - TransferRestrictedNft, - // legacy code, never ever existed in the first place - NonTransferableNft, - // legacy code, never ever existed in the first place - IdentityNft, MplCoreAsset, MplCoreCollection, } diff --git a/postgre-client/src/tasks.rs b/postgre-client/src/tasks.rs index 2525596e..4718ab96 100644 --- a/postgre-client/src/tasks.rs +++ b/postgre-client/src/tasks.rs @@ -195,9 +195,7 @@ impl PgClient { } pub async fn get_tasks_count(&self) -> Result { - let resp = sqlx::query("SELECT COUNT(*) FROM tasks") - .fetch_one(&self.pool) - .await?; + let resp = sqlx::query("SELECT COUNT(*) FROM tasks").fetch_one(&self.pool).await?; let count: i64 = resp.get(0); Ok(count) @@ -232,11 +230,7 @@ impl PgClient { let metadata_url: String = row.get("tsk_metadata_url"); let status: TaskStatus = row.get("tsk_status"); - tasks.push(JsonTask { - tsk_id, - metadata_url, - status, - }); + tasks.push(JsonTask { tsk_id, metadata_url, status }); } Ok(tasks) diff --git a/rocks-db/src/flatbuf/asset.fbs b/rocks-db/src/flatbuf/asset.fbs index c039512e..e39c9714 100644 --- a/rocks-db/src/flatbuf/asset.fbs +++ b/rocks-db/src/flatbuf/asset.fbs @@ -6,12 +6,12 @@ enum SpecificationAssetClass : byte { FungibleToken, FungibleAsset, Nft, - PrintableNft, + PrintableNft (deprecated), ProgrammableNft, - Print, - TransferRestrictedNft, - NonTransferableNft, - IdentityNft, + Print (deprecated), + TransferRestrictedNft (deprecated), + NonTransferableNft (deprecated), + IdentityNft (deprecated), MplCoreAsset, MplCoreCollection } diff --git a/rocks-db/src/mappers.rs b/rocks-db/src/mappers.rs index 440a42d7..e8e2177d 100644 --- a/rocks-db/src/mappers.rs +++ b/rocks-db/src/mappers.rs @@ -37,12 +37,7 @@ impl_from_enum!( FungibleToken, FungibleAsset, Nft, - PrintableNft, ProgrammableNft, - Print, - TransferRestrictedNft, - NonTransferableNft, - IdentityNft, MplCoreAsset, MplCoreCollection );