Skip to content

Commit

Permalink
Mesh 2188/migrations (#1699)
Browse files Browse the repository at this point in the history
* Add asset migrations

* Add checkpoint migrations

* Add compliance manager migrations

* Add corporate-actions migrations

* Addd external-agents migrations

* Add nft migrations

* Add portfolio migrations

* Add pallet-settlement migrations

* Add statistics migrations

* Add sto migrations

* Add identity migrations

* Add AssetIdentifiers missing migration

* Add missing on_runtime_upgrade

* Count migrated items

* Add missing cargo.lock
  • Loading branch information
HenriqueNogara committed Aug 14, 2024
1 parent 140b6e3 commit b4f0e84
Show file tree
Hide file tree
Showing 33 changed files with 2,153 additions and 60 deletions.
9 changes: 8 additions & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

174 changes: 174 additions & 0 deletions pallets/asset/src/checkpoint/migrations.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,174 @@
use sp_runtime::runtime_logger::RuntimeLogger;
use sp_std::collections::btree_map::BTreeMap;

use super::*;

mod v1 {
use super::*;
use polymesh_primitives::Ticker;

decl_storage! {
trait Store for Module<T: Config> as Checkpoint {
// This storage changed the Ticker key to AssetID.
pub TotalSupply get(fn total_supply_at):
double_map hasher(blake2_128_concat) Ticker, hasher(twox_64_concat) CheckpointId => polymesh_primitives::Balance;

// This storage changed the Ticker key to AssetID.
pub Balance get(fn balance_at_checkpoint):
double_map hasher(blake2_128_concat) (Ticker, CheckpointId), hasher(twox_64_concat) IdentityId => polymesh_primitives::Balance;

// This storage changed the Ticker key to AssetID.
pub CheckpointIdSequence get(fn checkpoint_id_sequence):
map hasher(blake2_128_concat) Ticker => CheckpointId;

// This storage changed the Ticker key to AssetID.
pub BalanceUpdates get(fn balance_updates):
double_map hasher(blake2_128_concat) Ticker, hasher(twox_64_concat) IdentityId => Vec<CheckpointId>;

// This storage changed the Ticker key to AssetID.
pub Timestamps get(fn timestamps):
double_map hasher(blake2_128_concat) Ticker, hasher(twox_64_concat) CheckpointId => Moment;

// This storage changed the Ticker key to AssetID.
pub ScheduleIdSequence get(fn schedule_id_sequence):
map hasher(blake2_128_concat) Ticker => ScheduleId;

// This storage changed the Ticker key to AssetID.
pub CachedNextCheckpoints get(fn cached_next_checkpoints):
map hasher(blake2_128_concat) Ticker => Option<NextCheckpoints>;

// This storage changed the Ticker key to AssetID.
pub ScheduledCheckpoints get(fn scheduled_checkpoints):
double_map hasher(blake2_128_concat) Ticker, hasher(twox_64_concat) ScheduleId => Option<ScheduleCheckpoints>;

// This storage changed the Ticker key to AssetID.
pub ScheduleRefCount get(fn schedule_ref_count):
double_map hasher(blake2_128_concat) Ticker, hasher(twox_64_concat) ScheduleId => u32;

// This storage changed the Ticker key to AssetID.
pub SchedulePoints get(fn schedule_points):
double_map hasher(blake2_128_concat) Ticker, hasher(twox_64_concat) ScheduleId => Vec<CheckpointId>;
}
}

decl_module! {
pub struct Module<T: Config> for enum Call where origin: T::RuntimeOrigin { }
}
}

pub(crate) fn migrate_to_v2<T: Config>() {
RuntimeLogger::init();
let mut ticker_to_asset_id = BTreeMap::new();

// Removes all elements in the old storage and inserts it in the new storage

let mut count = 0;
log::info!("Updating types for the TotalSupply storage");
v1::TotalSupply::drain().for_each(|(ticker, checkpoint_id, balance)| {
count += 1;
let asset_id = ticker_to_asset_id
.entry(ticker)
.or_insert(AssetID::from(ticker));
TotalSupply::insert(asset_id, checkpoint_id, balance);
});
log::info!("{:?} items migrated", count);

let mut count = 0;
log::info!("Updating types for the Balance storage");
v1::Balance::drain().for_each(|((ticker, checkpoint_id), did, balance)| {
count += 1;
let asset_id = ticker_to_asset_id
.entry(ticker)
.or_insert(AssetID::from(ticker));
Balance::insert((asset_id, checkpoint_id), did, balance);
});
log::info!("{:?} items migrated", count);

let mut count = 0;
log::info!("Updating types for the CheckpointIdSequence storage");
v1::CheckpointIdSequence::drain().for_each(|(ticker, checkpoint_id)| {
count += 1;
let asset_id = ticker_to_asset_id
.entry(ticker)
.or_insert(AssetID::from(ticker));
CheckpointIdSequence::insert(asset_id, checkpoint_id);
});
log::info!("{:?} items migrated", count);

let mut count = 0;
log::info!("Updating types for the BalanceUpdates storage");
v1::BalanceUpdates::drain().for_each(|(ticker, did, checkpoint_id)| {
count += 1;
let asset_id = ticker_to_asset_id
.entry(ticker)
.or_insert(AssetID::from(ticker));
BalanceUpdates::insert(asset_id, did, checkpoint_id);
});
log::info!("{:?} items migrated", count);

let mut count = 0;
log::info!("Updating types for the Timestamps storage");
v1::Timestamps::drain().for_each(|(ticker, checkpoint_id, when)| {
count += 1;
let asset_id = ticker_to_asset_id
.entry(ticker)
.or_insert(AssetID::from(ticker));
Timestamps::insert(asset_id, checkpoint_id, when);
});
log::info!("{:?} items migrated", count);

let mut count = 0;
log::info!("Updating types for the ScheduleIdSequence storage");
v1::ScheduleIdSequence::drain().for_each(|(ticker, schedule_id)| {
count += 1;
let asset_id = ticker_to_asset_id
.entry(ticker)
.or_insert(AssetID::from(ticker));
ScheduleIdSequence::insert(asset_id, schedule_id);
});
log::info!("{:?} items migrated", count);

let mut count = 0;
log::info!("Updating types for the CachedNextCheckpoints storage");
v1::CachedNextCheckpoints::drain().for_each(|(ticker, next_checkpoint)| {
count += 1;
let asset_id = ticker_to_asset_id
.entry(ticker)
.or_insert(AssetID::from(ticker));
CachedNextCheckpoints::insert(asset_id, next_checkpoint);
});
log::info!("{:?} items migrated", count);

let mut count = 0;
log::info!("Updating types for the ScheduledCheckpoints storage");
v1::ScheduledCheckpoints::drain().for_each(|(ticker, schedule_id, next_checkpoint)| {
count += 1;
let asset_id = ticker_to_asset_id
.entry(ticker)
.or_insert(AssetID::from(ticker));
ScheduledCheckpoints::insert(asset_id, schedule_id, next_checkpoint);
});
log::info!("{:?} items migrated", count);

let mut count = 0;
log::info!("Updating types for the ScheduleRefCount storage");
v1::ScheduleRefCount::drain().for_each(|(ticker, schedule_id, ref_count)| {
count += 1;
let asset_id = ticker_to_asset_id
.entry(ticker)
.or_insert(AssetID::from(ticker));
ScheduleRefCount::insert(asset_id, schedule_id, ref_count);
});
log::info!("{:?} items migrated", count);

let mut count = 0;
log::info!("Updating types for the SchedulePoints storage");
v1::SchedulePoints::drain().for_each(|(ticker, schedule_id, checkpoint_id)| {
count += 1;
let asset_id = ticker_to_asset_id
.entry(ticker)
.or_insert(AssetID::from(ticker));
SchedulePoints::insert(asset_id, schedule_id, checkpoint_id);
});
log::info!("{:?} items migrated", count);
}
17 changes: 14 additions & 3 deletions pallets/asset/src/checkpoint/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,13 +41,15 @@

#[cfg(feature = "runtime-benchmarks")]
pub mod benchmarking;
mod migrations;

use codec::{Decode, Encode};
use frame_support::{
decl_error, decl_module, decl_storage,
dispatch::{DispatchError, DispatchResult},
ensure,
traits::UnixTime,
weights::Weight,
};
use frame_system::ensure_root;
use sp_runtime::traits::SaturatedConversion;
Expand All @@ -64,14 +66,16 @@ use polymesh_common_utilities::{
GC_DID,
};
use polymesh_primitives::asset::AssetID;
use polymesh_primitives::{asset::CheckpointId, storage_migration_ver, IdentityId, Moment};
use polymesh_primitives::{
asset::CheckpointId, storage_migrate_on, storage_migration_ver, IdentityId, Moment,
};

use crate::Config;

type Asset<T> = crate::Module<T>;
type ExternalAgents<T> = pallet_external_agents::Module<T>;

storage_migration_ver!(1);
storage_migration_ver!(2);

decl_storage! {
trait Store for Module<T: Config> as Checkpoint {
Expand Down Expand Up @@ -155,7 +159,7 @@ decl_storage! {
double_map hasher(blake2_128_concat) AssetID, hasher(twox_64_concat) ScheduleId => Vec<CheckpointId>;

/// Storage version.
StorageVersion get(fn storage_version) build(|_| Version::new(1)): Version;
StorageVersion get(fn storage_version) build(|_| Version::new(2)): Version;
}
}

Expand All @@ -165,6 +169,13 @@ decl_module! {

fn deposit_event() = default;

fn on_runtime_upgrade() -> Weight {
storage_migrate_on!(StorageVersion, 2, {
migrations::migrate_to_v2::<T>();
});
Weight::zero()
}

/// Creates a single checkpoint at the current time.
///
/// # Arguments
Expand Down
25 changes: 17 additions & 8 deletions pallets/asset/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -79,13 +79,15 @@ pub mod benchmarking;
pub mod checkpoint;

mod error;
mod migrations;
mod types;

use codec::{Decode, Encode};
use core::mem;
use currency::*;
use frame_support::dispatch::{DispatchError, DispatchResult};
use frame_support::traits::Get;
use frame_support::weights::Weight;
use frame_support::BoundedBTreeSet;
use frame_support::{decl_module, decl_storage, ensure};
use frame_system::ensure_root;
Expand Down Expand Up @@ -115,9 +117,9 @@ use polymesh_primitives::asset_metadata::{
};
use polymesh_primitives::settlement::InstructionId;
use polymesh_primitives::{
extract_auth, storage_migration_ver, AssetIdentifier, Balance, Document, DocumentId,
IdentityId, Memo, PortfolioId, PortfolioKind, PortfolioUpdateReason, SecondaryKey, Ticker,
WeightMeter,
extract_auth, storage_migrate_on, storage_migration_ver, AssetIdentifier, Balance, Document,
DocumentId, IdentityId, Memo, PortfolioId, PortfolioKind, PortfolioUpdateReason, SecondaryKey,
Ticker, WeightMeter,
};

pub use error::Error;
Expand All @@ -132,7 +134,7 @@ type Identity<T> = pallet_identity::Module<T>;
type Portfolio<T> = pallet_portfolio::Module<T>;
type Statistics<T> = pallet_statistics::Module<T>;

storage_migration_ver!(4);
storage_migration_ver!(5);

decl_storage! {
trait Store for Module<T: Config> as Asset {
Expand Down Expand Up @@ -249,7 +251,7 @@ decl_storage! {
pub AssetNonce: map hasher(identity) T::AccountId => u64;

/// Storage version.
StorageVersion get(fn storage_version) build(|_| Version::new(4)): Version;
StorageVersion get(fn storage_version) build(|_| Version::new(5)): Version;
}

add_extra_genesis {
Expand Down Expand Up @@ -284,16 +286,23 @@ decl_module! {

type Error = Error<T>;

/// initialize the default event for this module
fn deposit_event() = default;

const AssetNameMaxLength: u32 = T::AssetNameMaxLength::get();
const FundingRoundNameMaxLength: u32 = T::FundingRoundNameMaxLength::get();
const AssetMetadataNameMaxLength: u32 = T::AssetMetadataNameMaxLength::get();
const AssetMetadataValueMaxLength: u32 = T::AssetMetadataValueMaxLength::get();
const AssetMetadataTypeDefMaxLength: u32 = T::AssetMetadataTypeDefMaxLength::get();
const MaxAssetMediators: u32 = T::MaxAssetMediators::get();

/// initialize the default event for this module
fn deposit_event() = default;

fn on_runtime_upgrade() -> Weight {
storage_migrate_on!(StorageVersion, 5, {
migrations::migrate_to_v5::<T>();
});
Weight::zero()
}

/// Registers a unique ticker or extends validity of an existing ticker.
/// NB: Ticker validity does not get carry forward when renewing ticker.
///
Expand Down
Loading

0 comments on commit b4f0e84

Please sign in to comment.