diff --git a/.changelog/unreleased/features/346-implement-ics721-nft-transfer.md b/.changelog/unreleased/features/346-implement-ics721-nft-transfer.md new file mode 100644 index 0000000000..0a68909ffd --- /dev/null +++ b/.changelog/unreleased/features/346-implement-ics721-nft-transfer.md @@ -0,0 +1,2 @@ +- [ibc-app-nft-transfer] Implement ICS-721 NFT transfer application + ([\#346](https://github.com/cosmos/ibc-rs/issues/346)) diff --git a/Cargo.toml b/Cargo.toml index d32711f978..1fce9be1ae 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -25,6 +25,8 @@ members = [ "ibc-clients", "ibc-apps/ics20-transfer/types", "ibc-apps/ics20-transfer", + "ibc-apps/ics721-nft-transfer/types", + "ibc-apps/ics721-nft-transfer", "ibc-apps", "ibc-core/ics24-host/cosmos", "ibc-data-types", @@ -74,6 +76,7 @@ ibc-core-handler = { version = "0.49.1", path = "./ibc-core/ics25-handler", ibc-core-router = { version = "0.49.1", path = "./ibc-core/ics26-routing", default-features = false } ibc-client-tendermint = { version = "0.49.1", path = "./ibc-clients/ics07-tendermint", default-features = false } ibc-app-transfer = { version = "0.49.1", path = "./ibc-apps/ics20-transfer", default-features = false } +ibc-app-nft-transfer = { version = "0.49.1", path = "./ibc-apps/ics721-nft-transfer", default-features = false } ibc-core-client-context = { version = "0.49.1", path = "./ibc-core/ics02-client/context", default-features = false } ibc-core-client-types = { version = "0.49.1", path = "./ibc-core/ics02-client/types", default-features = false } @@ -87,6 +90,7 @@ ibc-core-router-types = { version = "0.49.1", path = "./ibc-core/ics26-rou ibc-client-tendermint-types = { version = "0.49.1", path = "./ibc-clients/ics07-tendermint/types", default-features = false } ibc-client-wasm-types = { version = "0.49.1", path = "./ibc-clients/ics08-wasm/types", default-features = false } ibc-app-transfer-types = { version = "0.49.1", path = "./ibc-apps/ics20-transfer/types", default-features = false } +ibc-app-nft-transfer-types = { version = "0.49.1", path = "./ibc-apps/ics721-nft-transfer/types", default-features = false } ibc-proto = { version = "0.41.0", default-features = false } diff --git a/ci/no-std-check/Cargo.lock b/ci/no-std-check/Cargo.lock index f41f4ff9c5..e0a8bf90de 100644 --- a/ci/no-std-check/Cargo.lock +++ b/ci/no-std-check/Cargo.lock @@ -281,9 +281,9 @@ checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" [[package]] name = "base64" -version = "0.21.6" +version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c79fed4cdb43e993fcdadc7e58a09fd0e3e649c4436fa11da71c9f1f3ee7feb9" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" [[package]] name = "base64ct" @@ -327,9 +327,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.4.1" +version = "2.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" +checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf" [[package]] name = "bitvec" @@ -518,14 +518,14 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.31" +version = "0.4.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f2c685bad3eb3d45a01354cedb7d5faa66194d1d58ba6e267a8de788f79db38" +checksum = "41daef31d7a747c5c847246f36de49ced6f7403b4cdabc807a97b5cc184cda7a" dependencies = [ "android-tzdata", "iana-time-zone", "num-traits", - "windows-targets 0.48.5", + "windows-targets 0.52.0", ] [[package]] @@ -697,41 +697,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "darling" -version = "0.20.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0209d94da627ab5605dcccf08bb18afa5009cfbef48d8a8b7d7bdbc79be25c5e" -dependencies = [ - "darling_core", - "darling_macro", -] - -[[package]] -name = "darling_core" -version = "0.20.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "177e3443818124b357d8e76f53be906d60937f0d3a90773a664fa63fa253e621" -dependencies = [ - "fnv", - "ident_case", - "proc-macro2", - "quote", - "strsim", - "syn 2.0.48", -] - -[[package]] -name = "darling_macro" -version = "0.20.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" -dependencies = [ - "darling_core", - "quote", - "syn 2.0.48", -] - [[package]] name = "der" version = "0.7.8" @@ -1009,12 +974,6 @@ dependencies = [ "paste", ] -[[package]] -name = "fnv" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" - [[package]] name = "form_urlencoded" version = "1.2.1" @@ -1228,9 +1187,9 @@ checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" [[package]] name = "hermit-abi" -version = "0.3.3" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7" +checksum = "5d3d0e0f38255e7fa3cf31335b3a56f05febd18025f4db5ef7a0cfb4f8da651f" [[package]] name = "hex" @@ -1368,7 +1327,7 @@ dependencies = [ name = "ibc-client-wasm-types" version = "0.49.1" dependencies = [ - "base64 0.21.6", + "base64 0.21.7", "displaydoc", "ibc-core-client", "ibc-core-host-types", @@ -1396,6 +1355,7 @@ dependencies = [ "ibc-core-handler", "ibc-core-host", "ibc-core-router", + "ibc-derive", "ibc-primitives", ] @@ -1453,7 +1413,6 @@ dependencies = [ "ibc-core-commitment-types", "ibc-core-handler-types", "ibc-core-host-types", - "ibc-derive", "ibc-primitives", "subtle-encoding", "tendermint", @@ -1627,7 +1586,6 @@ dependencies = [ name = "ibc-derive" version = "0.5.0" dependencies = [ - "darling", "proc-macro2", "quote", "syn 2.0.48", @@ -1652,7 +1610,7 @@ version = "0.41.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd4ee32b22d3b06f31529b956f4928e5c9a068d71e46cf6abfa19c31ca550553" dependencies = [ - "base64 0.21.6", + "base64 0.21.7", "borsh", "bytes", "flex-error", @@ -1683,12 +1641,6 @@ dependencies = [ "sha3", ] -[[package]] -name = "ident_case" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" - [[package]] name = "idna" version = "0.5.0" @@ -1765,7 +1717,7 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9aa4a0980c8379295100d70854354e78df2ee1c6ca0f96ffe89afeb3140e3a3d" dependencies = [ - "base64 0.21.6", + "base64 0.21.7", "serde", ] @@ -1815,18 +1767,18 @@ checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" [[package]] name = "js-sys" -version = "0.3.66" +version = "0.3.67" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cee9c64da59eae3b50095c18d3e74f8b73c0b86d2792824ff01bbce68ba229ca" +checksum = "9a1d36f1235bc969acba30b7f5990b864423a6068a10f7c90ae8f0112e3a59d1" dependencies = [ "wasm-bindgen", ] [[package]] name = "keccak" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f6d5ed8676d904364de097082f4e7d240b571b67989ced0240f08b7f966f940" +checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" dependencies = [ "cpufeatures", ] @@ -1899,9 +1851,9 @@ checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4" [[package]] name = "linux-raw-sys" -version = "0.4.12" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4cd1a83af159aa67994778be9070f0ae1bd732942279cabb14f86f986a21456" +checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" [[package]] name = "lock_api" @@ -1949,7 +1901,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b2cffa4ad52c6f791f4f8b15f0c05f9824b2ced1160e88cc393d64fff9a8ac64" dependencies = [ - "rustix 0.38.28", + "rustix 0.38.30", ] [[package]] @@ -2268,9 +2220,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.76" +version = "1.0.78" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95fc56cda0b5c3325f5fbbd7ff9fda9e02bb00bb3dac51252d2f1bfa1cb8cc8c" +checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae" dependencies = [ "unicode-ident", ] @@ -2433,13 +2385,13 @@ dependencies = [ [[package]] name = "regex" -version = "1.10.2" +version = "1.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343" +checksum = "b62dbe01f0b06f9d8dc7d49e05a0785f153b00b2c227856282f671e0318c9b15" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.3", + "regex-automata 0.4.4", "regex-syntax 0.8.2", ] @@ -2454,9 +2406,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f" +checksum = "3b7fa1134405e2ec9353fd416b17f8dacd46c473d7d3fd1cf202706a14eb792a" dependencies = [ "aho-corasick", "memchr", @@ -2521,14 +2473,14 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.28" +version = "0.38.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72e572a5e8ca657d7366229cdde4bd14c4eb5499a9573d4d366fe1b599daa316" +checksum = "322394588aaf33c24007e8bb3238ee3e4c5c09c084ab32bc73890b99ff326bca" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.4.2", "errno", "libc", - "linux-raw-sys 0.4.12", + "linux-raw-sys 0.4.13", "windows-sys 0.52.0", ] @@ -2797,9 +2749,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.11.2" +version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dccd0940a2dcdf68d092b8cbab7dc0ad8fa938bf95787e1b916b0e3d0e8e970" +checksum = "e6ecd384b10a64542d77071bd64bd7b231f4ed5940fba55e98c3de13824cf3d7" [[package]] name = "sp-application-crypto" @@ -3143,9 +3095,9 @@ dependencies = [ [[package]] name = "ss58-registry" -version = "1.45.0" +version = "1.46.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c0c74081753a8ce1c8eb10b9f262ab6f7017e5ad3317c17a54c7ab65fcb3c6e" +checksum = "b1114ee5900b8569bbc8b1a014a942f937b752af4b44f4607430b5f86cedaac0" dependencies = [ "Inflector", "num-format", @@ -3168,12 +3120,6 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" -[[package]] -name = "strsim" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" - [[package]] name = "substrate-bip39" version = "0.4.5" @@ -3566,9 +3512,9 @@ dependencies = [ [[package]] name = "unicode-bidi" -version = "0.3.14" +version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f2528f27a9eb2b21e69c95319b30bd0efd85d09c379741b0f78ea1d86be2416" +checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" [[package]] name = "unicode-ident" @@ -3662,9 +3608,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.89" +version = "0.2.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ed0d4f68a3015cc185aff4db9506a015f4b96f95303897bfa23f846db54064e" +checksum = "b1223296a201415c7fad14792dbefaace9bd52b62d33453ade1c5b5f07555406" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -3672,9 +3618,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.89" +version = "0.2.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b56f625e64f3a1084ded111c4d5f477df9f8c92df113852fa5a374dbda78826" +checksum = "fcdc935b63408d58a32f8cc9738a0bffd8f05cc7c002086c6ef20b7312ad9dcd" dependencies = [ "bumpalo", "log", @@ -3687,9 +3633,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.89" +version = "0.2.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0162dbf37223cd2afce98f3d0785506dcb8d266223983e4b5b525859e6e182b2" +checksum = "3e4c238561b2d428924c49815533a8b9121c664599558a5d9ec51f8a1740a999" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -3697,9 +3643,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.89" +version = "0.2.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0eb82fcb7930ae6219a7ecfd55b217f5f0893484b7a13022ebb2b2bf20b5283" +checksum = "bae1abb6806dc1ad9e560ed242107c0f6c84335f1749dd4e8ddb012ebd5e25a7" dependencies = [ "proc-macro2", "quote", @@ -3710,9 +3656,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.89" +version = "0.2.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ab9b36309365056cd639da3134bf87fa8f3d86008abf99e612384a6eecd459f" +checksum = "4d91413b1c31d7539ba5ef2451af3f0b833a005eb27a631cec32bc0635a8602b" [[package]] name = "wasmparser" @@ -4096,9 +4042,9 @@ checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" [[package]] name = "winnow" -version = "0.5.33" +version = "0.5.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7520bbdec7211caa7c4e682eb1fbe07abe20cee6756b6e00f537c82c11816aa" +checksum = "b7cf47b659b318dccbd69cc4797a39ae128f533dce7902a1096044d1967b9c16" dependencies = [ "memchr", ] diff --git a/ibc-apps/Cargo.toml b/ibc-apps/Cargo.toml index 9057c5e7fa..a6fe583b3b 100644 --- a/ibc-apps/Cargo.toml +++ b/ibc-apps/Cargo.toml @@ -17,12 +17,14 @@ description = """ all-features = true [dependencies] -ibc-app-transfer = { workspace = true } +ibc-app-transfer = { workspace = true } +ibc-app-nft-transfer = { workspace = true, optional = true, features = [ "std", "serde", "schema", "borsh", "parity-scale-codec" ] } [features] default = ["std"] std = [ "ibc-app-transfer/std", + "nft-transfer", ] serde = [ "ibc-app-transfer/serde", @@ -38,3 +40,6 @@ borsh = [ parity-scale-codec = [ "ibc-app-transfer/parity-scale-codec", ] +nft-transfer = [ + "ibc-app-nft-transfer" +] diff --git a/ibc-apps/README.md b/ibc-apps/README.md index 71c8848dfc..103e68bc2b 100644 --- a/ibc-apps/README.md +++ b/ibc-apps/README.md @@ -26,6 +26,11 @@ applications: - [ibc-app-transfer](./../ibc-apps/ics20-transfer) - [ibc-app-transfer-types](./../ibc-apps/ics20-transfer/types) +### ICS-721: Non-Fungible Token Transfer Application + +- [ibc-app-nft-transfer](./../ibc-apps/ics721-nft-transfer) +- [ibc-app-nft-transfer-types](./../ibc-apps/ics721-nft-transfer/types) + ## Contributing IBC is specified in English in the [cosmos/ibc diff --git a/ibc-apps/ics721-nft-transfer/Cargo.toml b/ibc-apps/ics721-nft-transfer/Cargo.toml new file mode 100644 index 0000000000..aaba188625 --- /dev/null +++ b/ibc-apps/ics721-nft-transfer/Cargo.toml @@ -0,0 +1,53 @@ +[package] +name = "ibc-app-nft-transfer" +version = { workspace = true } +authors = { workspace = true } +edition = { workspace = true } +rust-version = { workspace = true } +license = { workspace = true } +repository = { workspace = true } +keywords = ["cosmos", "ibc", "nft", "transfer", "ics721"] +readme = "./../README.md" +description = """ + Maintained by `ibc-rs`, contains the implementation of the ICS-721 Non-Fungible Token Transfer + application logic and re-exports essential data structures and domain types from + `ibc-app-nft-transfer-types` crate. +""" + +[package.metadata.docs.rs] +all-features = true + +[dependencies] +# external dependencies +serde_json = { workspace = true, optional = true } + +# ibc dependencies +ibc-app-nft-transfer-types = { workspace = true } +ibc-core = { workspace = true } + +[features] +default = ["std"] +std = [ + "ibc-app-nft-transfer-types/std", + "ibc-core/std", + "serde_json/std", +] +serde = [ + "ibc-app-nft-transfer-types/serde", + "ibc-core/serde", + "serde_json" +] +schema = [ + "ibc-app-nft-transfer-types/schema", + "ibc-core/schema", + "serde", + "std", +] +borsh = [ + "ibc-app-nft-transfer-types/borsh", + "ibc-core/borsh", +] +parity-scale-codec = [ + "ibc-app-nft-transfer-types/parity-scale-codec", + "ibc-core/parity-scale-codec", +] diff --git a/ibc-apps/ics721-nft-transfer/src/context.rs b/ibc-apps/ics721-nft-transfer/src/context.rs new file mode 100644 index 0000000000..34fec88dd3 --- /dev/null +++ b/ibc-apps/ics721-nft-transfer/src/context.rs @@ -0,0 +1,199 @@ +//! Defines the required context traits for ICS-721 to interact with host +//! machine. +use ibc_core::host::types::identifiers::{ChannelId, PortId}; +use ibc_core::primitives::prelude::*; +use ibc_core::primitives::Signer; + +use crate::types::error::NftTransferError; +use crate::types::{ + ClassData, ClassId, ClassUri, Memo, PrefixedClassId, TokenData, TokenId, TokenUri, +}; + +pub trait NftContext { + /// Get the class ID of the token + fn get_class_id(&self) -> &ClassId; + + /// Get the token ID + fn get_id(&self) -> &TokenId; + + /// Get the token URI + fn get_uri(&self) -> Option<&TokenUri>; + + /// Get the token Data + fn get_data(&self) -> Option<&TokenData>; +} + +pub trait NftClassContext { + /// Get the class ID + fn get_id(&self) -> &ClassId; + + /// Get the class URI + fn get_uri(&self) -> Option<&ClassUri>; + + /// Get the class Data + fn get_data(&self) -> Option<&ClassData>; +} + +/// Read-only methods required in NFT transfer validation context. +pub trait NftTransferValidationContext { + type AccountId: TryFrom + PartialEq; + type Nft: NftContext; + type NftClass: NftClassContext; + + /// get_port returns the portID for the transfer module. + fn get_port(&self) -> Result; + + /// Returns Ok() if the host chain supports sending NFTs. + fn can_send_nft(&self) -> Result<(), NftTransferError>; + + /// Returns Ok() if the host chain supports receiving NFTs. + fn can_receive_nft(&self) -> Result<(), NftTransferError>; + + /// Validates that the NFT can be created or updated successfully. + /// + /// Note: some existing ICS-721 implementations may not strictly adhere to + /// the ICS-721 class data structure. The + /// [`ClassData`] associated with this + /// implementation can take any valid JSON format. If your project requires + /// ICS-721 format for the `ClassData`, ensure correctness by checking with + /// [`parse_as_ics721_data()`](crate::types::Data::parse_as_ics721_data). + fn create_or_update_class_validate( + &self, + class_id: &PrefixedClassId, + class_uri: Option<&ClassUri>, + class_data: Option<&ClassData>, + ) -> Result<(), NftTransferError>; + + /// Validates that the tokens can be escrowed successfully. + /// + /// The owner of the NFT should be checked in this validation. + /// `memo` field allows to incorporate additional contextual details in the + /// escrow validation. + fn escrow_nft_validate( + &self, + from_account: &Self::AccountId, + port_id: &PortId, + channel_id: &ChannelId, + class_id: &PrefixedClassId, + token_id: &TokenId, + memo: &Memo, + ) -> Result<(), NftTransferError>; + + /// Validates that the NFT can be unescrowed successfully. + fn unescrow_nft_validate( + &self, + to_account: &Self::AccountId, + port_id: &PortId, + channel_id: &ChannelId, + class_id: &PrefixedClassId, + token_id: &TokenId, + ) -> Result<(), NftTransferError>; + + /// Validates the receiver account and the NFT input + /// + /// Note: some existing ICS-721 implementations may not strictly adhere to + /// the ICS-721 token data structure. The + /// [`TokenData`] associated with this + /// implementation can take any valid JSON format. If your project requires + /// ICS-721 format for `TokenData`, ensure correctness by checking with + /// [`parse_as_ics721_data()`](crate::types::Data::parse_as_ics721_data). + fn mint_nft_validate( + &self, + account: &Self::AccountId, + class_id: &PrefixedClassId, + token_id: &TokenId, + token_uri: Option<&TokenUri>, + token_data: Option<&TokenData>, + ) -> Result<(), NftTransferError>; + + /// Validates the sender account and the coin input before burning. + /// + /// The owner of the NFT should be checked in this validation. + /// `memo` field allows to incorporate additional contextual details in the + /// burn validation. + fn burn_nft_validate( + &self, + account: &Self::AccountId, + class_id: &PrefixedClassId, + token_id: &TokenId, + memo: &Memo, + ) -> Result<(), NftTransferError>; + + /// Returns a hash of the prefixed class ID and the token ID. + /// Implement only if the host chain supports hashed class ID and token ID. + fn token_hash_string( + &self, + _class_id: &PrefixedClassId, + _token_id: &TokenId, + ) -> Option { + None + } + + /// Returns the NFT + fn get_nft( + &self, + class_id: &PrefixedClassId, + token_id: &TokenId, + ) -> Result; + + /// Returns the NFT class + fn get_nft_class(&self, class_id: &PrefixedClassId) + -> Result; +} + +/// Read-write methods required in NFT transfer execution context. +pub trait NftTransferExecutionContext: NftTransferValidationContext { + /// Creates a new NFT Class identified by classId. If the class ID already exists, it updates the class metadata. + fn create_or_update_class_execute( + &self, + class_id: &PrefixedClassId, + class_uri: Option<&ClassUri>, + class_data: Option<&ClassData>, + ) -> Result<(), NftTransferError>; + + /// Executes the escrow of the NFT in a user account. + /// + /// `memo` field allows to incorporate additional contextual details in the + /// escrow execution. + fn escrow_nft_execute( + &mut self, + from_account: &Self::AccountId, + port_id: &PortId, + channel_id: &ChannelId, + class_id: &PrefixedClassId, + token_id: &TokenId, + memo: &Memo, + ) -> Result<(), NftTransferError>; + + /// Executes the unescrow of the NFT in a user account. + fn unescrow_nft_execute( + &mut self, + to_account: &Self::AccountId, + port_id: &PortId, + channel_id: &ChannelId, + class_id: &PrefixedClassId, + token_id: &TokenId, + ) -> Result<(), NftTransferError>; + + /// Executes minting of the NFT in a user account. + fn mint_nft_execute( + &mut self, + account: &Self::AccountId, + class_id: &PrefixedClassId, + token_id: &TokenId, + token_uri: Option<&TokenUri>, + token_data: Option<&TokenData>, + ) -> Result<(), NftTransferError>; + + /// Executes burning of the NFT in a user account. + /// + /// `memo` field allows to incorporate additional contextual details in the + /// burn execution. + fn burn_nft_execute( + &mut self, + account: &Self::AccountId, + class_id: &PrefixedClassId, + token_id: &TokenId, + memo: &Memo, + ) -> Result<(), NftTransferError>; +} diff --git a/ibc-apps/ics721-nft-transfer/src/handler/mod.rs b/ibc-apps/ics721-nft-transfer/src/handler/mod.rs new file mode 100644 index 0000000000..2adbb47f6d --- /dev/null +++ b/ibc-apps/ics721-nft-transfer/src/handler/mod.rs @@ -0,0 +1,85 @@ +//! Implements IBC handlers responsible for processing Non-Fungible Token +//! Transfers (ICS-721) messages. +mod on_recv_packet; +mod send_transfer; + +use ibc_core::channel::types::packet::Packet; +pub use on_recv_packet::*; +pub use send_transfer::*; + +use crate::context::{NftTransferExecutionContext, NftTransferValidationContext}; +use crate::types::error::NftTransferError; +use crate::types::is_sender_chain_source; +use crate::types::packet::PacketData; + +pub fn refund_packet_nft_execute( + ctx_a: &mut impl NftTransferExecutionContext, + packet: &Packet, + data: &PacketData, +) -> Result<(), NftTransferError> { + let sender = data + .sender + .clone() + .try_into() + .map_err(|_| NftTransferError::ParseAccountFailure)?; + + if is_sender_chain_source( + packet.port_id_on_a.clone(), + packet.chan_id_on_a.clone(), + &data.class_id, + ) { + data.token_ids.as_ref().iter().try_for_each(|token_id| { + ctx_a.unescrow_nft_execute( + &sender, + &packet.port_id_on_a, + &packet.chan_id_on_a, + &data.class_id, + token_id, + ) + }) + } + // mint vouchers back to sender + else { + for (i, token_id) in data.token_ids.0.iter().enumerate() { + let token_uri = data.token_uris.as_ref().and_then(|uris| uris.get(i)); + let token_data = data.token_data.as_ref().and_then(|data| data.get(i)); + ctx_a.mint_nft_execute(&sender, &data.class_id, token_id, token_uri, token_data)?; + } + Ok(()) + } +} + +pub fn refund_packet_nft_validate( + ctx_a: &impl NftTransferValidationContext, + packet: &Packet, + data: &PacketData, +) -> Result<(), NftTransferError> { + let sender = data + .sender + .clone() + .try_into() + .map_err(|_| NftTransferError::ParseAccountFailure)?; + + if is_sender_chain_source( + packet.port_id_on_a.clone(), + packet.chan_id_on_a.clone(), + &data.class_id, + ) { + data.token_ids.0.iter().try_for_each(|token_id| { + ctx_a.unescrow_nft_validate( + &sender, + &packet.port_id_on_a, + &packet.chan_id_on_a, + &data.class_id, + token_id, + ) + }) + } else { + for (i, token_id) in data.token_ids.0.iter().enumerate() { + let token_uri = data.token_uris.as_ref().and_then(|uris| uris.get(i)); + let token_data = data.token_data.as_ref().and_then(|data| data.get(i)); + ctx_a.mint_nft_validate(&sender, &data.class_id, token_id, token_uri, token_data)?; + } + Ok(()) + } +} diff --git a/ibc-apps/ics721-nft-transfer/src/handler/on_recv_packet.rs b/ibc-apps/ics721-nft-transfer/src/handler/on_recv_packet.rs new file mode 100644 index 0000000000..8782bf7a2f --- /dev/null +++ b/ibc-apps/ics721-nft-transfer/src/handler/on_recv_packet.rs @@ -0,0 +1,137 @@ +use ibc_core::channel::types::packet::Packet; +use ibc_core::primitives::prelude::*; +use ibc_core::router::types::module::ModuleExtras; + +use crate::context::NftTransferExecutionContext; +use crate::types::error::NftTransferError; +use crate::types::events::TokenTraceEvent; +use crate::types::packet::PacketData; +use crate::types::{is_receiver_chain_source, TracePrefix}; + +/// This function handles the transfer receiving logic. +/// +/// Note that `send/mint_nft_validate` steps are performed on the host chain +/// to validate accounts and NFT info. But the result is then used for execution +/// on the IBC side, including storing acknowledgements and emitting events. +pub fn process_recv_packet_execute( + ctx_b: &mut Ctx, + packet: &Packet, + data: PacketData, +) -> Result> +where + Ctx: NftTransferExecutionContext, +{ + ctx_b + .can_receive_nft() + .map_err(|err| (ModuleExtras::empty(), err))?; + + let receiver_account = data + .receiver + .clone() + .try_into() + .map_err(|_| (ModuleExtras::empty(), NftTransferError::ParseAccountFailure))?; + + let extras = if is_receiver_chain_source( + packet.port_id_on_a.clone(), + packet.chan_id_on_a.clone(), + &data.class_id, + ) { + // sender chain is not the source, unescrow the NFT + let prefix = TracePrefix::new(packet.port_id_on_a.clone(), packet.chan_id_on_a.clone()); + let class_id = { + let mut c = data.class_id; + c.remove_trace_prefix(&prefix); + c + }; + + // Note: the validation is called before the execution. + // Refer to ICS-20 `process_recv_packet_execute()`. + for token_id in data.token_ids.as_ref() { + ctx_b + .unescrow_nft_validate( + &receiver_account, + &packet.port_id_on_b, + &packet.chan_id_on_b, + &class_id, + token_id, + ) + .map_err(|nft_error| (ModuleExtras::empty(), nft_error))?; + ctx_b + .unescrow_nft_execute( + &receiver_account, + &packet.port_id_on_b, + &packet.chan_id_on_b, + &class_id, + token_id, + ) + .map_err(|nft_error| (ModuleExtras::empty(), nft_error))?; + } + + ModuleExtras::empty() + } else { + // sender chain is the source, mint vouchers + let prefix = TracePrefix::new(packet.port_id_on_b.clone(), packet.chan_id_on_b.clone()); + let class_id = { + let mut c = data.class_id; + c.add_trace_prefix(prefix); + c + }; + + let mut extras = ModuleExtras { + events: vec![], + log: Vec::new(), + }; + for (i, token_id) in data.token_ids.0.iter().enumerate() { + let token_uri = data.token_uris.as_ref().and_then(|uris| uris.get(i)); + let token_data = data.token_data.as_ref().and_then(|data| data.get(i)); + + let trace_event = TokenTraceEvent { + trace_hash: ctx_b.token_hash_string(&class_id, token_id), + class: class_id.clone(), + token: token_id.clone(), + }; + extras.events.push(trace_event.into()); + + // Note: the validation is called before the execution. + // Refer to ICS-20 `process_recv_packet_execute()`. + + ctx_b + .create_or_update_class_validate( + &class_id, + data.class_uri.as_ref(), + data.class_data.as_ref(), + ) + .map_err(|nft_error| (ModuleExtras::empty(), nft_error))?; + ctx_b + .create_or_update_class_execute( + &class_id, + data.class_uri.as_ref(), + data.class_data.as_ref(), + ) + .map_err(|nft_error| (ModuleExtras::empty(), nft_error))?; + + ctx_b + .mint_nft_validate( + &receiver_account, + &class_id, + token_id, + token_uri, + token_data, + ) + .map_err(|nft_error| (extras.clone(), nft_error))?; + ctx_b + .mint_nft_execute( + &receiver_account, + &class_id, + token_id, + token_uri, + token_data, + ) + .map_err(|nft_error| (extras.clone(), nft_error))?; + } + + extras + }; + + Ok(extras) +} diff --git a/ibc-apps/ics721-nft-transfer/src/handler/send_transfer.rs b/ibc-apps/ics721-nft-transfer/src/handler/send_transfer.rs new file mode 100644 index 0000000000..2701647017 --- /dev/null +++ b/ibc-apps/ics721-nft-transfer/src/handler/send_transfer.rs @@ -0,0 +1,253 @@ +use ibc_core::channel::context::{SendPacketExecutionContext, SendPacketValidationContext}; +use ibc_core::channel::handler::{send_packet_execute, send_packet_validate}; +use ibc_core::channel::types::packet::Packet; +use ibc_core::handler::types::events::MessageEvent; +use ibc_core::host::types::path::{ChannelEndPath, SeqSendPath}; +use ibc_core::primitives::prelude::*; +use ibc_core::router::types::event::ModuleEvent; + +use crate::context::{ + NftClassContext, NftContext, NftTransferExecutionContext, NftTransferValidationContext, +}; +use crate::types::error::NftTransferError; +use crate::types::events::TransferEvent; +use crate::types::msgs::transfer::MsgTransfer; +use crate::types::{is_sender_chain_source, MODULE_ID_STR}; + +/// Initiate a token transfer. Equivalent to calling [`send_nft_transfer_validate`], followed by [`send_nft_transfer_execute`]. +pub fn send_nft_transfer( + send_packet_ctx_a: &mut SendPacketCtx, + transfer_ctx: &mut TransferCtx, + msg: MsgTransfer, +) -> Result<(), NftTransferError> +where + SendPacketCtx: SendPacketExecutionContext, + TransferCtx: NftTransferExecutionContext, +{ + send_nft_transfer_validate(send_packet_ctx_a, transfer_ctx, msg.clone())?; + send_nft_transfer_execute(send_packet_ctx_a, transfer_ctx, msg) +} + +/// Validates the NFT transfer +pub fn send_nft_transfer_validate( + send_packet_ctx_a: &SendPacketCtx, + transfer_ctx: &TransferCtx, + msg: MsgTransfer, +) -> Result<(), NftTransferError> +where + SendPacketCtx: SendPacketValidationContext, + TransferCtx: NftTransferValidationContext, +{ + transfer_ctx.can_send_nft()?; + + let chan_end_path_on_a = ChannelEndPath::new(&msg.port_id_on_a, &msg.chan_id_on_a); + let chan_end_on_a = send_packet_ctx_a.channel_end(&chan_end_path_on_a)?; + + let port_id_on_b = chan_end_on_a.counterparty().port_id().clone(); + let chan_id_on_b = chan_end_on_a + .counterparty() + .channel_id() + .ok_or_else(|| NftTransferError::DestinationChannelNotFound { + port_id: msg.port_id_on_a.clone(), + channel_id: msg.chan_id_on_a.clone(), + })? + .clone(); + + let seq_send_path_on_a = SeqSendPath::new(&msg.port_id_on_a, &msg.chan_id_on_a); + let sequence = send_packet_ctx_a.get_next_sequence_send(&seq_send_path_on_a)?; + + let sender: TransferCtx::AccountId = msg + .packet_data + .sender + .clone() + .try_into() + .map_err(|_| NftTransferError::ParseAccountFailure)?; + + let mut packet_data = msg.packet_data; + let class_id = &packet_data.class_id; + let token_ids = &packet_data.token_ids; + // overwrite even if they are set in MsgTransfer + if let Some(uris) = &mut packet_data.token_uris { + uris.clear(); + } + if let Some(data) = &mut packet_data.token_data { + data.clear(); + } + for token_id in token_ids.as_ref() { + if is_sender_chain_source(msg.port_id_on_a.clone(), msg.chan_id_on_a.clone(), class_id) { + transfer_ctx.escrow_nft_validate( + &sender, + &msg.port_id_on_a, + &msg.chan_id_on_a, + class_id, + token_id, + &packet_data.memo.clone().unwrap_or_default(), + )?; + } else { + transfer_ctx.burn_nft_validate( + &sender, + class_id, + token_id, + &packet_data.memo.clone().unwrap_or_default(), + )?; + } + let nft = transfer_ctx.get_nft(class_id, token_id)?; + // Set the URI and the data if both exists + if let (Some(uri), Some(data)) = (nft.get_uri(), nft.get_data()) { + match &mut packet_data.token_uris { + Some(uris) => uris.push(uri.clone()), + None => packet_data.token_uris = Some(vec![uri.clone()]), + } + match &mut packet_data.token_data { + Some(token_data) => token_data.push(data.clone()), + None => packet_data.token_data = Some(vec![data.clone()]), + } + } + } + + packet_data.validate_basic()?; + + let nft_class = transfer_ctx.get_nft_class(class_id)?; + packet_data.class_uri = nft_class.get_uri().cloned(); + packet_data.class_data = nft_class.get_data().cloned(); + + let packet = { + let data = serde_json::to_vec(&packet_data) + .expect("PacketData's infallible Serialize impl failed"); + + Packet { + seq_on_a: sequence, + port_id_on_a: msg.port_id_on_a, + chan_id_on_a: msg.chan_id_on_a, + port_id_on_b, + chan_id_on_b, + data, + timeout_height_on_b: msg.timeout_height_on_b, + timeout_timestamp_on_b: msg.timeout_timestamp_on_b, + } + }; + + send_packet_validate(send_packet_ctx_a, &packet)?; + + Ok(()) +} + +/// Executes the token transfer. A prior call to [`send_nft_transfer_validate`] MUST have succeeded. +pub fn send_nft_transfer_execute( + send_packet_ctx_a: &mut SendPacketCtx, + transfer_ctx: &mut TransferCtx, + msg: MsgTransfer, +) -> Result<(), NftTransferError> +where + SendPacketCtx: SendPacketExecutionContext, + TransferCtx: NftTransferExecutionContext, +{ + let chan_end_path_on_a = ChannelEndPath::new(&msg.port_id_on_a, &msg.chan_id_on_a); + let chan_end_on_a = send_packet_ctx_a.channel_end(&chan_end_path_on_a)?; + + let port_on_b = chan_end_on_a.counterparty().port_id().clone(); + let chan_on_b = chan_end_on_a + .counterparty() + .channel_id() + .ok_or_else(|| NftTransferError::DestinationChannelNotFound { + port_id: msg.port_id_on_a.clone(), + channel_id: msg.chan_id_on_a.clone(), + })? + .clone(); + + // get the next sequence + let seq_send_path_on_a = SeqSendPath::new(&msg.port_id_on_a, &msg.chan_id_on_a); + let sequence = send_packet_ctx_a.get_next_sequence_send(&seq_send_path_on_a)?; + + let sender = msg + .packet_data + .sender + .clone() + .try_into() + .map_err(|_| NftTransferError::ParseAccountFailure)?; + + let mut packet_data = msg.packet_data; + let class_id = &packet_data.class_id; + let token_ids = &packet_data.token_ids; + // overwrite even if they are set in MsgTransfer + if let Some(uris) = &mut packet_data.token_uris { + uris.clear(); + } + if let Some(data) = &mut packet_data.token_data { + data.clear(); + } + for token_id in token_ids.as_ref() { + if is_sender_chain_source(msg.port_id_on_a.clone(), msg.chan_id_on_a.clone(), class_id) { + transfer_ctx.escrow_nft_execute( + &sender, + &msg.port_id_on_a, + &msg.chan_id_on_a, + class_id, + token_id, + &packet_data.memo.clone().unwrap_or_default(), + )?; + } else { + transfer_ctx.burn_nft_execute( + &sender, + class_id, + token_id, + &packet_data.memo.clone().unwrap_or_default(), + )?; + } + let nft = transfer_ctx.get_nft(class_id, token_id)?; + // Set the URI and the data if both exists + if let (Some(uri), Some(data)) = (nft.get_uri(), nft.get_data()) { + match &mut packet_data.token_uris { + Some(uris) => uris.push(uri.clone()), + None => packet_data.token_uris = Some(vec![uri.clone()]), + } + match &mut packet_data.token_data { + Some(token_data) => token_data.push(data.clone()), + None => packet_data.token_data = Some(vec![data.clone()]), + } + } + } + + let nft_class = transfer_ctx.get_nft_class(class_id)?; + packet_data.class_uri = nft_class.get_uri().cloned(); + packet_data.class_data = nft_class.get_data().cloned(); + + let packet = { + let data = { + serde_json::to_vec(&packet_data).expect("PacketData's infallible Serialize impl failed") + }; + + Packet { + seq_on_a: sequence, + port_id_on_a: msg.port_id_on_a, + chan_id_on_a: msg.chan_id_on_a, + port_id_on_b: port_on_b, + chan_id_on_b: chan_on_b, + data, + timeout_height_on_b: msg.timeout_height_on_b, + timeout_timestamp_on_b: msg.timeout_timestamp_on_b, + } + }; + + send_packet_execute(send_packet_ctx_a, packet)?; + + { + send_packet_ctx_a.log_message(format!( + "IBC NFT transfer: {} --({}, [{}])--> {}", + packet_data.sender, class_id, token_ids, packet_data.receiver + ))?; + + let transfer_event = TransferEvent { + sender: packet_data.sender, + receiver: packet_data.receiver, + class: packet_data.class_id, + tokens: packet_data.token_ids, + memo: packet_data.memo.unwrap_or_default(), + }; + send_packet_ctx_a.emit_ibc_event(ModuleEvent::from(transfer_event).into())?; + + send_packet_ctx_a.emit_ibc_event(MessageEvent::Module(MODULE_ID_STR.to_string()).into())?; + } + + Ok(()) +} diff --git a/ibc-apps/ics721-nft-transfer/src/lib.rs b/ibc-apps/ics721-nft-transfer/src/lib.rs new file mode 100644 index 0000000000..66693ac1d6 --- /dev/null +++ b/ibc-apps/ics721-nft-transfer/src/lib.rs @@ -0,0 +1,33 @@ +//! Implementation of the IBC [Non-Fungible Token +//! Transfer](https://github.com/cosmos/ibc/blob/main/spec/app/ics-721-nft-transfer/README.md) +//! (ICS-721) application logic. +#![no_std] +#![forbid(unsafe_code)] +#![cfg_attr(not(test), deny(clippy::unwrap_used))] +#![cfg_attr(not(test), deny(clippy::disallowed_methods, clippy::disallowed_types))] +#![deny( + warnings, + trivial_casts, + trivial_numeric_casts, + unused_import_braces, + unused_qualifications, + rust_2018_idioms +)] + +#[cfg(any(test, feature = "std"))] +extern crate std; + +#[cfg(feature = "serde")] +pub mod context; +#[cfg(feature = "serde")] +pub mod handler; +#[cfg(feature = "serde")] +pub mod module; + +/// Re-exports the implementation of the IBC [Non-Fungible Token +/// Transfer](https://github.com/cosmos/ibc/blob/main/spec/app/ics-020-fungible-token-transfer/README.md) +/// (ICS-721) data structures. +pub mod types { + #[doc(inline)] + pub use ibc_app_nft_transfer_types::*; +} diff --git a/ibc-apps/ics721-nft-transfer/src/module.rs b/ibc-apps/ics721-nft-transfer/src/module.rs new file mode 100644 index 0000000000..fc4097016f --- /dev/null +++ b/ibc-apps/ics721-nft-transfer/src/module.rs @@ -0,0 +1,386 @@ +//! Provides IBC module callbacks implementation for the ICS-721 transfer. + +use ibc_core::channel::types::acknowledgement::{Acknowledgement, AcknowledgementStatus}; +use ibc_core::channel::types::channel::{Counterparty, Order}; +use ibc_core::channel::types::packet::Packet; +use ibc_core::channel::types::Version; +use ibc_core::handler::types::error::ContextError; +use ibc_core::host::types::identifiers::{ChannelId, ConnectionId, PortId}; +use ibc_core::primitives::prelude::*; +use ibc_core::primitives::Signer; +use ibc_core::router::types::module::ModuleExtras; + +use crate::context::{NftTransferExecutionContext, NftTransferValidationContext}; +use crate::handler::{ + process_recv_packet_execute, refund_packet_nft_execute, refund_packet_nft_validate, +}; +use crate::types::error::NftTransferError; +use crate::types::events::{AckEvent, AckStatusEvent, RecvEvent, TimeoutEvent}; +use crate::types::packet::PacketData; +use crate::types::{ack_success_b64, VERSION}; + +pub fn on_chan_open_init_validate( + ctx: &impl NftTransferValidationContext, + order: Order, + _connection_hops: &[ConnectionId], + port_id: &PortId, + _channel_id: &ChannelId, + _counterparty: &Counterparty, + version: &Version, +) -> Result<(), NftTransferError> { + if order != Order::Unordered { + return Err(NftTransferError::ChannelNotUnordered { + expect_order: Order::Unordered, + got_order: order, + }); + } + let bound_port = ctx.get_port()?; + if port_id != &bound_port { + return Err(NftTransferError::InvalidPort { + port_id: port_id.clone(), + exp_port_id: bound_port, + }); + } + + if !version.is_empty() { + version + .verify_is_expected(Version::new(VERSION.to_string())) + .map_err(ContextError::from)?; + } + + Ok(()) +} + +pub fn on_chan_open_init_execute( + _ctx: &mut impl NftTransferExecutionContext, + _order: Order, + _connection_hops: &[ConnectionId], + _port_id: &PortId, + _channel_id: &ChannelId, + _counterparty: &Counterparty, + _version: &Version, +) -> Result<(ModuleExtras, Version), NftTransferError> { + Ok((ModuleExtras::empty(), Version::new(VERSION.to_string()))) +} + +pub fn on_chan_open_try_validate( + _ctx: &impl NftTransferValidationContext, + order: Order, + _connection_hops: &[ConnectionId], + _port_id: &PortId, + _channel_id: &ChannelId, + _counterparty: &Counterparty, + counterparty_version: &Version, +) -> Result<(), NftTransferError> { + if order != Order::Unordered { + return Err(NftTransferError::ChannelNotUnordered { + expect_order: Order::Unordered, + got_order: order, + }); + } + + counterparty_version + .verify_is_expected(Version::new(VERSION.to_string())) + .map_err(ContextError::from)?; + + Ok(()) +} + +pub fn on_chan_open_try_execute( + _ctx: &mut impl NftTransferExecutionContext, + _order: Order, + _connection_hops: &[ConnectionId], + _port_id: &PortId, + _channel_id: &ChannelId, + _counterparty: &Counterparty, + _counterparty_version: &Version, +) -> Result<(ModuleExtras, Version), NftTransferError> { + Ok((ModuleExtras::empty(), Version::new(VERSION.to_string()))) +} + +pub fn on_chan_open_ack_validate( + _ctx: &impl NftTransferExecutionContext, + _port_id: &PortId, + _channel_id: &ChannelId, + counterparty_version: &Version, +) -> Result<(), NftTransferError> { + counterparty_version + .verify_is_expected(Version::new(VERSION.to_string())) + .map_err(ContextError::from)?; + + Ok(()) +} + +pub fn on_chan_open_ack_execute( + _ctx: &mut impl NftTransferExecutionContext, + _port_id: &PortId, + _channel_id: &ChannelId, + _counterparty_version: &Version, +) -> Result { + Ok(ModuleExtras::empty()) +} + +pub fn on_chan_open_confirm_validate( + _ctx: &impl NftTransferValidationContext, + _port_id: &PortId, + _channel_id: &ChannelId, +) -> Result<(), NftTransferError> { + Ok(()) +} + +pub fn on_chan_open_confirm_execute( + _ctx: &mut impl NftTransferExecutionContext, + _port_id: &PortId, + _channel_id: &ChannelId, +) -> Result { + Ok(ModuleExtras::empty()) +} + +pub fn on_chan_close_init_validate( + _ctx: &impl NftTransferValidationContext, + _port_id: &PortId, + _channel_id: &ChannelId, +) -> Result<(), NftTransferError> { + Err(NftTransferError::CantCloseChannel) +} + +pub fn on_chan_close_init_execute( + _ctx: &mut impl NftTransferExecutionContext, + _port_id: &PortId, + _channel_id: &ChannelId, +) -> Result { + Err(NftTransferError::CantCloseChannel) +} + +pub fn on_chan_close_confirm_validate( + _ctx: &impl NftTransferValidationContext, + _port_id: &PortId, + _channel_id: &ChannelId, +) -> Result<(), NftTransferError> { + Ok(()) +} + +pub fn on_chan_close_confirm_execute( + _ctx: &mut impl NftTransferExecutionContext, + _port_id: &PortId, + _channel_id: &ChannelId, +) -> Result { + Ok(ModuleExtras::empty()) +} + +pub fn on_recv_packet_execute( + ctx_b: &mut impl NftTransferExecutionContext, + packet: &Packet, +) -> (ModuleExtras, Acknowledgement) { + let data = match serde_json::from_slice::(&packet.data) { + Ok(data) => data, + Err(_) => { + let ack = + AcknowledgementStatus::error(NftTransferError::PacketDataDeserialization.into()); + return (ModuleExtras::empty(), ack.into()); + } + }; + + let (mut extras, ack) = match process_recv_packet_execute(ctx_b, packet, data.clone()) { + Ok(extras) => (extras, AcknowledgementStatus::success(ack_success_b64())), + Err(boxed_error) => { + let (extras, error) = *boxed_error; + (extras, AcknowledgementStatus::error(error.into())) + } + }; + + let recv_event = RecvEvent { + sender: data.sender, + receiver: data.receiver, + class: data.class_id, + tokens: data.token_ids, + memo: data.memo.unwrap_or_default(), + success: ack.is_successful(), + }; + extras.events.push(recv_event.into()); + + (extras, ack.into()) +} + +pub fn on_acknowledgement_packet_validate( + ctx: &impl NftTransferValidationContext, + packet: &Packet, + acknowledgement: &Acknowledgement, + _relayer: &Signer, +) -> Result<(), NftTransferError> { + let data = serde_json::from_slice::(&packet.data) + .map_err(|_| NftTransferError::PacketDataDeserialization)?; + + let acknowledgement = serde_json::from_slice::(acknowledgement.as_ref()) + .map_err(|_| NftTransferError::AckDeserialization)?; + + if !acknowledgement.is_successful() { + refund_packet_nft_validate(ctx, packet, &data)?; + } + + Ok(()) +} + +pub fn on_acknowledgement_packet_execute( + ctx: &mut impl NftTransferExecutionContext, + packet: &Packet, + acknowledgement: &Acknowledgement, + _relayer: &Signer, +) -> (ModuleExtras, Result<(), NftTransferError>) { + let data = match serde_json::from_slice::(&packet.data) { + Ok(data) => data, + Err(_) => { + return ( + ModuleExtras::empty(), + Err(NftTransferError::PacketDataDeserialization), + ); + } + }; + + let acknowledgement = + match serde_json::from_slice::(acknowledgement.as_ref()) { + Ok(ack) => ack, + Err(_) => { + return ( + ModuleExtras::empty(), + Err(NftTransferError::AckDeserialization), + ); + } + }; + + if !acknowledgement.is_successful() { + if let Err(err) = refund_packet_nft_execute(ctx, packet, &data) { + return (ModuleExtras::empty(), Err(err)); + } + } + + let ack_event = AckEvent { + sender: data.sender, + receiver: data.receiver, + class: data.class_id, + tokens: data.token_ids, + memo: data.memo.unwrap_or_default(), + acknowledgement: acknowledgement.clone(), + }; + + let extras = ModuleExtras { + events: vec![ack_event.into(), AckStatusEvent { acknowledgement }.into()], + log: Vec::new(), + }; + + (extras, Ok(())) +} + +pub fn on_timeout_packet_validate( + ctx: &impl NftTransferValidationContext, + packet: &Packet, + _relayer: &Signer, +) -> Result<(), NftTransferError> { + let data = serde_json::from_slice::(&packet.data) + .map_err(|_| NftTransferError::PacketDataDeserialization)?; + + refund_packet_nft_validate(ctx, packet, &data)?; + + Ok(()) +} + +pub fn on_timeout_packet_execute( + ctx: &mut impl NftTransferExecutionContext, + packet: &Packet, + _relayer: &Signer, +) -> (ModuleExtras, Result<(), NftTransferError>) { + let data = match serde_json::from_slice::(&packet.data) { + Ok(data) => data, + Err(_) => { + return ( + ModuleExtras::empty(), + Err(NftTransferError::PacketDataDeserialization), + ); + } + }; + + if let Err(err) = refund_packet_nft_execute(ctx, packet, &data) { + return (ModuleExtras::empty(), Err(err)); + } + + let timeout_event = TimeoutEvent { + refund_receiver: data.sender, + refund_class: data.class_id, + refund_tokens: data.token_ids, + memo: data.memo.unwrap_or_default(), + }; + + let extras = ModuleExtras { + events: vec![timeout_event.into()], + log: Vec::new(), + }; + + (extras, Ok(())) +} + +#[cfg(test)] +mod test { + use super::*; + use crate::types::ack_success_b64; + use crate::types::error::NftTransferError; + + #[test] + fn test_ack_ser() { + fn ser_json_assert_eq(ack: AcknowledgementStatus, json_str: &str) { + let ser = serde_json::to_string(&ack).unwrap(); + assert_eq!(ser, json_str) + } + + ser_json_assert_eq( + AcknowledgementStatus::success(ack_success_b64()), + r#"{"result":"AQ=="}"#, + ); + ser_json_assert_eq( + AcknowledgementStatus::error(NftTransferError::PacketDataDeserialization.into()), + r#"{"error":"failed to deserialize packet data"}"#, + ); + } + + #[test] + fn test_ack_success_to_vec() { + let ack_success: Vec = AcknowledgementStatus::success(ack_success_b64()).into(); + + // Check that it's the same output as ibc-go + // Note: this also implicitly checks that the ack bytes are non-empty, + // which would make the conversion to `Acknowledgement` panic + assert_eq!(ack_success, r#"{"result":"AQ=="}"#.as_bytes()); + } + + #[test] + fn test_ack_error_to_vec() { + let ack_error: Vec = + AcknowledgementStatus::error(NftTransferError::PacketDataDeserialization.into()).into(); + + // Check that it's the same output as ibc-go + // Note: this also implicitly checks that the ack bytes are non-empty, + // which would make the conversion to `Acknowledgement` panic + assert_eq!( + ack_error, + r#"{"error":"failed to deserialize packet data"}"#.as_bytes() + ); + } + + #[test] + fn test_ack_de() { + fn de_json_assert_eq(json_str: &str, ack: AcknowledgementStatus) { + let de = serde_json::from_str::(json_str).unwrap(); + assert_eq!(de, ack) + } + + de_json_assert_eq( + r#"{"result":"AQ=="}"#, + AcknowledgementStatus::success(ack_success_b64()), + ); + de_json_assert_eq( + r#"{"error":"failed to deserialize packet data"}"#, + AcknowledgementStatus::error(NftTransferError::PacketDataDeserialization.into()), + ); + + assert!(serde_json::from_str::(r#"{"success":"AQ=="}"#).is_err()); + } +} diff --git a/ibc-apps/ics721-nft-transfer/types/Cargo.toml b/ibc-apps/ics721-nft-transfer/types/Cargo.toml new file mode 100644 index 0000000000..b63d0fb14e --- /dev/null +++ b/ibc-apps/ics721-nft-transfer/types/Cargo.toml @@ -0,0 +1,76 @@ +[package] +name = "ibc-app-nft-transfer-types" +version = { workspace = true } +authors = { workspace = true } +edition = { workspace = true } +rust-version = { workspace = true } +license = { workspace = true } +repository = { workspace = true } +keywords = ["cosmos", "ibc", "transfer", "nft", "ics721"] +readme = "./../../README.md" +description = """ + Maintained by `ibc-rs`, encapsulates essential ICS-721 Non-Fungible Token Transfer data structures and + domain types, as specified in the Inter-Blockchain Communication (IBC) protocol. Designed for universal + applicability to facilitate development and integration across diverse IBC-enabled projects. +""" + +[package.metadata.docs.rs] +all-features = true + +[dependencies] +# external dependencies +borsh = { workspace = true, optional = true } +base64 = { version = "0.21.6", default-features = false } +derive_more = { workspace = true } +displaydoc = { workspace = true } +http = "1.0.0" +mime = "0.3.17" +schemars = { workspace = true, optional = true } +serde = { workspace = true, optional = true } +serde_json = { workspace = true } + +# ibc dependencies +ibc-core = { workspace = true } +ibc-proto = { workspace = true } + +## parity dependencies +parity-scale-codec = { workspace = true , optional = true } +scale-info = { workspace = true , optional = true } + +[dev-dependencies] +rstest = { workspace = true } + +[features] +default = ["std"] +std = [ + "serde/std", + "serde_json/std", + "base64/std", + "displaydoc/std", + "http/std", + "ibc-core/std", + "ibc-proto/std", +] +serde = [ + "dep:serde", + "ibc-core/serde", + "ibc-proto/serde", +] +schema = [ + "dep:schemars", + "ibc-core/schema", + "ibc-proto/json-schema", + "serde", + "std" +] +borsh = [ + "dep:borsh", + "ibc-core/borsh", + "ibc-proto/borsh" +] +parity-scale-codec = [ + "dep:parity-scale-codec", + "dep:scale-info", + "ibc-core/parity-scale-codec", + "ibc-proto/parity-scale-codec" +] diff --git a/ibc-apps/ics721-nft-transfer/types/src/class.rs b/ibc-apps/ics721-nft-transfer/types/src/class.rs new file mode 100644 index 0000000000..278a90a073 --- /dev/null +++ b/ibc-apps/ics721-nft-transfer/types/src/class.rs @@ -0,0 +1,602 @@ +//! Defines Non-Fungible Token Transfer (ICS-721) class types. +use core::fmt::{self, Display, Error as FmtError, Formatter}; +use core::str::FromStr; + +use derive_more::From; +use http::Uri; +use ibc_core::host::types::identifiers::{ChannelId, PortId}; +use ibc_core::primitives::prelude::*; +use ibc_proto::ibc::applications::nft_transfer::v1::ClassTrace as RawClassTrace; + +use crate::data::Data; +use crate::error::NftTransferError; +use crate::serializers; + +/// Class ID for an NFT +#[cfg_attr( + feature = "parity-scale-codec", + derive( + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_info::TypeInfo + ) +)] +#[cfg_attr( + feature = "borsh", + derive(borsh::BorshSerialize, borsh::BorshDeserialize) +)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub struct ClassId(String); + +impl AsRef for ClassId { + fn as_ref(&self) -> &str { + &self.0 + } +} + +impl Display for ClassId { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +impl FromStr for ClassId { + type Err = NftTransferError; + + fn from_str(class_id: &str) -> Result { + if class_id.trim().is_empty() { + Err(NftTransferError::EmptyBaseClassId) + } else { + Ok(Self(class_id.to_string())) + } + } +} + +/// Class prefix, the same as ICS-20 TracePrefix +#[cfg_attr( + feature = "parity-scale-codec", + derive( + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_info::TypeInfo + ) +)] +#[cfg_attr( + feature = "borsh", + derive(borsh::BorshSerialize, borsh::BorshDeserialize) +)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] +#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq)] +pub struct TracePrefix { + port_id: PortId, + channel_id: ChannelId, +} + +impl TracePrefix { + pub fn new(port_id: PortId, channel_id: ChannelId) -> Self { + Self { + port_id, + channel_id, + } + } +} + +impl Display for TracePrefix { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), FmtError> { + write!(f, "{}/{}", self.port_id, self.channel_id) + } +} + +/// Class trace path, the same as ICS-20 TracePath +#[cfg_attr( + feature = "parity-scale-codec", + derive( + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_info::TypeInfo + ) +)] +#[cfg_attr( + feature = "borsh", + derive(borsh::BorshSerialize, borsh::BorshDeserialize) +)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] +#[derive(Clone, Debug, Default, Eq, PartialEq, PartialOrd, Ord, From)] +pub struct TracePath(Vec); + +impl TracePath { + /// Returns true iff this path starts with the specified prefix + pub fn starts_with(&self, prefix: &TracePrefix) -> bool { + self.0.last().map(|p| p == prefix).unwrap_or(false) + } + + /// Removes the specified prefix from the path if there is a match, otherwise does nothing. + pub fn remove_prefix(&mut self, prefix: &TracePrefix) { + if self.starts_with(prefix) { + self.0.pop(); + } + } + + /// Adds the specified prefix to the path. + pub fn add_prefix(&mut self, prefix: TracePrefix) { + self.0.push(prefix) + } + + /// Returns true if the path is empty and false otherwise. + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } +} + +impl<'a> TryFrom> for TracePath { + type Error = NftTransferError; + + fn try_from(v: Vec<&'a str>) -> Result { + if v.len() % 2 != 0 { + return Err(NftTransferError::InvalidTraceLength { + len: v.len() as u64, + }); + } + + let mut trace = vec![]; + let id_pairs = v.chunks_exact(2).map(|paths| (paths[0], paths[1])); + for (pos, (port_id, channel_id)) in id_pairs.rev().enumerate() { + let port_id = + PortId::from_str(port_id).map_err(|e| NftTransferError::InvalidTracePortId { + pos: pos as u64, + validation_error: e, + })?; + let channel_id = ChannelId::from_str(channel_id).map_err(|e| { + NftTransferError::InvalidTraceChannelId { + pos: pos as u64, + validation_error: e, + } + })?; + trace.push(TracePrefix { + port_id, + channel_id, + }); + } + + Ok(trace.into()) + } +} + +impl FromStr for TracePath { + type Err = NftTransferError; + + fn from_str(s: &str) -> Result { + let parts = { + let parts: Vec<&str> = s.split('/').collect(); + if parts.len() == 1 && parts[0].trim().is_empty() { + vec![] + } else { + parts + } + }; + parts.try_into() + } +} + +impl Display for TracePath { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), FmtError> { + let path = self + .0 + .iter() + .rev() + .map(|prefix| prefix.to_string()) + .collect::>() + .join("/"); + write!(f, "{path}") + } +} + +/// Prefixed class to trace sources like ICS-20 PrefixedDenom +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] +#[cfg_attr( + feature = "parity-scale-codec", + derive( + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_info::TypeInfo + ) +)] +#[cfg_attr( + feature = "borsh", + derive(borsh::BorshSerialize, borsh::BorshDeserialize) +)] +#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord)] +pub struct PrefixedClassId { + /// A series of `{port-id}/{channel-id}`s for tracing the source of the class. + #[cfg_attr(feature = "serde", serde(with = "serializers"))] + #[cfg_attr(feature = "schema", schemars(with = "String"))] + pub trace_path: TracePath, + /// Base class of the relayed non-fungible token. + pub base_class_id: ClassId, +} + +impl PrefixedClassId { + /// Removes the specified prefix from the trace path if there is a match, otherwise does nothing. + pub fn remove_trace_prefix(&mut self, prefix: &TracePrefix) { + self.trace_path.remove_prefix(prefix) + } + + /// Adds the specified prefix to the trace path. + pub fn add_trace_prefix(&mut self, prefix: TracePrefix) { + self.trace_path.add_prefix(prefix) + } +} + +/// Returns true if the class ID originally came from the sender chain and false otherwise. +pub fn is_sender_chain_source( + source_port: PortId, + source_channel: ChannelId, + class_id: &PrefixedClassId, +) -> bool { + !is_receiver_chain_source(source_port, source_channel, class_id) +} + +/// Returns true if the class ID originally came from the receiving chain and false otherwise. +pub fn is_receiver_chain_source( + source_port: PortId, + source_channel: ChannelId, + class_id: &PrefixedClassId, +) -> bool { + // For example, let + // A: sender chain in this transfer, port "transfer" and channel "c2b" (to B) + // B: receiver chain in this transfer, port "transfer" and channel "c2a" (to A) + // + // If B had originally sent the token in a previous transfer, then A would have stored the token as + // "transfer/c2b/{token_denom}". Now, A is sending to B, so to check if B is the source of the token, + // we need to check if the token starts with "transfer/c2b". + let prefix = TracePrefix::new(source_port, source_channel); + class_id.trace_path.starts_with(&prefix) +} + +impl FromStr for PrefixedClassId { + type Err = NftTransferError; + + fn from_str(s: &str) -> Result { + let mut parts: Vec<&str> = s.split('/').collect(); + let last_part = parts.pop().expect("split() returned an empty iterator"); + + let (base_class_id, trace_path) = { + if last_part == s { + (ClassId::from_str(s)?, TracePath::default()) + } else { + let base_class_id = ClassId::from_str(last_part)?; + let trace_path = TracePath::try_from(parts)?; + (base_class_id, trace_path) + } + }; + + Ok(Self { + trace_path, + base_class_id, + }) + } +} + +impl TryFrom for PrefixedClassId { + type Error = NftTransferError; + + fn try_from(value: RawClassTrace) -> Result { + let base_class_id = ClassId::from_str(&value.base_class_id)?; + let trace_path = TracePath::from_str(&value.path)?; + Ok(Self { + trace_path, + base_class_id, + }) + } +} + +impl From for RawClassTrace { + fn from(value: PrefixedClassId) -> Self { + Self { + path: value.trace_path.to_string(), + base_class_id: value.base_class_id.to_string(), + } + } +} + +impl From for PrefixedClassId { + fn from(class_id: ClassId) -> Self { + Self { + trace_path: Default::default(), + base_class_id: class_id, + } + } +} + +impl Display for PrefixedClassId { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), FmtError> { + if self.trace_path.0.is_empty() { + write!(f, "{}", self.base_class_id) + } else { + write!(f, "{}/{}", self.trace_path, self.base_class_id) + } + } +} + +/// Class URI for an NFT +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct ClassUri( + #[cfg_attr(feature = "serde", serde(with = "serializers"))] + #[cfg_attr(feature = "schema", schemars(with = "String"))] + Uri, +); + +#[cfg(feature = "borsh")] +impl borsh::BorshSerialize for ClassUri { + fn serialize( + &self, + writer: &mut W, + ) -> borsh::maybestd::io::Result<()> { + borsh::BorshSerialize::serialize(&self.to_string(), writer) + } +} + +#[cfg(feature = "borsh")] +impl borsh::BorshDeserialize for ClassUri { + fn deserialize_reader( + reader: &mut R, + ) -> borsh::maybestd::io::Result { + let uri = String::deserialize_reader(reader)?; + Ok(ClassUri::from_str(&uri).map_err(|_| borsh::maybestd::io::ErrorKind::Other)?) + } +} + +#[cfg(feature = "parity-scale-codec")] +impl parity_scale_codec::Encode for ClassUri { + fn encode_to(&self, writer: &mut T) { + self.to_string().encode_to(writer); + } +} + +#[cfg(feature = "parity-scale-codec")] +impl parity_scale_codec::Decode for ClassUri { + fn decode( + input: &mut I, + ) -> Result { + let uri = String::decode(input)?; + ClassUri::from_str(&uri).map_err(|_| parity_scale_codec::Error::from("from str error")) + } +} + +#[cfg(feature = "parity-scale-codec")] +impl scale_info::TypeInfo for ClassUri { + type Identity = Self; + + fn type_info() -> scale_info::Type { + scale_info::Type::builder() + .path(scale_info::Path::new("ClassUri", module_path!())) + .composite( + scale_info::build::Fields::unnamed() + .field(|f| f.ty::().type_name("String")), + ) + } +} + +impl Display for ClassUri { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +impl FromStr for ClassUri { + type Err = NftTransferError; + + fn from_str(class_uri: &str) -> Result { + match Uri::from_str(class_uri) { + Ok(uri) => Ok(Self(uri)), + Err(err) => Err(NftTransferError::InvalidUri { + uri: class_uri.to_string(), + validation_error: err, + }), + } + } +} + +/// Class data for an NFT +#[cfg_attr( + feature = "parity-scale-codec", + derive( + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_info::TypeInfo + ) +)] +#[cfg_attr( + feature = "borsh", + derive(borsh::BorshSerialize, borsh::BorshDeserialize) +)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] +#[derive(Clone, Debug, PartialEq, Eq, derive_more::AsRef)] +pub struct ClassData(Data); + +impl Display for ClassData { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +impl FromStr for ClassData { + type Err = NftTransferError; + + fn from_str(class_data: &str) -> Result { + // validate the data + let data = Data::from_str(class_data)?; + Ok(Self(data)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_class_id_validation() -> Result<(), NftTransferError> { + assert!(ClassId::from_str("").is_err(), "empty base class ID"); + assert!(ClassId::from_str("myclass").is_ok(), "valid base class ID"); + assert!(PrefixedClassId::from_str("").is_err(), "empty class trace"); + assert!( + PrefixedClassId::from_str("transfer/channel-0/").is_err(), + "empty base class ID with trace" + ); + assert!( + PrefixedClassId::from_str("/myclass").is_err(), + "empty prefix" + ); + assert!(PrefixedClassId::from_str("//myclass").is_err(), "empty ids"); + assert!( + PrefixedClassId::from_str("transfer/").is_err(), + "single trace" + ); + assert!( + PrefixedClassId::from_str("transfer/myclass").is_err(), + "single trace with base class ID" + ); + assert!( + PrefixedClassId::from_str("transfer/channel-0/myclass").is_ok(), + "valid single trace info" + ); + assert!( + PrefixedClassId::from_str("transfer/channel-0/transfer/channel-1/myclass").is_ok(), + "valid multiple trace info" + ); + assert!( + PrefixedClassId::from_str("(transfer)/channel-0/myclass").is_err(), + "invalid port" + ); + assert!( + PrefixedClassId::from_str("transfer/(channel-0)/myclass").is_err(), + "invalid channel" + ); + + Ok(()) + } + + #[test] + fn test_class_id_trace() -> Result<(), NftTransferError> { + assert_eq!( + PrefixedClassId::from_str("transfer/channel-0/myclass")?, + PrefixedClassId { + trace_path: "transfer/channel-0".parse()?, + base_class_id: "myclass".parse()? + }, + "valid single trace info" + ); + assert_eq!( + PrefixedClassId::from_str("transfer/channel-0/transfer/channel-1/myclass")?, + PrefixedClassId { + trace_path: "transfer/channel-0/transfer/channel-1".parse()?, + base_class_id: "myclass".parse()? + }, + "valid multiple trace info" + ); + + Ok(()) + } + + #[test] + fn test_class_id_serde() -> Result<(), NftTransferError> { + let dt_str = "transfer/channel-0/myclass"; + let dt = PrefixedClassId::from_str(dt_str)?; + assert_eq!(dt.to_string(), dt_str, "valid single trace info"); + + let dt_str = "transfer/channel-0/transfer/channel-1/myclass"; + let dt = PrefixedClassId::from_str(dt_str)?; + assert_eq!(dt.to_string(), dt_str, "valid multiple trace info"); + + Ok(()) + } + + #[test] + fn test_trace_path() -> Result<(), NftTransferError> { + assert!(TracePath::from_str("").is_ok(), "empty trace path"); + assert!( + TracePath::from_str("transfer/myclass").is_err(), + "invalid trace path: bad ChannelId" + ); + assert!( + TracePath::from_str("transfer//myclass").is_err(), + "malformed trace path: missing ChannelId" + ); + assert!( + TracePath::from_str("transfer/channel-0/").is_err(), + "malformed trace path: trailing delimiter" + ); + + let prefix_1 = TracePrefix::new("transfer".parse().unwrap(), "channel-1".parse().unwrap()); + let prefix_2 = TracePrefix::new("transfer".parse().unwrap(), "channel-0".parse().unwrap()); + let mut trace_path = TracePath(vec![prefix_1.clone()]); + + trace_path.add_prefix(prefix_2.clone()); + assert_eq!( + TracePath::from_str("transfer/channel-0/transfer/channel-1")?, + trace_path + ); + assert_eq!( + TracePath(vec![prefix_1.clone(), prefix_2.clone()]), + trace_path + ); + + trace_path.remove_prefix(&prefix_2); + assert_eq!(TracePath::from_str("transfer/channel-1")?, trace_path); + assert_eq!(TracePath(vec![prefix_1.clone()]), trace_path); + + trace_path.remove_prefix(&prefix_1); + assert!(trace_path.is_empty()); + + Ok(()) + } + + #[test] + fn test_serde_json_roundtrip() { + fn serde_roundtrip(class_uri: ClassUri) { + let serialized = + serde_json::to_string(&class_uri).expect("failed to serialize ClassUri"); + let deserialized = serde_json::from_str::(&serialized) + .expect("failed to deserialize ClassUri"); + + assert_eq!(deserialized, class_uri); + } + + let uri = "/foo/bar?baz".parse::().unwrap(); + serde_roundtrip(ClassUri(uri)); + + let uri = "https://www.rust-lang.org/install.html" + .parse::() + .unwrap(); + serde_roundtrip(ClassUri(uri)); + } + + #[cfg(feature = "borsh")] + #[test] + fn test_borsh_roundtrip() { + fn borsh_roundtrip(class_uri: ClassUri) { + use borsh::{BorshDeserialize, BorshSerialize}; + + let class_uri_bytes = class_uri.try_to_vec().unwrap(); + let res = ClassUri::try_from_slice(&class_uri_bytes).unwrap(); + + assert_eq!(class_uri, res); + } + + let uri = "/foo/bar?baz".parse::().unwrap(); + borsh_roundtrip(ClassUri(uri)); + + let uri = "https://www.rust-lang.org/install.html" + .parse::() + .unwrap(); + borsh_roundtrip(ClassUri(uri)); + } +} diff --git a/ibc-apps/ics721-nft-transfer/types/src/data.rs b/ibc-apps/ics721-nft-transfer/types/src/data.rs new file mode 100644 index 0000000000..098c8c094b --- /dev/null +++ b/ibc-apps/ics721-nft-transfer/types/src/data.rs @@ -0,0 +1,309 @@ +//! Defines Non-Fungible Token Transfer (ICS-721) data types. +use core::fmt::{self, Display, Formatter}; +use core::str::FromStr; + +use base64::prelude::BASE64_STANDARD; +use base64::Engine; +use ibc_core::primitives::prelude::*; +use mime::Mime; + +use crate::error::NftTransferError; + +#[cfg_attr( + feature = "parity-scale-codec", + derive( + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_info::TypeInfo + ) +)] +#[cfg_attr( + feature = "borsh", + derive(borsh::BorshSerialize, borsh::BorshDeserialize) +)] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] +#[derive(Clone, Debug, Default, PartialEq, Eq, derive_more::From)] +pub struct Data(String); + +impl Data { + /// Parses the data in the format specified by ICS-721. + pub fn parse_as_ics721_data(&self) -> Result { + self.0.parse::() + } +} + +impl Display for Data { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +impl FromStr for Data { + type Err = NftTransferError; + + fn from_str(s: &str) -> Result { + Ok(Self(s.to_string())) + } +} + +impl serde::Serialize for Data { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + serializer.serialize_str(&BASE64_STANDARD.encode(&self.0)) + } +} + +impl<'de> serde::Deserialize<'de> for Data { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let encoded = String::deserialize(deserializer)?; + let decoded = BASE64_STANDARD + .decode(encoded) + .map_err(serde::de::Error::custom)?; + let decoded_str = String::from_utf8(decoded).map_err(serde::de::Error::custom)?; + Ok(Data(decoded_str)) + } +} + +#[cfg_attr( + feature = "parity-scale-codec", + derive( + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_info::TypeInfo + ) +)] +#[cfg_attr( + feature = "borsh", + derive(borsh::BorshSerialize, borsh::BorshDeserialize) +)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] +#[derive(Clone, Debug, Default, PartialEq, Eq)] +pub struct Ics721Data(BTreeMap); + +impl FromStr for Ics721Data { + type Err = NftTransferError; + + fn from_str(s: &str) -> Result { + serde_json::from_str(s).map_err(|_| NftTransferError::InvalidIcs721Data) + } +} + +#[derive(Clone, Debug, Default, PartialEq, Eq)] +pub struct DataValue { + value: String, + mime: Option, +} + +#[cfg(feature = "serde")] +impl serde::Serialize for DataValue { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut state = serializer.serialize_struct("DataValue", 2)?; + state.serialize_field("value", &self.value)?; + match &self.mime { + Some(mime) if *mime != "" => { + state.serialize_field("mime", &mime.to_string())?; + } + _ => {} + } + state.end() + } +} + +#[cfg(feature = "serde")] +impl<'de> serde::Deserialize<'de> for DataValue { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + #[derive(serde::Deserialize)] + struct StringDataValue { + value: String, + mime: Option, + } + + let data_value = StringDataValue::deserialize(deserializer)?; + let mime = data_value + .mime + .map(|s| Mime::from_str(&s).map_err(serde::de::Error::custom)) + .transpose()?; + + Ok(DataValue { + value: data_value.value, + mime, + }) + } +} + +#[cfg(feature = "borsh")] +impl borsh::BorshSerialize for DataValue { + fn serialize( + &self, + writer: &mut W, + ) -> borsh::maybestd::io::Result<()> { + borsh::BorshSerialize::serialize(&self.value, writer)?; + let mime = match &self.mime { + Some(mime) => mime.to_string(), + None => String::default(), + }; + borsh::BorshSerialize::serialize(&mime.to_string(), writer)?; + Ok(()) + } +} + +#[cfg(feature = "borsh")] +impl borsh::BorshDeserialize for DataValue { + fn deserialize_reader( + reader: &mut R, + ) -> borsh::maybestd::io::Result { + let value = String::deserialize_reader(reader)?; + let mime = String::deserialize_reader(reader)?; + let mime = if mime.is_empty() { + None + } else { + Some(Mime::from_str(&mime).map_err(|_| borsh::maybestd::io::ErrorKind::Other)?) + }; + + Ok(Self { value, mime }) + } +} + +#[cfg(feature = "parity-scale-codec")] +impl parity_scale_codec::Encode for DataValue { + fn encode_to(&self, writer: &mut T) { + self.value.encode_to(writer); + if let Some(mime) = &self.mime { + mime.to_string().encode_to(writer); + } else { + "".encode_to(writer); + } + } +} + +#[cfg(feature = "parity-scale-codec")] +impl parity_scale_codec::Decode for DataValue { + fn decode( + input: &mut I, + ) -> Result { + let value = String::decode(input)?; + let mime_str = String::decode(input)?; + let mime = if mime_str.is_empty() { + None + } else { + Some( + Mime::from_str(&mime_str) + .map_err(|_| parity_scale_codec::Error::from("from str error"))?, + ) + }; + + Ok(DataValue { value, mime }) + } +} + +#[cfg(feature = "parity-scale-codec")] +impl scale_info::TypeInfo for DataValue { + type Identity = Self; + + fn type_info() -> scale_info::Type { + scale_info::Type::builder() + .path(scale_info::Path::new("DataValue", module_path!())) + .composite( + scale_info::build::Fields::named() + .field(|f| f.ty::().name("value").type_name("String")) + .field(|f| f.ty::().name("mime").type_name("String")), + ) + } +} + +#[cfg(feature = "schema")] +impl schemars::JsonSchema for DataValue { + fn schema_name() -> String { + "DataValue".to_string() + } + + fn schema_id() -> std::borrow::Cow<'static, str> { + std::borrow::Cow::Borrowed(concat!(module_path!(), "::DataValue")) + } + + fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema { + gen.subschema_for::() + } +} + +#[cfg(test)] +mod tests { + use rstest::rstest; + + use super::*; + + #[cfg(feature = "serde")] + #[rstest] + #[case(r#"{"value":"foo"}"#)] + #[case(r#"{"value":"foo-42","mime":"multipart/form-data; boundary=ABCDEFG"}"#)] + fn test_valid_json_deserialization(#[case] data_value_json: &str) { + assert!(serde_json::from_str::(data_value_json).is_ok()); + } + + #[cfg(feature = "serde")] + #[rstest] + #[case(r#"{"value":"foo-42","mime":"invalid"}"#)] + #[case(r#"{"value":"invalid","mime":""}"#)] + fn test_invalid_json_deserialization(#[case] data_value_json: &str) { + assert!(serde_json::from_str::(data_value_json).is_err()); + } + + #[cfg(feature = "serde")] + #[test] + fn test_serde_json_roundtrip() { + fn serde_roundtrip(data_value: DataValue) { + let serialized = + serde_json::to_string(&data_value).expect("failed to serialize DataValue"); + let deserialized = serde_json::from_str::(&serialized) + .expect("failed to deserialize DataValue"); + + assert_eq!(deserialized, data_value); + } + + serde_roundtrip(DataValue { + value: String::from("foo"), + mime: None, + }); + + serde_roundtrip(DataValue { + value: String::from("foo"), + mime: Some(mime::TEXT_PLAIN_UTF_8), + }); + } + + #[cfg(feature = "borsh")] + #[test] + fn test_borsh_roundtrip() { + fn borsh_roundtrip(data_value: DataValue) { + use borsh::{BorshDeserialize, BorshSerialize}; + + let data_value_bytes = data_value.try_to_vec().unwrap(); + let res = DataValue::try_from_slice(&data_value_bytes).unwrap(); + + assert_eq!(data_value, res); + } + + borsh_roundtrip(DataValue { + value: String::from("foo"), + mime: None, + }); + + borsh_roundtrip(DataValue { + value: String::from("foo"), + mime: Some(mime::TEXT_PLAIN_UTF_8), + }); + } +} diff --git a/ibc-apps/ics721-nft-transfer/types/src/error.rs b/ibc-apps/ics721-nft-transfer/types/src/error.rs new file mode 100644 index 0000000000..08ab598e5c --- /dev/null +++ b/ibc-apps/ics721-nft-transfer/types/src/error.rs @@ -0,0 +1,140 @@ +//! Defines the Non-Fungible Token Transfer (ICS-721) error types. +use core::convert::Infallible; +use core::str::Utf8Error; + +use displaydoc::Display; +use ibc_core::channel::types::acknowledgement::StatusValue; +use ibc_core::channel::types::channel::Order; +use ibc_core::handler::types::error::ContextError; +use ibc_core::host::types::error::IdentifierError; +use ibc_core::host::types::identifiers::{ChannelId, PortId}; +use ibc_core::primitives::prelude::*; + +#[derive(Display, Debug)] +pub enum NftTransferError { + /// context error: `{0}` + ContextError(ContextError), + /// invalid identifier: `{0}` + InvalidIdentifier(IdentifierError), + /// invalid URI: `{uri}`, validation error: `{validation_error}`` + InvalidUri { + uri: String, + validation_error: http::uri::InvalidUri, + }, + /// destination channel not found in the counterparty of port_id `{port_id}` and channel_id `{channel_id}` + DestinationChannelNotFound { + port_id: PortId, + channel_id: ChannelId, + }, + /// base class ID is empty + EmptyBaseClassId, + /// invalid prot id n trace at position: `{pos}`, validation error: `{validation_error}` + InvalidTracePortId { + pos: u64, + validation_error: IdentifierError, + }, + /// invalid channel id in trace at position: `{pos}`, validation error: `{validation_error}` + InvalidTraceChannelId { + pos: u64, + validation_error: IdentifierError, + }, + /// trace length must be even but got: `{len}` + InvalidTraceLength { len: u64 }, + /// no token ID + NoTokenId, + /// invalid token ID + InvalidTokenId, + /// duplicated token IDs + DuplicatedTokenIds, + /// The length of token IDs mismatched that of token URIs or token data + TokenMismatched, + /// invalid json data + InvalidJsonData, + /// the data is not in the JSON format specified by ICS-721 + InvalidIcs721Data, + /// expected `{expect_order}` channel, got `{got_order}` + ChannelNotUnordered { + expect_order: Order, + got_order: Order, + }, + /// channel cannot be closed + CantCloseChannel, + /// `{sender}` doesn't own the NFT + InvalidOwner { sender: String }, + /// owner is not found + OwnerNotFound, + /// nft is not found + NftNotFound, + /// nft class is not found + NftClassNotFound, + /// failed to deserialize packet data + PacketDataDeserialization, + /// failed to deserialize acknowledgement + AckDeserialization, + /// receive is not enabled + ReceiveDisabled { reason: String }, + /// send is not enabled + SendDisabled { reason: String }, + /// failed to parse as AccountId + ParseAccountFailure, + /// invalid port: `{port_id}`, expected `{exp_port_id}` + InvalidPort { + port_id: PortId, + exp_port_id: PortId, + }, + /// decoding raw msg error: `{reason}` + DecodeRawMsg { reason: String }, + /// unknown msg type: `{msg_type}` + UnknownMsgType { msg_type: String }, + /// decoding raw bytes as UTF8 string error: `{0}` + Utf8Decode(Utf8Error), + /// other error: `{0}` + Other(String), +} + +#[cfg(feature = "std")] +impl std::error::Error for NftTransferError { + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + match &self { + Self::ContextError(e) => Some(e), + Self::InvalidIdentifier(e) => Some(e), + Self::InvalidUri { + validation_error: e, + .. + } => Some(e), + Self::InvalidTracePortId { + validation_error: e, + .. + } => Some(e), + Self::InvalidTraceChannelId { + validation_error: e, + .. + } => Some(e), + _ => None, + } + } +} + +impl From for NftTransferError { + fn from(e: Infallible) -> Self { + match e {} + } +} + +impl From for NftTransferError { + fn from(err: ContextError) -> NftTransferError { + Self::ContextError(err) + } +} + +impl From for NftTransferError { + fn from(err: IdentifierError) -> NftTransferError { + Self::InvalidIdentifier(err) + } +} + +impl From for StatusValue { + fn from(err: NftTransferError) -> Self { + StatusValue::new(err.to_string()).expect("error message must not be empty") + } +} diff --git a/ibc-apps/ics721-nft-transfer/types/src/events.rs b/ibc-apps/ics721-nft-transfer/types/src/events.rs new file mode 100644 index 0000000000..6f0c90579d --- /dev/null +++ b/ibc-apps/ics721-nft-transfer/types/src/events.rs @@ -0,0 +1,214 @@ +//! Defines Non-Fungible Token Transfer (ICS-721) event types. +use ibc_core::channel::types::acknowledgement::AcknowledgementStatus; +use ibc_core::primitives::prelude::*; +use ibc_core::primitives::Signer; +use ibc_core::router::types::event::ModuleEvent; + +use super::Memo; +use crate::{PrefixedClassId, TokenId, TokenIds, MODULE_ID_STR}; + +const EVENT_TYPE_PACKET: &str = "non_fungible_token_packet"; +const EVENT_TYPE_TIMEOUT: &str = "timeout"; +const EVENT_TYPE_TOKEN_TRACE: &str = "token_trace"; +const EVENT_TYPE_TRANSFER: &str = "ibc_nft_transfer"; + +/// Contains all events variants that can be emitted from the NFT transfer application +pub enum Event { + Recv(RecvEvent), + Ack(AckEvent), + AckStatus(AckStatusEvent), + Timeout(TimeoutEvent), + TokenTrace(TokenTraceEvent), + Transfer(TransferEvent), +} + +/// Event emitted by the `onRecvPacket` module callback to indicate the that the +/// `RecvPacket` message was processed +pub struct RecvEvent { + pub sender: Signer, + pub receiver: Signer, + pub class: PrefixedClassId, + pub tokens: TokenIds, + pub memo: Memo, + pub success: bool, +} + +impl From for ModuleEvent { + fn from(ev: RecvEvent) -> Self { + let RecvEvent { + sender, + receiver, + class, + tokens, + memo, + success, + } = ev; + Self { + kind: EVENT_TYPE_PACKET.to_string(), + attributes: vec![ + ("module", MODULE_ID_STR).into(), + ("sender", sender).into(), + ("receiver", receiver).into(), + ("class", class).into(), + ("tokens", tokens).into(), + ("memo", memo).into(), + ("success", success).into(), + ], + } + } +} + +/// Event emitted in the `onAcknowledgePacket` module callback +pub struct AckEvent { + pub sender: Signer, + pub receiver: Signer, + pub class: PrefixedClassId, + pub tokens: TokenIds, + pub memo: Memo, + pub acknowledgement: AcknowledgementStatus, +} + +impl From for ModuleEvent { + fn from(ev: AckEvent) -> Self { + let AckEvent { + sender, + receiver, + class, + tokens, + memo, + acknowledgement, + } = ev; + Self { + kind: EVENT_TYPE_PACKET.to_string(), + attributes: vec![ + ("module", MODULE_ID_STR).into(), + ("sender", sender).into(), + ("receiver", receiver).into(), + ("class", class).into(), + ("tokens", tokens).into(), + ("memo", memo).into(), + ("acknowledgement", acknowledgement).into(), + ], + } + } +} + +/// Event emitted in the `onAcknowledgePacket` module callback to indicate +/// whether the acknowledgement is a success or a failure +pub struct AckStatusEvent { + pub acknowledgement: AcknowledgementStatus, +} + +impl From for ModuleEvent { + fn from(ev: AckStatusEvent) -> Self { + let AckStatusEvent { acknowledgement } = ev; + let attr_label = match acknowledgement { + AcknowledgementStatus::Success(_) => "success", + AcknowledgementStatus::Error(_) => "error", + }; + + Self { + kind: EVENT_TYPE_PACKET.to_string(), + attributes: vec![(attr_label, acknowledgement.to_string()).into()], + } + } +} + +/// Event emitted in the `onTimeoutPacket` module callback +pub struct TimeoutEvent { + pub refund_receiver: Signer, + pub refund_class: PrefixedClassId, + pub refund_tokens: TokenIds, + pub memo: Memo, +} + +impl From for ModuleEvent { + fn from(ev: TimeoutEvent) -> Self { + let TimeoutEvent { + refund_receiver, + refund_class, + refund_tokens, + memo, + } = ev; + Self { + kind: EVENT_TYPE_TIMEOUT.to_string(), + attributes: vec![ + ("module", MODULE_ID_STR).into(), + ("refund_receiver", refund_receiver).into(), + ("refund_class", refund_class).into(), + ("refund_tokens", refund_tokens).into(), + ("memo", memo).into(), + ], + } + } +} + +/// Event emitted in the `onRecvPacket` module callback when new tokens are minted +pub struct TokenTraceEvent { + pub trace_hash: Option, + pub class: PrefixedClassId, + pub token: TokenId, +} + +impl From for ModuleEvent { + fn from(ev: TokenTraceEvent) -> Self { + let TokenTraceEvent { + trace_hash, + class, + token, + } = ev; + let mut ev = Self { + kind: EVENT_TYPE_TOKEN_TRACE.to_string(), + attributes: vec![("class", class).into(), ("token", token).into()], + }; + if let Some(hash) = trace_hash { + ev.attributes.push(("trace_hash", hash).into()); + } + ev + } +} + +/// Event emitted after a successful `sendTransfer` +pub struct TransferEvent { + pub sender: Signer, + pub receiver: Signer, + pub class: PrefixedClassId, + pub tokens: TokenIds, + pub memo: Memo, +} + +impl From for ModuleEvent { + fn from(ev: TransferEvent) -> Self { + let TransferEvent { + sender, + receiver, + class, + tokens, + memo, + } = ev; + + Self { + kind: EVENT_TYPE_TRANSFER.to_string(), + attributes: vec![ + ("sender", sender).into(), + ("receiver", receiver).into(), + ("class", class).into(), + ("tokens", tokens).into(), + ("memo", memo).into(), + ], + } + } +} + +impl From for ModuleEvent { + fn from(ev: Event) -> Self { + match ev { + Event::Recv(ev) => ev.into(), + Event::Ack(ev) => ev.into(), + Event::AckStatus(ev) => ev.into(), + Event::Timeout(ev) => ev.into(), + Event::TokenTrace(ev) => ev.into(), + Event::Transfer(ev) => ev.into(), + } + } +} diff --git a/ibc-apps/ics721-nft-transfer/types/src/lib.rs b/ibc-apps/ics721-nft-transfer/types/src/lib.rs new file mode 100644 index 0000000000..b429ce1a37 --- /dev/null +++ b/ibc-apps/ics721-nft-transfer/types/src/lib.rs @@ -0,0 +1,69 @@ +//! Implementation of the IBC [Non-Fungible Token +//! Transfer](https://github.com/cosmos/ibc/blob/main/spec/app/ics-721-nft-transfer/README.md) +//! (ICS-721) data structures. +#![no_std] +#![forbid(unsafe_code)] +#![cfg_attr(not(test), deny(clippy::unwrap_used))] +#![cfg_attr(not(test), deny(clippy::disallowed_methods, clippy::disallowed_types))] +#![deny( + warnings, + trivial_casts, + trivial_numeric_casts, + unused_import_braces, + unused_qualifications, + rust_2018_idioms +)] + +#[cfg(any(test, feature = "std"))] +extern crate std; + +#[cfg(feature = "serde")] +mod class; +#[cfg(feature = "serde")] +pub use class::*; +#[cfg(feature = "serde")] +mod data; +#[cfg(feature = "serde")] +pub use data::*; +#[cfg(feature = "serde")] +pub mod events; +#[cfg(feature = "serde")] +pub mod msgs; +#[cfg(feature = "serde")] +pub mod packet; +#[cfg(feature = "serde")] +mod token; +#[cfg(feature = "serde")] +pub use token::*; + +#[cfg(feature = "serde")] +pub(crate) mod serializers; + +pub mod error; +mod memo; +pub use memo::*; + +/// Re-exports ICS-721 NFT transfer proto types from the `ibc-proto` crate. +pub mod proto { + pub use ibc_proto::ibc::apps::nft_transfer; +} + +/// Module identifier for the ICS-721 application. +pub const MODULE_ID_STR: &str = "nft_transfer"; + +/// The port identifier that the ICS-721 applications typically bind with. +pub const PORT_ID_STR: &str = "nft-transfer"; + +/// ICS-721 application current version. +pub const VERSION: &str = "ics721-1"; + +/// The successful string used for creating an acknowledgement status, +/// equivalent to `base64::encode(0x01)`. +pub const ACK_SUCCESS_B64: &str = "AQ=="; + +use ibc_core::channel::types::acknowledgement::StatusValue; + +/// Returns a successful acknowledgement status for the NFT transfer application. +pub fn ack_success_b64() -> StatusValue { + StatusValue::new(ACK_SUCCESS_B64).expect("ack status value is never supposed to be empty") +} diff --git a/ibc-apps/ics721-nft-transfer/types/src/memo.rs b/ibc-apps/ics721-nft-transfer/types/src/memo.rs new file mode 100644 index 0000000000..432dd4fc6f --- /dev/null +++ b/ibc-apps/ics721-nft-transfer/types/src/memo.rs @@ -0,0 +1,54 @@ +//! Defines the memo type, which represents the string that users can include +//! with a Non-Fungible Token Transfer + +use core::convert::Infallible; +use core::fmt::{ + Display, {self}, +}; +use core::str::FromStr; + +use ibc_core::primitives::prelude::*; + +/// Represents the token transfer memo +#[cfg_attr( + feature = "parity-scale-codec", + derive( + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_info::TypeInfo + ) +)] +#[cfg_attr( + feature = "borsh", + derive(borsh::BorshSerialize, borsh::BorshDeserialize) +)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] +#[derive(Clone, Debug, Default, PartialEq, Eq)] +pub struct Memo(String); + +impl AsRef for Memo { + fn as_ref(&self) -> &str { + &self.0 + } +} + +impl Display for Memo { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +impl From for Memo { + fn from(memo: String) -> Self { + Self(memo) + } +} + +impl FromStr for Memo { + type Err = Infallible; + + fn from_str(memo: &str) -> Result { + Ok(Self(memo.to_owned())) + } +} diff --git a/ibc-apps/ics721-nft-transfer/types/src/msgs/mod.rs b/ibc-apps/ics721-nft-transfer/types/src/msgs/mod.rs new file mode 100644 index 0000000000..70f4adfef6 --- /dev/null +++ b/ibc-apps/ics721-nft-transfer/types/src/msgs/mod.rs @@ -0,0 +1,2 @@ +//! Defines the Non-Fungible Token Transfer (ICS-721) message types. +pub mod transfer; diff --git a/ibc-apps/ics721-nft-transfer/types/src/msgs/transfer.rs b/ibc-apps/ics721-nft-transfer/types/src/msgs/transfer.rs new file mode 100644 index 0000000000..357723f614 --- /dev/null +++ b/ibc-apps/ics721-nft-transfer/types/src/msgs/transfer.rs @@ -0,0 +1,138 @@ +//! Defines the Non-Fungible Token Transfer message type + +use ibc_core::channel::types::error::PacketError; +use ibc_core::channel::types::timeout::TimeoutHeight; +use ibc_core::handler::types::error::ContextError; +use ibc_core::host::types::identifiers::{ChannelId, PortId}; +use ibc_core::primitives::prelude::*; +use ibc_core::primitives::Timestamp; +use ibc_proto::google::protobuf::Any; +use ibc_proto::ibc::applications::nft_transfer::v1::MsgTransfer as RawMsgTransfer; +use ibc_proto::Protobuf; + +use crate::error::NftTransferError; +use crate::packet::PacketData; + +pub(crate) const TYPE_URL: &str = "/ibc.applications.nft_transfer.v1.MsgTransfer"; + +/// Message used to build an ICS-721 Non-Fungible Token Transfer packet. +/// +/// Note that this message is not a packet yet, as it lacks the proper sequence +/// number, and destination port/channel. This is by design. The sender of the +/// packet, which might be the user of a command line application, should only +/// have to specify the information related to the transfer of the token, and +/// let the library figure out how to build the packet properly. +#[derive(Clone, Debug, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr( + feature = "parity-scale-codec", + derive(parity_scale_codec::Encode, parity_scale_codec::Decode,) +)] +#[cfg_attr( + feature = "borsh", + derive(borsh::BorshSerialize, borsh::BorshDeserialize) +)] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] +pub struct MsgTransfer { + /// the port on which the packet will be sent + pub port_id_on_a: PortId, + /// the channel by which the packet will be sent + pub chan_id_on_a: ChannelId, + /// NFT transfer packet data of the packet that will be sent + pub packet_data: PacketData, + /// Timeout height relative to the current block height. + /// The timeout is disabled when set to None. + pub timeout_height_on_b: TimeoutHeight, + /// Timeout timestamp relative to the current block timestamp. + /// The timeout is disabled when set to 0. + pub timeout_timestamp_on_b: Timestamp, +} + +impl TryFrom for MsgTransfer { + type Error = NftTransferError; + + fn try_from(raw_msg: RawMsgTransfer) -> Result { + let timeout_timestamp_on_b = Timestamp::from_nanoseconds(raw_msg.timeout_timestamp) + .map_err(PacketError::InvalidPacketTimestamp) + .map_err(ContextError::from)?; + + let timeout_height_on_b: TimeoutHeight = raw_msg + .timeout_height + .try_into() + .map_err(ContextError::from)?; + + // Packet timeout height and packet timeout timestamp cannot both be unset. + if !timeout_height_on_b.is_set() && !timeout_timestamp_on_b.is_set() { + return Err(ContextError::from(PacketError::MissingTimeout))?; + } + + let memo = if raw_msg.memo.is_empty() { + None + } else { + Some(raw_msg.memo.into()) + }; + + Ok(MsgTransfer { + port_id_on_a: raw_msg.source_port.parse()?, + chan_id_on_a: raw_msg.source_channel.parse()?, + packet_data: PacketData { + class_id: raw_msg.class_id.parse()?, + class_uri: None, + class_data: None, + token_ids: raw_msg.token_ids.try_into()?, + token_uris: None, + token_data: None, + sender: raw_msg.sender.into(), + receiver: raw_msg.receiver.into(), + memo, + }, + timeout_height_on_b, + timeout_timestamp_on_b, + }) + } +} + +impl From for RawMsgTransfer { + fn from(domain_msg: MsgTransfer) -> Self { + RawMsgTransfer { + source_port: domain_msg.port_id_on_a.to_string(), + source_channel: domain_msg.chan_id_on_a.to_string(), + class_id: domain_msg.packet_data.class_id.to_string(), + token_ids: domain_msg + .packet_data + .token_ids + .as_ref() + .iter() + .map(|t| t.to_string()) + .collect(), + sender: domain_msg.packet_data.sender.to_string(), + receiver: domain_msg.packet_data.receiver.to_string(), + timeout_height: domain_msg.timeout_height_on_b.into(), + timeout_timestamp: domain_msg.timeout_timestamp_on_b.nanoseconds(), + memo: domain_msg + .packet_data + .memo + .map(|m| m.to_string()) + .unwrap_or_default(), + } + } +} + +impl Protobuf for MsgTransfer {} + +impl TryFrom for MsgTransfer { + type Error = NftTransferError; + + fn try_from(raw: Any) -> Result { + match raw.type_url.as_str() { + TYPE_URL => { + MsgTransfer::decode_vec(&raw.value).map_err(|e| NftTransferError::DecodeRawMsg { + reason: e.to_string(), + }) + } + _ => Err(NftTransferError::UnknownMsgType { + msg_type: raw.type_url, + }), + } + } +} diff --git a/ibc-apps/ics721-nft-transfer/types/src/packet.rs b/ibc-apps/ics721-nft-transfer/types/src/packet.rs new file mode 100644 index 0000000000..a870ae4364 --- /dev/null +++ b/ibc-apps/ics721-nft-transfer/types/src/packet.rs @@ -0,0 +1,324 @@ +//! Contains the `PacketData` type that defines the structure of NFT transfers' packet bytes + +use core::convert::TryFrom; + +use base64::prelude::BASE64_STANDARD; +use base64::Engine; +use ibc_core::primitives::prelude::*; +use ibc_core::primitives::Signer; +use ibc_proto::ibc::applications::nft_transfer::v1::NonFungibleTokenPacketData as RawPacketData; + +use crate::class::{ClassData, ClassUri, PrefixedClassId}; +use crate::error::NftTransferError; +use crate::memo::Memo; +use crate::serializers; +use crate::token::{TokenData, TokenIds, TokenUri}; + +/// Defines the structure of token transfers' packet bytes +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[serde(rename_all = "camelCase")] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] +#[cfg_attr( + feature = "parity-scale-codec", + derive(parity_scale_codec::Encode, parity_scale_codec::Decode,) +)] +#[cfg_attr( + feature = "borsh", + derive(borsh::BorshSerialize, borsh::BorshDeserialize) +)] +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct PacketData { + #[cfg_attr(feature = "serde", serde(with = "serializers"))] + #[cfg_attr(feature = "schema", schemars(with = "String"))] + pub class_id: PrefixedClassId, + pub class_uri: Option, + pub class_data: Option, + pub token_ids: TokenIds, + // Need `Option` to decode `null` value + pub token_uris: Option>, + // Need `Option` to decode `null` value + pub token_data: Option>, + pub sender: Signer, + pub receiver: Signer, + pub memo: Option, +} + +impl PacketData { + #[allow(clippy::too_many_arguments)] + pub fn new( + class_id: PrefixedClassId, + class_uri: Option, + class_data: Option, + token_ids: TokenIds, + token_uris: Vec, + token_data: Vec, + sender: Signer, + receiver: Signer, + memo: Memo, + ) -> Result { + let token_uris = if token_uris.is_empty() { + None + } else { + Some(token_uris) + }; + let token_data = if token_data.is_empty() { + None + } else { + Some(token_data) + }; + let memo = if memo.as_ref().is_empty() { + None + } else { + Some(memo) + }; + + let packet_data = Self { + class_id, + class_uri, + class_data, + token_ids, + token_uris, + token_data, + sender, + receiver, + memo, + }; + + packet_data.validate_basic()?; + + Ok(packet_data) + } + + /// Performs the basic validation of the packet data fields. + pub fn validate_basic(&self) -> Result<(), NftTransferError> { + if self.token_ids.0.is_empty() { + return Err(NftTransferError::NoTokenId); + } + let num = self.token_ids.0.len(); + let num_uri = self + .token_uris + .as_ref() + .map(|t| t.len()) + .unwrap_or_default(); + let num_data = self + .token_data + .as_ref() + .map(|t| t.len()) + .unwrap_or_default(); + if (num_uri != 0 && num_uri != num) || (num_data != 0 && num_data != num) { + return Err(NftTransferError::TokenMismatched); + } + Ok(()) + } +} + +impl TryFrom for PacketData { + type Error = NftTransferError; + + fn try_from(raw_pkt_data: RawPacketData) -> Result { + let class_uri = if raw_pkt_data.class_uri.is_empty() { + None + } else { + Some(raw_pkt_data.class_uri.parse()?) + }; + let class_data = if raw_pkt_data.class_data.is_empty() { + None + } else { + let decoded = BASE64_STANDARD + .decode(raw_pkt_data.class_data) + .map_err(|_| NftTransferError::InvalidJsonData)?; + let data_str = + String::from_utf8(decoded).map_err(|_| NftTransferError::InvalidJsonData)?; + Some(data_str.parse()?) + }; + + let token_ids = raw_pkt_data.token_ids.try_into()?; + let token_uris: Result, _> = + raw_pkt_data.token_uris.iter().map(|t| t.parse()).collect(); + let token_data: Result, _> = raw_pkt_data + .token_data + .iter() + .map(|data| { + let decoded = BASE64_STANDARD + .decode(data) + .map_err(|_| NftTransferError::InvalidJsonData)?; + let data_str = + String::from_utf8(decoded).map_err(|_| NftTransferError::InvalidJsonData)?; + data_str.parse() + }) + .collect(); + Self::new( + raw_pkt_data.class_id.parse()?, + class_uri, + class_data, + token_ids, + token_uris?, + token_data?, + raw_pkt_data.sender.into(), + raw_pkt_data.receiver.into(), + raw_pkt_data.memo.into(), + ) + } +} + +impl From for RawPacketData { + fn from(pkt_data: PacketData) -> Self { + Self { + class_id: pkt_data.class_id.to_string(), + class_uri: pkt_data + .class_uri + .map(|c| c.to_string()) + .unwrap_or_default(), + class_data: pkt_data + .class_data + .map(|c| BASE64_STANDARD.encode(c.to_string())) + .unwrap_or_default(), + token_ids: pkt_data + .token_ids + .as_ref() + .iter() + .map(|t| t.to_string()) + .collect(), + token_uris: pkt_data + .token_uris + .map(|uris| uris.iter().map(|t| t.to_string()).collect()) + .unwrap_or_default(), + token_data: pkt_data + .token_data + .map(|data| { + data.iter() + .map(|t| BASE64_STANDARD.encode(t.to_string())) + .collect() + }) + .unwrap_or_default(), + sender: pkt_data.sender.to_string(), + receiver: pkt_data.receiver.to_string(), + memo: pkt_data.memo.map(|m| m.to_string()).unwrap_or_default(), + } + } +} + +#[cfg(test)] +mod tests { + use core::str::FromStr; + + use super::*; + + const DUMMY_ADDRESS: &str = "cosmos1wxeyh7zgn4tctjzs0vtqpc6p5cxq5t2muzl7ng"; + const DUMMY_CLASS_ID: &str = "class"; + const DUMMY_URI: &str = "http://example.com"; + const DUMMY_DATA: &str = + r#"{"image":{"value":"binary","mime":"image/png"},"name":{"value":"Crypto Creatures"}}"#; + + impl PacketData { + pub fn new_dummy(memo: Option<&str>) -> Self { + let address: Signer = DUMMY_ADDRESS.to_string().into(); + + Self { + class_id: PrefixedClassId::from_str(DUMMY_CLASS_ID).unwrap(), + class_uri: Some(ClassUri::from_str(DUMMY_URI).unwrap()), + class_data: Some(ClassData::from_str(DUMMY_DATA).unwrap()), + token_ids: TokenIds::try_from(vec!["token_0".to_string(), "token_1".to_string()]) + .unwrap(), + token_uris: Some(vec![ + TokenUri::from_str(DUMMY_URI).unwrap(), + TokenUri::from_str(DUMMY_URI).unwrap(), + ]), + token_data: Some(vec![ + TokenData::from_str(DUMMY_DATA).unwrap(), + TokenData::from_str(DUMMY_DATA).unwrap(), + ]), + sender: address.clone(), + receiver: address, + memo: memo.map(|m| m.to_string().into()), + } + } + + pub fn new_min_dummy() -> Self { + let address: Signer = DUMMY_ADDRESS.to_string().into(); + + Self { + class_id: PrefixedClassId::from_str(DUMMY_CLASS_ID).unwrap(), + class_uri: None, + class_data: None, + token_ids: TokenIds::try_from(vec!["token_0".to_string()]).unwrap(), + token_uris: None, + token_data: None, + sender: address.clone(), + receiver: address, + memo: None, + } + } + + pub fn ser_json_assert_eq(&self, json: &str) { + let ser = serde_json::to_string(&self).unwrap(); + assert_eq!(ser, json); + } + + pub fn deser_json_assert_eq(&self, json: &str) { + let deser: Self = serde_json::from_str(json).unwrap(); + + if let Some(data) = &deser.class_data { + assert!(data.as_ref().parse_as_ics721_data().is_ok()); + }; + + if let Some(token_data) = &deser.token_data { + for data in token_data.iter() { + assert!(data.as_ref().parse_as_ics721_data().is_ok()); + } + } + + assert_eq!(&deser, self); + } + } + + fn dummy_min_json_packet_data() -> &'static str { + r#"{"classId":"class","tokenIds":["token_0"],"sender":"cosmos1wxeyh7zgn4tctjzs0vtqpc6p5cxq5t2muzl7ng","receiver":"cosmos1wxeyh7zgn4tctjzs0vtqpc6p5cxq5t2muzl7ng"}"# + } + + fn dummy_min_json_packet_data_with_null() -> &'static str { + r#"{"classId":"class","classUri":null,"classData":null,"tokenIds":["token_0"],"tokenUris":null,"tokenData":null,"sender":"cosmos1wxeyh7zgn4tctjzs0vtqpc6p5cxq5t2muzl7ng","receiver":"cosmos1wxeyh7zgn4tctjzs0vtqpc6p5cxq5t2muzl7ng"}"# + } + + fn dummy_json_packet_data() -> &'static str { + r#"{"classId":"class","classUri":"http://example.com/","classData":"eyJpbWFnZSI6eyJ2YWx1ZSI6ImJpbmFyeSIsIm1pbWUiOiJpbWFnZS9wbmcifSwibmFtZSI6eyJ2YWx1ZSI6IkNyeXB0byBDcmVhdHVyZXMifX0=","tokenIds":["token_0","token_1"],"tokenUris":["http://example.com/","http://example.com/"],"tokenData":["eyJpbWFnZSI6eyJ2YWx1ZSI6ImJpbmFyeSIsIm1pbWUiOiJpbWFnZS9wbmcifSwibmFtZSI6eyJ2YWx1ZSI6IkNyeXB0byBDcmVhdHVyZXMifX0=","eyJpbWFnZSI6eyJ2YWx1ZSI6ImJpbmFyeSIsIm1pbWUiOiJpbWFnZS9wbmcifSwibmFtZSI6eyJ2YWx1ZSI6IkNyeXB0byBDcmVhdHVyZXMifX0="],"sender":"cosmos1wxeyh7zgn4tctjzs0vtqpc6p5cxq5t2muzl7ng","receiver":"cosmos1wxeyh7zgn4tctjzs0vtqpc6p5cxq5t2muzl7ng","memo":"memo"}"# + } + + fn dummy_json_packet_data_without_memo() -> &'static str { + r#"{"classId":"class","classUri":"http://example.com/","classData":"eyJpbWFnZSI6eyJ2YWx1ZSI6ImJpbmFyeSIsIm1pbWUiOiJpbWFnZS9wbmcifSwibmFtZSI6eyJ2YWx1ZSI6IkNyeXB0byBDcmVhdHVyZXMifX0=","tokenIds":["token_0","token_1"],"tokenUris":["http://example.com/","http://example.com/"],"tokenData":["eyJpbWFnZSI6eyJ2YWx1ZSI6ImJpbmFyeSIsIm1pbWUiOiJpbWFnZS9wbmcifSwibmFtZSI6eyJ2YWx1ZSI6IkNyeXB0byBDcmVhdHVyZXMifX0=","eyJpbWFnZSI6eyJ2YWx1ZSI6ImJpbmFyeSIsIm1pbWUiOiJpbWFnZS9wbmcifSwibmFtZSI6eyJ2YWx1ZSI6IkNyeXB0byBDcmVhdHVyZXMifX0="],"sender":"cosmos1wxeyh7zgn4tctjzs0vtqpc6p5cxq5t2muzl7ng","receiver":"cosmos1wxeyh7zgn4tctjzs0vtqpc6p5cxq5t2muzl7ng"}"# + } + + /// Ensures `PacketData` properly encodes to JSON by first converting to a + /// `RawPacketData` and then serializing that. + #[test] + fn test_packet_data_ser() { + PacketData::new_dummy(Some("memo")).ser_json_assert_eq(dummy_json_packet_data()); + } + + /// Ensures `PacketData` properly decodes from JSON by first deserializing to a + /// `RawPacketData` and then converting from that. + #[test] + fn test_packet_data_deser() { + PacketData::new_dummy(Some("memo")).deser_json_assert_eq(dummy_json_packet_data()); + PacketData::new_dummy(None).deser_json_assert_eq(dummy_json_packet_data_without_memo()); + PacketData::new_min_dummy().deser_json_assert_eq(dummy_min_json_packet_data()); + PacketData::new_min_dummy().deser_json_assert_eq(dummy_min_json_packet_data_with_null()); + } + + #[test] + fn test_invalid_packet_data() { + // the number of tokens is mismatched + let packet_data = r#"{"class_id":"class","token_ids":["token_0","token_1"],"token_uris":["http://example.com"],"token_data":["{\"image\":{\"value\":\"binary\",\"mime\":\"image/png\"},\"name\":{\"value\":\"Crypto Creatures\"}}"],"sender":"cosmos1wxeyh7zgn4tctjzs0vtqpc6p5cxq5t2muzl7ng","receiver":"cosmos1wxeyh7zgn4tctjzs0vtqpc6p5cxq5t2muzl7ng","memo":""}"#; + assert!( + serde_json::from_str::(packet_data).is_err(), + "num of token data is unmatched" + ); + + // No token ID + let packet_data = r#"{"class_id":"class","token_ids":[],"sender":"cosmos1wxeyh7zgn4tctjzs0vtqpc6p5cxq5t2muzl7ng","receiver":"cosmos1wxeyh7zgn4tctjzs0vtqpc6p5cxq5t2muzl7ng","memo":""}"#; + assert!( + serde_json::from_str::(packet_data).is_err(), + "no token ID" + ); + } +} diff --git a/ibc-apps/ics721-nft-transfer/types/src/serializers.rs b/ibc-apps/ics721-nft-transfer/types/src/serializers.rs new file mode 100644 index 0000000000..65c4d2ba61 --- /dev/null +++ b/ibc-apps/ics721-nft-transfer/types/src/serializers.rs @@ -0,0 +1,27 @@ +use core::fmt::Display; +use core::str::FromStr; + +use ibc_core::primitives::prelude::*; +use serde::{de, Deserialize, Deserializer, Serializer}; + +// Note: This method serializes to a String instead of a str +// in order to avoid a wasm compilation issue. Specifically, +// str (de)serialization hits some kind of f64/f32 case +// when compiled into wasm, but this fails validation on +// f32/f64 wasm runtimes. +pub fn serialize(value: &T, serializer: S) -> Result +where + T: Display, + S: Serializer, +{ + serializer.serialize_str(value.to_string().as_ref()) +} + +pub fn deserialize<'de, T, D>(deserializer: D) -> Result +where + T: FromStr, + T::Err: Display, + D: Deserializer<'de>, +{ + T::from_str(::deserialize(deserializer)?.as_str()).map_err(de::Error::custom) +} diff --git a/ibc-apps/ics721-nft-transfer/types/src/token.rs b/ibc-apps/ics721-nft-transfer/types/src/token.rs new file mode 100644 index 0000000000..4965d49bed --- /dev/null +++ b/ibc-apps/ics721-nft-transfer/types/src/token.rs @@ -0,0 +1,268 @@ +//! Defines Non-Fungible Token Transfer (ICS-721) token types. +use core::fmt::{self, Display}; +use core::str::FromStr; + +use http::Uri; +use ibc_core::primitives::prelude::*; + +use crate::data::Data; +use crate::error::NftTransferError; +use crate::serializers; + +/// Token ID for an NFT +#[cfg_attr( + feature = "parity-scale-codec", + derive( + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_info::TypeInfo + ) +)] +#[cfg_attr( + feature = "borsh", + derive(borsh::BorshSerialize, borsh::BorshDeserialize) +)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub struct TokenId(String); + +impl AsRef for TokenId { + fn as_ref(&self) -> &str { + &self.0 + } +} + +impl Display for TokenId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +impl FromStr for TokenId { + type Err = NftTransferError; + + fn from_str(token_id: &str) -> Result { + if token_id.trim().is_empty() { + Err(NftTransferError::InvalidTokenId) + } else { + Ok(Self(token_id.to_string())) + } + } +} + +#[cfg_attr( + feature = "parity-scale-codec", + derive( + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_info::TypeInfo + ) +)] +#[cfg_attr( + feature = "borsh", + derive(borsh::BorshSerialize, borsh::BorshDeserialize) +)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct TokenIds(pub Vec); + +impl TokenIds { + pub fn as_ref(&self) -> Vec<&TokenId> { + self.0.iter().collect() + } +} + +impl Display for TokenIds { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "{}", + self.0 + .iter() + .map(|t| t.to_string()) + .collect::>() + .join(",") + ) + } +} + +impl TryFrom> for TokenIds { + type Error = NftTransferError; + + fn try_from(token_ids: Vec) -> Result { + if token_ids.is_empty() { + return Err(NftTransferError::NoTokenId); + } + let ids: Result, _> = token_ids.iter().map(|t| t.parse()).collect(); + let mut ids = ids?; + ids.sort(); + ids.dedup(); + if ids.len() != token_ids.len() { + return Err(NftTransferError::DuplicatedTokenIds); + } + Ok(Self(ids)) + } +} + +/// Token URI for an NFT +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct TokenUri( + #[cfg_attr(feature = "serde", serde(with = "serializers"))] + #[cfg_attr(feature = "schema", schemars(with = "String"))] + Uri, +); + +#[cfg(feature = "borsh")] +impl borsh::BorshSerialize for TokenUri { + fn serialize( + &self, + writer: &mut W, + ) -> borsh::maybestd::io::Result<()> { + borsh::BorshSerialize::serialize(&self.to_string(), writer) + } +} + +#[cfg(feature = "borsh")] +impl borsh::BorshDeserialize for TokenUri { + fn deserialize_reader( + reader: &mut R, + ) -> borsh::maybestd::io::Result { + let uri = String::deserialize_reader(reader)?; + Ok(TokenUri::from_str(&uri).map_err(|_| borsh::maybestd::io::ErrorKind::Other)?) + } +} + +#[cfg(feature = "parity-scale-codec")] +impl parity_scale_codec::Encode for TokenUri { + fn encode_to(&self, writer: &mut T) { + self.to_string().encode_to(writer); + } +} + +#[cfg(feature = "parity-scale-codec")] +impl parity_scale_codec::Decode for TokenUri { + fn decode( + input: &mut I, + ) -> Result { + let uri = String::decode(input)?; + TokenUri::from_str(&uri).map_err(|_| parity_scale_codec::Error::from("from str error")) + } +} + +#[cfg(feature = "parity-scale-codec")] +impl scale_info::TypeInfo for TokenUri { + type Identity = Self; + + fn type_info() -> scale_info::Type { + scale_info::Type::builder() + .path(scale_info::Path::new("TokenUri", module_path!())) + .composite( + scale_info::build::Fields::unnamed() + .field(|f| f.ty::().type_name("String")), + ) + } +} + +impl Display for TokenUri { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +impl FromStr for TokenUri { + type Err = NftTransferError; + + fn from_str(token_uri: &str) -> Result { + match Uri::from_str(token_uri) { + Ok(uri) => Ok(Self(uri)), + Err(err) => Err(NftTransferError::InvalidUri { + uri: token_uri.to_string(), + validation_error: err, + }), + } + } +} + +/// Token data for an NFT +#[cfg_attr( + feature = "parity-scale-codec", + derive( + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_info::TypeInfo + ) +)] +#[cfg_attr( + feature = "borsh", + derive(borsh::BorshSerialize, borsh::BorshDeserialize) +)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] +#[derive(Clone, Debug, PartialEq, Eq, derive_more::AsRef)] +pub struct TokenData(Data); + +impl Display for TokenData { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +impl FromStr for TokenData { + type Err = NftTransferError; + + fn from_str(token_data: &str) -> Result { + let data = Data::from_str(token_data)?; + Ok(Self(data)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[cfg(feature = "serde")] + #[test] + fn test_serde_json_roundtrip() { + fn serde_roundtrip(token_uri: TokenUri) { + let serialized = + serde_json::to_string(&token_uri).expect("failed to serialize TokenUri"); + let deserialized = serde_json::from_str::(&serialized) + .expect("failed to deserialize TokenUri"); + + assert_eq!(deserialized, token_uri); + } + + let uri = "/foo/bar?baz".parse::().unwrap(); + serde_roundtrip(TokenUri(uri)); + + let uri = "https://www.rust-lang.org/install.html" + .parse::() + .unwrap(); + serde_roundtrip(TokenUri(uri)); + } + + #[cfg(feature = "borsh")] + #[test] + fn test_borsh_roundtrip() { + fn borsh_roundtrip(token_uri: TokenUri) { + use borsh::{BorshDeserialize, BorshSerialize}; + + let token_uri_bytes = token_uri.try_to_vec().unwrap(); + let res = TokenUri::try_from_slice(&token_uri_bytes).unwrap(); + + assert_eq!(token_uri, res); + } + + let uri = "/foo/bar?baz".parse::().unwrap(); + borsh_roundtrip(TokenUri(uri)); + + let uri = "https://www.rust-lang.org/install.html" + .parse::() + .unwrap(); + borsh_roundtrip(TokenUri(uri)); + } +} diff --git a/ibc-apps/src/lib.rs b/ibc-apps/src/lib.rs index 2467130118..306624bc0a 100644 --- a/ibc-apps/src/lib.rs +++ b/ibc-apps/src/lib.rs @@ -18,3 +18,12 @@ pub mod transfer { #[doc(inline)] pub use ibc_app_transfer::*; } + +/// Re-exports the implementation of the IBC [Non-Fungible Token +/// Transfer](https://github.com/cosmos/ibc/blob/main/spec/app/ics-721-nft-transfer/README.md) +/// (ICS-721) application logic. +pub mod nft_transfer { + #[doc(inline)] + #[cfg(feature = "nft-transfer")] + pub use ibc_app_nft_transfer::*; +} diff --git a/ibc-primitives/src/prelude.rs b/ibc-primitives/src/prelude.rs index 3662f7bb90..b7ac674e9f 100644 --- a/ibc-primitives/src/prelude.rs +++ b/ibc-primitives/src/prelude.rs @@ -2,6 +2,7 @@ // https://doc.rust-lang.org/src/alloc/prelude/v1.rs.html pub use alloc::borrow::ToOwned; pub use alloc::boxed::Box; +pub use alloc::collections::BTreeMap; pub use alloc::string::{String, ToString}; pub use alloc::vec::Vec; pub use alloc::{format, str, vec}; diff --git a/ibc-testkit/src/testapp/ibc/applications/mod.rs b/ibc-testkit/src/testapp/ibc/applications/mod.rs index 014e52f277..85baa285fa 100644 --- a/ibc-testkit/src/testapp/ibc/applications/mod.rs +++ b/ibc-testkit/src/testapp/ibc/applications/mod.rs @@ -1 +1,2 @@ +pub mod nft_transfer; pub mod transfer; diff --git a/ibc-testkit/src/testapp/ibc/applications/nft_transfer/context.rs b/ibc-testkit/src/testapp/ibc/applications/nft_transfer/context.rs new file mode 100644 index 0000000000..a526dcf45a --- /dev/null +++ b/ibc-testkit/src/testapp/ibc/applications/nft_transfer/context.rs @@ -0,0 +1,185 @@ +use ibc::apps::nft_transfer::context::{ + NftClassContext, NftContext, NftTransferExecutionContext, NftTransferValidationContext, +}; +use ibc::apps::nft_transfer::types::error::NftTransferError; +use ibc::apps::nft_transfer::types::{ + ClassData, ClassId, ClassUri, Memo, PrefixedClassId, TokenData, TokenId, TokenUri, +}; +use ibc::core::host::types::identifiers::{ChannelId, PortId}; +use ibc::core::primitives::prelude::*; +use ibc::core::primitives::Signer; + +use super::types::{DummyNft, DummyNftClass, DummyNftTransferModule}; + +impl NftContext for DummyNft { + fn get_class_id(&self) -> &ClassId { + &self.class_id + } + + fn get_id(&self) -> &TokenId { + &self.token_id + } + + fn get_uri(&self) -> Option<&TokenUri> { + self.token_uri.as_ref() + } + + fn get_data(&self) -> Option<&TokenData> { + self.token_data.as_ref() + } +} + +impl NftClassContext for DummyNftClass { + fn get_id(&self) -> &ClassId { + &self.class_id + } + + fn get_uri(&self) -> Option<&ClassUri> { + self.class_uri.as_ref() + } + + fn get_data(&self) -> Option<&ClassData> { + self.class_data.as_ref() + } +} + +impl NftTransferValidationContext for DummyNftTransferModule { + type AccountId = Signer; + type Nft = DummyNft; + type NftClass = DummyNftClass; + + fn get_port(&self) -> Result { + Ok(PortId::transfer()) + } + + fn can_send_nft(&self) -> Result<(), NftTransferError> { + Ok(()) + } + + fn can_receive_nft(&self) -> Result<(), NftTransferError> { + Ok(()) + } + + fn create_or_update_class_validate( + &self, + _class_id: &PrefixedClassId, + _class_uri: Option<&ClassUri>, + _class_data: Option<&ClassData>, + ) -> Result<(), NftTransferError> { + Ok(()) + } + + fn escrow_nft_validate( + &self, + _from_account: &Self::AccountId, + _port_id: &PortId, + _channel_id: &ChannelId, + _class_id: &PrefixedClassId, + _token_id: &TokenId, + _memo: &Memo, + ) -> Result<(), NftTransferError> { + Ok(()) + } + + fn unescrow_nft_validate( + &self, + _to_account: &Self::AccountId, + _port_id: &PortId, + _channel_id: &ChannelId, + _class_id: &PrefixedClassId, + _token_id: &TokenId, + ) -> Result<(), NftTransferError> { + Ok(()) + } + + fn mint_nft_validate( + &self, + _account: &Self::AccountId, + _class_id: &PrefixedClassId, + _token_id: &TokenId, + _token_uri: Option<&TokenUri>, + _token_data: Option<&TokenData>, + ) -> Result<(), NftTransferError> { + Ok(()) + } + + fn burn_nft_validate( + &self, + _account: &Self::AccountId, + _class_id: &PrefixedClassId, + _token_id: &TokenId, + _memo: &Memo, + ) -> Result<(), NftTransferError> { + Ok(()) + } + + fn get_nft( + &self, + _class_id: &PrefixedClassId, + _token_id: &TokenId, + ) -> Result { + Ok(DummyNft::default()) + } + + fn get_nft_class( + &self, + _class_id: &PrefixedClassId, + ) -> Result { + Ok(DummyNftClass::default()) + } +} + +impl NftTransferExecutionContext for DummyNftTransferModule { + fn create_or_update_class_execute( + &self, + _class_id: &PrefixedClassId, + _class_uri: Option<&ClassUri>, + _class_data: Option<&ClassData>, + ) -> Result<(), NftTransferError> { + Ok(()) + } + + fn escrow_nft_execute( + &mut self, + _from_account: &Self::AccountId, + _port_id: &PortId, + _channel_id: &ChannelId, + _class_id: &PrefixedClassId, + _token_id: &TokenId, + _memo: &Memo, + ) -> Result<(), NftTransferError> { + Ok(()) + } + + fn unescrow_nft_execute( + &mut self, + _to_account: &Self::AccountId, + _port_id: &PortId, + _channel_id: &ChannelId, + _class_id: &PrefixedClassId, + _token_id: &TokenId, + ) -> Result<(), NftTransferError> { + Ok(()) + } + + fn mint_nft_execute( + &mut self, + _account: &Self::AccountId, + _class_id: &PrefixedClassId, + _token_id: &TokenId, + _token_uri: Option<&TokenUri>, + _token_data: Option<&TokenData>, + ) -> Result<(), NftTransferError> { + Ok(()) + } + + fn burn_nft_execute( + &mut self, + _account: &Self::AccountId, + _class_id: &PrefixedClassId, + _token_id: &TokenId, + _memo: &Memo, + ) -> Result<(), NftTransferError> { + Ok(()) + } +} diff --git a/ibc-testkit/src/testapp/ibc/applications/nft_transfer/mod.rs b/ibc-testkit/src/testapp/ibc/applications/nft_transfer/mod.rs new file mode 100644 index 0000000000..a811b728e3 --- /dev/null +++ b/ibc-testkit/src/testapp/ibc/applications/nft_transfer/mod.rs @@ -0,0 +1,4 @@ +#[cfg(feature = "serde")] +pub mod context; +pub mod module; +pub mod types; diff --git a/ibc-testkit/src/testapp/ibc/applications/nft_transfer/module.rs b/ibc-testkit/src/testapp/ibc/applications/nft_transfer/module.rs new file mode 100644 index 0000000000..06652a1dd6 --- /dev/null +++ b/ibc-testkit/src/testapp/ibc/applications/nft_transfer/module.rs @@ -0,0 +1,107 @@ +use ibc::core::channel::types::acknowledgement::Acknowledgement; +use ibc::core::channel::types::channel::{Counterparty, Order}; +use ibc::core::channel::types::error::{ChannelError, PacketError}; +use ibc::core::channel::types::packet::Packet; +use ibc::core::channel::types::Version; +use ibc::core::host::types::identifiers::{ChannelId, ConnectionId, PortId}; +use ibc::core::primitives::prelude::*; +use ibc::core::primitives::Signer; +use ibc::core::router::module::Module; +use ibc::core::router::types::module::ModuleExtras; + +use super::types::DummyNftTransferModule; + +impl Module for DummyNftTransferModule { + fn on_chan_open_init_validate( + &self, + _order: Order, + _connection_hops: &[ConnectionId], + _port_id: &PortId, + _channel_id: &ChannelId, + _counterparty: &Counterparty, + version: &Version, + ) -> Result { + Ok(version.clone()) + } + + fn on_chan_open_init_execute( + &mut self, + _order: Order, + _connection_hops: &[ConnectionId], + _port_id: &PortId, + _channel_id: &ChannelId, + _counterparty: &Counterparty, + version: &Version, + ) -> Result<(ModuleExtras, Version), ChannelError> { + Ok((ModuleExtras::empty(), version.clone())) + } + + fn on_chan_open_try_validate( + &self, + _order: Order, + _connection_hops: &[ConnectionId], + _port_id: &PortId, + _channel_id: &ChannelId, + _counterparty: &Counterparty, + counterparty_version: &Version, + ) -> Result { + Ok(counterparty_version.clone()) + } + + fn on_chan_open_try_execute( + &mut self, + _order: Order, + _connection_hops: &[ConnectionId], + _port_id: &PortId, + _channel_id: &ChannelId, + _counterparty: &Counterparty, + counterparty_version: &Version, + ) -> Result<(ModuleExtras, Version), ChannelError> { + Ok((ModuleExtras::empty(), counterparty_version.clone())) + } + + fn on_recv_packet_execute( + &mut self, + _packet: &Packet, + _relayer: &Signer, + ) -> (ModuleExtras, Acknowledgement) { + ( + ModuleExtras::empty(), + Acknowledgement::try_from(vec![1u8]).expect("Never fails"), + ) + } + + fn on_timeout_packet_validate( + &self, + _packet: &Packet, + _relayer: &Signer, + ) -> Result<(), PacketError> { + Ok(()) + } + + fn on_timeout_packet_execute( + &mut self, + _packet: &Packet, + _relayer: &Signer, + ) -> (ModuleExtras, Result<(), PacketError>) { + (ModuleExtras::empty(), Ok(())) + } + + fn on_acknowledgement_packet_validate( + &self, + _packet: &Packet, + _acknowledgement: &Acknowledgement, + _relayer: &Signer, + ) -> Result<(), PacketError> { + Ok(()) + } + + fn on_acknowledgement_packet_execute( + &mut self, + _packet: &Packet, + _acknowledgement: &Acknowledgement, + _relayer: &Signer, + ) -> (ModuleExtras, Result<(), PacketError>) { + (ModuleExtras::empty(), Ok(())) + } +} diff --git a/ibc-testkit/src/testapp/ibc/applications/nft_transfer/types.rs b/ibc-testkit/src/testapp/ibc/applications/nft_transfer/types.rs new file mode 100644 index 0000000000..6e282fe61a --- /dev/null +++ b/ibc-testkit/src/testapp/ibc/applications/nft_transfer/types.rs @@ -0,0 +1,61 @@ +use ibc::apps::nft_transfer::types::{ClassData, ClassId, ClassUri, TokenData, TokenId, TokenUri}; + +#[derive(Debug)] +pub struct DummyNftTransferModule; + +#[derive(Debug)] +pub struct DummyNft { + pub class_id: ClassId, + pub token_id: TokenId, + pub token_uri: Option, + pub token_data: Option, +} + +impl Default for DummyNft { + fn default() -> Self { + let class_id = "class_0".parse().expect("infallible"); + let token_id = "token_0".parse().expect("infallible"); + let token_uri = Some("http://example.com".parse().expect("infallible")); + let data = r#"{"name":{"value":"Crypto Creatures"},"image":{"value":"binary","mime":"image/png"}}"#; + let token_data = Some(data.parse().expect("infallible")); + Self { + class_id, + token_id, + token_uri, + token_data, + } + } +} + +#[derive(Debug)] +pub struct DummyNftClass { + pub class_id: ClassId, + pub class_uri: Option, + pub class_data: Option, +} + +impl Default for DummyNftClass { + fn default() -> Self { + let class_id = "class_0".parse().expect("infallible"); + let class_uri = Some("http://example.com".parse().expect("infallible")); + let data = r#"{"name":{"value":"Crypto Creatures"},"image":{"value":"binary","mime":"image/png"}}"#; + let class_data = Some(data.parse().expect("infallible")); + Self { + class_id, + class_uri, + class_data, + } + } +} + +impl DummyNftTransferModule { + pub fn new() -> Self { + Self + } +} + +impl Default for DummyNftTransferModule { + fn default() -> Self { + Self::new() + } +} diff --git a/ibc-testkit/tests/applications/mod.rs b/ibc-testkit/tests/applications/mod.rs index 544cab2c51..2b046ccd56 100644 --- a/ibc-testkit/tests/applications/mod.rs +++ b/ibc-testkit/tests/applications/mod.rs @@ -1,2 +1,4 @@ #[cfg(feature = "serde")] +pub mod nft_transfer; +#[cfg(feature = "serde")] pub mod transfer; diff --git a/ibc-testkit/tests/applications/nft_transfer.rs b/ibc-testkit/tests/applications/nft_transfer.rs new file mode 100644 index 0000000000..05996a534a --- /dev/null +++ b/ibc-testkit/tests/applications/nft_transfer.rs @@ -0,0 +1,135 @@ +use ibc::apps::nft_transfer::module::{ + on_chan_open_init_execute, on_chan_open_init_validate, on_chan_open_try_execute, + on_chan_open_try_validate, +}; +use ibc::apps::nft_transfer::types::VERSION; +use ibc::core::channel::types::channel::{Counterparty, Order}; +use ibc::core::channel::types::Version; +use ibc::core::host::types::identifiers::{ChannelId, ConnectionId, PortId}; +use ibc::core::primitives::prelude::*; +use ibc_testkit::testapp::ibc::applications::nft_transfer::types::DummyNftTransferModule; + +fn get_defaults() -> ( + DummyNftTransferModule, + Order, + Vec, + PortId, + ChannelId, + Counterparty, +) { + let order = Order::Unordered; + let connection_hops = vec![ConnectionId::new(1)]; + let port_id = PortId::transfer(); + let channel_id = ChannelId::new(1); + let counterparty = Counterparty::new(port_id.clone(), Some(channel_id.clone())); + + ( + DummyNftTransferModule, + order, + connection_hops, + port_id, + channel_id, + counterparty, + ) +} + +/// If the relayer passed "", indicating that it wants us to return the versions we support. +#[test] +fn test_on_chan_open_init_empty_version() { + let (mut ctx, order, connection_hops, port_id, channel_id, counterparty) = get_defaults(); + + let in_version = Version::new("".to_string()); + + let (_, out_version) = on_chan_open_init_execute( + &mut ctx, + order, + &connection_hops, + &port_id, + &channel_id, + &counterparty, + &in_version, + ) + .unwrap(); + + assert_eq!(out_version, Version::new(VERSION.to_string())); +} + +/// If the relayer passed in the only supported version (ics721-1), then return ics721-1 +#[test] +fn test_on_chan_open_init_ics721_version() { + let (mut ctx, order, connection_hops, port_id, channel_id, counterparty) = get_defaults(); + + let in_version = Version::new(VERSION.to_string()); + let (_, out_version) = on_chan_open_init_execute( + &mut ctx, + order, + &connection_hops, + &port_id, + &channel_id, + &counterparty, + &in_version, + ) + .unwrap(); + + assert_eq!(out_version, Version::new(VERSION.to_string())); +} + +/// If the relayer passed in an unsupported version, then fail +#[test] +fn test_on_chan_open_init_incorrect_version() { + let (ctx, order, connection_hops, port_id, channel_id, counterparty) = get_defaults(); + + let in_version = Version::new("some-unsupported-version".to_string()); + let res = on_chan_open_init_validate( + &ctx, + order, + &connection_hops, + &port_id, + &channel_id, + &counterparty, + &in_version, + ); + + assert!(res.is_err()); +} + +/// If the counterparty supports ics721, then return ics721 +#[test] +fn test_on_chan_open_try_counterparty_correct_version() { + let (mut ctx, order, connection_hops, port_id, channel_id, counterparty) = get_defaults(); + + let counterparty_version = Version::new(VERSION.to_string()); + + let (_, out_version) = on_chan_open_try_execute( + &mut ctx, + order, + &connection_hops, + &port_id, + &channel_id, + &counterparty, + &counterparty_version, + ) + .unwrap(); + + assert_eq!(out_version, Version::new(VERSION.to_string())); +} + +/// If the counterparty doesn't support ics721, then fail +#[test] +fn test_on_chan_open_try_counterparty_incorrect_version() { + let (ctx, order, connection_hops, port_id, channel_id, counterparty) = get_defaults(); + + let counterparty_version = Version::new("some-unsupported-version".to_string()); + + let res = on_chan_open_try_validate( + &ctx, + order, + &connection_hops, + &port_id, + &channel_id, + &counterparty, + &counterparty_version, + ); + + assert!(res.is_err()); +}