diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 7899e38af3..d62d29fa85 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -4,9 +4,11 @@ on: push: branches: - master + - dev pull_request: branches: - master + - dev defaults: run: @@ -97,9 +99,9 @@ jobs: strategy: matrix: os: - - macos-latest +# - macos-latest - ubuntu-latest - - windows-latest +# - windows-latest runs-on: ${{matrix.os}} diff --git a/Cargo.lock b/Cargo.lock index 25ff414367..3d2d00789c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -453,6 +453,19 @@ version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d86b93f97252c47b41663388e6d155714a9d0c398b99f1005cbc5f978b29f445" +[[package]] +name = "bigdecimal" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c06619be423ea5bb86c95f087d5707942791a08a85530df0db2209a3ecfb8bc9" +dependencies = [ + "autocfg", + "libm", + "num-bigint", + "num-integer", + "num-traits", +] + [[package]] name = "bip39" version = "2.0.0" @@ -612,6 +625,7 @@ version = "1.0.83" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0" dependencies = [ + "jobserver", "libc", ] @@ -747,6 +761,32 @@ dependencies = [ "windows-sys 0.45.0", ] +[[package]] +name = "const_fn" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbdcdcb6d86f71c5e97409ad45898af11cbc995b4ee8112d59095a28d376c935" + +[[package]] +name = "const_format" +version = "0.2.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3a214c7af3d04997541b18d432afaff4c455e79e2029079647e72fc2bd27673" +dependencies = [ + "const_format_proc_macros", +] + +[[package]] +name = "const_format_proc_macros" +version = "0.2.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7f6ff08fd20f4f299298a28e2dfa8a8ba1036e6cd2460ac1de7b425d76f2500" +dependencies = [ + "proc-macro2", + "quote", + "unicode-xid", +] + [[package]] name = "convert_case" version = "0.4.0" @@ -973,6 +1013,17 @@ dependencies = [ "powerfmt", ] +[[package]] +name = "derivative" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "derive_builder" version = "0.12.0" @@ -1017,6 +1068,12 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "destructure_traitobject" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c877555693c14d2f84191cfd3ad8582790fc52b5e2274b40b59cf5f5cea25c7" + [[package]] name = "diff" version = "0.1.13" @@ -1382,6 +1439,19 @@ version = "0.28.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" +[[package]] +name = "git2" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbf97ba92db08df386e10c8ede66a2a0369bd277090afd8710e19e38de9ec0cd" +dependencies = [ + "bitflags 2.4.1", + "libc", + "libgit2-sys", + "log", + "url", +] + [[package]] name = "globset" version = "0.4.14" @@ -1432,7 +1502,7 @@ dependencies = [ "futures-sink", "futures-util", "http 0.2.11", - "indexmap", + "indexmap 2.1.0", "slab", "tokio", "tokio-util 0.7.10", @@ -1445,6 +1515,12 @@ version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + [[package]] name = "hashbrown" version = "0.14.3" @@ -1638,6 +1714,16 @@ dependencies = [ "unicode-normalization", ] +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", +] + [[package]] name = "indexmap" version = "2.1.0" @@ -1645,7 +1731,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f" dependencies = [ "equivalent", - "hashbrown", + "hashbrown 0.14.3", + "serde", ] [[package]] @@ -1698,6 +1785,12 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "is_debug" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06d198e9919d9822d5f7083ba8530e04de87841eaf21ead9af8f2304efd57c89" + [[package]] name = "itertools" version = "0.10.5" @@ -1713,6 +1806,15 @@ version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" +[[package]] +name = "jobserver" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c37f63953c4c63420ed5fd3d6d398c719489b9f872b9fa683262f8edd363c7d" +dependencies = [ + "libc", +] + [[package]] name = "js-sys" version = "0.3.66" @@ -1772,7 +1874,7 @@ dependencies = [ "jsonrpc-server-utils", "log", "net2", - "parking_lot", + "parking_lot 0.11.2", "unicase", ] @@ -1815,6 +1917,24 @@ version = "0.2.151" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "302d7ab3130588088d277783b1e2d2e10c9e9e4a16dd9050e6ec93fb3e7048f4" +[[package]] +name = "libgit2-sys" +version = "0.16.1+1.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2a2bb3680b094add03bb3732ec520ece34da31a8cd2d633d1389d0f0fb60d0c" +dependencies = [ + "cc", + "libc", + "libz-sys", + "pkg-config", +] + +[[package]] +name = "libm" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" + [[package]] name = "libredox" version = "0.0.1" @@ -1826,6 +1946,24 @@ dependencies = [ "redox_syscall 0.4.1", ] +[[package]] +name = "libz-sys" +version = "1.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d97137b25e321a73eef1418d1d5d2eda4d77e12813f8e6dead84bc52c5870a7b" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "linked-hash-map" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" + [[package]] name = "linux-raw-sys" version = "0.3.8" @@ -1853,6 +1991,42 @@ name = "log" version = "0.4.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" +dependencies = [ + "serde", +] + +[[package]] +name = "log-mdc" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a94d21414c1f4a51209ad204c1776a3d0765002c76c6abcb602a6f09f1e881c7" + +[[package]] +name = "log4rs" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d36ca1786d9e79b8193a68d480a0907b612f109537115c6ff655a3a1967533fd" +dependencies = [ + "anyhow", + "arc-swap", + "chrono", + "derivative", + "flate2", + "fnv", + "humantime", + "libc", + "log", + "log-mdc", + "parking_lot 0.12.1", + "serde", + "serde-value", + "serde_json", + "serde_yaml 0.8.26", + "thiserror", + "thread-id", + "typemap-ors", + "winapi", +] [[package]] name = "matchit" @@ -2018,6 +2192,12 @@ dependencies = [ "num-traits", ] +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + [[package]] name = "num-integer" version = "0.1.45" @@ -2060,6 +2240,15 @@ dependencies = [ "libc", ] +[[package]] +name = "num_threads" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2819ce041d2ee131036f4fc9d6ae7ae125a3a40e97ba64d04fe799ad9dabbb44" +dependencies = [ + "libc", +] + [[package]] name = "number_prefix" version = "0.4.0" @@ -2156,6 +2345,7 @@ dependencies = [ "axum-server", "base64 0.21.5", "bech32", + "bigdecimal", "bip39", "bitcoin", "boilerplate", @@ -2178,32 +2368,41 @@ dependencies = [ "indicatif", "lazy_static", "log", + "log4rs", "mime", "mime_guess", "miniscript", "mp4", + "once_cell", "ord-bitcoincore-rpc", "pretty_assertions", "pulldown-cmark", + "rayon", "redb", "regex", "reqwest", + "rmp-serde", "rss", "rust-embed", "rustls 0.22.1", "rustls-acme", "serde", "serde_json", - "serde_yaml", + "serde_yaml 0.9.30", "sha3", + "shadow-rs", + "strum_macros 0.26.1", "sysinfo", "tempfile", "test-bitcoincore-rpc", + "thiserror", + "time", "tokio", "tokio-stream", "tokio-util 0.7.10", "tower-http", "unindent", + "utoipa", ] [[package]] @@ -2232,6 +2431,15 @@ dependencies = [ "serde_json", ] +[[package]] +name = "ordered-float" +version = "2.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c" +dependencies = [ + "num-traits", +] + [[package]] name = "parking" version = "2.2.0" @@ -2246,7 +2454,17 @@ checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" dependencies = [ "instant", "lock_api", - "parking_lot_core", + "parking_lot_core 0.8.6", +] + +[[package]] +name = "parking_lot" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" +dependencies = [ + "lock_api", + "parking_lot_core 0.9.9", ] [[package]] @@ -2263,6 +2481,25 @@ dependencies = [ "winapi", ] +[[package]] +name = "parking_lot_core" +version = "0.9.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" +dependencies = [ + "cfg-if 1.0.0", + "libc", + "redox_syscall 0.4.1", + "smallvec", + "windows-targets 0.48.5", +] + +[[package]] +name = "paste" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" + [[package]] name = "pem" version = "1.1.1" @@ -2408,6 +2645,30 @@ dependencies = [ "toml", ] +[[package]] +name = "proc-macro-error" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" +dependencies = [ + "proc-macro-error-attr", + "proc-macro2", + "quote", + "syn 1.0.109", + "version_check", +] + +[[package]] +name = "proc-macro-error-attr" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" +dependencies = [ + "proc-macro2", + "quote", + "version_check", +] + [[package]] name = "proc-macro2" version = "1.0.74" @@ -2646,6 +2907,28 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "rmp" +version = "0.8.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f9860a6cc38ed1da53456442089b4dfa35e7cedaa326df63017af88385e6b20" +dependencies = [ + "byteorder", + "num-traits", + "paste", +] + +[[package]] +name = "rmp-serde" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bffea85eea980d8a74453e5d02a8d93028f3c34725de143085a844ebe953258a" +dependencies = [ + "byteorder", + "rmp", + "serde", +] + [[package]] name = "rss" version = "2.0.6" @@ -2939,6 +3222,16 @@ dependencies = [ "serde_derive", ] +[[package]] +name = "serde-value" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c" +dependencies = [ + "ordered-float", + "serde", +] + [[package]] name = "serde_derive" version = "1.0.194" @@ -2956,7 +3249,7 @@ version = "1.0.110" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6fbd975230bada99c8bb618e0c365c2eefa219158d5c6c29610fd09ff1833257" dependencies = [ - "indexmap", + "indexmap 2.1.0", "itoa", "ryu", "serde", @@ -2984,13 +3277,25 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_yaml" +version = "0.8.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "578a7433b776b56a35785ed5ce9a7e777ac0598aac5a6dd1b4b18a307c7fc71b" +dependencies = [ + "indexmap 1.9.3", + "ryu", + "serde", + "yaml-rust", +] + [[package]] name = "serde_yaml" version = "0.9.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b1bf28c79a99f70ee1f1d83d10c875d2e70618417fda01ad1785e027579d9d38" dependencies = [ - "indexmap", + "indexmap 2.1.0", "itoa", "ryu", "serde", @@ -3029,6 +3334,19 @@ dependencies = [ "keccak", ] +[[package]] +name = "shadow-rs" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "615d846f7174a0850dca101bca72f6913e3376a64c5fda2b965d7fc3d1ff60cb" +dependencies = [ + "const_format", + "git2", + "is_debug", + "time", + "tzdb", +] + [[package]] name = "slab" version = "0.4.9" @@ -3088,7 +3406,7 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" dependencies = [ - "strum_macros", + "strum_macros 0.24.3", ] [[package]] @@ -3104,6 +3422,19 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "strum_macros" +version = "0.26.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a3417fc93d76740d974a01654a09777cb500428cc874ca9f45edfe0c4d4cd18" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "rustversion", + "syn 2.0.46", +] + [[package]] name = "subtle" version = "2.5.0" @@ -3244,14 +3575,27 @@ dependencies = [ "syn 2.0.46", ] +[[package]] +name = "thread-id" +version = "4.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0ec81c46e9eb50deaa257be2f148adf052d1fb7701cfd55ccfab2525280b70b" +dependencies = [ + "libc", + "winapi", +] + [[package]] name = "time" -version = "0.3.31" +version = "0.3.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f657ba42c3f86e7680e53c8cd3af8abbe56b5491790b46e22e19c0d57463583e" +checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" dependencies = [ "deranged", "itoa", + "libc", + "num-conv", + "num_threads", "powerfmt", "serde", "time-core", @@ -3266,10 +3610,11 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.16" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26197e33420244aeb70c3e8c78376ca46571bc4e701e4791c2cd9f57dcb3a43f" +checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" dependencies = [ + "num-conv", "time-core", ] @@ -3470,12 +3815,40 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" +[[package]] +name = "typemap-ors" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a68c24b707f02dd18f1e4ccceb9d49f2058c2fb86384ef9972592904d7a28867" +dependencies = [ + "unsafe-any-ors", +] + [[package]] name = "typenum" version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" +[[package]] +name = "tz-rs" +version = "0.6.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33851b15c848fad2cf4b105c6bb66eb9512b6f6c44a4b13f57c53c73c707e2b4" +dependencies = [ + "const_fn", +] + +[[package]] +name = "tzdb" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec758958f2fb5069cd7fae385be95cc8eceb8cdfd270c7d14de6034f0108d99e" +dependencies = [ + "iana-time-zone", + "tz-rs", +] + [[package]] name = "unicase" version = "2.7.0" @@ -3524,6 +3897,15 @@ version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c7de7d73e1754487cb58364ee906a499937a0dfabd86bcb980fa99ec8c8fa2ce" +[[package]] +name = "unsafe-any-ors" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0a303d30665362d9680d7d91d78b23f5f899504d4f08b3c4cf08d055d87c0ad" +dependencies = [ + "destructure_traitobject", +] + [[package]] name = "unsafe-libyaml" version = "0.2.10" @@ -3565,6 +3947,30 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" +[[package]] +name = "utoipa" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ff05e3bac2c9428f57ade702667753ca3f5cf085e2011fe697de5bfd49aa72d" +dependencies = [ + "indexmap 2.1.0", + "serde", + "serde_json", + "utoipa-gen", +] + +[[package]] +name = "utoipa-gen" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f0b6f4667edd64be0e820d6631a60433a269710b6ee89ac39525b872b76d61d" +dependencies = [ + "proc-macro-error", + "proc-macro2", + "quote", + "syn 2.0.46", +] + [[package]] name = "vcpkg" version = "0.2.15" @@ -3966,6 +4372,15 @@ dependencies = [ "time", ] +[[package]] +name = "yaml-rust" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85" +dependencies = [ + "linked-hash-map", +] + [[package]] name = "yansi" version = "0.5.1" diff --git a/Cargo.toml b/Cargo.toml index 293755d3e5..5d1b17ddb4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -9,6 +9,7 @@ homepage = "https://github.com/ordinals/ord" repository = "https://github.com/ordinals/ord" autobins = false rust-version = "1.67" +build = "build.rs" [package.metadata.deb] copyright = "The Ord Maintainers" @@ -24,6 +25,7 @@ axum = { version = "0.6.1", features = ["headers", "http2"] } axum-server = "0.5.0" base64 = "0.21.0" bech32 = "0.9.1" +bigdecimal = "0.4.2" bip39 = "2.0.0" bitcoin = { version = "0.30.1", features = ["rand"] } boilerplate = { version = "1.0.0", features = ["axum"] } @@ -40,6 +42,7 @@ hex = "0.4.3" html-escaper = "0.2.0" http = "0.2.6" humantime = "2.1.0" +time = "0.3.36" hyper = { version = "0.14.24", features = ["client", "http2"] } indicatif = "0.17.1" lazy_static = "1.4.0" @@ -59,12 +62,20 @@ serde = { version = "1.0.137", features = ["derive"] } serde_json = { version = "1.0.81", features = ["preserve_order"] } serde_yaml = "0.9.17" sha3 = "0.10.8" +shadow-rs = "0.25.0" sysinfo = "0.30.3" tempfile = "3.2.0" tokio = { version = "1.17.0", features = ["rt-multi-thread"] } tokio-stream = "0.1.9" tokio-util = {version = "0.7.3", features = ["compat"] } tower-http = { version = "0.4.0", features = ["compression-br", "compression-gzip", "cors", "set-header"] } +utoipa = "4.1.0" +thiserror = "1.0.51" +log4rs = { version = "1.2.0", features = ["gzip"] } +once_cell = "1.19.0" +rmp-serde = "1.1.2" +rayon = "1.8.0" +strum_macros = "0.26.1" [dev-dependencies] criterion = "0.5.1" @@ -92,3 +103,8 @@ path = "tests/lib.rs" [build-dependencies] pulldown-cmark = "0.9.2" +shadow-rs = "0.25.0" + +[features] +default = [] +cache = [] diff --git a/README.md b/README.md index 8b38c20842..e1c89a62df 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,8 @@ `ord` ===== +Forks on [ordinals/ord](https://github.com/ordinals/ord) and adds the [BRC20 Protocol](https://domo-2.gitbook.io/brc-20-experiment/) feature. It can easily call the API, obtaining the BRC20 transaction and tick balance. +----------------------- `ord` is an index, block explorer, and command-line wallet. It is experimental software with no warranty. See [LICENSE](LICENSE) for more details. @@ -114,23 +116,6 @@ Once built, the `ord` binary can be found at `./target/release/ord`. `ord` requires `rustc` version 1.67.0 or later. Run `rustc --version` to ensure you have this version. Run `rustup update` to get the latest stable release. -### Homebrew - -`ord` is available in [Homebrew](https://brew.sh/): - -``` -brew install ord -``` - -### Debian Package - -To build a `.deb` package: - -``` -cargo install cargo-deb -cargo deb -``` - Contributing ------------ @@ -229,17 +214,43 @@ bitcoin_rpc_user: foo bitcoin_rpc_pass: bar ``` +Quick start +----------- +``` +./ord --data-dir=./_cache --log-level --rpc-url= --bitcoin-rpc-user --bitcoin-rpc-pass --enable-save-ord-receipts --enable-index-brc20 server --http-port +``` +For more commands, please check `./ord --help`. + +Features +-------- +- [x] Saving ord creation and transfer receipts.(with the `--enable-save-ord-receipts` command). +- [x] Indexing BRC20 protocol and saving states (with the `--enable-index-brc20` command). +- [x] Indexing Bitmap collection and saving states (with the `--enable-index-bitmap` command). + +APIs +---- +For more information, follow [openapi.json](./docs/api/openapi.json) + Logging -------- -`ord` uses [env_logger](https://docs.rs/env_logger/latest/env_logger/). Set the -`RUST_LOG` environment variable in order to turn on logging. For example, run +`ord` uses [log4rs](https://docs.rs/log4rs/latest/log4rs/) instead of [env_logger](https://docs.rs/env_logger/latest/env_logger/). Set the +`--log-level` argument variable in order to turn on logging. For example, run the server and show `info`-level log messages and above: ``` -$ RUST_LOG=info cargo run server +$ cargo run server --log-level info ``` +SnapShot +-------- +Use a snapshot to quickly synchronize the BRC20 indexer database. + +1. Download the specified height snapshot database from this web page. +- + +2. Extract and Unzip the `.tar.gz` file and replace the database file. + New Releases ------------ diff --git a/bin/forbid b/bin/forbid index a5a82f116e..e3b4aaa03f 100755 --- a/bin/forbid +++ b/bin/forbid @@ -9,5 +9,5 @@ which rg > /dev/null --glob '!docs/src/bounty/frequency.tsv' \ --glob '!docs/po/*' \ --ignore-case \ - 'dbg!|fixme|todo|xxx' \ + 'dbg!|fixme|xxx' \ . diff --git a/build.rs b/build.rs index 419575640a..9e70ec03a0 100644 --- a/build.rs +++ b/build.rs @@ -24,7 +24,7 @@ fn git_commit() -> Option { .map(|branch| branch.into()) } -fn main() { +fn main() -> shadow_rs::SdResult<()> { println!( "cargo:rustc-env=GIT_BRANCH={}", git_branch().unwrap_or_default() @@ -33,4 +33,5 @@ fn main() { "cargo:rustc-env=GIT_COMMIT={}", git_commit().unwrap_or_default() ); + shadow_rs::new() } diff --git a/docs/api/README.md b/docs/api/README.md new file mode 100644 index 0000000000..bd7403d4c2 --- /dev/null +++ b/docs/api/README.md @@ -0,0 +1,15 @@ +# BRC20 indexer API +This document defines a REST API specification for querying the BRC20S indexer. + + +See sepc source: +- [openapi.json](./openapi.json) + +## Sanity checks +Double check that the spec looks good by running server and then visit http://127.0.0.1/api/v1/api-docs/openapi.json. + + +How to running indexer server +```bash +ord --bitcoin-rpc-user foo --bitcoin-rpc-pass bar server +``` \ No newline at end of file diff --git a/docs/api/openapi.json b/docs/api/openapi.json new file mode 100644 index 0000000000..5fd37845e8 --- /dev/null +++ b/docs/api/openapi.json @@ -0,0 +1,2272 @@ +{ + "openapi": "3.0.3", + "info": { + "title": "ord", + "description": "◉ Ordinal wallet and block explorer", + "license": { + "name": "CC0-1.0" + }, + "version": "0.14.0" + }, + "paths": { + "/api/v1/brc20/address/{address}/balance": { + "get": { + "tags": [ + "brc20" + ], + "summary": "Get all ticker balances of the address.", + "description": "Get all ticker balances of the address.\n\nRetrieve all BRC20 protocol asset balances associated with a address.", + "operationId": "brc20_all_balance", + "parameters": [ + { + "name": "address", + "in": "path", + "description": "Address", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Obtain account balances by query address.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BRC20AllBalance" + } + } + } + }, + "400": { + "description": "Bad query.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 2, + "msg": "bad request" + } + } + } + }, + "404": { + "description": "Not found.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 3, + "msg": "not found" + } + } + } + }, + "500": { + "description": "Internal server error.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 1, + "msg": "internal error" + } + } + } + } + } + } + }, + "/api/v1/brc20/address/{address}/transferable": { + "get": { + "tags": [ + "brc20" + ], + "summary": "Get the balance of ticker of the address.", + "description": "Get the balance of ticker of the address.\n\nRetrieve the balance of the ticker from the given address.", + "operationId": "brc20_all_transferable", + "parameters": [ + { + "name": "address", + "in": "path", + "description": "Address", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Obtain account all transferable inscriptions.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BRC20Transferable" + } + } + } + }, + "400": { + "description": "Bad query.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 2, + "msg": "bad request" + } + } + } + }, + "404": { + "description": "Not found.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 3, + "msg": "not found" + } + } + } + }, + "500": { + "description": "Internal server error.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 1, + "msg": "internal error" + } + } + } + } + } + } + }, + "/api/v1/brc20/block/{blockhash}/events": { + "get": { + "tags": [ + "brc20" + ], + "summary": "Get block events by blockhash.", + "description": "Get block events by blockhash.\n\nRetrieve all BRC20 events associated with a block.", + "operationId": "brc20_block_events", + "parameters": [ + { + "name": "blockhash", + "in": "path", + "description": "block hash", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Obtain block events by block hash", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BRC20BlockEvents" + } + } + } + }, + "400": { + "description": "Bad query.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 2, + "msg": "bad request" + } + } + } + }, + "404": { + "description": "Not found.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 3, + "msg": "not found" + } + } + } + }, + "500": { + "description": "Internal server error.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 1, + "msg": "internal error" + } + } + } + } + } + } + }, + "/api/v1/brc20/tick": { + "get": { + "tags": [ + "brc20" + ], + "summary": "Get all tickers info.", + "description": "Get all tickers info.\n\nRetrieve detailed information about all tickers.", + "operationId": "brc20_all_tick_info", + "responses": { + "200": { + "description": "Obtain matching all BRC20 tickers.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BRC20AllTick" + } + } + } + }, + "400": { + "description": "Bad query.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 2, + "msg": "bad request" + } + } + } + }, + "404": { + "description": "Not found.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 3, + "msg": "not found" + } + } + } + }, + "500": { + "description": "Internal server error.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 1, + "msg": "internal error" + } + } + } + } + } + } + }, + "/api/v1/brc20/tick/{ticker}": { + "get": { + "tags": [ + "brc20" + ], + "summary": "Get the ticker info.", + "description": "Get the ticker info.\n\nRetrieve detailed information about the ticker.", + "operationId": "brc20_tick_info", + "parameters": [ + { + "name": "ticker", + "in": "path", + "description": "Token ticker", + "required": true, + "schema": { + "type": "string", + "maxLength": 4, + "minLength": 4 + } + } + ], + "responses": { + "200": { + "description": "Obtain matching BRC20 ticker by query.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BRC20Tick" + } + } + } + }, + "400": { + "description": "Bad query.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 2, + "msg": "ticker must be 4 bytes length" + } + } + } + }, + "404": { + "description": "Ticker not found.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 3, + "msg": "tick not found" + } + } + } + }, + "500": { + "description": "Internal server error.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 1, + "msg": "internal error" + } + } + } + } + } + } + }, + "/api/v1/brc20/tick/{ticker}/address/{address}/balance": { + "get": { + "tags": [ + "brc20" + ], + "summary": "Get the ticker balance of the address.", + "description": "Get the ticker balance of the address.\n\nRetrieve the asset balance of the 'ticker' for the address.", + "operationId": "brc20_balance", + "parameters": [ + { + "name": "ticker", + "in": "path", + "description": "Token ticker", + "required": true, + "schema": { + "type": "string", + "maxLength": 4, + "minLength": 4 + } + }, + { + "name": "address", + "in": "path", + "description": "Address", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Obtain account balance by query ticker.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BRC20Balance" + } + } + } + }, + "400": { + "description": "Bad query.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 2, + "msg": "bad request" + } + } + } + }, + "404": { + "description": "Not found.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 3, + "msg": "not found" + } + } + } + }, + "500": { + "description": "Internal server error.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 1, + "msg": "internal error" + } + } + } + } + } + } + }, + "/api/v1/brc20/tick/{ticker}/address/{address}/transferable": { + "get": { + "tags": [ + "brc20" + ], + "summary": "Get the transferable inscriptions of the address.", + "description": "Get the transferable inscriptions of the address.\n\nRetrieve the transferable inscriptions with the ticker from the given address.", + "operationId": "brc20_transferable", + "parameters": [ + { + "name": "ticker", + "in": "path", + "description": "Token ticker", + "required": true, + "schema": { + "type": "string", + "maxLength": 4, + "minLength": 4 + } + }, + { + "name": "address", + "in": "path", + "description": "Address", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Obtain account transferable inscriptions of ticker.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BRC20Transferable" + } + } + } + }, + "400": { + "description": "Bad query.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 2, + "msg": "bad request" + } + } + } + }, + "404": { + "description": "Not found.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 3, + "msg": "not found" + } + } + } + }, + "500": { + "description": "Internal server error.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 1, + "msg": "internal error" + } + } + } + } + } + } + }, + "/api/v1/brc20/tx/{txid}/events": { + "get": { + "tags": [ + "brc20" + ], + "summary": "Get transaction events by txid.", + "description": "Get transaction events by txid.\n\nRetrieve all BRC20 events associated with a transaction.", + "operationId": "brc20_tx_events", + "parameters": [ + { + "name": "txid", + "in": "path", + "description": "transaction ID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Obtain transaction events by txid", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BRC20TxEvents" + } + } + } + }, + "400": { + "description": "Bad query.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 2, + "msg": "bad request" + } + } + } + }, + "404": { + "description": "Not found.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 3, + "msg": "not found" + } + } + } + }, + "500": { + "description": "Internal server error.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 1, + "msg": "internal error" + } + } + } + } + } + } + }, + "/api/v1/node/info": { + "get": { + "tags": [ + "info" + ], + "summary": "Retrieve the indexer status.", + "description": "Retrieve the indexer status.\n\nDisplay indexer synchronization information, including indexer version, blockchain network, indexer height, blockchain network height, and other information.", + "operationId": "node_info", + "parameters": [ + { + "name": "btc", + "in": "path", + "description": "Optional to query the BTC chain status.", + "required": true, + "schema": { + "type": "boolean", + "nullable": true + } + } + ], + "responses": { + "200": { + "description": "Obtain node runtime status.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Node" + } + } + } + }, + "500": { + "description": "Internal server error.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 1, + "msg": "internal error" + } + } + } + } + } + } + }, + "/api/v1/ord/block/{blockhash}/inscriptions": { + "get": { + "tags": [ + "ord" + ], + "summary": "Retrieve the inscription actions from the given block.", + "description": "Retrieve the inscription actions from the given block.", + "operationId": "ord_block_inscriptions", + "parameters": [ + { + "name": "blockhash", + "in": "path", + "description": "block hash", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Obtain inscription actions by blockhash", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OrdBlockInscriptions" + } + } + } + }, + "400": { + "description": "Bad query.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 2, + "msg": "bad request" + } + } + } + }, + "404": { + "description": "Not found.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 3, + "msg": "not found" + } + } + } + }, + "500": { + "description": "Internal server error.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 1, + "msg": "internal error" + } + } + } + } + } + } + }, + "/api/v1/ord/id/{id}/inscription": { + "get": { + "tags": [ + "ord" + ], + "summary": "Retrieve the inscription infomation with the specified inscription id.", + "description": "Retrieve the inscription infomation with the specified inscription id.", + "operationId": "ord_inscription_id", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "inscription ID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Obtain inscription infomation.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OrdOrdInscription" + } + } + } + }, + "400": { + "description": "Bad query.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 2, + "msg": "bad request" + } + } + } + }, + "404": { + "description": "Not found.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 3, + "msg": "not found" + } + } + } + }, + "500": { + "description": "Internal server error.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 1, + "msg": "internal error" + } + } + } + } + } + } + }, + "/api/v1/ord/number/{number}/inscription": { + "get": { + "tags": [ + "ord" + ], + "summary": "Retrieve the inscription infomation with the specified inscription number.", + "description": "Retrieve the inscription infomation with the specified inscription number.", + "operationId": "ord_inscription_number", + "parameters": [ + { + "name": "number", + "in": "path", + "description": "inscription number", + "required": true, + "schema": { + "type": "integer", + "format": "int64" + } + } + ], + "responses": { + "200": { + "description": "Obtain inscription infomation.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OrdOrdInscription" + } + } + } + }, + "400": { + "description": "Bad query.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 2, + "msg": "bad request" + } + } + } + }, + "404": { + "description": "Not found.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 3, + "msg": "not found" + } + } + } + }, + "500": { + "description": "Internal server error.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 1, + "msg": "internal error" + } + } + } + } + } + } + }, + "/api/v1/ord/outpoint/{outpoint}/info": { + "get": { + "tags": [ + "ord" + ], + "summary": "Retrieve the outpoint infomation with the specified outpoint.", + "description": "Retrieve the outpoint infomation with the specified outpoint.", + "operationId": "ord_outpoint", + "parameters": [ + { + "name": "outpoint", + "in": "path", + "description": "Outpoint", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Obtain outpoint infomation", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OrdOutPointData" + } + } + } + }, + "400": { + "description": "Bad query.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 2, + "msg": "bad request" + } + } + } + }, + "404": { + "description": "Not found.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 3, + "msg": "not found" + } + } + } + }, + "500": { + "description": "Internal server error.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 1, + "msg": "internal error" + } + } + } + } + } + } + }, + "/api/v1/ord/tx/{txid}/inscriptions": { + "get": { + "tags": [ + "ord" + ], + "summary": "Retrieve the inscription actions from the given transaction.", + "description": "Retrieve the inscription actions from the given transaction.", + "operationId": "ord_txid_inscriptions", + "parameters": [ + { + "name": "txid", + "in": "path", + "description": "transaction ID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Obtain inscription actions by txid", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OrdTxInscriptions" + } + } + } + }, + "400": { + "description": "Bad query.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 2, + "msg": "bad request" + } + } + } + }, + "404": { + "description": "Not found.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 3, + "msg": "not found" + } + } + } + }, + "500": { + "description": "Internal server error.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiError" + }, + "example": { + "code": 1, + "msg": "internal error" + } + } + } + } + } + } + } + }, + "components": { + "schemas": { + "ApiError": { + "oneOf": [ + { + "type": "object", + "required": [ + "Internal" + ], + "properties": { + "Internal": { + "type": "string", + "description": "Internal server error." + } + }, + "example": { + "code": 1, + "msg": "internal error" + } + }, + { + "type": "object", + "required": [ + "BadRequest" + ], + "properties": { + "BadRequest": { + "type": "string", + "description": "Bad request." + } + }, + "example": { + "code": 1, + "msg": "bad request" + } + }, + { + "type": "object", + "required": [ + "NotFound" + ], + "properties": { + "NotFound": { + "type": "string", + "description": "Resource not found." + } + }, + "example": { + "code": 1, + "msg": "not found" + } + } + ] + }, + "BRC20AllBalance": { + "type": "object", + "required": [ + "code", + "msg", + "data" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "data": { + "$ref": "#/components/schemas/brc20.AllBalance" + }, + "msg": { + "type": "string", + "description": "ok", + "example": "ok" + } + } + }, + "BRC20AllTick": { + "type": "object", + "required": [ + "code", + "msg", + "data" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "data": { + "$ref": "#/components/schemas/brc20.AllTickInfo" + }, + "msg": { + "type": "string", + "description": "ok", + "example": "ok" + } + } + }, + "BRC20Balance": { + "type": "object", + "required": [ + "code", + "msg", + "data" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "data": { + "$ref": "#/components/schemas/brc20.Balance" + }, + "msg": { + "type": "string", + "description": "ok", + "example": "ok" + } + } + }, + "BRC20BlockEvents": { + "type": "object", + "required": [ + "code", + "msg", + "data" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "data": { + "$ref": "#/components/schemas/brc20.BlockEvents" + }, + "msg": { + "type": "string", + "description": "ok", + "example": "ok" + } + } + }, + "BRC20Tick": { + "type": "object", + "required": [ + "code", + "msg", + "data" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "data": { + "$ref": "#/components/schemas/brc20.TickInfo" + }, + "msg": { + "type": "string", + "description": "ok", + "example": "ok" + } + } + }, + "BRC20Transferable": { + "type": "object", + "required": [ + "code", + "msg", + "data" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "data": { + "$ref": "#/components/schemas/brc20.TransferableInscriptions" + }, + "msg": { + "type": "string", + "description": "ok", + "example": "ok" + } + } + }, + "BRC20TxEvents": { + "type": "object", + "required": [ + "code", + "msg", + "data" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "data": { + "$ref": "#/components/schemas/brc20.TxEvents" + }, + "msg": { + "type": "string", + "description": "ok", + "example": "ok" + } + } + }, + "ChainInfo": { + "type": "object", + "properties": { + "chainHeight": { + "type": "integer", + "format": "uint64", + "description": "The height of the blockchain.", + "nullable": true, + "minimum": 0 + }, + "network": { + "type": "string", + "description": "The network of the blockchain.", + "nullable": true + }, + "ordHeight": { + "type": "integer", + "format": "uint64", + "description": "The height of our indexer.", + "nullable": true, + "minimum": 0 + } + } + }, + "Node": { + "type": "object", + "required": [ + "code", + "msg", + "data" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "data": { + "$ref": "#/components/schemas/NodeInfo" + }, + "msg": { + "type": "string", + "description": "ok", + "example": "ok" + } + } + }, + "NodeInfo": { + "type": "object", + "required": [ + "chainInfo" + ], + "properties": { + "branch": { + "type": "string", + "description": "The name of the branch or tag of the API endpoint build.", + "nullable": true + }, + "buildTime": { + "type": "string", + "description": "Build time of the API endpoint.", + "nullable": true + }, + "chainInfo": { + "$ref": "#/components/schemas/ChainInfo" + }, + "commitHash": { + "type": "string", + "description": "Git commit hash of the API endpoint build.", + "nullable": true + }, + "version": { + "type": "string", + "description": "Node version of the API endpoint build.", + "nullable": true + } + } + }, + "OrdBlockInscriptions": { + "type": "object", + "required": [ + "code", + "msg", + "data" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "data": { + "$ref": "#/components/schemas/ord.BlockInscriptions" + }, + "msg": { + "type": "string", + "description": "ok", + "example": "ok" + } + } + }, + "OrdOrdInscription": { + "type": "object", + "required": [ + "code", + "msg", + "data" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "data": { + "$ref": "#/components/schemas/ord.OrdInscription" + }, + "msg": { + "type": "string", + "description": "ok", + "example": "ok" + } + } + }, + "OrdOutPointData": { + "type": "object", + "required": [ + "code", + "msg", + "data" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "data": { + "$ref": "#/components/schemas/ord.OutPointData" + }, + "msg": { + "type": "string", + "description": "ok", + "example": "ok" + } + } + }, + "OrdOutPointResult": { + "type": "object", + "required": [ + "code", + "msg", + "data" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "data": { + "$ref": "#/components/schemas/ord.OutPointResult" + }, + "msg": { + "type": "string", + "description": "ok", + "example": "ok" + } + } + }, + "OrdTxInscriptions": { + "type": "object", + "required": [ + "code", + "msg", + "data" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "data": { + "$ref": "#/components/schemas/ord.TxInscriptions" + }, + "msg": { + "type": "string", + "description": "ok", + "example": "ok" + } + } + }, + "ScriptPubkey": { + "oneOf": [ + { + "type": "object", + "required": [ + "address" + ], + "properties": { + "address": { + "type": "string", + "description": "Address." + } + } + }, + { + "type": "object", + "required": [ + "nonStandard" + ], + "properties": { + "nonStandard": { + "type": "string", + "description": "Non-standard script hash." + } + } + } + ] + }, + "brc20.AllBalance": { + "type": "object", + "required": [ + "balance" + ], + "properties": { + "balance": { + "type": "array", + "items": { + "$ref": "#/components/schemas/brc20.Balance" + } + } + } + }, + "brc20.AllTickInfo": { + "type": "object", + "required": [ + "tokens" + ], + "properties": { + "tokens": { + "type": "array", + "items": { + "$ref": "#/components/schemas/brc20.TickInfo" + } + } + } + }, + "brc20.Balance": { + "type": "object", + "required": [ + "tick", + "availableBalance", + "transferableBalance", + "overallBalance" + ], + "properties": { + "availableBalance": { + "type": "string", + "format": "uint64", + "description": "Available balance." + }, + "overallBalance": { + "type": "string", + "format": "uint64", + "description": "Overall balance." + }, + "tick": { + "type": "string", + "description": "Name of the ticker." + }, + "transferableBalance": { + "type": "string", + "format": "uint64", + "description": "Transferable balance." + } + } + }, + "brc20.BlockEvents": { + "type": "object", + "required": [ + "block" + ], + "properties": { + "block": { + "type": "array", + "items": { + "$ref": "#/components/schemas/brc20.TxEvents" + } + } + } + }, + "brc20.DeployEvent": { + "type": "object", + "required": [ + "type", + "tick", + "inscriptionId", + "inscriptionNumber", + "oldSatpoint", + "newSatpoint", + "supply", + "limitPerMint", + "decimal", + "from", + "to", + "valid", + "msg" + ], + "properties": { + "decimal": { + "type": "integer", + "format": "int32", + "description": "The decimal of the deployed ticker.", + "minimum": 0 + }, + "from": { + "$ref": "#/components/schemas/ScriptPubkey" + }, + "inscriptionId": { + "type": "string", + "description": "The inscription id." + }, + "inscriptionNumber": { + "type": "integer", + "format": "int32", + "description": "The inscription number." + }, + "limitPerMint": { + "type": "string", + "description": "The limit per mint of the deployed ticker." + }, + "msg": { + "type": "string", + "description": "Message generated during execution." + }, + "newSatpoint": { + "type": "string", + "description": "The inscription satpoint of the transaction output." + }, + "oldSatpoint": { + "type": "string", + "description": "The inscription satpoint of the transaction input." + }, + "supply": { + "type": "string", + "description": "The total supply of the deployed ticker." + }, + "tick": { + "type": "string", + "description": "The ticker deployed." + }, + "to": { + "$ref": "#/components/schemas/ScriptPubkey" + }, + "type": { + "type": "string", + "description": "Event type." + }, + "valid": { + "type": "boolean", + "description": "Executed state." + } + } + }, + "brc20.ErrorEvent": { + "type": "object", + "required": [ + "type", + "inscriptionId", + "inscriptionNumber", + "oldSatpoint", + "newSatpoint", + "from", + "to", + "valid", + "msg" + ], + "properties": { + "from": { + "$ref": "#/components/schemas/ScriptPubkey" + }, + "inscriptionId": { + "type": "string", + "description": "The inscription id." + }, + "inscriptionNumber": { + "type": "integer", + "format": "int32", + "description": "The inscription number." + }, + "msg": { + "type": "string", + "description": "Error message." + }, + "newSatpoint": { + "type": "string", + "description": "The inscription satpoint of the transaction output." + }, + "oldSatpoint": { + "type": "string", + "description": "The inscription satpoint of the transaction input." + }, + "to": { + "$ref": "#/components/schemas/ScriptPubkey" + }, + "type": { + "type": "string", + "description": "Event type." + }, + "valid": { + "type": "boolean", + "description": "Executed state." + } + } + }, + "brc20.InscribeTransferEvent": { + "type": "object", + "required": [ + "type", + "tick", + "inscriptionId", + "inscriptionNumber", + "oldSatpoint", + "newSatpoint", + "amount", + "from", + "to", + "valid", + "msg" + ], + "properties": { + "amount": { + "type": "string", + "description": "The amount of pretransfer." + }, + "from": { + "$ref": "#/components/schemas/ScriptPubkey" + }, + "inscriptionId": { + "type": "string", + "description": "The inscription id." + }, + "inscriptionNumber": { + "type": "integer", + "format": "int32", + "description": "The inscription number." + }, + "msg": { + "type": "string", + "description": "Message generated during execution." + }, + "newSatpoint": { + "type": "string", + "description": "The inscription satpoint of the transaction output." + }, + "oldSatpoint": { + "type": "string", + "description": "The inscription satpoint of the transaction input." + }, + "tick": { + "type": "string", + "description": "The ticker of pretransfer." + }, + "to": { + "$ref": "#/components/schemas/ScriptPubkey" + }, + "type": { + "type": "string", + "description": "Event type." + }, + "valid": { + "type": "boolean", + "description": "Executed state." + } + } + }, + "brc20.MintEvent": { + "type": "object", + "required": [ + "type", + "tick", + "inscriptionId", + "inscriptionNumber", + "oldSatpoint", + "newSatpoint", + "amount", + "from", + "to", + "valid", + "msg" + ], + "properties": { + "amount": { + "type": "string", + "description": "The amount minted." + }, + "from": { + "$ref": "#/components/schemas/ScriptPubkey" + }, + "inscriptionId": { + "type": "string", + "description": "The inscription id." + }, + "inscriptionNumber": { + "type": "integer", + "format": "int32", + "description": "The inscription number." + }, + "msg": { + "type": "string", + "description": "Message generated during execution." + }, + "newSatpoint": { + "type": "string", + "description": "The inscription satpoint of the transaction output." + }, + "oldSatpoint": { + "type": "string", + "description": "The inscription satpoint of the transaction input." + }, + "tick": { + "type": "string", + "description": "The ticker minted." + }, + "to": { + "$ref": "#/components/schemas/ScriptPubkey" + }, + "type": { + "type": "string", + "description": "Event type." + }, + "valid": { + "type": "boolean", + "description": "Executed state." + } + } + }, + "brc20.TickInfo": { + "type": "object", + "description": "Description of a BRC20 ticker.", + "required": [ + "tick", + "inscriptionId", + "inscriptionNumber", + "supply", + "limitPerMint", + "minted", + "decimal", + "deployBy", + "txid", + "deployHeight", + "deployBlocktime" + ], + "properties": { + "decimal": { + "type": "integer", + "format": "uint8", + "description": "The decimal of the ticker.
\nNumber of decimals cannot exceed 18 (default).", + "default": 18, + "example": 18, + "maximum": 18, + "minimum": 0 + }, + "deployBlocktime": { + "type": "integer", + "format": "uint32", + "description": "The timestamp of the block that the ticker deployed.", + "minimum": 0 + }, + "deployBy": { + "$ref": "#/components/schemas/ScriptPubkey" + }, + "deployHeight": { + "type": "integer", + "format": "uint32", + "description": "The height of the block that the ticker deployed.", + "minimum": 0 + }, + "inscriptionId": { + "type": "string", + "description": "Inscription ID of the ticker deployed." + }, + "inscriptionNumber": { + "type": "integer", + "format": "int32", + "description": "Inscription number of the ticker deployed." + }, + "limitPerMint": { + "type": "string", + "format": "uint64", + "description": "The maximum amount of each mining." + }, + "minted": { + "type": "string", + "format": "uint64", + "description": "The amount of the ticker that has been minted." + }, + "supply": { + "type": "string", + "format": "uint64", + "description": "The total supply of the ticker.
\nMaximum supply cannot exceed uint64_max.\n\nA string containing a 64-bit unsigned integer.
\nWe represent u64 values as a string to ensure compatibility with languages such as JavaScript that do not parse u64s in JSON natively." + }, + "tick": { + "type": "string", + "description": "Name of the ticker." + }, + "txid": { + "type": "string", + "description": "A hex encoded 32 byte transaction ID that the ticker deployed.\n\nThis is represented in a string as adding a prefix 0x to a 64 character hex string." + } + } + }, + "brc20.TransferEvent": { + "type": "object", + "required": [ + "type", + "tick", + "inscriptionId", + "inscriptionNumber", + "oldSatpoint", + "newSatpoint", + "amount", + "from", + "to", + "valid", + "msg" + ], + "properties": { + "amount": { + "type": "string", + "description": "The amount of transfer." + }, + "from": { + "$ref": "#/components/schemas/ScriptPubkey" + }, + "inscriptionId": { + "type": "string", + "description": "The inscription id." + }, + "inscriptionNumber": { + "type": "integer", + "format": "int32", + "description": "The inscription number." + }, + "msg": { + "type": "string", + "description": "Message generated during execution." + }, + "newSatpoint": { + "type": "string", + "description": "The inscription satpoint of the transaction output." + }, + "oldSatpoint": { + "type": "string", + "description": "The inscription satpoint of the transaction input." + }, + "tick": { + "type": "string", + "description": "The ticker of transfer." + }, + "to": { + "$ref": "#/components/schemas/ScriptPubkey" + }, + "type": { + "type": "string", + "description": "Event type." + }, + "valid": { + "type": "boolean", + "description": "Executed state." + } + } + }, + "brc20.TransferableInscription": { + "type": "object", + "required": [ + "inscriptionId", + "inscriptionNumber", + "amount", + "tick", + "owner" + ], + "properties": { + "amount": { + "type": "string", + "format": "uint64", + "description": "The amount of the ticker that will be transferred." + }, + "inscriptionId": { + "type": "string", + "description": "The inscription id." + }, + "inscriptionNumber": { + "type": "integer", + "format": "int32", + "description": "The inscription number." + }, + "owner": { + "type": "string", + "description": "The address to which the transfer will be made." + }, + "tick": { + "type": "string", + "description": "The ticker name that will be transferred." + } + } + }, + "brc20.TransferableInscriptions": { + "type": "object", + "required": [ + "inscriptions" + ], + "properties": { + "inscriptions": { + "type": "array", + "items": { + "$ref": "#/components/schemas/brc20.TransferableInscription" + } + } + } + }, + "brc20.TxEvent": { + "oneOf": [ + { + "$ref": "#/components/schemas/brc20.DeployEvent" + }, + { + "$ref": "#/components/schemas/brc20.MintEvent" + }, + { + "$ref": "#/components/schemas/brc20.InscribeTransferEvent" + }, + { + "$ref": "#/components/schemas/brc20.TransferEvent" + }, + { + "$ref": "#/components/schemas/brc20.ErrorEvent" + } + ] + }, + "brc20.TxEvents": { + "type": "object", + "required": [ + "events", + "txid" + ], + "properties": { + "events": { + "type": "array", + "items": { + "$ref": "#/components/schemas/brc20.TxEvent" + } + }, + "txid": { + "type": "string" + } + } + }, + "ord.BlockInscriptions": { + "type": "object", + "required": [ + "block" + ], + "properties": { + "block": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ord.TxInscriptions" + } + } + } + }, + "ord.InscriptionAction": { + "oneOf": [ + { + "type": "object", + "required": [ + "new" + ], + "properties": { + "new": { + "type": "object", + "description": "New inscription", + "required": [ + "cursed", + "unbound" + ], + "properties": { + "cursed": { + "type": "boolean" + }, + "unbound": { + "type": "boolean" + } + } + } + } + }, + { + "type": "string", + "description": "Transfer inscription", + "enum": [ + "transfer" + ] + } + ] + }, + "ord.InscriptionDigest": { + "type": "object", + "required": [ + "id", + "number", + "location" + ], + "properties": { + "id": { + "type": "string", + "description": "The inscription id." + }, + "location": { + "type": "string", + "description": "The inscription location." + }, + "number": { + "type": "integer", + "format": "int32", + "description": "The inscription number." + } + } + }, + "ord.OrdInscription": { + "type": "object", + "required": [ + "id", + "number", + "genesisHeight", + "location", + "collections" + ], + "properties": { + "collections": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Collections of Inscriptions." + }, + "content": { + "type": "string", + "description": "The inscription content body.", + "nullable": true + }, + "contentType": { + "type": "string", + "description": "The inscription content type.", + "nullable": true + }, + "genesisHeight": { + "type": "integer", + "format": "uint64", + "description": "The inscription genesis block height.", + "minimum": 0 + }, + "id": { + "type": "string", + "description": "The inscription id." + }, + "location": { + "type": "string", + "description": "The inscription location." + }, + "number": { + "type": "integer", + "format": "int32", + "description": "The inscription number." + }, + "owner": { + "allOf": [ + { + "$ref": "#/components/schemas/ScriptPubkey" + } + ], + "nullable": true + }, + "sat": { + "type": "integer", + "format": "int64", + "description": "The inscription sat index.", + "nullable": true, + "minimum": 0 + } + } + }, + "ord.OutPointData": { + "type": "object", + "required": [ + "txid", + "scriptPubKey", + "owner", + "value", + "inscriptionDigest" + ], + "properties": { + "inscriptionDigest": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ord.InscriptionDigest" + }, + "description": "The inscriptions on the transaction output." + }, + "owner": { + "$ref": "#/components/schemas/ScriptPubkey" + }, + "scriptPubKey": { + "type": "string", + "description": "The script pubkey." + }, + "txid": { + "type": "string", + "description": "The transaction id." + }, + "value": { + "type": "integer", + "format": "uint64", + "description": "The value of the transaction output.", + "minimum": 0 + } + } + }, + "ord.OutPointResult": { + "type": "object", + "required": [ + "latestBlockhash", + "latestHeight" + ], + "properties": { + "latestBlockhash": { + "type": "string" + }, + "latestHeight": { + "type": "integer", + "format": "uint64", + "minimum": 0 + }, + "result": { + "allOf": [ + { + "$ref": "#/components/schemas/ord.OutPointData" + } + ], + "nullable": true + } + } + }, + "ord.TxInscription": { + "type": "object", + "required": [ + "action", + "inscriptionId", + "oldSatpoint", + "from" + ], + "properties": { + "action": { + "$ref": "#/components/schemas/ord.InscriptionAction" + }, + "from": { + "$ref": "#/components/schemas/ScriptPubkey" + }, + "inscriptionId": { + "type": "string", + "description": "The inscription id." + }, + "inscriptionNumber": { + "type": "integer", + "format": "int32", + "description": "The inscription number.", + "nullable": true + }, + "newSatpoint": { + "type": "string", + "description": "The inscription satpoint of the transaction output.", + "nullable": true + }, + "oldSatpoint": { + "type": "string", + "description": "The inscription satpoint of the transaction input." + }, + "to": { + "allOf": [ + { + "$ref": "#/components/schemas/ScriptPubkey" + } + ], + "nullable": true + } + } + }, + "ord.TxInscriptions": { + "type": "object", + "required": [ + "inscriptions", + "txid" + ], + "properties": { + "inscriptions": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ord.TxInscription" + } + }, + "txid": { + "type": "string" + } + } + } + } + } +} \ No newline at end of file diff --git a/src/chain.rs b/src/chain.rs index 1186ac63ad..34d3c128ca 100644 --- a/src/chain.rs +++ b/src/chain.rs @@ -47,6 +47,15 @@ impl Chain { } } + pub(crate) fn first_brc20_height(self) -> u32 { + match self { + Self::Mainnet => 779832, + Self::Regtest => 0, + Self::Signet => 0, + Self::Testnet => 2413343, + } + } + pub(crate) fn first_rune_height(self) -> u32 { SUBSIDY_HALVING_INTERVAL * match self { diff --git a/src/index.rs b/src/index.rs index 91fb5e6cb8..29d00ed454 100644 --- a/src/index.rs +++ b/src/index.rs @@ -1,9 +1,17 @@ +use crate::okx::datastore::brc20::redb::table::{ + get_balance, get_balances, get_token_info, get_tokens_info, get_transaction_receipts, + get_transferable_assets_by_account, get_transferable_assets_by_account_ticker, + get_transferable_assets_by_outpoint, +}; +use crate::okx::datastore::ord::redb::table::{ + get_collection_inscription_id, get_collections_of_inscription, get_transaction_operations, + get_txout_by_outpoint, +}; +use crate::okx::datastore::{brc20, ScriptKey}; + use { self::{ - entry::{ - Entry, HeaderValue, InscriptionEntry, InscriptionEntryValue, InscriptionIdValue, - OutPointValue, RuneEntryValue, RuneIdValue, SatPointValue, SatRange, TxidValue, - }, + entry::{Entry, HeaderValue, RuneEntryValue, RuneIdValue, SatPointValue, SatRange}, reorg::*, runes::{Rune, RuneId}, updater::Updater, @@ -18,6 +26,7 @@ use { chrono::SubsecRound, indicatif::{ProgressBar, ProgressStyle}, log::log_enabled, + okx::datastore::ord::{self, bitmap::District, collections::CollectionKind}, redb::{ Database, DatabaseError, MultimapTable, MultimapTableDefinition, MultimapTableHandle, ReadOnlyTable, ReadableMultimapTable, ReadableTable, RedbKey, RedbValue, RepairSession, @@ -31,12 +40,18 @@ use { }; pub use self::entry::RuneEntry; +pub(super) use self::entry::{ + InscriptionEntry, InscriptionEntryValue, InscriptionIdValue, OutPointValue, TxidValue, +}; +pub(super) use self::updater::BlockData; pub(crate) mod entry; mod fetcher; mod reorg; -mod rtx; -mod updater; +pub(crate) mod rtx; +pub(crate) mod updater; + +mod extend; #[cfg(test)] pub(crate) mod testing; @@ -45,13 +60,13 @@ const SCHEMA_VERSION: u64 = 16; macro_rules! define_table { ($name:ident, $key:ty, $value:ty) => { - const $name: TableDefinition<$key, $value> = TableDefinition::new(stringify!($name)); + pub const $name: TableDefinition<$key, $value> = TableDefinition::new(stringify!($name)); }; } macro_rules! define_multimap_table { ($name:ident, $key:ty, $value:ty) => { - const $name: MultimapTableDefinition<$key, $value> = + pub const $name: MultimapTableDefinition<$key, $value> = MultimapTableDefinition::new(stringify!($name)); }; } @@ -66,7 +81,7 @@ define_table! { INSCRIPTION_ID_TO_SEQUENCE_NUMBER, InscriptionIdValue, u32 } define_table! { INSCRIPTION_NUMBER_TO_SEQUENCE_NUMBER, i32, u32 } define_table! { OUTPOINT_TO_RUNE_BALANCES, &OutPointValue, &[u8] } define_table! { OUTPOINT_TO_SAT_RANGES, &OutPointValue, &[u8] } -define_table! { OUTPOINT_TO_VALUE, &OutPointValue, u64} +define_table! { OUTPOINT_TO_ENTRY, &OutPointValue, &[u8]} define_table! { RUNE_ID_TO_RUNE_ENTRY, RuneIdValue, RuneEntryValue } define_table! { RUNE_TO_RUNE_ID, u128, RuneIdValue } define_table! { SAT_TO_SATPOINT, u64, &SatPointValue } @@ -78,6 +93,17 @@ define_table! { TRANSACTION_ID_TO_RUNE, &TxidValue, u128 } define_table! { TRANSACTION_ID_TO_TRANSACTION, &TxidValue, &[u8] } define_table! { WRITE_TRANSACTION_STARTING_BLOCK_COUNT_TO_TIMESTAMP, u32, u128 } +// new +define_table! { ORD_TX_TO_OPERATIONS, &TxidValue, &[u8] } +define_table! { COLLECTIONS_KEY_TO_INSCRIPTION_ID, &str, InscriptionIdValue } +define_multimap_table! { COLLECTIONS_INSCRIPTION_ID_TO_KINDS, InscriptionIdValue, &[u8] } + +define_table! { BRC20_BALANCES, &str, &[u8] } +define_table! { BRC20_TOKEN, &str, &[u8] } +define_table! { BRC20_EVENTS, &TxidValue, &[u8] } +define_table! { BRC20_SATPOINT_TO_TRANSFERABLE_ASSETS, &SatPointValue, &[u8] } +define_multimap_table! { BRC20_ADDRESS_TICKER_TO_TRANSFERABLE_ASSETS, &str, &SatPointValue } + #[derive(Debug, PartialEq)] pub enum List { Spent, @@ -245,26 +271,28 @@ impl Index { let once = Once::new(); let progress_bar = Mutex::new(None); - let database = match Database::builder() - .set_cache_size(db_cache_size) - .set_repair_callback(move |progress: &mut RepairSession| { - once.call_once(|| println!("Index file `{}` needs recovery. This can take a long time, especially for the --index-sats index.", index_path.display())); + let repair_callback = move |progress: &mut RepairSession| { + once.call_once(|| println!("Index file `{}` needs recovery. This can take a long time, especially for the --index-sats index.", index_path.display())); - if !(cfg!(test) || log_enabled!(log::Level::Info) || integration_test()) { - let mut guard = progress_bar.lock().unwrap(); + if !(cfg!(test) || log_enabled!(log::Level::Info) || integration_test()) { + let mut guard = progress_bar.lock().unwrap(); - let progress_bar = guard.get_or_insert_with(|| { - let progress_bar = ProgressBar::new(100); - progress_bar.set_style( - ProgressStyle::with_template("[repairing database] {wide_bar} {pos}/{len}").unwrap(), - ); - progress_bar - }); + let progress_bar = guard.get_or_insert_with(|| { + let progress_bar = ProgressBar::new(100); + progress_bar.set_style( + ProgressStyle::with_template("[repairing database] {wide_bar} {pos}/{len}").unwrap(), + ); + progress_bar + }); - #[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss)] - progress_bar.set_position((progress.progress() * 100.0) as u64); - } - }) + #[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss)] + progress_bar.set_position((progress.progress() * 100.0) as u64); + } + }; + + let database = match Database::builder() + .set_cache_size(db_cache_size) + .set_repair_callback(repair_callback) .open(&path) { Ok(database) => { @@ -292,7 +320,6 @@ impl Index { } } - index_runes = Self::is_statistic_set(&statistics, Statistic::IndexRunes)?; index_sats = Self::is_statistic_set(&statistics, Statistic::IndexSats)?; index_transactions = Self::is_statistic_set(&statistics, Statistic::IndexTransactions)?; @@ -320,7 +347,7 @@ impl Index { tx.open_table(INSCRIPTION_ID_TO_SEQUENCE_NUMBER)?; tx.open_table(INSCRIPTION_NUMBER_TO_SEQUENCE_NUMBER)?; tx.open_table(OUTPOINT_TO_RUNE_BALANCES)?; - tx.open_table(OUTPOINT_TO_VALUE)?; + tx.open_table(OUTPOINT_TO_ENTRY)?; tx.open_table(RUNE_ID_TO_RUNE_ENTRY)?; tx.open_table(RUNE_TO_RUNE_ID)?; tx.open_table(SAT_TO_SATPOINT)?; @@ -330,6 +357,18 @@ impl Index { tx.open_table(TRANSACTION_ID_TO_RUNE)?; tx.open_table(WRITE_TRANSACTION_STARTING_BLOCK_COUNT_TO_TIMESTAMP)?; + // new ord tables + tx.open_table(ORD_TX_TO_OPERATIONS)?; + tx.open_table(COLLECTIONS_KEY_TO_INSCRIPTION_ID)?; + tx.open_multimap_table(COLLECTIONS_INSCRIPTION_ID_TO_KINDS)?; + + // brc20 tables + tx.open_multimap_table(BRC20_ADDRESS_TICKER_TO_TRANSFERABLE_ASSETS)?; + tx.open_table(BRC20_BALANCES)?; + tx.open_table(BRC20_TOKEN)?; + tx.open_table(BRC20_EVENTS)?; + tx.open_table(BRC20_SATPOINT_TO_TRANSFERABLE_ASSETS)?; + { let mut outpoint_to_sat_ranges = tx.open_table(OUTPOINT_TO_SAT_RANGES)?; let mut statistics = tx.open_table(STATISTIC_TO_COUNT)?; @@ -342,9 +381,17 @@ impl Index { index_sats = options.index_sats; index_transactions = options.index_transactions; - Self::set_statistic(&mut statistics, Statistic::IndexRunes, u64::from(index_runes))?; + Self::set_statistic( + &mut statistics, + Statistic::IndexRunes, + u64::from(index_runes), + )?; Self::set_statistic(&mut statistics, Statistic::IndexSats, u64::from(index_sats))?; - Self::set_statistic(&mut statistics, Statistic::IndexTransactions, u64::from(index_transactions))?; + Self::set_statistic( + &mut statistics, + Statistic::IndexTransactions, + u64::from(index_transactions), + )?; Self::set_statistic(&mut statistics, Statistic::Schema, SCHEMA_VERSION)?; } @@ -355,6 +402,8 @@ impl Index { Err(error) => bail!("failed to open index: {error}"), }; + log::info!("Options:\n{:#?}", options); + let genesis_block_coinbase_transaction = options.chain().genesis_block().coinbase().unwrap().clone(); @@ -376,6 +425,10 @@ impl Index { }) } + pub(crate) fn get_chain(&self) -> Chain { + self.options.chain() + } + #[cfg(test)] fn set_durability(&mut self, durability: redb::Durability) { self.durability = durability; @@ -383,9 +436,9 @@ impl Index { pub(crate) fn check_sync(&self, utxos: &BTreeMap) -> Result { let rtx = self.database.begin_read()?; - let outpoint_to_value = rtx.open_table(OUTPOINT_TO_VALUE)?; + let outpoint_to_entry = rtx.open_table(OUTPOINT_TO_ENTRY)?; for outpoint in utxos.keys() { - if outpoint_to_value.get(&outpoint.store())?.is_none() { + if outpoint_to_entry.get(&outpoint.store())?.is_none() { return Err(anyhow!( "output in Bitcoin Core wallet but not in ord index: {outpoint}" )); @@ -399,6 +452,10 @@ impl Index { self.index_runes } + pub(crate) fn has_transactions_index(&self) -> bool { + self.index_transactions + } + pub(crate) fn has_sat_index(&self) -> bool { self.index_sats } @@ -547,7 +604,7 @@ impl Index { ); insert_table_info(&mut tables, &wtx, total_bytes, OUTPOINT_TO_RUNE_BALANCES); insert_table_info(&mut tables, &wtx, total_bytes, OUTPOINT_TO_SAT_RANGES); - insert_table_info(&mut tables, &wtx, total_bytes, OUTPOINT_TO_VALUE); + insert_table_info(&mut tables, &wtx, total_bytes, OUTPOINT_TO_ENTRY); insert_table_info(&mut tables, &wtx, total_bytes, RUNE_ID_TO_RUNE_ENTRY); insert_table_info(&mut tables, &wtx, total_bytes, RUNE_TO_RUNE_ID); insert_table_info(&mut tables, &wtx, total_bytes, SAT_TO_SATPOINT); @@ -574,6 +631,35 @@ impl Index { WRITE_TRANSACTION_STARTING_BLOCK_COUNT_TO_TIMESTAMP, ); + insert_table_info(&mut tables, &wtx, total_bytes, ORD_TX_TO_OPERATIONS); + insert_table_info( + &mut tables, + &wtx, + total_bytes, + COLLECTIONS_KEY_TO_INSCRIPTION_ID, + ); + insert_multimap_table_info( + &mut tables, + &wtx, + total_bytes, + COLLECTIONS_INSCRIPTION_ID_TO_KINDS, + ); + insert_table_info(&mut tables, &wtx, total_bytes, BRC20_BALANCES); + insert_table_info(&mut tables, &wtx, total_bytes, BRC20_TOKEN); + insert_table_info(&mut tables, &wtx, total_bytes, BRC20_EVENTS); + insert_table_info( + &mut tables, + &wtx, + total_bytes, + BRC20_SATPOINT_TO_TRANSFERABLE_ASSETS, + ); + insert_multimap_table_info( + &mut tables, + &wtx, + total_bytes, + BRC20_ADDRESS_TICKER_TO_TRANSFERABLE_ASSETS, + ); + for table in wtx.list_tables()? { assert!(tables.contains_key(table.name())); } @@ -639,7 +725,7 @@ impl Index { match updater.update_index() { Ok(ok) => return Ok(ok), Err(err) => { - log::info!("{}", err.to_string()); + log::error!("{}", err.to_string()); match err.downcast_ref() { Some(&ReorgError::Recoverable { height, depth }) => { @@ -728,10 +814,14 @@ impl Index { Ok(()) } - fn begin_read(&self) -> Result { + pub(crate) fn begin_read(&self) -> Result { Ok(rtx::Rtx(self.database.begin_read()?)) } + pub(crate) fn bitcoin_rpc_client(&self) -> Result { + self.options.bitcoin_rpc_client(None) + } + fn begin_write(&self) -> Result { let mut tx = self.database.begin_write()?; tx.set_durability(self.durability); @@ -1314,7 +1404,7 @@ impl Index { .transpose() } - #[cfg(test)] + #[allow(unused)] pub(crate) fn get_inscription_id_by_inscription_number( &self, inscription_number: i32, @@ -2037,11 +2127,11 @@ impl Index { } } - fn inscriptions_on_output<'a: 'tx, 'tx>( + fn full_inscriptions_on_output<'a: 'tx, 'tx>( satpoint_to_sequence_number: &'a impl ReadableMultimapTable<&'static SatPointValue, u32>, sequence_number_to_inscription_entry: &'a impl ReadableTable, outpoint: OutPoint, - ) -> Result> { + ) -> Result> { let start = SatPoint { outpoint, offset: 0, @@ -2073,11 +2163,23 @@ impl Index { inscriptions.sort_by_key(|(sequence_number, _, _)| *sequence_number); + Ok(inscriptions) + } + + fn inscriptions_on_output<'a: 'tx, 'tx>( + satpoint_to_sequence_number: &'a impl ReadableMultimapTable<&'static SatPointValue, u32>, + sequence_number_to_inscription_entry: &'a impl ReadableTable, + outpoint: OutPoint, + ) -> Result> { Ok( - inscriptions - .into_iter() - .map(|(_sequence_number, satpoint, inscription_id)| (satpoint, inscription_id)) - .collect(), + Self::full_inscriptions_on_output( + satpoint_to_sequence_number, + sequence_number_to_inscription_entry, + outpoint, + )? + .into_iter() + .map(|(_sequence_number, satpoint, inscription_id)| (satpoint, inscription_id)) + .collect(), ) } } diff --git a/src/index/entry.rs b/src/index/entry.rs index 116ee448ea..95bae50dc0 100644 --- a/src/index/entry.rs +++ b/src/index/entry.rs @@ -309,7 +309,7 @@ impl Entry for InscriptionId { } } -pub(super) type OutPointValue = [u8; 36]; +pub(crate) type OutPointValue = [u8; 36]; impl Entry for OutPoint { type Value = OutPointValue; @@ -325,7 +325,7 @@ impl Entry for OutPoint { } } -pub(super) type SatPointValue = [u8; 44]; +pub(crate) type SatPointValue = [u8; 44]; impl Entry for SatPoint { type Value = SatPointValue; @@ -368,7 +368,7 @@ impl Entry for SatRange { } } -pub(super) type TxidValue = [u8; 32]; +pub(crate) type TxidValue = [u8; 32]; impl Entry for Txid { type Value = TxidValue; diff --git a/src/index/extend.rs b/src/index/extend.rs new file mode 100644 index 0000000000..105ce474fc --- /dev/null +++ b/src/index/extend.rs @@ -0,0 +1,381 @@ +use super::{rtx::Rtx, *}; + +impl Index { + pub(crate) fn get_inscription_satpoint_by_id_with_rtx( + inscription_id: InscriptionId, + rtx: &Rtx, + ) -> Result> { + let Some(sequence_number) = rtx.inscription_id_to_sequence_number(inscription_id)? else { + return Ok(None); + }; + + rtx.sequence_number_to_satpoint(sequence_number) + } + + pub(crate) fn get_inscription_entry_with_rtx( + inscription_id: InscriptionId, + rtx: &Rtx, + ) -> Result> { + let Some(sequence_number) = rtx.inscription_id_to_sequence_number(inscription_id)? else { + return Ok(None); + }; + + rtx.sequence_number_to_inscription_entry(sequence_number) + } + + pub(crate) fn get_inscription_id_by_inscription_number_with_rtx( + inscription_number: i32, + rtx: &Rtx, + ) -> Result> { + let Some(sequence_number) = rtx.inscription_number_to_sequence_number(inscription_number)? + else { + return Ok(None); + }; + + Ok( + rtx + .sequence_number_to_inscription_entry(sequence_number)? + .map(|entry| entry.id), + ) + } + + pub(crate) fn get_transaction_with_rtx( + txid: Txid, + rtx: &Rtx, + client: &Client, + chain: Chain, + index_transactions: bool, + ) -> Result> { + let genesis_block = chain.genesis_block(); + let genesis_block_coinbase_transaction = genesis_block.coinbase().unwrap(); + + if txid == genesis_block_coinbase_transaction.txid() { + return Ok(Some(genesis_block_coinbase_transaction.clone())); + } + + if index_transactions { + if let Some(transaction) = rtx.transaction_id_to_transaction(txid)? { + return Ok(Some(transaction)); + } + } + + client.get_raw_transaction(&txid, None).into_option() + } + + pub(crate) fn get_ord_inscription_operations( + txid: Txid, + rtx: &Rtx, + client: &Client, + ) -> Result>> { + let Some(operations) = rtx.ord_transaction_id_to_inscription_operations(txid)? else { + let raw_tx = client.get_raw_transaction_info(&txid, None)?; + + match raw_tx.blockhash { + Some(tx_blockhash) => { + // Get the block header of the transaction. We should check if the block has been parsed by the indexer. + let tx_bh = client.get_block_header_info(&tx_blockhash)?; + + // Check if the block hash has been parsed by the indexer. + // If it has been parsed, proceed to the next step. + let Some(parsed_hash) = rtx.block_hash(Some(u32::try_from(tx_bh.height).unwrap()))? + else { + // If it has not been parsed, return None. + return Ok(None); + }; + + // Check if the block hash of the parsed transaction is the same as the indexed parsed blocks. + if parsed_hash != tx_blockhash { + // In the different conflicting block. + return Ok(None); + } + // Empty inscription operations in the transaction. + return Ok(Some(Vec::new())); + } + None => { + return Err(anyhow!( + "Can't retrieve pending transaction operations. {txid}" + )) + } + } + }; + Ok(Some(operations)) + } + + pub(crate) fn get_ord_block_inscription_operations( + block_hash: BlockHash, + rtx: &Rtx, + client: &Client, + ) -> Result)>> { + // get block from btc client. + let blockinfo = client.get_block_info(&block_hash)?; + + // get blockhash from redb. + let Some(block_hash) = rtx.block_hash(Some(u32::try_from(blockinfo.height).unwrap()))? else { + return Err(anyhow!( + "Can't retrieve block: {} from the database.", + blockinfo.height + )); + }; + + // check of conflicting block. + if blockinfo.hash != block_hash { + return Err(anyhow!( + "Conflict with block hash in the database. {} != {}", + block_hash, + blockinfo.hash + )); + } + + let mut result = Vec::new(); + for txid in blockinfo.tx { + let Some(inscriptions) = rtx.ord_transaction_id_to_inscription_operations(txid)? else { + continue; + }; + result.push((txid, inscriptions)); + } + Ok(result) + } + + pub(crate) fn get_brc20_balance_by_tick_and_address( + tick: brc20::Tick, + script_key: ScriptKey, + rtx: &Rtx, + ) -> Result> { + Ok(match rtx.brc20_get_balance_by_address(&tick, script_key)? { + Some(balance) => Some(balance), + None if rtx.brc20_get_tick_info(&tick)?.is_some() => Some(brc20::Balance { + tick: tick.clone(), + overall_balance: 0, + transferable_balance: 0, + }), + _ => None, + }) + } + + pub(crate) fn get_brc20_transferable_utxo_by_tick_and_address( + tick: brc20::Tick, + script_key: ScriptKey, + rtx: &Rtx, + ) -> Result>> { + let transferable_utxo_assets = rtx.brc20_get_tick_transferable_by_address(&tick, script_key)?; + + if transferable_utxo_assets.is_empty() { + if rtx.brc20_get_tick_info(&tick)?.is_some() { + return Ok(Some(Vec::new())); + } else { + return Ok(None); + } + } + Ok(Some(transferable_utxo_assets)) + } + + pub(crate) fn get_brc20_transaction_receipts( + txid: Txid, + rtx: &Rtx, + client: &Client, + ) -> Result>> { + let Some(receipts) = rtx.brc20_transaction_id_to_transaction_receipt(txid)? else { + let raw_tx = client.get_raw_transaction_info(&txid, None)?; + + match raw_tx.blockhash { + Some(tx_blockhash) => { + // Get the block header of the transaction. We should check if the block has been parsed by the indexer. + let tx_bh = client.get_block_header_info(&tx_blockhash)?; + + // Check if the block hash has been parsed by the indexer. + // If it has been parsed, proceed to the next step. + let Some(parsed_hash) = rtx.block_hash(Some(u32::try_from(tx_bh.height).unwrap()))? + else { + // If it has not been parsed, return None. + return Ok(None); + }; + + // Check if the block hash of the parsed transaction is the same as the indexed parsed blocks. + if parsed_hash != tx_blockhash { + // In the different conflicting block. + return Ok(None); + } + // Empty inscription operations in the transaction. + return Ok(Some(Vec::new())); + } + None => { + return Err(anyhow!( + "Can't retrieve pending BRC20 transaction receipts. {txid}" + )) + } + } + }; + Ok(Some(receipts)) + } + + pub(crate) fn get_brc20_block_receipts( + block_hash: BlockHash, + rtx: &Rtx, + client: &Client, + ) -> Result)>> { + // get block from btc client. + let blockinfo = client.get_block_info(&block_hash)?; + + // get blockhash from redb. + let Some(block_hash) = rtx.block_hash(Some(u32::try_from(blockinfo.height).unwrap()))? else { + return Err(anyhow!( + "Can't retrieve block: {} from the database.", + blockinfo.height + )); + }; + + // check of conflicting block. + if blockinfo.hash != block_hash { + return Err(anyhow!( + "Conflict with block hash in the database. {} != {}", + block_hash, + blockinfo.hash + )); + } + + let mut result = Vec::new(); + for txid in blockinfo.tx { + let Some(inscriptions) = rtx.brc20_transaction_id_to_transaction_receipt(txid)? else { + continue; + }; + result.push((txid, inscriptions)); + } + Ok(result) + } + + // Assume these are helper functions defined elsewhere in the module. + pub(crate) fn fetch_vout( + rtx: &Rtx, + client: &Client, + outpoint: OutPoint, + chain: Chain, + index_transactions: bool, + ) -> Result> { + // Try to get the txout from the database store at first. + if let Some(txout) = rtx.outpoint_to_entry(outpoint)? { + Ok(Some(txout)) + } else { + // Try to get the txout from the transaction table or the RPC request. + Ok( + Self::get_transaction_with_rtx(outpoint.txid, rtx, client, chain, index_transactions)?.map( + |tx| { + tx.output + .get(usize::try_from(outpoint.vout).unwrap()) + .unwrap() + .to_owned() + }, + ), + ) + } + } + + pub(crate) fn list_sat_range( + rtx: &Rtx, + outpoint: OutPoint, + index_sats: bool, + ) -> Result>> { + if !index_sats || outpoint == unbound_outpoint() { + return Ok(None); + } + + let sat_ranges = rtx.list_sat_range(outpoint.store())?; + + match sat_ranges { + Some(sat_ranges) => Ok(Some( + sat_ranges + .chunks_exact(11) + .map(|chunk| SatRange::load(chunk.try_into().unwrap())) + .collect(), + )), + None => Ok(None), + } + } + + pub(crate) fn calculate_rarity_for_sat_range(sat_range: SatRange) -> Vec<(Sat, Rarity)> { + let start_sat = Sat(sat_range.0); + let end_sat = Sat(sat_range.1); + + let start_height = if start_sat.third() > 0 { + start_sat.height().0 + 1 + } else { + start_sat.height().0 + }; + let end_height = if end_sat.third() > 0 { + end_sat.height().0 + } else { + end_sat.height().0 - 1 + }; + + let mut result = Vec::new(); + for height in start_height..=end_height { + let sat = Height(height).starting_sat(); + let rarity = sat.rarity(); + result.push((sat, rarity)); + } + result + } +} + +#[cfg(test)] +mod tests { + use super::*; + #[test] + fn test_calculate_rarity_for_sat_range_mythic() { + let sat_range: SatRange = (0, 100); + let rarity = Index::calculate_rarity_for_sat_range(sat_range); + assert_eq!(rarity, vec![(Sat(0), Rarity::Mythic)]); + let sat_range: SatRange = (1, 100); + let rarity = Index::calculate_rarity_for_sat_range(sat_range); + assert_eq!(rarity, vec![]); + } + #[test] + fn test_legendary_sat() { + let sat_range: SatRange = ( + Height(SUBSIDY_HALVING_INTERVAL * 6).starting_sat().0, + Height(SUBSIDY_HALVING_INTERVAL * 6).starting_sat().0 + 1, + ); + let rarity = Index::calculate_rarity_for_sat_range(sat_range); + assert_eq!(rarity, vec![(Sat(2067187500000000), Rarity::Legendary)]); + } + #[test] + fn test_epic_sat() { + let sat_range: SatRange = ( + Height(SUBSIDY_HALVING_INTERVAL).starting_sat().0, + Height(SUBSIDY_HALVING_INTERVAL).starting_sat().0 + 1, + ); + let rarity = Index::calculate_rarity_for_sat_range(sat_range); + assert_eq!(rarity, vec![(Sat(1050000000000000), Rarity::Epic)]); + } + + #[test] + fn test_rare_sat() { + let sat_range: SatRange = ( + Height(DIFFCHANGE_INTERVAL).starting_sat().0, + Height(DIFFCHANGE_INTERVAL).starting_sat().0 + 1, + ); + let rarity = Index::calculate_rarity_for_sat_range(sat_range); + assert_eq!(rarity, vec![(Sat(10080000000000), Rarity::Rare)]); + } + + #[test] + fn test_two_rarity_sat() { + let sat_range: SatRange = (0, 4999999999); + let rarity = Index::calculate_rarity_for_sat_range(sat_range); + assert_eq!(rarity, vec![(Sat(0), Rarity::Mythic)]); + let sat_range: SatRange = (0, 5000000000); + let rarity = Index::calculate_rarity_for_sat_range(sat_range); + assert_eq!(rarity, vec![(Sat(0), Rarity::Mythic)]); + let sat_range: SatRange = (0, 5000000001); + let rarity = Index::calculate_rarity_for_sat_range(sat_range); + assert_eq!( + rarity, + vec![ + (Sat(0), Rarity::Mythic), + (Sat(5000000000), Rarity::Uncommon) + ] + ); + let sat_range: SatRange = (1, 5000000001); + let rarity = Index::calculate_rarity_for_sat_range(sat_range); + assert_eq!(rarity, vec![(Sat(5000000000), Rarity::Uncommon)]); + } +} diff --git a/src/index/rtx.rs b/src/index/rtx.rs index de7a712952..1f76f25a79 100644 --- a/src/index/rtx.rs +++ b/src/index/rtx.rs @@ -43,4 +43,218 @@ impl Rtx<'_> { .map(|header| Header::load(*header.value()).block_hash()), ) } + + pub(crate) fn latest_block(&self) -> Result> { + Ok( + self + .0 + .open_table(HEIGHT_TO_BLOCK_HEADER)? + .range(0..)? + .next_back() + .and_then(|result| result.ok()) + .map(|(height, hash)| { + ( + Height(height.value()), + Header::load(*hash.value()).block_hash(), + ) + }), + ) + } + + pub(crate) fn inscription_id_to_sequence_number( + &self, + inscription_id: InscriptionId, + ) -> Result> { + Ok( + self + .0 + .open_table(INSCRIPTION_ID_TO_SEQUENCE_NUMBER)? + .get(&inscription_id.store())? + .map(|guard| guard.value()), + ) + } + + pub(crate) fn inscription_number_to_sequence_number( + &self, + inscription_number: i32, + ) -> Result> { + Ok( + self + .0 + .open_table(INSCRIPTION_NUMBER_TO_SEQUENCE_NUMBER)? + .get(inscription_number)? + .map(|guard| guard.value()), + ) + } + + pub(crate) fn sequence_number_to_satpoint( + &self, + sequence_number: u32, + ) -> Result> { + Ok( + self + .0 + .open_table(SEQUENCE_NUMBER_TO_SATPOINT)? + .get(sequence_number)? + .map(|satpoint| Entry::load(*satpoint.value())), + ) + } + + pub(crate) fn inscriptions_on_output_with_satpoints( + &self, + outpoint: OutPoint, + ) -> Result> { + let satpoint_to_sequence_number = self.0.open_multimap_table(SATPOINT_TO_SEQUENCE_NUMBER)?; + let sequence_number_to_inscription_entry = + self.0.open_table(SEQUENCE_NUMBER_TO_INSCRIPTION_ENTRY)?; + + Index::inscriptions_on_output( + &satpoint_to_sequence_number, + &sequence_number_to_inscription_entry, + outpoint, + ) + } + + pub(crate) fn sequence_number_to_inscription_entry( + &self, + sequence_number: u32, + ) -> Result> { + Ok( + self + .0 + .open_table(SEQUENCE_NUMBER_TO_INSCRIPTION_ENTRY)? + .get(sequence_number)? + .map(|value| InscriptionEntry::load(value.value())), + ) + } + + pub(crate) fn transaction_id_to_transaction(&self, txid: Txid) -> Result> { + Ok( + self + .0 + .open_table(TRANSACTION_ID_TO_TRANSACTION)? + .get(&txid.store())? + .map(|transaction| consensus::encode::deserialize(transaction.value())) + .transpose()?, + ) + } + + pub(crate) fn outpoint_to_entry(&self, outpoint: OutPoint) -> Result> { + let table = self.0.open_table(OUTPOINT_TO_ENTRY)?; + get_txout_by_outpoint(&table, &outpoint) + } + + pub(crate) fn get_inscription_entry( + &self, + inscription_id: InscriptionId, + ) -> Result> { + if let Some(sequence_number) = self.inscription_id_to_sequence_number(inscription_id)? { + self.sequence_number_to_inscription_entry(sequence_number) + } else { + Ok(None) + } + } + + pub(crate) fn ord_inscription_id_to_collections( + &self, + inscription_id: InscriptionId, + ) -> Result>> { + let table = self + .0 + .open_multimap_table(COLLECTIONS_INSCRIPTION_ID_TO_KINDS)?; + get_collections_of_inscription(&table, &inscription_id) + } + + pub(crate) fn ord_district_to_inscription_id( + &self, + number: u32, + ) -> Result> { + let district = District { number }; + let table = self.0.open_table(COLLECTIONS_KEY_TO_INSCRIPTION_ID)?; + get_collection_inscription_id(&table, &district.to_collection_key()) + } + + pub(crate) fn ord_transaction_id_to_inscription_operations( + &self, + txid: Txid, + ) -> Result>> { + let table = self.0.open_table(ORD_TX_TO_OPERATIONS)?; + get_transaction_operations(&table, &txid) + } + + pub(crate) fn brc20_get_tick_info(&self, name: &brc20::Tick) -> Result> { + let table = self.0.open_table(BRC20_TOKEN)?; + get_token_info(&table, name) + } + + pub(crate) fn brc20_get_all_tick_info(&self) -> Result> { + let table = self.0.open_table(BRC20_TOKEN)?; + get_tokens_info(&table) + } + + pub(crate) fn brc20_get_balance_by_address( + &self, + tick: &brc20::Tick, + script_key: ScriptKey, + ) -> Result> { + let table = self.0.open_table(BRC20_BALANCES)?; + get_balance(&table, &script_key, tick) + } + + pub(crate) fn brc20_get_all_balance_by_address( + &self, + script_key: ScriptKey, + ) -> Result> { + let table = self.0.open_table(BRC20_BALANCES)?; + get_balances(&table, &script_key) + } + + pub(crate) fn brc20_transaction_id_to_transaction_receipt( + &self, + txid: Txid, + ) -> Result>> { + let table = self.0.open_table(BRC20_EVENTS)?; + get_transaction_receipts(&table, &txid) + } + + pub(crate) fn brc20_get_tick_transferable_by_address( + &self, + tick: &brc20::Tick, + script_key: ScriptKey, + ) -> Result> { + let address_table = self + .0 + .open_multimap_table(BRC20_ADDRESS_TICKER_TO_TRANSFERABLE_ASSETS)?; + let satpoint_table = self.0.open_table(BRC20_SATPOINT_TO_TRANSFERABLE_ASSETS)?; + get_transferable_assets_by_account_ticker(&address_table, &satpoint_table, &script_key, tick) + } + + pub(crate) fn brc20_get_all_transferable_by_address( + &self, + script_key: ScriptKey, + ) -> Result> { + let address_table = self + .0 + .open_multimap_table(BRC20_ADDRESS_TICKER_TO_TRANSFERABLE_ASSETS)?; + let satpoint_table = self.0.open_table(BRC20_SATPOINT_TO_TRANSFERABLE_ASSETS)?; + get_transferable_assets_by_account(&address_table, &satpoint_table, &script_key) + } + + pub(crate) fn brc20_transferable_assets_on_output_with_satpoints( + &self, + outpoint: OutPoint, + ) -> Result> { + let satpoint_to_sequence_number = self.0.open_table(BRC20_SATPOINT_TO_TRANSFERABLE_ASSETS)?; + get_transferable_assets_by_outpoint(&satpoint_to_sequence_number, outpoint) + } + + pub(super) fn list_sat_range(&self, outpoint: OutPointValue) -> Result>> { + Ok( + self + .0 + .open_table(OUTPOINT_TO_SAT_RANGES)? + .get(&outpoint)? + .map(|outpoint| outpoint.value().to_vec()), + ) + } } diff --git a/src/index/updater.rs b/src/index/updater.rs index 7aa89768b8..fd0eb18b73 100644 --- a/src/index/updater.rs +++ b/src/index/updater.rs @@ -1,3 +1,5 @@ +use crate::okx::protocol::{context::Context, ChainContext, ProtocolConfig, ProtocolManager}; +use std::sync::atomic::{AtomicUsize, Ordering}; use { self::{inscription_updater::InscriptionUpdater, rune_updater::RuneUpdater}, super::{fetcher::Fetcher, *}, @@ -6,7 +8,9 @@ use { tokio::sync::mpsc::{error::TryRecvError, Receiver, Sender}, }; -mod inscription_updater; +pub(crate) mod inscription_updater; +use crate::okx::lru::SimpleLru; + mod rune_updater; pub(crate) struct BlockData { @@ -84,18 +88,28 @@ impl<'index> Updater<'_> { let rx = Self::fetch_blocks_from(self.index, self.height, self.index.index_sats)?; - let (mut outpoint_sender, mut value_receiver) = Self::spawn_fetcher(self.index)?; + let (mut outpoint_sender, mut tx_out_receiver) = Self::spawn_fetcher(self.index)?; + + let commit_height_interval = self.index.options.commit_height_interval(); + let commit_persist_interval = self.index.options.commit_persist_interval(); + log::info!( + "commit height interval: {}, commit persist interval: {}", + commit_height_interval, + commit_persist_interval + ); let mut uncommitted = 0; - let mut value_cache = HashMap::new(); + let mut unpersisted = 0; + let mut tx_out_cache = SimpleLru::new(self.index.options.lru_size); while let Ok(block) = rx.recv() { + tx_out_cache.refresh(); self.index_block( self.index, &mut outpoint_sender, - &mut value_receiver, + &mut tx_out_receiver, &mut wtx, block, - &mut value_cache, + &mut tx_out_cache, )?; if let Some(progress_bar) = &mut progress_bar { @@ -112,9 +126,16 @@ impl<'index> Updater<'_> { uncommitted += 1; - if uncommitted == 5000 { - self.commit(wtx, value_cache)?; - value_cache = HashMap::new(); + let should_break = SHUTTING_DOWN.load(atomic::Ordering::Relaxed); + if uncommitted >= commit_height_interval { + unpersisted += 1; + if unpersisted < commit_persist_interval && !should_break { + wtx.set_durability(redb::Durability::None); + log::info!("set wtx durability to none"); + } else { + unpersisted = 0; + } + self.commit(wtx)?; uncommitted = 0; wtx = self.index.begin_write()?; let height = wtx @@ -129,6 +150,9 @@ impl<'index> Updater<'_> { // write transaction break; } + if should_break { + break; + } wtx .open_table(WRITE_TRANSACTION_STARTING_BLOCK_COUNT_TO_TIMESTAMP)? .insert( @@ -138,15 +162,13 @@ impl<'index> Updater<'_> { .map(|duration| duration.as_millis()) .unwrap_or(0), )?; - } - - if SHUTTING_DOWN.load(atomic::Ordering::Relaxed) { + } else if should_break { break; } } - if uncommitted > 0 { - self.commit(wtx, value_cache)?; + if uncommitted > 0 || unpersisted > 0 { + self.commit(wtx)?; } if let Some(progress_bar) = &mut progress_bar { @@ -241,14 +263,14 @@ impl<'index> Updater<'_> { } } - fn spawn_fetcher(index: &Index) -> Result<(Sender, Receiver)> { + fn spawn_fetcher(index: &Index) -> Result<(Sender, Receiver)> { let fetcher = Fetcher::new(&index.options)?; // Not sure if any block has more than 20k inputs, but none so far after first inscription block const CHANNEL_BUFFER_SIZE: usize = 20_000; let (outpoint_sender, mut outpoint_receiver) = tokio::sync::mpsc::channel::(CHANNEL_BUFFER_SIZE); - let (value_sender, value_receiver) = tokio::sync::mpsc::channel::(CHANNEL_BUFFER_SIZE); + let (txout_sender, tx_out_receiver) = tokio::sync::mpsc::channel::(CHANNEL_BUFFER_SIZE); // Batch 2048 missing inputs at a time. Arbitrarily chosen for now, maybe higher or lower can be faster? // Did rudimentary benchmarks with 1024 and 4096 and time was roughly the same. @@ -295,8 +317,8 @@ impl<'index> Updater<'_> { }; // Send all tx output values back in order for (i, tx) in txs.iter().flatten().enumerate() { - let Ok(_) = value_sender - .send(tx.output[usize::try_from(outpoints[i].vout).unwrap()].value) + let Ok(_) = txout_sender + .send(tx.output[usize::try_from(outpoints[i].vout).unwrap()].clone()) .await else { log::error!("Value channel closed unexpectedly"); @@ -307,17 +329,17 @@ impl<'index> Updater<'_> { }) }); - Ok((outpoint_sender, value_receiver)) + Ok((outpoint_sender, tx_out_receiver)) } fn index_block( &mut self, index: &Index, outpoint_sender: &mut Sender, - value_receiver: &mut Receiver, + tx_out_receiver: &mut Receiver, wtx: &mut WriteTransaction, block: BlockData, - value_cache: &mut HashMap, + tx_out_cache: &mut SimpleLru, ) -> Result<()> { Reorg::detect_reorg(&block, self.height, self.index)?; @@ -325,24 +347,22 @@ impl<'index> Updater<'_> { let mut sat_ranges_written = 0; let mut outputs_in_block = 0; - log::info!( - "Block {} at {} with {} transactions…", - self.height, - timestamp(block.header.time), - block.txdata.len() - ); - // If value_receiver still has values something went wrong with the last block // Could be an assert, shouldn't recover from this and commit the last block - let Err(TryRecvError::Empty) = value_receiver.try_recv() else { + let Err(TryRecvError::Empty) = tx_out_receiver.try_recv() else { return Err(anyhow!("Previous block did not consume all input values")); }; - let mut outpoint_to_value = wtx.open_table(OUTPOINT_TO_VALUE)?; + let mut outpoint_to_entry = wtx.open_table(OUTPOINT_TO_ENTRY)?; let index_inscriptions = self.height >= index.first_inscription_height && !index.options.no_index_inscriptions; + let fetching_outputs_count = AtomicUsize::new(0); + let total_outputs_count = AtomicUsize::new(0); + let cache_outputs_count = AtomicUsize::new(0); + let miss_outputs_count = AtomicUsize::new(0); + let meet_outputs_count = AtomicUsize::new(0); if index_inscriptions { // Send all missing input outpoints to be fetched right away let txids = block @@ -350,33 +370,58 @@ impl<'index> Updater<'_> { .iter() .map(|(_, txid)| txid) .collect::>(); - for (tx, _) in &block.txdata { - for input in &tx.input { + use rayon::prelude::*; + let tx_outs = block + .txdata + .par_iter() + .flat_map(|(tx, _)| tx.input.par_iter()) + .filter_map(|input| { + total_outputs_count.fetch_add(1, Ordering::Relaxed); let prev_output = input.previous_output; // We don't need coinbase input value if prev_output.is_null() { - continue; - } - // We don't need input values from txs earlier in the block, since they'll be added to value_cache - // when the tx is indexed - if txids.contains(&prev_output.txid) { - continue; - } - // We don't need input values we already have in our value_cache from earlier blocks - if value_cache.contains_key(&prev_output) { - continue; - } - // We don't need input values we already have in our outpoint_to_value table from earlier blocks that - // were committed to db already - if outpoint_to_value.get(&prev_output.store())?.is_some() { - continue; + None + } else if txids.contains(&prev_output.txid) { + meet_outputs_count.fetch_add(1, Ordering::Relaxed); + None + } else if tx_out_cache.contains(&prev_output) { + cache_outputs_count.fetch_add(1, Ordering::Relaxed); + None + } else if let Some(txout) = + get_txout_by_outpoint(&outpoint_to_entry, &prev_output).unwrap() + { + miss_outputs_count.fetch_add(1, Ordering::Relaxed); + Some((prev_output, Some(txout))) + } else { + fetching_outputs_count.fetch_add(1, Ordering::Relaxed); + Some((prev_output, None)) } - // We don't know the value of this tx input. Send this outpoint to background thread to be fetched - outpoint_sender.blocking_send(prev_output)?; + }) + .collect::>(); + for (out_point, value) in tx_outs.into_iter() { + if let Some(tx_out) = value { + tx_out_cache.insert(out_point, tx_out); + } else { + outpoint_sender.blocking_send(out_point).unwrap(); } } } + let time = timestamp(block.header.time); + + log::info!( + "Block {} at {} with {} transactions, fetching previous outputs {}/{}…, {},{},{}, cost:{}ms", + self.height, + time, + block.txdata.len(), + fetching_outputs_count.load(Ordering::Relaxed), + total_outputs_count.load(Ordering::Relaxed), + miss_outputs_count.load(Ordering::Relaxed), + meet_outputs_count.load(Ordering::Relaxed), + cache_outputs_count.load(Ordering::Relaxed), + start.elapsed().as_millis(), + ); + let mut height_to_block_header = wtx.open_table(HEIGHT_TO_BLOCK_HEADER)?; let mut height_to_last_sequence_number = wtx.open_table(HEIGHT_TO_LAST_SEQUENCE_NUMBER)?; let mut home_inscriptions = wtx.open_table(HOME_INSCRIPTIONS)?; @@ -420,36 +465,33 @@ impl<'index> Updater<'_> { .map(|(number, _id)| number.value() + 1) .unwrap_or(0); - let home_inscription_count = home_inscriptions.len()?; - - let mut inscription_updater = InscriptionUpdater { + let mut operations = HashMap::new(); + let mut inscription_updater = InscriptionUpdater::new( + &mut operations, blessed_inscription_count, - chain: self.index.options.chain(), + self.index.options.chain(), cursed_inscription_count, - flotsam: Vec::new(), - height: self.height, - home_inscription_count, - home_inscriptions: &mut home_inscriptions, - id_to_sequence_number: &mut inscription_id_to_sequence_number, - index_transactions: self.index.index_transactions, - inscription_number_to_sequence_number: &mut inscription_number_to_sequence_number, - lost_sats, + self.height, + &mut home_inscriptions, + &mut inscription_id_to_sequence_number, + self.index.index_transactions, + &mut inscription_number_to_sequence_number, next_sequence_number, - outpoint_to_value: &mut outpoint_to_value, - reward: Height(self.height).subsidy(), - sat_to_sequence_number: &mut sat_to_sequence_number, - satpoint_to_sequence_number: &mut satpoint_to_sequence_number, - sequence_number_to_children: &mut sequence_number_to_children, - sequence_number_to_entry: &mut sequence_number_to_inscription_entry, - sequence_number_to_satpoint: &mut sequence_number_to_satpoint, - timestamp: block.header.time, - transaction_buffer: Vec::new(), - transaction_id_to_transaction: &mut transaction_id_to_transaction, + lost_sats, + &mut outpoint_to_entry, + &mut transaction_id_to_transaction, + &mut sat_to_sequence_number, + &mut satpoint_to_sequence_number, + &mut sequence_number_to_children, + &mut sequence_number_to_inscription_entry, + &mut sequence_number_to_satpoint, + block.header.time, unbound_inscriptions, - value_cache, - value_receiver, - }; + tx_out_receiver, + tx_out_cache, + )?; + let start_time = Instant::now(); if self.index.index_sats { let mut sat_to_satpoint = wtx.open_table(SAT_TO_SATPOINT)?; let mut outpoint_to_sat_ranges = wtx.open_table(OUTPOINT_TO_SAT_RANGES)?; @@ -545,6 +587,7 @@ impl<'index> Updater<'_> { inscription_updater.index_envelopes(tx, *txid, None)?; } } + let ord_cost = start_time.elapsed().as_millis(); if index_inscriptions { height_to_last_sequence_number @@ -575,6 +618,36 @@ impl<'index> Updater<'_> { &inscription_updater.unbound_inscriptions, )?; + inscription_updater.flush_cache()?; + + let mut context = Context { + chain_conf: ChainContext { + chain: self.index.options.chain(), + blockheight: self.height, + blocktime: block.header.time, + }, + tx_out_cache, + hit: 0, + miss: 0, + ORD_TX_TO_OPERATIONS: &mut wtx.open_table(ORD_TX_TO_OPERATIONS)?, + COLLECTIONS_KEY_TO_INSCRIPTION_ID: &mut wtx.open_table(COLLECTIONS_KEY_TO_INSCRIPTION_ID)?, + COLLECTIONS_INSCRIPTION_ID_TO_KINDS: &mut wtx + .open_multimap_table(COLLECTIONS_INSCRIPTION_ID_TO_KINDS)?, + SEQUENCE_NUMBER_TO_INSCRIPTION_ENTRY: &mut sequence_number_to_inscription_entry, + OUTPOINT_TO_ENTRY: &mut outpoint_to_entry, + BRC20_BALANCES: &mut wtx.open_table(BRC20_BALANCES)?, + BRC20_TOKEN: &mut wtx.open_table(BRC20_TOKEN)?, + BRC20_EVENTS: &mut wtx.open_table(BRC20_EVENTS)?, + BRC20_SATPOINT_TO_TRANSFERABLE_ASSETS: &mut wtx + .open_table(BRC20_SATPOINT_TO_TRANSFERABLE_ASSETS)?, + BRC20_ADDRESS_TICKER_TO_TRANSFERABLE_ASSETS: &mut wtx + .open_multimap_table(BRC20_ADDRESS_TICKER_TO_TRANSFERABLE_ASSETS)?, + }; + + // Create a protocol manager to index the block of bitmap data. + let config = ProtocolConfig::new_with_options(&index.options); + ProtocolManager::new(config).index_block(&mut context, &block, operations)?; + if index.index_runes && self.height >= self.index.options.first_rune_height() { let mut outpoint_to_rune_balances = wtx.open_table(OUTPOINT_TO_RUNE_BALANCES)?; let mut rune_id_to_rune_entry = wtx.open_table(RUNE_ID_TO_RUNE_ENTRY)?; @@ -628,8 +701,11 @@ impl<'index> Updater<'_> { self.outputs_traversed += outputs_in_block; log::info!( - "Wrote {sat_ranges_written} sat ranges from {outputs_in_block} outputs in {} ms", + "Wrote {sat_ranges_written} sat ranges from {outputs_in_block} outputs in {}/{} ms, hit miss: {}/{}", + ord_cost, (Instant::now() - start).as_millis(), + context.hit, + context.miss, ); Ok(()) @@ -701,7 +777,7 @@ impl<'index> Updater<'_> { Ok(()) } - fn commit(&mut self, wtx: WriteTransaction, value_cache: HashMap) -> Result { + fn commit(&mut self, wtx: WriteTransaction) -> Result { log::info!( "Committing at block height {}, {} outputs traversed, {} in map, {} cached", self.height, @@ -727,14 +803,6 @@ impl<'index> Updater<'_> { self.outputs_inserted_since_flush = 0; } - { - let mut outpoint_to_value = wtx.open_table(OUTPOINT_TO_VALUE)?; - - for (outpoint, value) in value_cache { - outpoint_to_value.insert(&outpoint.store(), &value)?; - } - } - Index::increment_statistic(&wtx, Statistic::OutputsTraversed, self.outputs_traversed)?; self.outputs_traversed = 0; Index::increment_statistic(&wtx, Statistic::SatRanges, self.sat_ranges_since_flush)?; @@ -747,3 +815,17 @@ impl<'index> Updater<'_> { Ok(()) } } + +#[cfg(test)] +mod tests { + use rayon::prelude::*; + #[test] + fn parallel() { + let mut a: Vec<_> = (0..10000).into_par_iter().map(|x| x + 1).collect(); + + let b = a.clone(); + a.sort(); + assert_eq!(a, b); + println!("{:?}", a); + } +} diff --git a/src/index/updater/inscription_updater.rs b/src/index/updater/inscription_updater.rs index 19f99033cc..6342dd522d 100644 --- a/src/index/updater/inscription_updater.rs +++ b/src/index/updater/inscription_updater.rs @@ -1,4 +1,5 @@ use super::*; +use crate::okx::datastore::ord::operation::{Action, InscriptionOp}; #[derive(Debug, PartialEq, Copy, Clone)] enum Curse { @@ -15,11 +16,14 @@ enum Curse { #[derive(Debug, Clone)] pub(super) struct Flotsam { + txid: Txid, inscription_id: InscriptionId, offset: u64, + old_satpoint: SatPoint, origin: Origin, } +#[allow(clippy::large_enum_variant)] #[derive(Debug, Clone)] enum Origin { New { @@ -30,14 +34,14 @@ enum Origin { pointer: Option, reinscription: bool, unbound: bool, + inscription: Inscription, vindicated: bool, }, - Old { - old_satpoint: SatPoint, - }, + Old, } pub(super) struct InscriptionUpdater<'a, 'db, 'tx> { + pub(super) operations: &'a mut HashMap>, pub(super) blessed_inscription_count: u64, pub(super) chain: Chain, pub(super) cursed_inscription_count: u64, @@ -48,9 +52,9 @@ pub(super) struct InscriptionUpdater<'a, 'db, 'tx> { pub(super) id_to_sequence_number: &'a mut Table<'db, 'tx, InscriptionIdValue, u32>, pub(super) index_transactions: bool, pub(super) inscription_number_to_sequence_number: &'a mut Table<'db, 'tx, i32, u32>, - pub(super) lost_sats: u64, pub(super) next_sequence_number: u32, - pub(super) outpoint_to_value: &'a mut Table<'db, 'tx, &'static OutPointValue, u64>, + pub(super) lost_sats: u64, + pub(super) outpoint_to_entry: &'a mut Table<'db, 'tx, &'static OutPointValue, &'static [u8]>, pub(super) reward: u64, pub(super) transaction_buffer: Vec, pub(super) transaction_id_to_transaction: @@ -63,11 +67,66 @@ pub(super) struct InscriptionUpdater<'a, 'db, 'tx> { pub(super) sequence_number_to_satpoint: &'a mut Table<'db, 'tx, u32, &'static SatPointValue>, pub(super) timestamp: u32, pub(super) unbound_inscriptions: u64, - pub(super) value_cache: &'a mut HashMap, - pub(super) value_receiver: &'a mut Receiver, + pub(super) tx_out_receiver: &'a mut Receiver, + pub(super) tx_out_cache: &'a mut SimpleLru, + pub(super) new_outpoints: Vec, } impl<'a, 'db, 'tx> InscriptionUpdater<'a, 'db, 'tx> { + pub(super) fn new( + operations: &'a mut HashMap>, + blessed_inscription_count: u64, + chain: Chain, + cursed_inscription_count: u64, + height: u32, + home_inscriptions: &'a mut Table<'db, 'tx, u32, InscriptionIdValue>, + id_to_sequence_number: &'a mut Table<'db, 'tx, InscriptionIdValue, u32>, + index_transactions: bool, + inscription_number_to_sequence_number: &'a mut Table<'db, 'tx, i32, u32>, + next_sequence_number: u32, + lost_sats: u64, + outpoint_to_entry: &'a mut Table<'db, 'tx, &'static OutPointValue, &'static [u8]>, + transaction_id_to_transaction: &'a mut Table<'db, 'tx, &'static TxidValue, &'static [u8]>, + sat_to_sequence_number: &'a mut MultimapTable<'db, 'tx, u64, u32>, + satpoint_to_sequence_number: &'a mut MultimapTable<'db, 'tx, &'static SatPointValue, u32>, + sequence_number_to_children: &'a mut MultimapTable<'db, 'tx, u32, u32>, + sequence_number_to_entry: &'a mut Table<'db, 'tx, u32, InscriptionEntryValue>, + sequence_number_to_satpoint: &'a mut Table<'db, 'tx, u32, &'static SatPointValue>, + timestamp: u32, + unbound_inscriptions: u64, + tx_out_receiver: &'a mut Receiver, + tx_out_cache: &'a mut SimpleLru, + ) -> Result { + Ok(Self { + operations, + blessed_inscription_count, + chain, + cursed_inscription_count, + flotsam: vec![], + height, + home_inscription_count: home_inscriptions.len()?, + home_inscriptions, + id_to_sequence_number, + index_transactions, + inscription_number_to_sequence_number, + next_sequence_number, + lost_sats, + outpoint_to_entry, + reward: Height(height).subsidy(), + transaction_buffer: vec![], + transaction_id_to_transaction, + sat_to_sequence_number, + satpoint_to_sequence_number, + sequence_number_to_children, + sequence_number_to_entry, + sequence_number_to_satpoint, + timestamp, + unbound_inscriptions, + tx_out_receiver, + tx_out_cache, + new_outpoints: vec![], + }) + } pub(super) fn index_envelopes( &mut self, tx: &Transaction, @@ -100,9 +159,11 @@ impl<'a, 'db, 'tx> InscriptionUpdater<'a, 'db, 'tx> { )? { let offset = total_input_value + old_satpoint.offset; floating_inscriptions.push(Flotsam { + txid, offset, inscription_id, - origin: Origin::Old { old_satpoint }, + old_satpoint, + origin: Origin::Old, }); inscribed_offsets @@ -114,21 +175,23 @@ impl<'a, 'db, 'tx> InscriptionUpdater<'a, 'db, 'tx> { let offset = total_input_value; // multi-level cache for UTXO set to get to the input amount - let current_input_value = if let Some(value) = self.value_cache.remove(&tx_in.previous_output) - { - value - } else if let Some(value) = self - .outpoint_to_value - .remove(&tx_in.previous_output.store())? + let current_input_value = if let Some(tx_out) = self.tx_out_cache.get(&tx_in.previous_output) { - value.value() + tx_out.value } else { - self.value_receiver.blocking_recv().ok_or_else(|| { + let tx_out = self.tx_out_receiver.blocking_recv().ok_or_else(|| { anyhow!( "failed to get transaction for {}", tx_in.previous_output.txid ) - })? + })?; + // received new tx out from chain node, add it to new_outpoints first and persist it in db later. + #[cfg(not(feature = "cache"))] + self.new_outpoints.push(tx_in.previous_output); + self + .tx_out_cache + .insert(tx_in.previous_output, tx_out.clone()); + tx_out.value }; total_input_value += current_input_value; @@ -199,8 +262,13 @@ impl<'a, 'db, 'tx> InscriptionUpdater<'a, 'db, 'tx> { .unwrap_or(offset); floating_inscriptions.push(Flotsam { + txid, inscription_id, offset, + old_satpoint: SatPoint { + outpoint: tx_in.previous_output, + offset: 0, + }, origin: Origin::New { cursed: curse.is_some() && !jubilant, fee: 0, @@ -209,6 +277,7 @@ impl<'a, 'db, 'tx> InscriptionUpdater<'a, 'db, 'tx> { pointer: inscription.payload.pointer(), reinscription: inscribed_offsets.get(&offset).is_some(), unbound, + inscription: inscription.payload.clone(), vindicated: curse.is_some() && jubilant, }, }); @@ -303,12 +372,17 @@ impl<'a, 'db, 'tx> InscriptionUpdater<'a, 'db, 'tx> { output_value = end; - self.value_cache.insert( + #[cfg(not(feature = "cache"))] + self.new_outpoints.push(OutPoint { + vout: vout.try_into().unwrap(), + txid, + }); + self.tx_out_cache.insert( OutPoint { vout: vout.try_into().unwrap(), txid, }, - tx_out.value, + tx_out.clone(), ); } @@ -357,6 +431,28 @@ impl<'a, 'db, 'tx> InscriptionUpdater<'a, 'db, 'tx> { } } + // write tx_out to outpoint_to_entry table + pub(super) fn flush_cache(self) -> Result { + let start = Instant::now(); + let persist = self.new_outpoints.len(); + let mut entry = Vec::new(); + for outpoint in self.new_outpoints.into_iter() { + let tx_out = self.tx_out_cache.get(&outpoint).unwrap(); + tx_out.consensus_encode(&mut entry)?; + self + .outpoint_to_entry + .insert(&outpoint.store(), entry.as_slice())?; + entry.clear(); + } + log::info!( + "flush cache, persist:{}, global:{} cost: {}ms", + persist, + self.tx_out_cache.len(), + start.elapsed().as_millis() + ); + Ok(()) + } + fn calculate_sat( input_sat_ranges: Option<&VecDeque<(u64, u64)>>, input_offset: u64, @@ -384,10 +480,10 @@ impl<'a, 'db, 'tx> InscriptionUpdater<'a, 'db, 'tx> { ) -> Result { let inscription_id = flotsam.inscription_id; let (unbound, sequence_number) = match flotsam.origin { - Origin::Old { old_satpoint } => { + Origin::Old => { self .satpoint_to_sequence_number - .remove_all(&old_satpoint.store())?; + .remove_all(&flotsam.old_satpoint.store())?; ( false, @@ -406,6 +502,7 @@ impl<'a, 'db, 'tx> InscriptionUpdater<'a, 'db, 'tx> { pointer: _, reinscription, unbound, + inscription: _, vindicated, } => { let inscription_number = if cursed { @@ -541,6 +638,42 @@ impl<'a, 'db, 'tx> InscriptionUpdater<'a, 'db, 'tx> { new_satpoint.store() }; + self + .operations + .entry(flotsam.txid) + .or_default() + .push(InscriptionOp { + txid: flotsam.txid, + sequence_number, + inscription_number: self + .sequence_number_to_entry + .get(sequence_number)? + .map(|entry| InscriptionEntry::load(entry.value()).inscription_number), + inscription_id: flotsam.inscription_id, + action: match flotsam.origin { + Origin::Old => Action::Transfer, + Origin::New { + cursed, + fee: _, + hidden: _, + pointer: _, + reinscription: _, + unbound, + parent, + inscription, + vindicated, + } => Action::New { + cursed, + unbound, + vindicated, + parent, + inscription, + }, + }, + old_satpoint: flotsam.old_satpoint, + new_satpoint: Some(Entry::load(satpoint)), + }); + self .satpoint_to_sequence_number .insert(&satpoint, sequence_number)?; diff --git a/src/lib.rs b/src/lib.rs index 9102594701..18a615efb0 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -126,7 +126,9 @@ mod fee_rate; mod height; mod index; mod inscriptions; +mod logger; mod object; +mod okx; mod options; mod outgoing; pub mod rarity; @@ -213,7 +215,9 @@ fn gracefully_shutdown_indexer() { } pub fn main() { - env_logger::init(); + let args = Arguments::parse(); + let log_dir = args.options.log_dir(); + logger::init(args.options.log_level(), log_dir).expect("initialize logger error:"); ctrlc::set_handler(move || { if SHUTTING_DOWN.fetch_or(true, atomic::Ordering::Relaxed) { diff --git a/src/logger.rs b/src/logger.rs new file mode 100644 index 0000000000..a6cce6bd79 --- /dev/null +++ b/src/logger.rs @@ -0,0 +1,58 @@ +use anyhow::Context; +use log4rs::{ + append::{ + console::ConsoleAppender, + rolling_file::{ + policy::compound::{ + roll::fixed_window::FixedWindowRoller, trigger::size::SizeTrigger, CompoundPolicy, + }, + RollingFileAppender, + }, + }, + config::{Appender, Logger, Root}, + encode::pattern::PatternEncoder, + Config, +}; +use std::fs; +use std::path::Path; + +pub fn init>(level: log::LevelFilter, log_dir: P) -> anyhow::Result { + fs::create_dir_all(&log_dir)?; + let log_file = log_dir.as_ref().join("ord.log"); + + let stdout = ConsoleAppender::builder().build(); + + // using default encoder for now, change it as needed. + let encoder = PatternEncoder::default(); + let trigger = SizeTrigger::new(1024 * 1024 * 20); + let roller = FixedWindowRoller::builder() + .build( + log_dir + .as_ref() + .join("ord-{}.log.gz") + .to_string_lossy() + .as_ref(), + 50, + ) + .map_err(|e| anyhow::format_err!("build FixedWindowRoller error: {}", e))?; + let policy = CompoundPolicy::new(Box::new(trigger), Box::new(roller)); + let rfile = RollingFileAppender::builder() + .append(true) + .encoder(Box::new(encoder)) + .build(&log_file, Box::new(policy)) + .with_context(|| format!("Failed to create rolling file {}", log_file.display()))?; + + let cfg = Config::builder() + .appender(Appender::builder().build("stdout", Box::new(stdout))) + .appender(Appender::builder().build("rfile", Box::new(rfile))) + .logger(Logger::builder().build("mio", log::LevelFilter::Error)) + .build( + Root::builder() + .appender("stdout") + .appender("rfile") + .build(level), + ) + .context("build log config failed")?; + + log4rs::init_config(cfg).context("log4rs init config error") +} diff --git a/src/okx/datastore/brc20/balance.rs b/src/okx/datastore/brc20/balance.rs new file mode 100644 index 0000000000..f3b9d3df5e --- /dev/null +++ b/src/okx/datastore/brc20/balance.rs @@ -0,0 +1,18 @@ +use super::*; +use serde::{Deserialize, Serialize}; +#[derive(Debug, Deserialize, Serialize, Clone, PartialEq, Eq)] +pub struct Balance { + pub tick: Tick, + pub overall_balance: u128, + pub transferable_balance: u128, +} + +impl Balance { + pub fn new(tick: &Tick) -> Self { + Self { + tick: tick.clone(), + overall_balance: 0u128, + transferable_balance: 0u128, + } + } +} diff --git a/src/okx/datastore/brc20/errors.rs b/src/okx/datastore/brc20/errors.rs new file mode 100644 index 0000000000..db3f81e4fd --- /dev/null +++ b/src/okx/datastore/brc20/errors.rs @@ -0,0 +1,75 @@ +use crate::InscriptionId; +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, PartialEq, thiserror::Error, Deserialize, Serialize)] +pub enum BRC20Error { + #[error("invalid number: {0}")] + InvalidNum(String), + + #[error("tick invalid supply {0}")] + InvalidSupply(String), + + #[error("tick: {0} has been existed")] + DuplicateTick(String), + + #[error("tick: {0} not found")] + TickNotFound(String), + + #[error("illegal tick length '{0}'")] + InvalidTickLen(String), + + #[error("decimals {0} too large")] + DecimalsTooLarge(u8), + + #[error("tick: {0} has been minted")] + TickMinted(String), + + #[error("tick: {0} mint limit out of range {0}")] + MintLimitOutOfRange(String, String), + + #[error("zero amount not allowed")] + InvalidZeroAmount, + + #[error("amount overflow: {0}")] + AmountOverflow(String), + + #[error("insufficient balance: {0} {1}")] + InsufficientBalance(String, String), + + #[error("amount exceed limit: {0}")] + AmountExceedLimit(String), + + #[error("transferable inscriptionId not found: {0}")] + TransferableNotFound(InscriptionId), + + #[error("invalid inscribe to coinbase")] + InscribeToCoinbase, + + #[error("transferable owner not match {0}")] + TransferableOwnerNotMatch(InscriptionId), + + #[error("self issuance not activated")] + SelfIssuanceNotActivated, + + #[error("'self_mint' must be set to 'true', when deploying 5 bytes tick")] + SelfIssuanceCheckedFailed, + + #[error("self mint permission denied")] + SelfMintPermissionDenied, + + /// an InternalError is an error that happens exceed our expect + /// and should not happen under normal circumstances + #[error("internal error: {0}")] + InternalError(String), + + // num error + #[error("{op} overflow: original: {org}, other: {other}")] + Overflow { + op: String, + org: String, + other: String, + }, + + #[error("invalid integer {0}")] + InvalidInteger(String), +} diff --git a/src/okx/datastore/brc20/events.rs b/src/okx/datastore/brc20/events.rs new file mode 100644 index 0000000000..7ed57e52a3 --- /dev/null +++ b/src/okx/datastore/brc20/events.rs @@ -0,0 +1,120 @@ +use super::*; +use crate::{InscriptionId, SatPoint}; +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, strum_macros::Display)] +#[strum(serialize_all = "camelCase")] +pub enum OperationType { + Deploy, + Mint, + InscribeTransfer, + Transfer, +} +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] +pub struct Receipt { + pub inscription_id: InscriptionId, + pub inscription_number: i32, + pub old_satpoint: SatPoint, + pub new_satpoint: SatPoint, + pub op: OperationType, + pub from: ScriptKey, + pub to: ScriptKey, + pub result: Result, +} + +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] +pub enum Event { + Deploy(DeployEvent), + Mint(MintEvent), + InscribeTransfer(InscribeTransferEvent), + Transfer(TransferEvent), +} + +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] +pub struct DeployEvent { + pub supply: u128, + pub limit_per_mint: u128, + pub decimal: u8, + pub tick: Tick, + pub self_mint: bool, +} + +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] +pub struct MintEvent { + pub tick: Tick, + pub amount: u128, + pub msg: Option, +} + +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] +pub struct InscribeTransferEvent { + pub tick: Tick, + pub amount: u128, +} + +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] +pub struct TransferEvent { + pub tick: Tick, + pub amount: u128, + pub msg: Option, +} + +#[cfg(test)] +mod tests { + use super::*; + use bitcoin::Address; + use std::str::FromStr; + + #[test] + fn action_receipt_serialize() { + let action_receipt = Receipt { + inscription_id: InscriptionId::from_str( + "9991111111111111111111111111111111111111111111111111111111111111i1", + ) + .unwrap(), + inscription_number: 1, + old_satpoint: SatPoint::from_str( + "1111111111111111111111111111111111111111111111111111111111111111:1:1", + ) + .unwrap(), + new_satpoint: SatPoint::from_str( + "2111111111111111111111111111111111111111111111111111111111111111:1:1", + ) + .unwrap(), + op: OperationType::Deploy, + from: ScriptKey::from_address( + Address::from_str("bc1qhvd6suvqzjcu9pxjhrwhtrlj85ny3n2mqql5w4") + .unwrap() + .assume_checked(), + ), + to: ScriptKey::from_address( + Address::from_str("bc1qhvd6suvqzjcu9pxjhrwhtrlj85ny3n2mqql5w4") + .unwrap() + .assume_checked(), + ), + result: Err(BRC20Error::InvalidTickLen("abcde".to_string())), + }; + println!("{}", serde_json::to_string_pretty(&action_receipt).unwrap()); + assert_eq!( + serde_json::to_string_pretty(&action_receipt).unwrap(), + r#"{ + "inscription_id": "9991111111111111111111111111111111111111111111111111111111111111i1", + "inscription_number": 1, + "old_satpoint": "1111111111111111111111111111111111111111111111111111111111111111:1:1", + "new_satpoint": "2111111111111111111111111111111111111111111111111111111111111111:1:1", + "op": "Deploy", + "from": { + "Address": "bc1qhvd6suvqzjcu9pxjhrwhtrlj85ny3n2mqql5w4" + }, + "to": { + "Address": "bc1qhvd6suvqzjcu9pxjhrwhtrlj85ny3n2mqql5w4" + }, + "result": { + "Err": { + "InvalidTickLen": "abcde" + } + } +}"# + ); + } +} diff --git a/src/okx/datastore/brc20/mod.rs b/src/okx/datastore/brc20/mod.rs new file mode 100644 index 0000000000..04f9a17097 --- /dev/null +++ b/src/okx/datastore/brc20/mod.rs @@ -0,0 +1,83 @@ +pub(super) mod balance; +pub(super) mod errors; +pub(super) mod events; +pub mod redb; +pub(super) mod tick; +pub(super) mod token_info; +pub(super) mod transferable_log; + +pub use self::{ + balance::Balance, errors::BRC20Error, events::Receipt, events::*, tick::*, token_info::TokenInfo, + transferable_log::TransferableLog, +}; +use super::ScriptKey; +use crate::{Result, SatPoint}; +use bitcoin::{OutPoint, Txid}; +use std::fmt::{Debug, Display}; + +pub trait Brc20Reader { + type Error: Debug + Display; + + fn get_balances(&self, script_key: &ScriptKey) -> Result, Self::Error>; + fn get_balance( + &self, + script_key: &ScriptKey, + tick: &Tick, + ) -> Result, Self::Error>; + + fn get_token_info(&self, tick: &Tick) -> Result, Self::Error>; + fn get_tokens_info(&self) -> Result, Self::Error>; + + fn get_transaction_receipts(&self, txid: &Txid) -> Result>, Self::Error>; + + fn get_transferable_assets_by_satpoint( + &self, + satpoint: &SatPoint, + ) -> Result, Self::Error>; + fn get_transferable_assets_by_account( + &self, + script: &ScriptKey, + ) -> Result, Self::Error>; + fn get_transferable_assets_by_account_ticker( + &self, + script: &ScriptKey, + tick: &Tick, + ) -> Result, Self::Error>; + fn get_transferable_assets_by_outpoint( + &self, + outpoint: OutPoint, + ) -> Result, Self::Error>; +} + +pub trait Brc20ReaderWriter: Brc20Reader { + fn update_token_balance( + &mut self, + script_key: &ScriptKey, + new_balance: Balance, + ) -> Result<(), Self::Error>; + + fn insert_token_info(&mut self, tick: &Tick, new_info: &TokenInfo) -> Result<(), Self::Error>; + + fn update_mint_token_info( + &mut self, + tick: &Tick, + minted_amt: u128, + minted_block_number: u32, + ) -> Result<(), Self::Error>; + + fn update_burned_token_info(&mut self, tick: &Tick, burned_amt: u128) -> Result<(), Self::Error>; + + fn save_transaction_receipts( + &mut self, + txid: &Txid, + receipt: &[Receipt], + ) -> Result<(), Self::Error>; + + fn insert_transferable_asset( + &mut self, + satpoint: SatPoint, + inscription: &TransferableLog, + ) -> Result<(), Self::Error>; + + fn remove_transferable_asset(&mut self, satpoint: SatPoint) -> Result<(), Self::Error>; +} diff --git a/src/okx/datastore/brc20/redb/mod.rs b/src/okx/datastore/brc20/redb/mod.rs new file mode 100644 index 0000000000..48582eb12c --- /dev/null +++ b/src/okx/datastore/brc20/redb/mod.rs @@ -0,0 +1,24 @@ +pub mod table; + +use super::{LowerTick, ScriptKey, Tick}; + +fn min_script_tick_id_key(script: &ScriptKey, tick: &Tick) -> String { + script_tick_key(script, tick) +} + +fn max_script_tick_id_key(script: &ScriptKey, tick: &Tick) -> String { + // because hex format of `InscriptionId` will be 0~f, so `g` is greater than `InscriptionId.to_string()` in bytes order + format!("{}_{}_g", script, tick.to_lowercase().hex()) +} + +fn script_tick_key(script: &ScriptKey, tick: &Tick) -> String { + format!("{}_{}", script, tick.to_lowercase().hex()) +} + +fn min_script_tick_key(script: &ScriptKey) -> String { + format!("{}_{}", script, LowerTick::min_hex()) +} + +fn max_script_tick_key(script: &ScriptKey) -> String { + format!("{}_{}", script, LowerTick::max_hex()) +} diff --git a/src/okx/datastore/brc20/redb/table.rs b/src/okx/datastore/brc20/redb/table.rs new file mode 100644 index 0000000000..c3902d046c --- /dev/null +++ b/src/okx/datastore/brc20/redb/table.rs @@ -0,0 +1,301 @@ +use crate::{ + index::{ + entry::{Entry, SatPointValue}, + TxidValue, + }, + okx::datastore::{ + brc20::{ + redb::{ + max_script_tick_id_key, max_script_tick_key, min_script_tick_id_key, min_script_tick_key, + script_tick_key, + }, + Balance, Receipt, Tick, TokenInfo, TransferableLog, + }, + ScriptKey, + }, + Result, SatPoint, +}; +use bitcoin::{OutPoint, Txid}; +use redb::{MultimapTable, ReadableMultimapTable, ReadableTable, Table}; + +// BRC20_BALANCES +pub fn get_balances(table: &T, script_key: &ScriptKey) -> Result> +where + T: ReadableTable<&'static str, &'static [u8]>, +{ + Ok( + table + .range(min_script_tick_key(script_key).as_str()..=max_script_tick_key(script_key).as_str())? + .flat_map(|result| { + result.map(|(_, data)| rmp_serde::from_slice::(data.value()).unwrap()) + }) + .collect(), + ) +} + +// BRC20_BALANCES +pub fn get_balance(table: &T, script_key: &ScriptKey, tick: &Tick) -> Result> +where + T: ReadableTable<&'static str, &'static [u8]>, +{ + Ok( + table + .get(script_tick_key(script_key, tick).as_str())? + .map(|v| rmp_serde::from_slice::(v.value()).unwrap()), + ) +} + +// BRC20_TOKEN +pub fn get_token_info(table: &T, tick: &Tick) -> Result> +where + T: ReadableTable<&'static str, &'static [u8]>, +{ + Ok( + table + .get(tick.to_lowercase().hex().as_str())? + .map(|v| rmp_serde::from_slice::(v.value()).unwrap()), + ) +} + +// BRC20_TOKEN +pub fn get_tokens_info(table: &T) -> Result> +where + T: ReadableTable<&'static str, &'static [u8]>, +{ + Ok( + table + .range::<&str>(..)? + .flat_map(|result| { + result.map(|(_, data)| rmp_serde::from_slice::(data.value()).unwrap()) + }) + .collect(), + ) +} + +// BRC20_EVENTS +pub fn get_transaction_receipts(table: &T, txid: &Txid) -> Result>> +where + T: ReadableTable<&'static TxidValue, &'static [u8]>, +{ + Ok( + table + .get(&txid.store())? + .map(|x| rmp_serde::from_slice::>(x.value()).unwrap()), + ) +} + +// BRC20_SATPOINT_TO_TRANSFERABLE_ASSETS +// BRC20_ADDRESS_TICKER_TO_TRANSFERABLE_ASSETS +pub fn get_transferable_assets_by_account( + address_table: &T, + satpoint_table: &S, + script: &ScriptKey, +) -> Result> +where + T: ReadableMultimapTable<&'static str, &'static SatPointValue>, + S: ReadableTable<&'static SatPointValue, &'static [u8]>, +{ + let mut transferable_assets = Vec::new(); + + for range in address_table + .range(min_script_tick_key(script).as_str()..max_script_tick_key(script).as_str())? + { + let (_, satpoints) = range?; + for satpoint_guard in satpoints { + let satpoint = SatPoint::load(*satpoint_guard?.value()); + let entry = satpoint_table.get(&satpoint.store())?.unwrap(); + transferable_assets.push(( + satpoint, + rmp_serde::from_slice::(entry.value()).unwrap(), + )); + } + } + Ok(transferable_assets) +} + +// BRC20_SATPOINT_TO_TRANSFERABLE_ASSETS +// BRC20_ADDRESS_TICKER_TO_TRANSFERABLE_ASSETS +pub fn get_transferable_assets_by_account_ticker( + address_table: &T, + satpoint_table: &S, + script: &ScriptKey, + tick: &Tick, +) -> Result> +where + T: ReadableMultimapTable<&'static str, &'static SatPointValue>, + S: ReadableTable<&'static SatPointValue, &'static [u8]>, +{ + let mut transferable_assets = Vec::new(); + + for range in address_table.range( + min_script_tick_id_key(script, tick).as_str()..max_script_tick_id_key(script, tick).as_str(), + )? { + let (_, satpoints) = range?; + for satpoint_guard in satpoints { + let satpoint = SatPoint::load(*satpoint_guard?.value()); + let entry = satpoint_table.get(&satpoint.store())?.unwrap(); + transferable_assets.push(( + satpoint, + rmp_serde::from_slice::(entry.value()).unwrap(), + )); + } + } + Ok(transferable_assets) +} + +// BRC20_SATPOINT_TO_TRANSFERABLE_ASSETS +pub fn get_transferable_assets_by_satpoint( + table: &T, + satpoint: &SatPoint, +) -> Result> +where + T: ReadableTable<&'static SatPointValue, &'static [u8]>, +{ + Ok( + table + .get(&satpoint.store())? + .map(|entry| rmp_serde::from_slice::(entry.value()).unwrap()), + ) +} + +// BRC20_SATPOINT_TO_TRANSFERABLE_ASSETS +pub fn get_transferable_assets_by_outpoint( + table: &T, + outpoint: OutPoint, +) -> Result> +where + T: ReadableTable<&'static SatPointValue, &'static [u8]>, +{ + let start = SatPoint { + outpoint, + offset: 0, + } + .store(); + + let end = SatPoint { + outpoint, + offset: u64::MAX, + } + .store(); + + let mut transferable_assets = Vec::new(); + for range in table.range::<&[u8; 44]>(&start..&end)? { + let (satpoint_guard, asset) = range?; + let satpoint = SatPoint::load(*satpoint_guard.value()); + transferable_assets.push(( + satpoint, + rmp_serde::from_slice::(asset.value()).unwrap(), + )); + } + Ok(transferable_assets) +} + +// BRC20_BALANCES +pub fn update_token_balance( + table: &mut Table<'_, '_, &'static str, &'static [u8]>, + script_key: &ScriptKey, + new_balance: Balance, +) -> Result<()> { + table.insert( + script_tick_key(script_key, &new_balance.tick).as_str(), + rmp_serde::to_vec(&new_balance).unwrap().as_slice(), + )?; + Ok(()) +} + +// BRC20_TOKEN +pub fn insert_token_info( + table: &mut Table<'_, '_, &'static str, &'static [u8]>, + tick: &Tick, + new_info: &TokenInfo, +) -> Result<()> { + table.insert( + tick.to_lowercase().hex().as_str(), + rmp_serde::to_vec(new_info).unwrap().as_slice(), + )?; + Ok(()) +} + +// BRC20_TOKEN +pub fn update_mint_token_info( + table: &mut Table<'_, '_, &'static str, &'static [u8]>, + tick: &Tick, + minted_amt: u128, + minted_block_number: u32, +) -> Result<()> { + let mut info = + get_token_info(table, tick)?.unwrap_or_else(|| panic!("token {} not exist", tick.as_str())); + + info.minted = minted_amt; + info.latest_mint_number = minted_block_number; + + table.insert( + tick.to_lowercase().hex().as_str(), + rmp_serde::to_vec(&info).unwrap().as_slice(), + )?; + Ok(()) +} + +pub fn update_burned_token_info( + table: &mut Table<'_, '_, &'static str, &'static [u8]>, + tick: &Tick, + burned_amt: u128, +) -> Result<()> { + let mut info = + get_token_info(table, tick)?.unwrap_or_else(|| panic!("token {} not exist", tick.as_str())); + info.burned_supply = burned_amt; + table.insert( + tick.to_lowercase().hex().as_str(), + rmp_serde::to_vec(&info).unwrap().as_slice(), + )?; + Ok(()) +} + +// BRC20_EVENTS +pub fn save_transaction_receipts( + table: &mut Table<'_, '_, &'static TxidValue, &'static [u8]>, + txid: &Txid, + receipts: &[Receipt], +) -> Result<()> { + table.insert( + &txid.store(), + rmp_serde::to_vec(receipts).unwrap().as_slice(), + )?; + Ok(()) +} + +// BRC20_SATPOINT_TO_TRANSFERABLE_ASSETS +// BRC20_ADDRESS_TICKER_TO_TRANSFERABLE_ASSETS +pub fn insert_transferable_asset( + satpoint_table: &mut Table<'_, '_, &'static SatPointValue, &'static [u8]>, + address_table: &mut MultimapTable<'_, '_, &'static str, &'static SatPointValue>, + satpoint: SatPoint, + transferable_asset: &TransferableLog, +) -> Result<()> { + satpoint_table.insert( + &satpoint.store(), + rmp_serde::to_vec(&transferable_asset).unwrap().as_slice(), + )?; + address_table.insert( + script_tick_key(&transferable_asset.owner, &transferable_asset.tick).as_str(), + &satpoint.store(), + )?; + Ok(()) +} + +// BRC20_SATPOINT_TO_TRANSFERABLE_ASSETS +// BRC20_ADDRESS_TICKER_TO_TRANSFERABLE_ASSETS +pub fn remove_transferable_asset( + satpoint_table: &mut Table<'_, '_, &'static SatPointValue, &'static [u8]>, + address_table: &mut MultimapTable<'_, '_, &'static str, &'static SatPointValue>, + satpoint: SatPoint, +) -> Result<()> { + if let Some(guard) = satpoint_table.remove(&satpoint.store())? { + let transferable_asset = rmp_serde::from_slice::(guard.value()).unwrap(); + address_table.remove( + script_tick_key(&transferable_asset.owner, &transferable_asset.tick).as_str(), + &satpoint.store(), + )?; + } + Ok(()) +} diff --git a/src/okx/datastore/brc20/tick.rs b/src/okx/datastore/brc20/tick.rs new file mode 100644 index 0000000000..66a7177ccd --- /dev/null +++ b/src/okx/datastore/brc20/tick.rs @@ -0,0 +1,179 @@ +use super::*; +use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; +use std::{fmt::Formatter, str::FromStr}; + +pub const ORIGINAL_TICK_LENGTH: usize = 4; +pub const SELF_ISSUANCE_TICK_LENGTH: usize = 5; +pub const MAX_TICK_BYTE_COUNT: usize = SELF_ISSUANCE_TICK_LENGTH; + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] +pub struct Tick(Box<[u8]>); + +impl FromStr for Tick { + type Err = BRC20Error; + + fn from_str(s: &str) -> Result { + let bytes = s.as_bytes(); + + if bytes.len() < ORIGINAL_TICK_LENGTH || bytes.len() > SELF_ISSUANCE_TICK_LENGTH { + return Err(BRC20Error::InvalidTickLen(s.to_string())); + } + + Ok(Self(bytes.into())) + } +} + +impl Tick { + pub fn as_str(&self) -> String { + // NOTE: Tick comes from &str by from_str, + // so it could be calling unwrap when convert to str + String::from_utf8(self.0.to_vec()).unwrap() + } + + pub fn to_lowercase(&self) -> LowerTick { + LowerTick::new(&self.as_str().to_lowercase()) + } + + pub fn self_issuance_tick(&self) -> bool { + self.0.len() == SELF_ISSUANCE_TICK_LENGTH + } +} + +impl Serialize for Tick { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + self.as_str().serialize(serializer) + } +} + +impl<'de> Deserialize<'de> for Tick { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + Self::from_str(&String::deserialize(deserializer)?) + .map_err(|e| de::Error::custom(format!("deserialize tick error: {}", e))) + } +} + +impl Display for Tick { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.as_str()) + } +} + +#[derive(Debug, Clone, PartialEq)] +pub struct LowerTick(Box<[u8]>); + +impl LowerTick { + fn new(str: &str) -> Self { + LowerTick(str.as_bytes().to_vec().into_boxed_slice()) + } + + pub fn as_str(&self) -> &str { + std::str::from_utf8(&self.0).unwrap() + } + + pub fn hex(&self) -> String { + let mut data = [0u8; MAX_TICK_BYTE_COUNT * 4]; + data[..self.0.len()].copy_from_slice(&self.0); + hex::encode(data) + } + + pub fn min_hex() -> String { + hex::encode([0u8; MAX_TICK_BYTE_COUNT * 4]) + } + + pub fn max_hex() -> String { + hex::encode([0xffu8; MAX_TICK_BYTE_COUNT * 4]) + } +} + +impl Display for LowerTick { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.as_str()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_tick_length_case() { + assert!(Tick::from_str("XAİ").is_ok()); + assert!(Tick::from_str("XAİİ").is_err()); + assert!("XAİ".parse::().is_ok()); + assert!("XAİİ".parse::().is_err()); + assert!(Tick::from_str("X。").is_ok()); + assert!("X。".parse::().is_ok()); + assert!(Tick::from_str("aBc1").is_ok()); + assert!("aBc1".parse::().is_ok()); + assert!("ατ".parse::().is_ok()); + assert!("∑ii".parse::().is_ok()); // when self issuance is enabled + assert!("∑i".parse::().is_ok()); + assert!("⊢i".parse::().is_ok()); + assert!("⊢ii".parse::().is_ok()); // when self issuance is enabled + assert!("≯a".parse::().is_ok()); + assert!("a≯a".parse::().is_ok()); // when self issuance is enabled + } + #[test] + fn test_tick_hex() { + assert_eq!( + Tick::from_str("XAİ").unwrap().to_lowercase().hex(), + "786169cc87000000000000000000000000000000" + ); + assert_eq!( + Tick::from_str("aBc1").unwrap().to_lowercase().hex(), + "6162633100000000000000000000000000000000" + ); + } + + #[test] + fn test_tick_unicode_lowercase() { + assert_eq!( + Tick::from_str("XAİ").unwrap().to_lowercase().as_str(), + "xai\u{307}" + ); + assert_eq!( + Tick::from_str("aBc1").unwrap().to_lowercase().as_str(), + "abc1", + ); + assert_eq!("ατ".parse::().unwrap().to_lowercase().as_str(), "ατ"); + assert_eq!("∑H".parse::().unwrap().to_lowercase().as_str(), "∑h"); + assert_eq!("⊢I".parse::().unwrap().to_lowercase().as_str(), "⊢i"); + assert_eq!("≯A".parse::().unwrap().to_lowercase().as_str(), "≯a"); + } + + #[test] + fn test_tick_compare_ignore_case() { + assert_ne!(Tick::from_str("aBc1"), Tick::from_str("AbC1")); + + assert_ne!(Tick::from_str("aBc1"), Tick::from_str("aBc2")); + + assert_eq!( + Tick::from_str("aBc1").unwrap().to_lowercase(), + Tick::from_str("AbC1").unwrap().to_lowercase(), + ); + assert_ne!( + Tick::from_str("aBc1").unwrap().to_lowercase(), + Tick::from_str("AbC2").unwrap().to_lowercase(), + ); + } + + #[test] + fn test_tick_serialize() { + let obj = Tick::from_str("Ab1;").unwrap(); + assert_eq!(serde_json::to_string(&obj).unwrap(), r#""Ab1;""#); + } + + #[test] + fn test_tick_deserialize() { + assert_eq!( + serde_json::from_str::(r#""Ab1;""#).unwrap(), + Tick::from_str("Ab1;").unwrap() + ); + } +} diff --git a/src/okx/datastore/brc20/token_info.rs b/src/okx/datastore/brc20/token_info.rs new file mode 100644 index 0000000000..57fe3a0eef --- /dev/null +++ b/src/okx/datastore/brc20/token_info.rs @@ -0,0 +1,19 @@ +use super::*; +use crate::InscriptionId; +use serde::{Deserialize, Serialize}; +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] +pub struct TokenInfo { + pub tick: Tick, + pub inscription_id: InscriptionId, + pub inscription_number: i32, + pub supply: u128, + pub burned_supply: u128, + pub minted: u128, + pub limit_per_mint: u128, + pub decimal: u8, + pub deploy_by: ScriptKey, + pub is_self_mint: bool, + pub deployed_number: u32, + pub deployed_timestamp: u32, + pub latest_mint_number: u32, +} diff --git a/src/okx/datastore/brc20/transferable_log.rs b/src/okx/datastore/brc20/transferable_log.rs new file mode 100644 index 0000000000..dfaa25742d --- /dev/null +++ b/src/okx/datastore/brc20/transferable_log.rs @@ -0,0 +1,11 @@ +use super::*; +use crate::InscriptionId; +use serde::{Deserialize, Serialize}; +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] +pub struct TransferableLog { + pub inscription_id: InscriptionId, + pub inscription_number: i32, + pub amount: u128, + pub tick: Tick, + pub owner: ScriptKey, +} diff --git a/src/okx/datastore/mod.rs b/src/okx/datastore/mod.rs new file mode 100644 index 0000000000..d17e469105 --- /dev/null +++ b/src/okx/datastore/mod.rs @@ -0,0 +1,5 @@ +pub mod brc20; +pub mod ord; +mod script_key; + +pub use self::script_key::ScriptKey; diff --git a/src/okx/datastore/ord/bitmap.rs b/src/okx/datastore/ord/bitmap.rs new file mode 100644 index 0000000000..cf03973b02 --- /dev/null +++ b/src/okx/datastore/ord/bitmap.rs @@ -0,0 +1,52 @@ +use {super::*, anyhow::anyhow, regex::Regex}; + +const BITMAP_KEY: &str = r"BITMAP"; + +pub struct District { + pub number: u32, +} + +impl District { + pub fn parse(bytes: &[u8]) -> Result { + let pattern = r"^(0|[1-9][0-9]*)\.bitmap$"; + // pattern must be validated + let content = std::str::from_utf8(bytes)?; + let re = Regex::new(pattern).unwrap(); + if let Some(capture) = re.captures(content) { + if let Some(number) = capture.get(1) { + return Ok(Self { + number: number.as_str().parse()?, + }); + } + } + Err(anyhow!("No match found.")) + } + + pub fn to_collection_key(&self) -> String { + format!("{}_{}", BITMAP_KEY, self.number) + } +} + +#[cfg(test)] +mod tests { + use super::District; + + #[test] + fn validate_regex() { + let district = District::parse("0.bitmap".as_bytes()).unwrap(); + assert_eq!(district.number, 0); + + let district = District::parse("40.bitmap".as_bytes()).unwrap(); + assert_eq!(district.number, 40); + } + + #[test] + fn invalidate_regex() { + assert!(District::parse(".bitmap".as_bytes()).is_err()); + assert!(District::parse("bitmap".as_bytes()).is_err()); + assert!(District::parse("c.bitmap".as_bytes()).is_err()); + assert!(District::parse("111".as_bytes()).is_err()); + assert!(District::parse("01.bitmap".as_bytes()).is_err()); + assert!(District::parse((u64::MAX.to_string() + "1.bitmap").as_bytes()).is_err()); + } +} diff --git a/src/okx/datastore/ord/collections.rs b/src/okx/datastore/ord/collections.rs new file mode 100644 index 0000000000..196eceb929 --- /dev/null +++ b/src/okx/datastore/ord/collections.rs @@ -0,0 +1,21 @@ +use serde::{Deserialize, Serialize}; +use std::fmt::Display; + +// the act of marking an inscription. +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] +pub enum CollectionKind { + BitMap, + BRC20, +} +impl Display for CollectionKind { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "{}", + match self { + CollectionKind::BitMap => String::from("bitmap"), + CollectionKind::BRC20 => String::from("brc20"), + } + ) + } +} diff --git a/src/okx/datastore/ord/mod.rs b/src/okx/datastore/ord/mod.rs new file mode 100644 index 0000000000..3fe3b28afb --- /dev/null +++ b/src/okx/datastore/ord/mod.rs @@ -0,0 +1,62 @@ +pub use self::operation::{Action, InscriptionOp}; + +use { + crate::{okx::datastore::ScriptKey, Chain, InscriptionId, Result, SatPoint}, + bitcoin::Txid, + collections::CollectionKind, + std::fmt::{Debug, Display}, +}; + +pub mod bitmap; +pub mod collections; +pub mod operation; +pub mod redb; + +pub trait OrdReader { + type Error: Debug + Display; + fn get_inscription_number_by_sequence_number( + &self, + sequence_number: u32, + ) -> Result; + + fn get_script_key_on_satpoint( + &mut self, + satpoint: &SatPoint, + chain: Chain, + ) -> Result; + + fn get_transaction_operations( + &self, + txid: &Txid, + ) -> Result>, Self::Error>; + + fn get_collections_of_inscription( + &self, + inscription_id: &InscriptionId, + ) -> Result>, Self::Error>; + + fn get_collection_inscription_id( + &self, + collection_key: &str, + ) -> Result, Self::Error>; +} + +pub trait OrdReaderWriter: OrdReader { + fn save_transaction_operations( + &mut self, + txid: &Txid, + operations: &[InscriptionOp], + ) -> Result<(), Self::Error>; + + fn set_inscription_by_collection_key( + &mut self, + key: &str, + inscription_id: &InscriptionId, + ) -> Result<(), Self::Error>; + + fn add_inscription_attributes( + &mut self, + inscription_id: &InscriptionId, + kind: CollectionKind, + ) -> Result<(), Self::Error>; +} diff --git a/src/okx/datastore/ord/operation.rs b/src/okx/datastore/ord/operation.rs new file mode 100644 index 0000000000..ba06e60f1e --- /dev/null +++ b/src/okx/datastore/ord/operation.rs @@ -0,0 +1,34 @@ +use { + crate::{Inscription, InscriptionId, SatPoint}, + bitcoin::Txid, + serde::{Deserialize, Serialize}, +}; + +// collect the inscription operation. +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +pub struct InscriptionOp { + pub txid: Txid, + pub action: Action, + pub sequence_number: u32, + pub inscription_number: Option, + pub inscription_id: InscriptionId, + pub old_satpoint: SatPoint, + pub new_satpoint: Option, +} + +// the act of marking an inscription. +#[allow(clippy::large_enum_variant)] +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] +pub enum Action { + New { + cursed: bool, + unbound: bool, + #[serde(skip)] + inscription: Inscription, + #[serde(skip)] + vindicated: bool, + #[serde(skip)] + parent: Option, + }, + Transfer, +} diff --git a/src/okx/datastore/ord/redb/mod.rs b/src/okx/datastore/ord/redb/mod.rs new file mode 100644 index 0000000000..13971b0a5d --- /dev/null +++ b/src/okx/datastore/ord/redb/mod.rs @@ -0,0 +1 @@ +pub mod table; diff --git a/src/okx/datastore/ord/redb/table.rs b/src/okx/datastore/ord/redb/table.rs new file mode 100644 index 0000000000..ba5e0df347 --- /dev/null +++ b/src/okx/datastore/ord/redb/table.rs @@ -0,0 +1,190 @@ +use crate::index::entry::Entry; +use crate::index::{InscriptionEntryValue, InscriptionIdValue, OutPointValue, TxidValue}; +use crate::inscriptions::InscriptionId; +use crate::okx::datastore::ord::collections::CollectionKind; +use crate::okx::datastore::ord::InscriptionOp; +use bitcoin::consensus::Decodable; +use bitcoin::{OutPoint, TxOut, Txid}; +use redb::{MultimapTable, ReadableMultimapTable, ReadableTable, Table}; +use std::io; + +// COLLECTIONS_INSCRIPTION_ID_TO_KINDS +pub fn get_collections_of_inscription( + table: &T, + inscription_id: &InscriptionId, +) -> crate::Result>> +where + T: ReadableMultimapTable, +{ + let mut values = Vec::new(); + + for v in table.get(&inscription_id.store())? { + values.push(rmp_serde::from_slice::(v?.value()).unwrap()); + } + Ok(Some(values)) +} + +// COLLECTIONS_KEY_TO_INSCRIPTION_ID +pub fn get_collection_inscription_id( + table: &T, + key: &str, +) -> crate::Result> +where + T: ReadableTable<&'static str, InscriptionIdValue>, +{ + Ok(table.get(key)?.map(|v| InscriptionId::load(v.value()))) +} + +// SEQUENCE_NUMBER_TO_INSCRIPTION_ENTRY +pub fn get_inscription_number_by_sequence_number( + table: &T, + sequence_number: u32, +) -> crate::Result> +where + T: ReadableTable, +{ + Ok(table.get(sequence_number)?.map(|value| value.value().4)) +} + +// OUTPOINT_TO_ENTRY +pub fn get_txout_by_outpoint(table: &T, outpoint: &OutPoint) -> crate::Result> +where + T: ReadableTable<&'static OutPointValue, &'static [u8]>, +{ + Ok( + table + .get(&outpoint.store())? + .map(|x| Decodable::consensus_decode(&mut io::Cursor::new(x.value())).unwrap()), + ) +} + +// ORD_TX_TO_OPERATIONS +pub fn get_transaction_operations( + table: &T, + txid: &Txid, +) -> crate::Result>> +where + T: ReadableTable<&'static TxidValue, &'static [u8]>, +{ + Ok( + table + .get(&txid.store())? + .map(|v| rmp_serde::from_slice::>(v.value()).unwrap()), + ) +} + +// ORD_TX_TO_OPERATIONS +pub fn save_transaction_operations( + table: &mut Table<'_, '_, &'static TxidValue, &'static [u8]>, + txid: &Txid, + operations: &[InscriptionOp], +) -> crate::Result<()> { + table.insert(&txid.store(), rmp_serde::to_vec(operations)?.as_slice())?; + Ok(()) +} + +// COLLECTIONS_KEY_TO_INSCRIPTION_ID +pub fn set_inscription_by_collection_key( + table: &mut Table<'_, '_, &'static str, InscriptionIdValue>, + key: &str, + inscription_id: &InscriptionId, +) -> crate::Result<()> { + table.insert(key, inscription_id.store())?; + Ok(()) +} + +// COLLECTIONS_INSCRIPTION_ID_TO_KINDS +pub fn add_inscription_attributes( + table: &mut MultimapTable<'_, '_, InscriptionIdValue, &'static [u8]>, + inscription_id: &InscriptionId, + kind: CollectionKind, +) -> crate::Result<()> { + table.insert( + inscription_id.store(), + rmp_serde::to_vec(&kind).unwrap().as_slice(), + )?; + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::index::{COLLECTIONS_INSCRIPTION_ID_TO_KINDS, ORD_TX_TO_OPERATIONS}; + use crate::okx::datastore::ord::redb::table::{ + get_transaction_operations, save_transaction_operations, + }; + use crate::okx::datastore::ord::InscriptionOp; + use crate::{inscription, okx::datastore::ord::Action, SatPoint}; + use redb::Database; + use std::str::FromStr; + use tempfile::NamedTempFile; + + #[test] + fn test_inscription_attributes() { + let dbfile = NamedTempFile::new().unwrap(); + let db = Database::create(dbfile.path()).unwrap(); + let wtx = db.begin_write().unwrap(); + let mut table = wtx + .open_multimap_table(COLLECTIONS_INSCRIPTION_ID_TO_KINDS) + .unwrap(); + let inscription_id = + InscriptionId::from_str("b61b0172d95e266c18aea0c624db987e971a5d6d4ebc2aaed85da4642d635735i0") + .unwrap(); + + add_inscription_attributes(&mut table, &inscription_id, CollectionKind::BitMap).unwrap(); + assert_eq!( + get_collections_of_inscription(&table, &inscription_id).unwrap(), + Some(vec![CollectionKind::BitMap]) + ); + + add_inscription_attributes(&mut table, &inscription_id, CollectionKind::BRC20).unwrap(); + assert_eq!( + get_collections_of_inscription(&table, &inscription_id).unwrap(), + Some(vec![CollectionKind::BRC20, CollectionKind::BitMap]) + ); + + add_inscription_attributes(&mut table, &inscription_id, CollectionKind::BRC20).unwrap(); + assert_eq!( + get_collections_of_inscription(&table, &inscription_id).unwrap(), + Some(vec![CollectionKind::BRC20, CollectionKind::BitMap]) + ); + } + + #[test] + fn test_transaction_to_operations() { + let dbfile = NamedTempFile::new().unwrap(); + let db = Database::create(dbfile.path()).unwrap(); + let wtx = db.begin_write().unwrap(); + let mut table = wtx.open_table(ORD_TX_TO_OPERATIONS).unwrap(); + let txid = + Txid::from_str("b61b0172d95e266c18aea0c624db987e971a5d6d4ebc2aaed85da4642d635735").unwrap(); + let operation = InscriptionOp { + txid, + action: Action::New { + cursed: false, + unbound: false, + vindicated: false, + parent: None, + inscription: inscription("text/plain;charset=utf-8", "foobar"), + }, + sequence_number: 100, + inscription_number: Some(100), + inscription_id: InscriptionId { txid, index: 0 }, + old_satpoint: SatPoint::from_str( + "1111111111111111111111111111111111111111111111111111111111111111:1:1", + ) + .unwrap(), + new_satpoint: Some(SatPoint { + outpoint: OutPoint { txid, vout: 0 }, + offset: 1, + }), + }; + + save_transaction_operations(&mut table, &txid, &[operation.clone()]).unwrap(); + + assert_eq!( + get_transaction_operations(&table, &txid).unwrap(), + Some(vec![operation]) + ); + } +} diff --git a/src/okx/datastore/script_key.rs b/src/okx/datastore/script_key.rs new file mode 100644 index 0000000000..bb771b2d85 --- /dev/null +++ b/src/okx/datastore/script_key.rs @@ -0,0 +1,83 @@ +use crate::Chain; +use bitcoin::{address, Address, Script, ScriptHash}; +use serde::{Deserialize, Serialize}; +use std::fmt::{Display, Formatter}; + +#[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize)] +pub enum ScriptKey { + Address(Address), + ScriptHash { + script_hash: ScriptHash, + is_op_return: bool, + }, +} + +impl ScriptKey { + #[allow(unused)] + pub fn from_address(address: Address) -> Self { + ScriptKey::Address(Address::new(address.network, address.payload)) + } + pub fn from_script(script: &Script, chain: Chain) -> Self { + chain + .address_from_script(script) + .map(|address| Self::Address(Address::new(address.network, address.payload))) + .unwrap_or(ScriptKey::ScriptHash { + script_hash: script.script_hash(), + is_op_return: script.is_op_return(), + }) + } +} + +impl Display for ScriptKey { + fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { + write!( + f, + "{}", + match self { + ScriptKey::Address(address) => address.clone().assume_checked().to_string(), + ScriptKey::ScriptHash { script_hash, .. } => script_hash.to_string(), + } + ) + } +} +#[cfg(test)] +mod tests { + use super::*; + use bitcoin::{Address, Script}; + use std::str::FromStr; + + #[test] + fn test_script_key_from_address() { + let address = Address::from_str("132F25rTsvBdp9JzLLBHP5mvGY66i1xdiM") + .unwrap() + .assume_checked(); + assert_eq!( + ScriptKey::from_address(address), + ScriptKey::Address(Address::from_str("132F25rTsvBdp9JzLLBHP5mvGY66i1xdiM").unwrap()) + ); + } + + #[test] + fn test_script_key_from_script() { + let script = Address::from_str("bc1qhvd6suvqzjcu9pxjhrwhtrlj85ny3n2mqql5w4") + .unwrap() + .payload + .script_pubkey(); + assert_eq!( + ScriptKey::from_script(&script, Chain::Mainnet), + ScriptKey::Address(Address::from_str("bc1qhvd6suvqzjcu9pxjhrwhtrlj85ny3n2mqql5w4").unwrap()) + ); + let binding = hex::decode( + "0014017fed86bba5f31f955f8b316c7fb9bd45cb6cbc00000000000000000000000000000000000000", + ) + .unwrap(); + let script = Script::from_bytes(binding.as_slice()); + assert_eq!( + ScriptKey::from_script(script, Chain::Mainnet), + ScriptKey::ScriptHash { + script_hash: ScriptHash::from_str("df65c8a338dce7900824e7bd18c336656ca19e57").unwrap(), + is_op_return: false, + }, + ); + } +} diff --git a/src/okx/lru.rs b/src/okx/lru.rs new file mode 100644 index 0000000000..f2ebac3c89 --- /dev/null +++ b/src/okx/lru.rs @@ -0,0 +1,154 @@ +use std::borrow::Borrow; +use std::collections::HashMap; +use std::hash::Hash; +use std::mem; + +pub struct SimpleLru { + cache_size: usize, + new_cache: HashMap, + old_cache: HashMap, +} + +impl SimpleLru +where + K: Eq + Hash, +{ + pub fn new(cache_size: usize) -> SimpleLru { + Self { + cache_size, + new_cache: HashMap::with_capacity(cache_size), + old_cache: HashMap::new(), + } + } + + pub fn get(&self, key: &Q) -> Option<&V> + where + K: Borrow, + Q: Hash + Eq, + { + if let Some(v) = self.new_cache.get(key) { + Some(v) + } else { + self.old_cache.get(key) + } + } + + pub fn contains(&self, key: &Q) -> bool + where + K: Borrow, + Q: Hash + Eq, + { + if self.new_cache.contains_key(key) { + true + } else { + self.old_cache.contains_key(key) + } + } + + pub fn insert(&mut self, key: K, value: V) -> Option { + self.new_cache.insert(key, value) + } + + pub fn refresh(&mut self) { + if self.new_cache.len() >= self.cache_size { + self.old_cache.clear(); + mem::swap(&mut self.new_cache, &mut self.old_cache); + } + } + + pub fn len(&self) -> usize { + self.old_cache.len() + self.new_cache.len() + } +} + +#[cfg(test)] +mod tests { + use crate::okx::lru::SimpleLru; + + #[test] + fn lru_test() { + let mut lru = SimpleLru::new(2); + lru.insert(1, 11); + lru.insert(2, 22); + assert!(lru.get(&1).is_some()); + assert!(lru.get(&2).is_some()); + assert!(lru.contains(&1)); + assert!(lru.contains(&2)); + assert_eq!(2, lru.len()); + lru.refresh(); + + lru.insert(3, 33); + lru.insert(4, 44); + assert!(lru.contains(&1)); + assert!(lru.contains(&2)); + assert!(lru.contains(&3)); + assert!(lru.contains(&4)); + assert!(lru.get(&3).is_some()); + assert!(lru.get(&4).is_some()); + assert_eq!(4, lru.len()); + + lru.refresh(); + lru.insert(5, 55); + assert!(!lru.contains(&1)); + assert!(!lru.contains(&2)); + assert!(lru.contains(&3)); + assert!(lru.contains(&4)); + assert!(lru.contains(&5)); + assert!(lru.get(&1).is_none()); + assert!(lru.get(&2).is_none()); + assert!(lru.get(&3).is_some()); + assert!(lru.get(&4).is_some()); + assert!(lru.get(&5).is_some()); + assert_eq!(3, lru.len()); + + lru.refresh(); + lru.insert(6, 66); + assert!(lru.contains(&3)); + assert!(lru.contains(&4)); + assert!(lru.contains(&5)); + assert!(lru.contains(&6)); + assert!(lru.get(&3).is_some()); + assert!(lru.get(&4).is_some()); + assert!(lru.get(&5).is_some()); + assert!(lru.get(&6).is_some()); + assert_eq!(4, lru.len()); + + lru.refresh(); + lru.insert(7, 77); + assert!(!lru.contains(&3)); + assert!(!lru.contains(&4)); + assert!(lru.contains(&5)); + assert!(lru.contains(&6)); + assert!(lru.contains(&7)); + assert!(lru.get(&3).is_none()); + assert!(lru.get(&4).is_none()); + assert!(lru.get(&5).is_some()); + assert!(lru.get(&6).is_some()); + assert!(lru.get(&7).is_some()); + assert_eq!(3, lru.len()); + + lru.refresh(); + assert_eq!(55, *lru.get(&5).unwrap()); + assert_eq!(66, *lru.get(&6).unwrap()); + assert_eq!(77, *lru.get(&7).unwrap()); + } + + #[test] + fn lru_swap_test() { + const CACHE_SIZE: usize = 10000000; + let mut lru = SimpleLru::new(CACHE_SIZE); + for i in 0..CACHE_SIZE { + lru.insert(i, i); + } + assert_eq!(CACHE_SIZE, lru.len()); + lru.refresh(); + assert_eq!(CACHE_SIZE, lru.len()); + + for i in 0..CACHE_SIZE { + lru.insert(i, i); + } + assert_eq!(2 * CACHE_SIZE, lru.len()); + lru.refresh(); + assert_eq!(CACHE_SIZE, lru.len()); + } +} diff --git a/src/okx/mod.rs b/src/okx/mod.rs new file mode 100644 index 0000000000..16c10e12a5 --- /dev/null +++ b/src/okx/mod.rs @@ -0,0 +1,3 @@ +pub(crate) mod datastore; +pub(crate) mod lru; +pub(crate) mod protocol; diff --git a/src/okx/protocol/brc20/error.rs b/src/okx/protocol/brc20/error.rs new file mode 100644 index 0000000000..11c893c5b4 --- /dev/null +++ b/src/okx/protocol/brc20/error.rs @@ -0,0 +1,45 @@ +use crate::okx::datastore::brc20::BRC20Error; +use redb::TableError; + +#[allow(clippy::enum_variant_names)] +#[derive(Debug, thiserror::Error)] +pub enum Error { + #[error("brc20 error: {0}")] + BRC20Error(BRC20Error), + + #[error("ledger error: {0}")] + LedgerError(anyhow::Error), + + #[error("table error: {0}")] + TableError(TableError), +} + +#[derive(Debug, PartialEq, thiserror::Error)] +pub enum JSONError { + #[error("invalid content type")] + InvalidContentType, + + #[error("unsupport content type")] + UnSupportContentType, + + #[error("invalid json string")] + InvalidJson, + + #[error("not brc20 json")] + NotBRC20Json, + + #[error("parse operation json error: {0}")] + ParseOperationJsonError(String), +} + +impl From for Error { + fn from(e: BRC20Error) -> Self { + Self::BRC20Error(e) + } +} + +impl From for Error { + fn from(error: TableError) -> Self { + Self::TableError(error) + } +} diff --git a/src/okx/protocol/brc20/mod.rs b/src/okx/protocol/brc20/mod.rs new file mode 100644 index 0000000000..898c48a332 --- /dev/null +++ b/src/okx/protocol/brc20/mod.rs @@ -0,0 +1,36 @@ +use crate::{ + okx::datastore::{ + brc20::{BRC20Error, OperationType}, + ScriptKey, + }, + InscriptionId, Result, SatPoint, +}; +use bitcoin::Txid; + +mod error; +mod msg_executor; +mod msg_resolver; +mod num; +mod operation; +mod params; +mod policies; + +use self::error::Error; +pub(crate) use self::{ + error::JSONError, + msg_executor::{execute, ExecutionMessage}, + num::Num, + operation::{deserialize_brc20_operation, Deploy, Mint, Operation, Transfer}, +}; + +#[derive(Debug, Clone, PartialEq)] +pub struct Message { + pub txid: Txid, + pub sequence_number: u32, + pub inscription_id: InscriptionId, + pub old_satpoint: SatPoint, + // `new_satpoint` may be none when the transaction is not yet confirmed and the sat has not been bound to the current outputs. + pub new_satpoint: Option, + pub op: Operation, + pub sat_in_outputs: bool, +} diff --git a/src/okx/protocol/brc20/msg_executor.rs b/src/okx/protocol/brc20/msg_executor.rs new file mode 100644 index 0000000000..4096b5197c --- /dev/null +++ b/src/okx/protocol/brc20/msg_executor.rs @@ -0,0 +1,438 @@ +use super::{ + params::{BIGDECIMAL_TEN, MAXIMUM_SUPPLY, MAX_DECIMAL_WIDTH}, + *, +}; +use crate::{ + okx::{ + datastore::{ + brc20::{ + BRC20Error, Balance, Brc20Reader, Brc20ReaderWriter, DeployEvent, Event, + InscribeTransferEvent, MintEvent, Receipt, Tick, TokenInfo, TransferEvent, TransferableLog, + }, + ord::OrdReader, + }, + protocol::{ + brc20::{Message, Mint, Operation}, + context::Context, + }, + }, + Chain, Result, +}; +use anyhow::anyhow; +use bigdecimal::num_bigint::Sign; +use std::str::FromStr; + +#[derive(Debug, Clone, PartialEq)] +pub struct ExecutionMessage { + pub(self) txid: Txid, + pub(self) inscription_id: InscriptionId, + pub(self) inscription_number: i32, + pub(self) old_satpoint: SatPoint, + pub(self) new_satpoint: SatPoint, + pub(self) from: ScriptKey, + pub(self) to: Option, + pub(self) op: Operation, +} + +impl ExecutionMessage { + pub fn from_message(context: &mut Context, msg: &Message, chain: Chain) -> Result { + Ok(Self { + txid: msg.txid, + inscription_id: msg.inscription_id, + inscription_number: context.get_inscription_number_by_sequence_number(msg.sequence_number)?, + old_satpoint: msg.old_satpoint, + new_satpoint: msg + .new_satpoint + .ok_or(anyhow!("new satpoint cannot be None"))?, + from: context.get_script_key_on_satpoint(&msg.old_satpoint, chain)?, + to: if msg.sat_in_outputs { + Some(context.get_script_key_on_satpoint(msg.new_satpoint.as_ref().unwrap(), chain)?) + } else { + None + }, + op: msg.op.clone(), + }) + } +} + +pub fn execute(context: &mut Context, msg: &ExecutionMessage) -> Result { + log::debug!("BRC20 execute message: {:?}", msg); + let event = match &msg.op { + Operation::Deploy(deploy) => process_deploy(context, msg, deploy.clone()), + Operation::Mint { mint, parent } => process_mint(context, msg, mint.clone(), *parent), + Operation::InscribeTransfer(transfer) => { + process_inscribe_transfer(context, msg, transfer.clone()) + } + Operation::Transfer(_) => process_transfer(context, msg), + }; + + let receipt = Receipt { + inscription_id: msg.inscription_id, + inscription_number: msg.inscription_number, + old_satpoint: msg.old_satpoint, + new_satpoint: msg.new_satpoint, + from: msg.from.clone(), + // redirect receiver to sender if transfer to conibase. + to: msg.to.clone().map_or(msg.from.clone(), |v| v), + op: msg.op.op_type(), + result: match event { + Ok(event) => Ok(event), + Err(Error::BRC20Error(e)) => Err(e), + Err(e) => return Err(anyhow!("BRC20 execute exception: {e}")), + }, + }; + + log::debug!("BRC20 message receipt: {:?}", receipt); + Ok(receipt) +} + +fn process_deploy( + context: &mut Context, + msg: &ExecutionMessage, + deploy: Deploy, +) -> Result { + // ignore inscribe inscription to coinbase. + let to_script_key = msg.to.clone().ok_or(BRC20Error::InscribeToCoinbase)?; + + let tick = deploy.tick.parse::()?; + let mut max_supply = deploy.max_supply.clone(); + let mut is_self_mint = false; + + // proposal for issuance self mint token. + // https://l1f.discourse.group/t/brc-20-proposal-for-issuance-and-burn-enhancements-brc20-ip-1/621 + if tick.self_issuance_tick() { + if context.chain_conf.blockheight + < policies::HardForks::self_issuance_activation_height(context.chain_conf.chain) + { + return Err(Error::BRC20Error(BRC20Error::SelfIssuanceNotActivated)); + } + if !deploy.self_mint.unwrap_or_default() { + return Err(Error::BRC20Error(BRC20Error::SelfIssuanceCheckedFailed)); + } + if deploy.max_supply == u64::MIN.to_string() { + max_supply = u64::MAX.to_string(); + } + is_self_mint = true; + } + + if let Some(stored_tick_info) = context.get_token_info(&tick).map_err(Error::LedgerError)? { + return Err(Error::BRC20Error(BRC20Error::DuplicateTick( + stored_tick_info.tick.to_string(), + ))); + } + + let dec = Num::from_str(&deploy.decimals.map_or(MAX_DECIMAL_WIDTH.to_string(), |v| v))? + .checked_to_u8()?; + if dec > MAX_DECIMAL_WIDTH { + return Err(Error::BRC20Error(BRC20Error::DecimalsTooLarge(dec))); + } + let base = BIGDECIMAL_TEN.checked_powu(u64::from(dec))?; + + let supply = Num::from_str(&max_supply)?; + + if supply.sign() == Sign::NoSign + || supply > MAXIMUM_SUPPLY.to_owned() + || supply.scale() > i64::from(dec) + { + return Err(Error::BRC20Error(BRC20Error::InvalidSupply( + supply.to_string(), + ))); + } + + let limit = Num::from_str(&deploy.mint_limit.map_or(max_supply, |v| v))?; + + if limit.sign() == Sign::NoSign + || limit > MAXIMUM_SUPPLY.to_owned() + || limit.scale() > i64::from(dec) + { + return Err(Error::BRC20Error(BRC20Error::MintLimitOutOfRange( + tick.to_lowercase().to_string(), + limit.to_string(), + ))); + } + + let supply = supply.checked_mul(&base)?.checked_to_u128()?; + let limit = limit.checked_mul(&base)?.checked_to_u128()?; + + let new_info = TokenInfo { + inscription_id: msg.inscription_id, + inscription_number: msg.inscription_number, + tick: tick.clone(), + decimal: dec, + supply, + burned_supply: 0u128, + limit_per_mint: limit, + minted: 0u128, + deploy_by: to_script_key, + is_self_mint, + deployed_number: context.chain_conf.blockheight, + latest_mint_number: context.chain_conf.blockheight, + deployed_timestamp: context.chain_conf.blocktime, + }; + context + .insert_token_info(&tick, &new_info) + .map_err(Error::LedgerError)?; + + Ok(Event::Deploy(DeployEvent { + supply, + limit_per_mint: limit, + decimal: dec, + tick: new_info.tick, + self_mint: is_self_mint, + })) +} + +fn process_mint( + context: &mut Context, + msg: &ExecutionMessage, + mint: Mint, + parent: Option, +) -> Result { + // ignore inscribe inscription to coinbase. + let to_script_key = msg.to.clone().ok_or(BRC20Error::InscribeToCoinbase)?; + + let tick = mint.tick.parse::()?; + + let tick_info = context + .get_token_info(&tick) + .map_err(Error::LedgerError)? + .ok_or(BRC20Error::TickNotFound(tick.to_string()))?; + + // check if self mint is allowed. + if tick_info.is_self_mint && !parent.is_some_and(|parent| parent == tick_info.inscription_id) { + return Err(Error::BRC20Error(BRC20Error::SelfMintPermissionDenied)); + } + + let base = BIGDECIMAL_TEN.checked_powu(u64::from(tick_info.decimal))?; + + let mut amt = Num::from_str(&mint.amount)?; + + if amt.scale() > i64::from(tick_info.decimal) { + return Err(Error::BRC20Error(BRC20Error::AmountOverflow( + amt.to_string(), + ))); + } + + amt = amt.checked_mul(&base)?; + if amt.sign() == Sign::NoSign { + return Err(Error::BRC20Error(BRC20Error::InvalidZeroAmount)); + } + if amt > Into::::into(tick_info.limit_per_mint) { + return Err(Error::BRC20Error(BRC20Error::AmountExceedLimit( + amt.to_string(), + ))); + } + let minted = Into::::into(tick_info.minted); + let supply = Into::::into(tick_info.supply); + + if minted >= supply { + return Err(Error::BRC20Error(BRC20Error::TickMinted( + tick_info.tick.to_string(), + ))); + } + + // cut off any excess. + let mut out_msg = None; + amt = if amt.checked_add(&minted)? > supply { + let new = supply.checked_sub(&minted)?; + out_msg = Some(format!( + "amt has been cut off to fit the supply! origin: {}, now: {}", + amt, new + )); + new + } else { + amt + }; + + // get or initialize user balance. + let mut balance = context + .get_balance(&to_script_key, &tick) + .map_err(Error::LedgerError)? + .map_or(Balance::new(&tick), |v| v); + + // add amount to available balance. + balance.overall_balance = Into::::into(balance.overall_balance) + .checked_add(&amt)? + .checked_to_u128()?; + + // store to database. + context + .update_token_balance(&to_script_key, balance) + .map_err(Error::LedgerError)?; + + // update token minted. + let minted = minted.checked_add(&amt)?.checked_to_u128()?; + context + .update_mint_token_info(&tick, minted, context.chain_conf.blockheight) + .map_err(Error::LedgerError)?; + + Ok(Event::Mint(MintEvent { + tick: tick_info.tick, + amount: amt.checked_to_u128()?, + msg: out_msg, + })) +} + +fn process_inscribe_transfer( + context: &mut Context, + msg: &ExecutionMessage, + transfer: Transfer, +) -> Result { + // ignore inscribe inscription to coinbase. + let to_script_key = msg.to.clone().ok_or(BRC20Error::InscribeToCoinbase)?; + + let tick = transfer.tick.parse::()?; + + let token_info = context + .get_token_info(&tick) + .map_err(Error::LedgerError)? + .ok_or(BRC20Error::TickNotFound(tick.to_string()))?; + + let base = BIGDECIMAL_TEN.checked_powu(u64::from(token_info.decimal))?; + + let mut amt = Num::from_str(&transfer.amount)?; + + if amt.scale() > i64::from(token_info.decimal) { + return Err(Error::BRC20Error(BRC20Error::AmountOverflow( + amt.to_string(), + ))); + } + + amt = amt.checked_mul(&base)?; + if amt.sign() == Sign::NoSign || amt > Into::::into(token_info.supply) { + return Err(Error::BRC20Error(BRC20Error::AmountOverflow( + amt.to_string(), + ))); + } + + let mut balance = context + .get_balance(&to_script_key, &tick) + .map_err(Error::LedgerError)? + .map_or(Balance::new(&tick), |v| v); + + let overall = Into::::into(balance.overall_balance); + let transferable = Into::::into(balance.transferable_balance); + let available = overall.checked_sub(&transferable)?; + if available < amt { + return Err(Error::BRC20Error(BRC20Error::InsufficientBalance( + available.to_string(), + amt.to_string(), + ))); + } + + balance.transferable_balance = transferable.checked_add(&amt)?.checked_to_u128()?; + + let amt = amt.checked_to_u128()?; + context + .update_token_balance(&to_script_key, balance) + .map_err(Error::LedgerError)?; + + let transferable_asset = TransferableLog { + inscription_id: msg.inscription_id, + inscription_number: msg.inscription_number, + amount: amt, + tick: token_info.tick.clone(), + owner: to_script_key, + }; + + context + .insert_transferable_asset(msg.new_satpoint, &transferable_asset) + .map_err(Error::LedgerError)?; + + Ok(Event::InscribeTransfer(InscribeTransferEvent { + tick: transferable_asset.tick, + amount: amt, + })) +} + +fn process_transfer(context: &mut Context, msg: &ExecutionMessage) -> Result { + let transferable = context + .get_transferable_assets_by_satpoint(&msg.old_satpoint) + .map_err(Error::LedgerError)? + .ok_or(BRC20Error::TransferableNotFound(msg.inscription_id))?; + + let amt = Into::::into(transferable.amount); + + if transferable.owner != msg.from { + return Err(Error::BRC20Error(BRC20Error::TransferableOwnerNotMatch( + msg.inscription_id, + ))); + } + + let tick = transferable.tick; + + let token_info = context + .get_token_info(&tick) + .map_err(Error::LedgerError)? + .ok_or(BRC20Error::TickNotFound(tick.to_string()))?; + + // update from key balance. + let mut from_balance = context + .get_balance(&msg.from, &tick) + .map_err(Error::LedgerError)? + .map_or(Balance::new(&tick), |v| v); + + let from_overall = Into::::into(from_balance.overall_balance); + let from_transferable = Into::::into(from_balance.transferable_balance); + + let from_overall = from_overall.checked_sub(&amt)?.checked_to_u128()?; + let from_transferable = from_transferable.checked_sub(&amt)?.checked_to_u128()?; + + from_balance.overall_balance = from_overall; + from_balance.transferable_balance = from_transferable; + + context + .update_token_balance(&msg.from, from_balance) + .map_err(Error::LedgerError)?; + + // redirect receiver to sender if transfer to conibase. + let mut out_msg = None; + + let to_script_key = if msg.to.clone().is_none() { + out_msg = + Some("redirect receiver to sender, reason: transfer inscription to coinbase".to_string()); + msg.from.clone() + } else { + msg.to.clone().unwrap() + }; + + // update to key balance. + let mut to_balance = context + .get_balance(&to_script_key, &tick) + .map_err(Error::LedgerError)? + .map_or(Balance::new(&tick), |v| v); + + let to_overall = Into::::into(to_balance.overall_balance); + to_balance.overall_balance = to_overall.checked_add(&amt)?.checked_to_u128()?; + + context + .update_token_balance(&to_script_key, to_balance) + .map_err(Error::LedgerError)?; + + context + .remove_transferable_asset(msg.old_satpoint) + .map_err(Error::LedgerError)?; + + // update burned supply if transfer to op_return. + match to_script_key { + ScriptKey::ScriptHash { is_op_return, .. } if is_op_return => { + let burned_amt = Into::::into(token_info.burned_supply) + .checked_add(&amt)? + .checked_to_u128()?; + context + .update_burned_token_info(&tick, burned_amt) + .map_err(Error::LedgerError)?; + out_msg = Some(format!( + "transfer to op_return, burned supply increased: {}", + amt + )); + } + _ => (), + } + + Ok(Event::Transfer(TransferEvent { + msg: out_msg, + tick: token_info.tick, + amount: amt.checked_to_u128()?, + })) +} diff --git a/src/okx/protocol/brc20/msg_resolver.rs b/src/okx/protocol/brc20/msg_resolver.rs new file mode 100644 index 0000000000..a2fd4dd192 --- /dev/null +++ b/src/okx/protocol/brc20/msg_resolver.rs @@ -0,0 +1,292 @@ +use super::*; +use crate::{ + index::entry::{Entry, SatPointValue}, + okx::{ + datastore::{ + brc20::TransferableLog, + ord::{Action, InscriptionOp}, + }, + protocol::brc20::{deserialize_brc20_operation, Operation}, + }, + Result, +}; +use std::collections::HashMap; + +impl Message { + pub(crate) fn resolve( + op: &InscriptionOp, + transfer_assets_cache: HashMap, + ) -> Result> { + log::debug!("BRC20 resolving the message from {:?}", op); + let sat_in_outputs = op + .new_satpoint + .map(|satpoint| satpoint.outpoint.txid == op.txid) + .unwrap_or(false); + + let brc20_operation = match &op.action { + // New inscription is not `cursed` or `unbound`. + Action::New { + cursed: false, + unbound: false, + vindicated: false, + inscription, + .. + } if sat_in_outputs => { + let Ok(brc20_opteration) = deserialize_brc20_operation(inscription, &op.action) else { + return Ok(None); + }; + brc20_opteration + } + // Transfered inscription operation. + // Attempt to retrieve the `InscribeTransfer` Inscription information from the data store of BRC20S. + Action::Transfer => { + let Some(transfer_info) = transfer_assets_cache.get(&op.old_satpoint.store()) else { + return Ok(None); + }; + // If the inscription_id of the transfer operation is different from the inscription_id of the transferable log, it is invalid. + if transfer_info.inscription_id != op.inscription_id { + return Ok(None); + } + Operation::Transfer(Transfer { + tick: transfer_info.tick.as_str().to_string(), + amount: transfer_info.amount.to_string(), + }) + } + _ => return Ok(None), + }; + Ok(Some(Self { + txid: op.txid, + sequence_number: op.sequence_number, + inscription_id: op.inscription_id, + old_satpoint: op.old_satpoint, + new_satpoint: op.new_satpoint, + op: brc20_operation, + sat_in_outputs, + })) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{ + okx::datastore::brc20::{Tick, TransferableLog}, + Inscription, + }; + use bitcoin::{Address, OutPoint}; + use std::str::FromStr; + + fn create_inscription(str: &str) -> Inscription { + Inscription::new( + Some("text/plain;charset=utf-8".as_bytes().to_vec()), + Some(str.as_bytes().to_vec()), + ) + } + + fn create_inscribe_operation(str: &str) -> (Vec, InscriptionOp) { + let inscriptions = vec![create_inscription(str)]; + let txid = + Txid::from_str("b61b0172d95e266c18aea0c624db987e971a5d6d4ebc2aaed85da4642d635735").unwrap(); + let op = InscriptionOp { + txid, + action: Action::New { + cursed: false, + unbound: false, + inscription: inscriptions.first().unwrap().clone(), + parent: None, + vindicated: false, + }, + sequence_number: 1, + inscription_number: Some(1), + inscription_id: InscriptionId { txid, index: 0 }, + old_satpoint: SatPoint { + outpoint: OutPoint { + txid: Txid::from_str("2111111111111111111111111111111111111111111111111111111111111111") + .unwrap(), + vout: 0, + }, + offset: 0, + }, + new_satpoint: Some(SatPoint { + outpoint: OutPoint { txid, vout: 0 }, + offset: 0, + }), + }; + (inscriptions, op) + } + + fn create_transfer_operation() -> InscriptionOp { + let txid = + Txid::from_str("b61b0172d95e266c18aea0c624db987e971a5d6d4ebc2aaed85da4642d635735").unwrap(); + + let inscription_id = InscriptionId { + txid: Txid::from_str("2111111111111111111111111111111111111111111111111111111111111111") + .unwrap(), + index: 0, + }; + + InscriptionOp { + txid, + action: Action::Transfer, + sequence_number: 1, + inscription_number: Some(1), + inscription_id, + old_satpoint: SatPoint { + outpoint: OutPoint { + txid: inscription_id.txid, + vout: 0, + }, + offset: 0, + }, + new_satpoint: Some(SatPoint { + outpoint: OutPoint { txid, vout: 0 }, + offset: 0, + }), + } + } + + #[test] + fn test_invalid_protocol() { + let transfer_assets_cache = HashMap::new(); + let (_inscriptions, op) = create_inscribe_operation( + r#"{ "p": "brc-20s","op": "deploy", "tick": "ordi", "max": "1000", "lim": "10" }"#, + ); + assert_matches!(Message::resolve(&op, transfer_assets_cache), Ok(None)); + } + + #[test] + fn test_cursed_or_unbound_inscription() { + let transfer_assets_cache = HashMap::new(); + + let (inscriptions, op) = create_inscribe_operation( + r#"{ "p": "brc-20","op": "deploy", "tick": "ordi", "max": "1000", "lim": "10" }"#, + ); + let op = InscriptionOp { + action: Action::New { + cursed: true, + unbound: false, + inscription: inscriptions.first().unwrap().clone(), + parent: None, + vindicated: false, + }, + ..op + }; + assert_matches!( + Message::resolve(&op, transfer_assets_cache.clone()), + Ok(None) + ); + + let op2 = InscriptionOp { + action: Action::New { + cursed: false, + unbound: true, + inscription: inscriptions.first().unwrap().clone(), + parent: None, + vindicated: false, + }, + ..op + }; + assert_matches!( + Message::resolve(&op2, transfer_assets_cache.clone()), + Ok(None) + ); + let op3 = InscriptionOp { + action: Action::New { + cursed: true, + unbound: true, + inscription: inscriptions.first().unwrap().clone(), + parent: None, + vindicated: false, + }, + ..op + }; + assert_matches!(Message::resolve(&op3, transfer_assets_cache), Ok(None)); + } + + #[test] + fn test_valid_inscribe_operation() { + let transfer_assets_cache = HashMap::new(); + let (_inscriptions, op) = create_inscribe_operation( + r#"{ "p": "brc-20","op": "deploy", "tick": "ordi", "max": "1000", "lim": "10" }"#, + ); + let _result_msg = Message { + txid: op.txid, + sequence_number: op.sequence_number, + inscription_id: op.inscription_id, + old_satpoint: op.old_satpoint, + new_satpoint: op.new_satpoint, + op: Operation::Deploy(Deploy { + tick: "ordi".to_string(), + max_supply: "1000".to_string(), + mint_limit: Some("10".to_string()), + decimals: None, + self_mint: None, + }), + sat_in_outputs: true, + }; + assert_matches!( + Message::resolve(&op, transfer_assets_cache), + Ok(Some(_result_msg)) + ); + } + + #[test] + fn test_invalid_transfer() { + let transfer_assets_cache = HashMap::new(); + + // inscribe transfer not found + let op = create_transfer_operation(); + assert_matches!( + Message::resolve(&op, transfer_assets_cache.clone()), + Ok(None) + ); + + // non-first transfer operations. + let op1 = InscriptionOp { + old_satpoint: SatPoint { + outpoint: OutPoint { + txid: Txid::from_str("3111111111111111111111111111111111111111111111111111111111111111") + .unwrap(), + vout: 0, + }, + offset: 0, + }, + ..op + }; + assert_matches!(Message::resolve(&op1, transfer_assets_cache), Ok(None)); + } + + #[test] + fn test_valid_transfer() { + let mut transfer_assets_cache = HashMap::new(); + // inscribe transfer not found + let op = create_transfer_operation(); + transfer_assets_cache.insert( + op.old_satpoint.store(), + TransferableLog { + tick: Tick::from_str("ordi").unwrap(), + amount: 100, + inscription_id: op.inscription_id, + inscription_number: op.inscription_number.unwrap(), + owner: ScriptKey::Address( + Address::from_str("bc1qhvd6suvqzjcu9pxjhrwhtrlj85ny3n2mqql5w4").unwrap(), + ), + }, + ); + + let _msg = Message { + txid: op.txid, + sequence_number: op.sequence_number, + inscription_id: op.inscription_id, + old_satpoint: op.old_satpoint, + new_satpoint: op.new_satpoint, + op: Operation::Transfer(Transfer { + tick: "ordi".to_string(), + amount: "100".to_string(), + }), + sat_in_outputs: true, + }; + + assert_matches!(Message::resolve(&op, transfer_assets_cache), Ok(Some(_msg))); + } +} diff --git a/src/okx/protocol/brc20/num.rs b/src/okx/protocol/brc20/num.rs new file mode 100644 index 0000000000..4ba88d619a --- /dev/null +++ b/src/okx/protocol/brc20/num.rs @@ -0,0 +1,435 @@ +use super::{params::MAX_DECIMAL_WIDTH, BRC20Error}; +use bigdecimal::{ + num_bigint::{BigInt, Sign, ToBigInt}, + BigDecimal, One, ToPrimitive, +}; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; +use std::{ + fmt::{Display, Formatter}, + str::FromStr, +}; + +#[derive(PartialEq, PartialOrd, Debug, Clone)] +pub struct Num(BigDecimal); + +impl Num { + // TODO check overflow + pub fn checked_add(&self, other: &Num) -> Result { + Ok(Self(self.0.clone() + &other.0)) + } + + pub fn checked_sub(&self, other: &Num) -> Result { + if self.0 < other.0 { + return Err(BRC20Error::Overflow { + op: String::from("checked_sub"), + org: self.clone().to_string(), + other: other.clone().to_string(), + }); + } + + Ok(Self(self.0.clone() - &other.0)) + } + + // TODO check overflow + pub fn checked_mul(&self, other: &Num) -> Result { + Ok(Self(self.0.clone() * &other.0)) + } + + pub fn checked_powu(&self, exp: u64) -> Result { + match exp { + 0 => Ok(Self(BigDecimal::one())), + 1 => Ok(Self(self.0.clone())), + exp => { + let mut result = self.0.clone(); + for _ in 1..exp { + result *= &self.0; + } + + Ok(Self(result)) + } + } + } + + pub fn checked_to_u8(&self) -> Result { + if !self.0.is_integer() { + return Err(BRC20Error::InvalidInteger(self.clone().to_string())); + } + self.0.clone().to_u8().ok_or(BRC20Error::Overflow { + op: String::from("to_u8"), + org: self.clone().to_string(), + other: Self(BigDecimal::from(u8::MAX)).to_string(), + }) + } + + pub fn sign(&self) -> Sign { + self.0.sign() + } + + pub fn scale(&self) -> i64 { + let (_, scale) = self.0.as_bigint_and_exponent(); + scale + } + + pub fn checked_to_u128(&self) -> Result { + if !self.0.is_integer() { + return Err(BRC20Error::InvalidInteger(self.clone().to_string())); + } + self + .0 + .to_bigint() + .ok_or(BRC20Error::InternalError(format!( + "convert {} to bigint failed", + self.0 + )))? + .to_u128() + .ok_or(BRC20Error::Overflow { + op: String::from("to_u128"), + org: self.clone().to_string(), + other: Self(BigDecimal::from(BigInt::from(u128::MAX))).to_string(), // TODO: change overflow error to others + }) + } +} + +impl From for Num { + fn from(n: u64) -> Self { + Self(BigDecimal::from(n)) + } +} + +impl From for Num { + fn from(n: u128) -> Self { + Self(BigDecimal::from(BigInt::from(n))) + } +} + +impl FromStr for Num { + type Err = BRC20Error; + fn from_str(s: &str) -> Result { + if s.starts_with('.') || s.ends_with('.') || s.find(['e', 'E', '+', '-']).is_some() { + return Err(BRC20Error::InvalidNum(s.to_string())); + } + let num = BigDecimal::from_str(s).map_err(|_| BRC20Error::InvalidNum(s.to_string()))?; + + let (_, scale) = num.as_bigint_and_exponent(); + if scale > i64::from(MAX_DECIMAL_WIDTH) { + return Err(BRC20Error::InvalidNum(s.to_string())); + } + + Ok(Self(num)) + } +} + +impl Display for Num { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + self.0.fmt(f) + } +} + +impl Serialize for Num { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + let s = self.to_string(); + serializer.serialize_str(&s) + } +} + +impl<'de> Deserialize<'de> for Num { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + Ok(Self( + BigDecimal::from_str(&s).map_err(serde::de::Error::custom)?, + )) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use bigdecimal::FromPrimitive; + #[test] + fn test_num_from_str2() { + assert_eq!( + Num::from_str("001").unwrap(), + Num(BigDecimal::new(BigInt::from(1), 0)), + ); + assert_eq!( + Num::from_str("00.1").unwrap(), + Num(BigDecimal::new(BigInt::from(1), 1)), + ); + assert_eq!( + Num::from_str("0.0").unwrap(), + Num(BigDecimal::new(BigInt::from(0), 0)), + ); + assert_eq!( + Num::from_str("0.100").unwrap(), + Num(BigDecimal::new(BigInt::from(1), 1)), + ); + assert_eq!( + Num::from_str("0").unwrap(), + Num(BigDecimal::new(BigInt::from(0), 0)), + ); + assert_eq!( + Num::from_str("00.00100").unwrap(), + Num(BigDecimal::new(BigInt::from(1), 3)), + ); + } + + #[test] + fn test_num_from_str() { + assert!(Num::from_str(".1").is_err()); + assert_eq!( + Num(BigDecimal::new(BigInt::from(0), 0)), + Num::from_str("0").unwrap() + ); + assert_eq!( + Num(BigDecimal::new(BigInt::from(1), 0)), + Num::from_str("001").unwrap() + ); + assert_eq!( + Num(BigDecimal::new(BigInt::from(1), 1)), + Num::from_str("00.1").unwrap() + ); + + assert_eq!( + Num(BigDecimal::new(BigInt::from(0), 0)), + Num::from_str("0.0").unwrap() + ); + assert_eq!( + Num(BigDecimal::new(BigInt::from(1), 1)), + Num::from_str("0.100").unwrap() + ); + assert_eq!( + Num(BigDecimal::new(BigInt::from(1), 3)), + Num::from_str("00.00100").unwrap() + ); + assert_eq!( + Num(BigDecimal::new(BigInt::from(11), 1)), + Num::from_str("1.1").unwrap() + ); + assert_eq!( + Num(BigDecimal::new(BigInt::from(11), 1)), + Num::from_str("1.1000").unwrap() + ); + assert_eq!( + Num(BigDecimal::new(BigInt::from(101), 2)), + Num::from_str("1.01").unwrap() + ); + + // can not be negative + assert!(Num::from_str("-1.1").is_err()); + + // number of decimal fractional can not exceed 18 + assert_eq!( + Num(BigDecimal::new( + BigInt::from(1_000_000_000_000_000_001_u64), + 18 + )), + Num::from_str("1.000000000000000001").unwrap() + ); + assert!(Num::from_str("1.0000000000000000001").is_err()); + } + + #[test] + fn test_invalid_num() { + assert!(Num::from_str("").is_err()); + assert!(Num::from_str(" ").is_err()); + assert!(Num::from_str(".").is_err()); + assert!(Num::from_str(" 123.456").is_err()); + assert!(Num::from_str(".456").is_err()); + assert!(Num::from_str(".456 ").is_err()); + assert!(Num::from_str(" .456 ").is_err()); + assert!(Num::from_str(" 456").is_err()); + assert!(Num::from_str("456 ").is_err()); + assert!(Num::from_str("45 6").is_err()); + assert!(Num::from_str("123. 456").is_err()); + assert!(Num::from_str("123.-456").is_err()); + assert!(Num::from_str("123.+456").is_err()); + assert!(Num::from_str("+123.456").is_err()); + assert!(Num::from_str("123.456.789").is_err()); + assert!(Num::from_str("123456789.").is_err()); + assert!(Num::from_str("123456789.12345678901234567891").is_err()); + } + + #[test] + fn test_num_serialize() { + let num = Num::from_str("1.01").unwrap(); + let json = serde_json::to_string(&num).unwrap(); + assert_eq!(json.as_str(), "\"1.01\""); + } + + #[test] + fn test_num_deserialize() { + let num = serde_json::from_str::("\"1.11\"").unwrap(); + assert_eq!(Num::from_str("1.11").unwrap(), num); + } + + #[test] + fn test_num_checked_add() { + assert_eq!( + Num::from_str("2"), + Num::from_str("1") + .unwrap() + .checked_add(&Num::from_str("1").unwrap()) + ); + assert_eq!( + Num::from_str("2.1"), + Num::from_str("1") + .unwrap() + .checked_add(&Num::from_str("1.1").unwrap()) + ); + assert_eq!( + Num::from_str("2.1"), + Num::from_str("1.1") + .unwrap() + .checked_add(&Num::from_str("1").unwrap()) + ); + assert_eq!( + Num::from_str("2.222"), + Num::from_str("1.101") + .unwrap() + .checked_add(&Num::from_str("1.121").unwrap()) + ); + } + + #[test] + fn test_num_checked_sub() { + assert_eq!( + Num::from_str("2"), + Num::from_str("3") + .unwrap() + .checked_sub(&Num::from_str("1").unwrap()) + ); + assert_eq!( + Num::from_str("2.1"), + Num::from_str("3") + .unwrap() + .checked_sub(&Num::from_str("0.9").unwrap()) + ); + assert_eq!( + Num::from_str("2.1"), + Num::from_str("3.1") + .unwrap() + .checked_sub(&Num::from_str("1").unwrap()) + ); + assert_eq!( + Num::from_str("2.222"), + Num::from_str("3.303") + .unwrap() + .checked_sub(&Num::from_str("1.081").unwrap()) + ); + } + + #[test] + fn test_to_u8() { + assert_eq!(Num::from_str("2").unwrap().checked_to_u8().unwrap(), 2); + assert_eq!(Num::from_str("255").unwrap().checked_to_u8().unwrap(), 255); + assert_eq!( + Num::from_str("256").unwrap().checked_to_u8().unwrap_err(), + BRC20Error::Overflow { + op: String::from("to_u8"), + org: Num::from_str("256").unwrap().to_string(), + other: Num(BigDecimal::from_u8(u8::MAX).unwrap()).to_string(), + } + ); + + let n = Num::from_str("15.00").unwrap(); + assert_eq!(n.checked_to_u8().unwrap(), 15u8); + } + + #[test] + fn test_max_value() { + // brc20 protocol stipulate that a max integer value is 64 bit, and decimal has 18 numbers at most. + let max = format!("{}.999999999999999999", u64::MAX); + + BigDecimal::from_str(&max).unwrap(); + } + + #[test] + fn test_checked_powu_floatpoint() { + let n = Num::from_str("3.7").unwrap(); + assert_eq!(n.checked_powu(0).unwrap(), Num::from_str("1").unwrap()); + assert_eq!(n.checked_powu(1).unwrap(), n); + assert_eq!(n.checked_powu(2).unwrap(), Num::from_str("13.69").unwrap()); + assert_eq!(n.checked_powu(3).unwrap(), Num::from_str("50.653").unwrap()); + assert_eq!( + n.checked_powu(5).unwrap(), + Num::from_str("693.43957").unwrap() + ); + assert_eq!( + n.checked_powu(18).unwrap(), + Num::from_str("16890053810.563300749953435929").unwrap() + ); + } + + #[test] + fn test_checked_powu_integer() { + let n = Num::from_str("10").unwrap(); + assert_eq!(n.checked_powu(0).unwrap(), Num::from_str("1").unwrap()); + assert_eq!(n.checked_powu(1).unwrap(), n); + assert_eq!(n.checked_powu(2).unwrap(), Num::from_str("100").unwrap()); + assert_eq!(n.checked_powu(3).unwrap(), Num::from_str("1000").unwrap()); + assert_eq!(n.checked_powu(5).unwrap(), Num::from_str("100000").unwrap()); + assert_eq!( + n.checked_powu(18).unwrap(), + Num::from_str("1000000000000000000").unwrap() + ); + } + + #[test] + fn test_checked_to_u128() { + let n = Num::from_str(&format!("{}", u128::MAX)).unwrap(); + assert_eq!(n.checked_to_u128().unwrap(), u128::MAX); + + let n = Num::from_str("0").unwrap(); + assert_eq!(n.checked_to_u128().unwrap(), 0); + + let n = Num::from_str(&format!("{}{}", u128::MAX, 1)).unwrap(); + assert_eq!( + n.checked_to_u128().unwrap_err(), + BRC20Error::Overflow { + op: String::from("to_u128"), + org: n.to_string(), + other: Num::from(u128::MAX).to_string(), + } + ); + + let n = Num::from_str(&format!("{}.{}", u128::MAX - 1, "33333")).unwrap(); + assert_eq!( + n.checked_to_u128().unwrap_err(), + BRC20Error::InvalidInteger(n.to_string()) + ); + + let n = Num::from_str(&format!("{}.{}", 0, "33333")).unwrap(); + assert_eq!( + n.checked_to_u128().unwrap_err(), + BRC20Error::InvalidInteger(n.to_string()) + ); + let a = BigDecimal::from_str("0.333").unwrap().to_bigint().unwrap(); + + assert_eq!(a.to_u128().unwrap(), 0_u128); + + let n = Num::from_str("3140000000000000000.00").unwrap(); + assert_eq!(n.checked_to_u128().unwrap(), 3140000000000000000u128); + + let n = Num::from_str(&format!("{}.{}", u128::MAX - 1, "33333")).unwrap(); + assert_eq!(n.scale(), 5_i64); + assert_eq!( + Num::from_str("1e2").unwrap_err(), + BRC20Error::InvalidNum("1e2".to_string()) + ); + assert_eq!( + Num::from_str("0e2").unwrap_err(), + BRC20Error::InvalidNum("0e2".to_string()) + ); + + assert_eq!( + Num::from_str("100E2").unwrap_err(), + BRC20Error::InvalidNum("100E2".to_string()) + ); + } +} diff --git a/src/okx/protocol/brc20/operation/deploy.rs b/src/okx/protocol/brc20/operation/deploy.rs new file mode 100644 index 0000000000..df574132d4 --- /dev/null +++ b/src/okx/protocol/brc20/operation/deploy.rs @@ -0,0 +1,356 @@ +use crate::okx::datastore::brc20::SELF_ISSUANCE_TICK_LENGTH; +use serde::{de, Deserialize, Deserializer, Serialize}; +use serde_json::Value; +use std::str::FromStr; + +#[derive(Debug, PartialEq, Clone, Serialize)] +pub struct Deploy { + #[serde(rename = "tick")] + pub tick: String, + #[serde(rename = "max")] + pub max_supply: String, + #[serde(rename = "lim", skip_serializing_if = "Option::is_none")] + pub mint_limit: Option, + #[serde(rename = "dec", skip_serializing_if = "Option::is_none")] + pub decimals: Option, + #[serde( + default, + rename = "self_mint", + skip_serializing_if = "Option::is_none", + serialize_with = "ser_to_str" + )] + pub self_mint: Option, +} + +impl<'de> Deserialize<'de> for Deploy { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + #[derive(Deserialize)] + struct DeployFields { + tick: String, + max: String, + lim: Option, + dec: Option, + self_mint: Option, + } + let deploy = DeployFields::deserialize(deserializer)?; + let self_mint = if deploy.tick.len() == SELF_ISSUANCE_TICK_LENGTH { + match deploy.self_mint { + Some(v) => Some( + bool::from_str( + serde_json::from_value::(v) + .map_err(de::Error::custom)? + .as_str(), + ) + .map_err(de::Error::custom)?, + ), + None => return Err(de::Error::missing_field("self_mint")), + } + } else { + None + }; + Ok(Deploy { + tick: deploy.tick.clone(), + max_supply: deploy.max, + mint_limit: deploy.lim, + decimals: deploy.dec, + self_mint, + }) + } +} + +fn ser_to_str(v: &Option, serializer: S) -> Result +where + S: serde::Serializer, +{ + match v { + Some(v) => serializer.serialize_str(&v.to_string()), + None => serializer.serialize_none(), + } +} + +#[cfg(test)] +mod tests { + use super::super::*; + use super::*; + + #[test] + fn test_five_bytes_ticker_self_mint_deserialize() { + let json_str = r#"{"p":"brc-20","op":"deploy","tick":"abcde","max":"100","lim":"10","dec":"10","self_mint":"true"}"#; + assert_eq!( + deserialize_brc20(json_str).unwrap(), + RawOperation::Deploy(Deploy { + tick: "abcde".to_string(), + max_supply: "100".to_string(), + mint_limit: Some("10".to_string()), + decimals: Some("10".to_string()), + self_mint: Some(true), + }) + ); + + let json_str = r#"{"self_mint":"true","p":"brc-20","op":"deploy","tick":"abcde","max":"100","lim":"10","dec":"10"}"#; + assert_eq!( + deserialize_brc20(json_str).unwrap(), + RawOperation::Deploy(Deploy { + tick: "abcde".to_string(), + max_supply: "100".to_string(), + mint_limit: Some("10".to_string()), + decimals: Some("10".to_string()), + self_mint: Some(true), + }) + ); + } + + #[test] + fn test_self_mint_deserialize_with_error_value() { + assert_eq!( + deserialize_brc20( + r#"{"p":"brc-20","op":"deploy","tick":"abcde","max":"12000","lim":"12","dec":"11","self_mint":"True"}"# + ) + .unwrap_err(), + JSONError::ParseOperationJsonError("provided string was not `true` or `false`".to_string()) + ); + assert_eq!( + deserialize_brc20( + r#"{"p":"brc-20","op":"deploy","tick":"abcde","max":"12000","lim":"12","dec":"11","self_mint":"t"}"# + ) + .unwrap_err(), + JSONError::ParseOperationJsonError("provided string was not `true` or `false`".to_string()) + ); + assert_eq!( + deserialize_brc20( + r#"{"p":"brc-20","op":"deploy","tick":"abcde","max":"12000","lim":"12","dec":"11","self_mint":true}"# + ) + .unwrap_err(), + JSONError::ParseOperationJsonError("invalid type: boolean `true`, expected a string".to_string()) + ); + } + + #[test] + fn test_loss_self_mint() { + assert_eq!( + deserialize_brc20( + r#"{"p":"brc-20","op":"deploy","tick":"1234","max":"100","lim":"22","dec":"11"}"# + ) + .unwrap(), + RawOperation::Deploy(Deploy { + tick: "1234".to_string(), + max_supply: "100".to_string(), + mint_limit: Some("22".to_string()), + decimals: Some("11".to_string()), + self_mint: None, + }) + ); + assert_eq!( + deserialize_brc20( + r#"{"p":"brc-20","op":"deploy","tick":"12345","max":"100","lim":"22","dec":"11"}"# + ) + .unwrap_err(), + JSONError::ParseOperationJsonError("missing field `self_mint`".to_string()) + ); + } + + #[test] + fn test_ignore_self_mint() { + assert_eq!( + deserialize_brc20( + r#"{"p":"brc-20","op":"deploy","tick":"1234","max":"100","lim":"22","dec":"11","self_mint":"true"}"# + ) + .unwrap(), + RawOperation::Deploy(Deploy { + tick: "1234".to_string(), + max_supply: "100".to_string(), + mint_limit: Some("22".to_string()), + decimals: Some("11".to_string()), + self_mint: None, + }) + ); + assert_eq!( + deserialize_brc20( + r#"{"p":"brc-20","op":"deploy","tick":"1234","max":"100","lim":"22","dec":"11","self_mint":true}"# + ) + .unwrap(), + RawOperation::Deploy(Deploy { + tick: "1234".to_string(), + max_supply: "100".to_string(), + mint_limit: Some("22".to_string()), + decimals: Some("11".to_string()), + self_mint: None, + }) + ); + assert_eq!( + deserialize_brc20( + r#"{"p":"brc-20","op":"deploy","tick":"1234","max":"100","lim":"22","dec":"11","self_mint":"True"}"# + ) + .unwrap(), + RawOperation::Deploy(Deploy { + tick: "1234".to_string(), + max_supply: "100".to_string(), + mint_limit: Some("22".to_string()), + decimals: Some("11".to_string()), + self_mint: None, + }) + ); + } + + #[test] + fn test_serialize() { + let obj = Deploy { + tick: "abcd".to_string(), + max_supply: "12000".to_string(), + mint_limit: Some("12".to_string()), + decimals: Some("11".to_string()), + self_mint: None, + }; + + assert_eq!( + serde_json::to_string(&obj).unwrap(), + format!( + r##"{{"tick":"{}","max":"{}","lim":"{}","dec":"{}"}}"##, + obj.tick, + obj.max_supply, + obj.mint_limit.unwrap(), + obj.decimals.unwrap() + ) + ) + } + + #[test] + fn test_deserialize() { + assert_eq!( + deserialize_brc20( + r#"{"p":"brc-20","op":"deploy","tick":"abcd","max":"12000","lim":"12","dec":"11"}"# + ) + .unwrap(), + RawOperation::Deploy(Deploy { + tick: "abcd".to_string(), + max_supply: "12000".to_string(), + mint_limit: Some("12".to_string()), + decimals: Some("11".to_string()), + self_mint: None, + }) + ); + } + + #[test] + fn test_self_mint_serialize() { + let obj = Deploy { + tick: "abcd".to_string(), + max_supply: "12000".to_string(), + mint_limit: Some("12".to_string()), + decimals: Some("11".to_string()), + self_mint: None, + }; + + assert_eq!( + serde_json::to_string(&obj).unwrap(), + format!( + r##"{{"tick":"{}","max":"{}","lim":"{}","dec":"{}"}}"##, + obj.tick, + obj.max_supply, + obj.mint_limit.as_ref().unwrap(), + obj.decimals.as_ref().unwrap(), + ) + ); + + let obj = Deploy { + self_mint: Some(true), + ..obj + }; + + assert_eq!( + serde_json::to_string(&obj).unwrap(), + format!( + r##"{{"tick":"{}","max":"{}","lim":"{}","dec":"{}","self_mint":"{}"}}"##, + obj.tick, + obj.max_supply, + obj.mint_limit.as_ref().unwrap(), + obj.decimals.as_ref().unwrap(), + obj.self_mint.as_ref().unwrap() + ) + ); + + let obj = Deploy { + self_mint: Some(false), + ..obj + }; + assert_eq!( + serde_json::to_string(&obj).unwrap(), + format!( + r##"{{"tick":"{}","max":"{}","lim":"{}","dec":"{}","self_mint":"{}"}}"##, + obj.tick, + obj.max_supply, + obj.mint_limit.as_ref().unwrap(), + obj.decimals.as_ref().unwrap(), + obj.self_mint.as_ref().unwrap() + ) + ) + } + + #[test] + fn test_loss_require_key() { + assert_eq!( + deserialize_brc20(r#"{"p":"brc-20","op":"deploy","tick":"11","lim":"22","dec":"11"}"#) + .unwrap_err(), + JSONError::ParseOperationJsonError("missing field `max`".to_string()) + ); + } + + #[test] + fn test_loss_option_key() { + // loss lim + assert_eq!( + deserialize_brc20(r#"{"p":"brc-20","op":"deploy","tick":"smol","max":"100","dec":"10"}"#) + .unwrap(), + RawOperation::Deploy(Deploy { + tick: "smol".to_string(), + max_supply: "100".to_string(), + mint_limit: None, + decimals: Some("10".to_string()), + self_mint: None, + }) + ); + + // loss dec + assert_eq!( + deserialize_brc20(r#"{"p":"brc-20","op":"deploy","tick":"smol","max":"100","lim":"10"}"#) + .unwrap(), + RawOperation::Deploy(Deploy { + tick: "smol".to_string(), + max_supply: "100".to_string(), + mint_limit: Some("10".to_string()), + decimals: None, + self_mint: None, + }) + ); + + // loss all option + assert_eq!( + deserialize_brc20(r#"{"p":"brc-20","op":"deploy","tick":"smol","max":"100"}"#).unwrap(), + RawOperation::Deploy(Deploy { + tick: "smol".to_string(), + max_supply: "100".to_string(), + mint_limit: None, + decimals: None, + self_mint: None, + }) + ); + } + + #[test] + fn test_duplicate_key() { + let json_str = r#"{"p":"brc-20","op":"deploy","tick":"smol","max":"100","lim":"10","dec":"17","max":"200","lim":"20","max":"300"}"#; + assert_eq!( + deserialize_brc20(json_str).unwrap(), + RawOperation::Deploy(Deploy { + tick: "smol".to_string(), + max_supply: "300".to_string(), + mint_limit: Some("20".to_string()), + decimals: Some("17".to_string()), + self_mint: None, + }) + ); + } +} diff --git a/src/okx/protocol/brc20/operation/mint.rs b/src/okx/protocol/brc20/operation/mint.rs new file mode 100644 index 0000000000..ae0d9cb8f3 --- /dev/null +++ b/src/okx/protocol/brc20/operation/mint.rs @@ -0,0 +1,58 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +pub struct Mint { + #[serde(rename = "tick")] + pub tick: String, + #[serde(rename = "amt")] + pub amount: String, +} + +#[cfg(test)] +mod tests { + use super::super::*; + use super::*; + + #[test] + fn test_serialize() { + let obj = Mint { + tick: "abcd".to_string(), + amount: "22".to_string(), + }; + assert_eq!( + serde_json::to_string(&obj).unwrap(), + r#"{"tick":"abcd","amt":"22"}"# + ); + } + + #[test] + fn test_deserialize() { + assert_eq!( + deserialize_brc20(r#"{"p":"brc-20","op":"mint","tick":"abcd","amt":"12000"}"#).unwrap(), + RawOperation::Mint(Mint { + tick: "abcd".to_string(), + amount: "12000".to_string() + }) + ); + } + + #[test] + fn test_loss_require_key() { + assert_eq!( + deserialize_brc20(r#"{"p":"brc-20","op":"mint","tick":"abcd"}"#).unwrap_err(), + JSONError::ParseOperationJsonError("missing field `amt`".to_string()) + ); + } + + #[test] + fn test_duplicate_key() { + let json_str = r#"{"p":"brc-20","op":"mint","tick":"smol","amt":"100","tick":"hhaa","amt":"200","tick":"actt"}"#; + assert_eq!( + deserialize_brc20(json_str).unwrap(), + RawOperation::Mint(Mint { + tick: "actt".to_string(), + amount: "200".to_string(), + }) + ); + } +} diff --git a/src/okx/protocol/brc20/operation/mod.rs b/src/okx/protocol/brc20/operation/mod.rs new file mode 100644 index 0000000000..755d5deee4 --- /dev/null +++ b/src/okx/protocol/brc20/operation/mod.rs @@ -0,0 +1,306 @@ +mod deploy; +mod mint; +mod transfer; + +use super::{params::*, *}; +use crate::{okx::datastore::ord::Action, Inscription}; +use serde::{Deserialize, Serialize}; +use serde_json::{json, Value}; + +pub use self::{deploy::Deploy, mint::Mint, transfer::Transfer}; + +#[derive(Debug, Clone, PartialEq)] +pub enum Operation { + Deploy(Deploy), + Mint { + mint: Mint, + parent: Option, + }, + InscribeTransfer(Transfer), + Transfer(Transfer), +} + +impl Operation { + pub fn op_type(&self) -> OperationType { + match self { + Operation::Deploy(_) => OperationType::Deploy, + Operation::Mint { .. } => OperationType::Mint, + Operation::InscribeTransfer(_) => OperationType::InscribeTransfer, + Operation::Transfer(_) => OperationType::Transfer, + } + } +} + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +#[serde(tag = "op")] +enum RawOperation { + #[serde(rename = "deploy")] + Deploy(Deploy), + #[serde(rename = "mint")] + Mint(Mint), + #[serde(rename = "transfer")] + Transfer(Transfer), +} + +pub(crate) fn deserialize_brc20_operation( + inscription: &Inscription, + action: &Action, +) -> Result { + let content_body = std::str::from_utf8(inscription.body().ok_or(JSONError::InvalidJson)?)?; + if content_body.len() < 40 { + return Err(JSONError::NotBRC20Json.into()); + } + + let content_type = inscription + .content_type() + .ok_or(JSONError::InvalidContentType)?; + + if content_type != "text/plain" + && content_type != "text/plain;charset=utf-8" + && content_type != "text/plain;charset=UTF-8" + && content_type != "application/json" + && !content_type.starts_with("text/plain;") + { + return Err(JSONError::UnSupportContentType.into()); + } + let raw_operation = match deserialize_brc20(content_body) { + Ok(op) => op, + Err(e) => { + return Err(e.into()); + } + }; + + match action { + Action::New { parent, .. } => match raw_operation { + RawOperation::Deploy(deploy) => Ok(Operation::Deploy(deploy)), + RawOperation::Mint(mint) => Ok(Operation::Mint { + mint, + parent: *parent, + }), + RawOperation::Transfer(transfer) => Ok(Operation::InscribeTransfer(transfer)), + }, + Action::Transfer => match raw_operation { + RawOperation::Transfer(transfer) => Ok(Operation::Transfer(transfer)), + _ => Err(JSONError::NotBRC20Json.into()), + }, + } +} + +fn deserialize_brc20(s: &str) -> Result { + let value: Value = serde_json::from_str(s).map_err(|_| JSONError::InvalidJson)?; + if value.get("p") != Some(&json!(PROTOCOL_LITERAL)) { + return Err(JSONError::NotBRC20Json); + } + + serde_json::from_value(value).map_err(|e| JSONError::ParseOperationJsonError(e.to_string())) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::okx::datastore::ord::Action; + + #[test] + fn test_deploy_deserialize() { + let max_supply = "21000000".to_string(); + let mint_limit = "1000".to_string(); + + let json_str = format!( + r##"{{ + "p": "brc-20", + "op": "deploy", + "tick": "ordi", + "max": "{max_supply}", + "lim": "{mint_limit}" +}}"## + ); + + assert_eq!( + deserialize_brc20(&json_str).unwrap(), + RawOperation::Deploy(Deploy { + tick: "ordi".to_string(), + max_supply, + mint_limit: Some(mint_limit), + decimals: None, + self_mint: None, + }) + ); + } + + #[test] + fn test_mint_deserialize() { + let amount = "1000".to_string(); + + let json_str = format!( + r##"{{ + "p": "brc-20", + "op": "mint", + "tick": "ordi", + "amt": "{amount}" +}}"## + ); + + assert_eq!( + deserialize_brc20(&json_str).unwrap(), + RawOperation::Mint(Mint { + tick: "ordi".to_string(), + amount, + }) + ); + } + + #[test] + fn test_transfer_deserialize() { + let amount = "100".to_string(); + + let json_str = format!( + r##"{{ + "p": "brc-20", + "op": "transfer", + "tick": "ordi", + "amt": "{amount}" +}}"## + ); + + assert_eq!( + deserialize_brc20(&json_str).unwrap(), + RawOperation::Transfer(Transfer { + tick: "ordi".to_string(), + amount, + }) + ); + } + #[test] + fn test_json_duplicate_field() { + let json_str = r#"{"p":"brc-20","op":"mint","tick":"smol","amt":"333","amt":"33"}"#; + assert_eq!( + deserialize_brc20(json_str).unwrap(), + RawOperation::Mint(Mint { + tick: String::from("smol"), + amount: String::from("33"), + }) + ) + } + + #[test] + fn test_json_non_string() { + let json_str = r#"{"p":"brc-20","op":"mint","tick":"smol","amt":33}"#; + assert!(deserialize_brc20(json_str).is_err()) + } + + #[test] + fn test_deserialize_case_insensitive() { + let max_supply = "21000000".to_string(); + let mint_limit = "1000".to_string(); + + let json_str = format!( + r##"{{ + "P": "brc-20", + "Op": "deploy", + "Tick": "ordi", + "mAx": "{max_supply}", + "Lim": "{mint_limit}" +}}"## + ); + + assert_eq!(deserialize_brc20(&json_str), Err(JSONError::NotBRC20Json)); + } + #[test] + fn test_ignore_non_transfer_brc20() { + let content_type = "text/plain;charset=utf-8"; + let inscription = crate::inscription( + content_type, + r#"{"p":"brc-20","op":"deploy","tick":"abcd","max":"12000","lim":"12","dec":"11"}"#, + ); + assert_eq!( + deserialize_brc20_operation( + &inscription, + &Action::New { + cursed: false, + unbound: false, + vindicated: false, + parent: None, + inscription: inscription.clone() + }, + ) + .unwrap(), + Operation::Deploy(Deploy { + tick: "abcd".to_string(), + max_supply: "12000".to_string(), + mint_limit: Some("12".to_string()), + decimals: Some("11".to_string()), + self_mint: None, + }), + ); + let inscription = crate::inscription( + content_type, + r#"{"p":"brc-20","op":"mint","tick":"abcd","amt":"12000"}"#, + ); + + assert_eq!( + deserialize_brc20_operation( + &inscription, + &Action::New { + cursed: false, + unbound: false, + vindicated: false, + parent: None, + inscription: inscription.clone() + }, + ) + .unwrap(), + Operation::Mint { + mint: Mint { + tick: "abcd".to_string(), + amount: "12000".to_string() + }, + parent: None + } + ); + let inscription = crate::inscription( + content_type, + r#"{"p":"brc-20","op":"transfer","tick":"abcd","amt":"12000"}"#, + ); + + assert_eq!( + deserialize_brc20_operation( + &inscription, + &Action::New { + cursed: false, + unbound: false, + vindicated: false, + parent: None, + inscription: inscription.clone() + }, + ) + .unwrap(), + Operation::InscribeTransfer(Transfer { + tick: "abcd".to_string(), + amount: "12000".to_string() + }) + ); + + let inscription = crate::inscription( + content_type, + r#"{"p":"brc-20","op":"deploy","tick":"abcd","max":"12000","lim":"12","dec":"11"}"#, + ); + assert!(deserialize_brc20_operation(&inscription, &Action::Transfer).is_err()); + + let inscription = crate::inscription( + content_type, + r#"{"p":"brc-20","op":"mint","tick":"abcd","amt":"12000"}"#, + ); + assert!(deserialize_brc20_operation(&inscription, &Action::Transfer).is_err()); + let inscription = crate::inscription( + content_type, + r#"{"p":"brc-20","op":"transfer","tick":"abcd","amt":"12000"}"#, + ); + assert_eq!( + deserialize_brc20_operation(&inscription, &Action::Transfer).unwrap(), + Operation::Transfer(Transfer { + tick: "abcd".to_string(), + amount: "12000".to_string() + }) + ); + } +} diff --git a/src/okx/protocol/brc20/operation/transfer.rs b/src/okx/protocol/brc20/operation/transfer.rs new file mode 100644 index 0000000000..2026e31837 --- /dev/null +++ b/src/okx/protocol/brc20/operation/transfer.rs @@ -0,0 +1,58 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] +pub struct Transfer { + #[serde(rename = "tick")] + pub tick: String, + #[serde(rename = "amt")] + pub amount: String, +} + +#[cfg(test)] +mod tests { + use super::super::*; + use super::*; + + #[test] + fn test_serialize() { + let obj = Transfer { + tick: "abcd".to_string(), + amount: "333".to_string(), + }; + assert_eq!( + serde_json::to_string(&obj).unwrap(), + r#"{"tick":"abcd","amt":"333"}"# + ); + } + + #[test] + fn test_deserialize() { + assert_eq!( + deserialize_brc20(r#"{"p":"brc-20","op":"transfer","tick":"abcd","amt":"12000"}"#).unwrap(), + RawOperation::Transfer(Transfer { + tick: "abcd".to_string(), + amount: "12000".to_string() + }) + ); + } + + #[test] + fn test_loss_require_key() { + assert_eq!( + deserialize_brc20(r#"{"p":"brc-20","op":"transfer","tick":"abcd"}"#).unwrap_err(), + JSONError::ParseOperationJsonError("missing field `amt`".to_string()) + ); + } + + #[test] + fn test_duplicate_key() { + let json_str = r#"{"p":"brc-20","op":"transfer","tick":"smol","amt":"100","tick":"hhaa","amt":"200","tick":"actt"}"#; + assert_eq!( + deserialize_brc20(json_str).unwrap(), + RawOperation::Transfer(Transfer { + tick: "actt".to_string(), + amount: "200".to_string(), + }) + ); + } +} diff --git a/src/okx/protocol/brc20/params.rs b/src/okx/protocol/brc20/params.rs new file mode 100644 index 0000000000..5bd326c99c --- /dev/null +++ b/src/okx/protocol/brc20/params.rs @@ -0,0 +1,14 @@ +use super::num::Num; +use once_cell::sync::Lazy; + +pub const PROTOCOL_LITERAL: &str = "brc-20"; +pub const MAX_DECIMAL_WIDTH: u8 = 18; + +pub static MAXIMUM_SUPPLY: Lazy = Lazy::new(|| Num::from(u64::MAX)); + +pub static BIGDECIMAL_TEN: Lazy = Lazy::new(|| Num::from(10u64)); + +#[allow(dead_code)] +pub const fn default_decimals() -> u8 { + MAX_DECIMAL_WIDTH +} diff --git a/src/okx/protocol/brc20/policies.rs b/src/okx/protocol/brc20/policies.rs new file mode 100644 index 0000000000..8b160c30f8 --- /dev/null +++ b/src/okx/protocol/brc20/policies.rs @@ -0,0 +1,16 @@ +use crate::Chain; + +pub struct HardForks; + +impl HardForks { + /// Proposed block activation height for issuance and burn enhancements. + /// Proposal content: https://l1f.discourse.group/t/brc-20-proposal-for-issuance-and-burn-enhancements-brc20-ip-1/621 + pub fn self_issuance_activation_height(chain: Chain) -> u32 { + match chain { + Chain::Mainnet => 837090, // decided by community + Chain::Testnet => 2413343, // decided by the ourselves + Chain::Regtest => 0, + Chain::Signet => 0, + } + } +} diff --git a/src/okx/protocol/context.rs b/src/okx/protocol/context.rs new file mode 100644 index 0000000000..bdf0d7406b --- /dev/null +++ b/src/okx/protocol/context.rs @@ -0,0 +1,288 @@ +use crate::{ + index::{ + entry::SatPointValue, InscriptionEntryValue, InscriptionIdValue, OutPointValue, TxidValue, + }, + inscriptions::InscriptionId, + okx::{ + datastore::{ + brc20::{ + redb::table::{ + get_balance, get_balances, get_token_info, get_tokens_info, get_transaction_receipts, + get_transferable_assets_by_account, get_transferable_assets_by_account_ticker, + get_transferable_assets_by_outpoint, get_transferable_assets_by_satpoint, + insert_token_info, insert_transferable_asset, remove_transferable_asset, + save_transaction_receipts, update_burned_token_info, update_mint_token_info, + update_token_balance, + }, + Balance, Brc20Reader, Brc20ReaderWriter, Receipt, Tick, TokenInfo, TransferableLog, + }, + ord::{ + collections::CollectionKind, + redb::table::{ + add_inscription_attributes, get_collection_inscription_id, + get_collections_of_inscription, get_inscription_number_by_sequence_number, + get_transaction_operations, get_txout_by_outpoint, save_transaction_operations, + set_inscription_by_collection_key, + }, + InscriptionOp, OrdReader, OrdReaderWriter, + }, + ScriptKey, + }, + lru::SimpleLru, + protocol::ChainContext, + }, + Chain, SatPoint, +}; +use anyhow::anyhow; +use bitcoin::{OutPoint, TxOut, Txid}; +use redb::{MultimapTable, Table}; + +#[allow(non_snake_case)] +pub struct Context<'a, 'db, 'txn> { + pub(crate) chain_conf: ChainContext, + pub(crate) tx_out_cache: &'a mut SimpleLru, + pub(crate) hit: u64, + pub(crate) miss: u64, + + // ord tables + pub(crate) ORD_TX_TO_OPERATIONS: &'a mut Table<'db, 'txn, &'static TxidValue, &'static [u8]>, + pub(crate) COLLECTIONS_KEY_TO_INSCRIPTION_ID: + &'a mut Table<'db, 'txn, &'static str, InscriptionIdValue>, + pub(crate) COLLECTIONS_INSCRIPTION_ID_TO_KINDS: + &'a mut MultimapTable<'db, 'txn, InscriptionIdValue, &'static [u8]>, + pub(crate) SEQUENCE_NUMBER_TO_INSCRIPTION_ENTRY: + &'a mut Table<'db, 'txn, u32, InscriptionEntryValue>, + pub(crate) OUTPOINT_TO_ENTRY: &'a mut Table<'db, 'txn, &'static OutPointValue, &'static [u8]>, + + // BRC20 tables + pub(crate) BRC20_BALANCES: &'a mut Table<'db, 'txn, &'static str, &'static [u8]>, + pub(crate) BRC20_TOKEN: &'a mut Table<'db, 'txn, &'static str, &'static [u8]>, + pub(crate) BRC20_EVENTS: &'a mut Table<'db, 'txn, &'static TxidValue, &'static [u8]>, + pub(crate) BRC20_SATPOINT_TO_TRANSFERABLE_ASSETS: + &'a mut Table<'db, 'txn, &'static SatPointValue, &'static [u8]>, + pub(crate) BRC20_ADDRESS_TICKER_TO_TRANSFERABLE_ASSETS: + &'a mut MultimapTable<'db, 'txn, &'static str, &'static SatPointValue>, +} + +impl<'a, 'db, 'txn> OrdReader for Context<'a, 'db, 'txn> { + type Error = anyhow::Error; + + fn get_inscription_number_by_sequence_number( + &self, + sequence_number: u32, + ) -> crate::Result { + get_inscription_number_by_sequence_number( + self.SEQUENCE_NUMBER_TO_INSCRIPTION_ENTRY, + sequence_number, + ) + .map_err(|e| anyhow!("failed to get inscription number from state! error: {e}"))? + .ok_or(anyhow!( + "failed to get inscription number! error: sequence number {} not found", + sequence_number + )) + } + + fn get_script_key_on_satpoint( + &mut self, + satpoint: &SatPoint, + chain: Chain, + ) -> crate::Result { + if let Some(tx_out) = self.tx_out_cache.get(&satpoint.outpoint) { + self.hit += 1; + Ok(ScriptKey::from_script(&tx_out.script_pubkey, chain)) + } else if let Some(tx_out) = get_txout_by_outpoint(self.OUTPOINT_TO_ENTRY, &satpoint.outpoint)? + { + self.miss += 1; + Ok(ScriptKey::from_script(&tx_out.script_pubkey, chain)) + } else { + Err(anyhow!( + "failed to get tx out! error: outpoint {} not found", + &satpoint.outpoint + )) + } + } + + fn get_transaction_operations( + &self, + txid: &Txid, + ) -> crate::Result>, Self::Error> { + get_transaction_operations(self.ORD_TX_TO_OPERATIONS, txid) + } + + fn get_collections_of_inscription( + &self, + inscription_id: &InscriptionId, + ) -> crate::Result>, Self::Error> { + get_collections_of_inscription(self.COLLECTIONS_INSCRIPTION_ID_TO_KINDS, inscription_id) + } + + fn get_collection_inscription_id( + &self, + collection_key: &str, + ) -> crate::Result, Self::Error> { + get_collection_inscription_id(self.COLLECTIONS_KEY_TO_INSCRIPTION_ID, collection_key) + } +} + +impl<'a, 'db, 'txn> OrdReaderWriter for Context<'a, 'db, 'txn> { + fn save_transaction_operations( + &mut self, + txid: &Txid, + operations: &[InscriptionOp], + ) -> crate::Result<(), Self::Error> { + save_transaction_operations(self.ORD_TX_TO_OPERATIONS, txid, operations) + } + + fn set_inscription_by_collection_key( + &mut self, + key: &str, + inscription_id: &InscriptionId, + ) -> crate::Result<(), Self::Error> { + set_inscription_by_collection_key(self.COLLECTIONS_KEY_TO_INSCRIPTION_ID, key, inscription_id) + } + + fn add_inscription_attributes( + &mut self, + inscription_id: &InscriptionId, + kind: CollectionKind, + ) -> crate::Result<(), Self::Error> { + add_inscription_attributes( + self.COLLECTIONS_INSCRIPTION_ID_TO_KINDS, + inscription_id, + kind, + ) + } +} + +impl<'a, 'db, 'txn> Brc20Reader for Context<'a, 'db, 'txn> { + type Error = anyhow::Error; + + fn get_balances(&self, script_key: &ScriptKey) -> crate::Result, Self::Error> { + get_balances(self.BRC20_BALANCES, script_key) + } + + fn get_balance( + &self, + script_key: &ScriptKey, + tick: &Tick, + ) -> crate::Result, Self::Error> { + get_balance(self.BRC20_BALANCES, script_key, tick) + } + + fn get_token_info(&self, tick: &Tick) -> crate::Result, Self::Error> { + get_token_info(self.BRC20_TOKEN, tick) + } + + fn get_tokens_info(&self) -> crate::Result, Self::Error> { + get_tokens_info(self.BRC20_TOKEN) + } + + fn get_transaction_receipts( + &self, + txid: &Txid, + ) -> crate::Result>, Self::Error> { + get_transaction_receipts(self.BRC20_EVENTS, txid) + } + + fn get_transferable_assets_by_account( + &self, + script: &ScriptKey, + ) -> crate::Result, Self::Error> { + get_transferable_assets_by_account( + self.BRC20_ADDRESS_TICKER_TO_TRANSFERABLE_ASSETS, + self.BRC20_SATPOINT_TO_TRANSFERABLE_ASSETS, + script, + ) + } + + fn get_transferable_assets_by_account_ticker( + &self, + script: &ScriptKey, + tick: &Tick, + ) -> crate::Result, Self::Error> { + get_transferable_assets_by_account_ticker( + self.BRC20_ADDRESS_TICKER_TO_TRANSFERABLE_ASSETS, + self.BRC20_SATPOINT_TO_TRANSFERABLE_ASSETS, + script, + tick, + ) + } + + fn get_transferable_assets_by_satpoint( + &self, + satpoint: &SatPoint, + ) -> crate::Result, Self::Error> { + get_transferable_assets_by_satpoint(self.BRC20_SATPOINT_TO_TRANSFERABLE_ASSETS, satpoint) + } + + fn get_transferable_assets_by_outpoint( + &self, + outpoint: OutPoint, + ) -> crate::Result, Self::Error> { + get_transferable_assets_by_outpoint(self.BRC20_SATPOINT_TO_TRANSFERABLE_ASSETS, outpoint) + } +} + +impl<'a, 'db, 'txn> Brc20ReaderWriter for Context<'a, 'db, 'txn> { + fn update_token_balance( + &mut self, + script_key: &ScriptKey, + new_balance: Balance, + ) -> crate::Result<(), Self::Error> { + update_token_balance(self.BRC20_BALANCES, script_key, new_balance) + } + + fn insert_token_info( + &mut self, + tick: &Tick, + new_info: &TokenInfo, + ) -> crate::Result<(), Self::Error> { + insert_token_info(self.BRC20_TOKEN, tick, new_info) + } + + fn update_mint_token_info( + &mut self, + tick: &Tick, + minted_amt: u128, + minted_block_number: u32, + ) -> crate::Result<(), Self::Error> { + update_mint_token_info(self.BRC20_TOKEN, tick, minted_amt, minted_block_number) + } + + fn update_burned_token_info( + &mut self, + tick: &Tick, + burned_amt: u128, + ) -> crate::Result<(), Self::Error> { + update_burned_token_info(self.BRC20_TOKEN, tick, burned_amt) + } + + fn save_transaction_receipts( + &mut self, + txid: &Txid, + receipt: &[Receipt], + ) -> crate::Result<(), Self::Error> { + save_transaction_receipts(self.BRC20_EVENTS, txid, receipt) + } + + fn insert_transferable_asset( + &mut self, + satpoint: SatPoint, + transferable_asset: &TransferableLog, + ) -> crate::Result<(), Self::Error> { + insert_transferable_asset( + self.BRC20_SATPOINT_TO_TRANSFERABLE_ASSETS, + self.BRC20_ADDRESS_TICKER_TO_TRANSFERABLE_ASSETS, + satpoint, + transferable_asset, + ) + } + + fn remove_transferable_asset(&mut self, satpoint: SatPoint) -> crate::Result<(), Self::Error> { + remove_transferable_asset( + self.BRC20_SATPOINT_TO_TRANSFERABLE_ASSETS, + self.BRC20_ADDRESS_TICKER_TO_TRANSFERABLE_ASSETS, + satpoint, + ) + } +} diff --git a/src/okx/protocol/execute_manager.rs b/src/okx/protocol/execute_manager.rs new file mode 100644 index 0000000000..a1944e5a20 --- /dev/null +++ b/src/okx/protocol/execute_manager.rs @@ -0,0 +1,56 @@ +use super::*; +use crate::{ + okx::{ + datastore::{ + brc20::Brc20ReaderWriter, + ord::{collections::CollectionKind, OrdReaderWriter}, + }, + protocol::{brc20 as brc20_proto, context::Context}, + }, + Result, +}; +use anyhow::anyhow; +use bitcoin::Txid; +use std::collections::HashSet; + +pub struct CallManager {} + +impl CallManager { + pub fn new() -> Self { + Self {} + } + + pub fn execute_message(&self, context: &mut Context, txid: &Txid, msgs: &[Message]) -> Result { + let mut receipts = vec![]; + // execute message + for msg in msgs { + match msg { + Message::BRC20(brc_msg) => { + let msg = brc20_proto::ExecutionMessage::from_message( + context, + brc_msg, + context.chain_conf.chain, + )?; + let receipt = brc20_proto::execute(context, &msg)?; + receipts.push(receipt); + } + }; + } + + context + .save_transaction_receipts(txid, &receipts) + .map_err(|e| anyhow!("failed to add transaction receipt to state! error: {e}"))?; + + let brc20_inscriptions = receipts + .into_iter() + .map(|receipt| receipt.inscription_id) + .collect::>(); + + for inscription_id in brc20_inscriptions { + context + .add_inscription_attributes(&inscription_id, CollectionKind::BRC20) + .map_err(|e| anyhow!("failed to add inscription attributes to state! error: {e}"))?; + } + Ok(()) + } +} diff --git a/src/okx/protocol/message.rs b/src/okx/protocol/message.rs new file mode 100644 index 0000000000..4806a6ebce --- /dev/null +++ b/src/okx/protocol/message.rs @@ -0,0 +1,13 @@ +use crate::okx::datastore::brc20 as brc20_store; +use crate::okx::protocol::brc20 as brc20_proto; + +#[allow(clippy::upper_case_acronyms)] +pub enum Message { + BRC20(brc20_proto::Message), +} + +#[allow(clippy::upper_case_acronyms)] +#[allow(unused)] +pub enum Receipt { + BRC20(brc20_store::Receipt), +} diff --git a/src/okx/protocol/mod.rs b/src/okx/protocol/mod.rs new file mode 100644 index 0000000000..947ef633a9 --- /dev/null +++ b/src/okx/protocol/mod.rs @@ -0,0 +1,43 @@ +pub(crate) mod brc20; +pub(crate) mod context; +pub(crate) mod execute_manager; +pub(crate) mod message; +pub(crate) mod ord; +pub(crate) mod protocol_manager; +pub(crate) mod resolve_manager; + +pub use self::protocol_manager::ProtocolManager; + +use { + self::{execute_manager::CallManager, message::Message, resolve_manager::MsgResolveManager}, + crate::{Chain, Options}, +}; + +#[derive(Debug, Copy, Clone, PartialEq)] +pub struct ChainContext { + pub chain: Chain, + pub blockheight: u32, + pub blocktime: u32, +} +#[derive(Debug, Clone, Copy)] +pub struct ProtocolConfig { + first_inscription_height: u32, + first_brc20_height: Option, + enable_ord_receipts: bool, + enable_index_bitmap: bool, +} + +impl ProtocolConfig { + pub(crate) fn new_with_options(options: &Options) -> Self { + Self { + first_inscription_height: options.first_inscription_height(), + first_brc20_height: if options.enable_index_brc20 { + Some(options.first_brc20_height()) + } else { + None + }, + enable_ord_receipts: options.enable_save_ord_receipts, + enable_index_bitmap: options.enable_index_bitmap, + } + } +} diff --git a/src/okx/protocol/ord/bitmap.rs b/src/okx/protocol/ord/bitmap.rs new file mode 100644 index 0000000000..fa102bb9af --- /dev/null +++ b/src/okx/protocol/ord/bitmap.rs @@ -0,0 +1,91 @@ +use crate::okx::datastore::ord::{OrdReader, OrdReaderWriter}; +use crate::okx::protocol::context::Context; +use { + crate::{ + okx::datastore::ord::{ + bitmap::District, + collections::CollectionKind, + operation::{Action, InscriptionOp}, + }, + Inscription, InscriptionId, Result, + }, + anyhow::anyhow, + bitcoin::Txid, + std::collections::HashMap, +}; + +pub fn index_bitmap( + context: &mut Context, + operations: &HashMap>, +) -> Result { + let mut count = 0; + + // ignore transferred or cursed inscriptions. + let mut positive_inscriptions = operations + .values() + .flatten() + .filter(|op| { + !op.inscription_number.unwrap().is_negative() && matches!(op.action, Action::New { .. }) + }) + .cloned() + .collect::>(); + + // sort by inscription number. + positive_inscriptions.sort_by_key(|op| op.inscription_number.unwrap()); + + for op in positive_inscriptions.into_iter() { + match op.action { + Action::New { inscription, .. } => { + if let Some((inscription_id, district)) = + index_district(context, inscription, op.inscription_id)? + { + let key = district.to_collection_key(); + context.set_inscription_by_collection_key(&key, &inscription_id)?; + context.add_inscription_attributes(&inscription_id, CollectionKind::BitMap)?; + + count += 1; + } + } + _ => unreachable!(), + } + } + Ok(count) +} + +fn index_district( + context: &mut Context, + inscription: Inscription, + inscription_id: InscriptionId, +) -> Result> { + if let Some(content) = inscription.body() { + if let Ok(district) = District::parse(content) { + if district.number > context.chain_conf.blockheight { + return Ok(None); + } + let collection_key = district.to_collection_key(); + + if context + .get_collection_inscription_id(&collection_key) + .map_err(|e| { + anyhow!("failed to get collection inscription! key: {collection_key} error: {e}") + })? + .is_none() + { + log::info!( + "found valid district! number: {} content: {} inscription_id {}", + district.number, + std::str::from_utf8(content).unwrap(), + inscription_id, + ); + return Ok(Some((inscription_id, district))); + } + log::info!( + "duplicate district! number: {} content: {} inscription_id {}", + district.number, + std::str::from_utf8(content).unwrap(), + inscription_id, + ); + } + } + Ok(None) +} diff --git a/src/okx/protocol/ord/mod.rs b/src/okx/protocol/ord/mod.rs new file mode 100644 index 0000000000..163f8968d3 --- /dev/null +++ b/src/okx/protocol/ord/mod.rs @@ -0,0 +1 @@ +pub mod bitmap; diff --git a/src/okx/protocol/protocol_manager.rs b/src/okx/protocol/protocol_manager.rs new file mode 100644 index 0000000000..e3a4d0ab66 --- /dev/null +++ b/src/okx/protocol/protocol_manager.rs @@ -0,0 +1,100 @@ +use crate::okx::datastore::ord::OrdReaderWriter; +use crate::okx::protocol::context::Context; +use { + super::*, + crate::{ + index::BlockData, + okx::{datastore::ord::operation::InscriptionOp, protocol::ord as ord_proto}, + Instant, Result, + }, + bitcoin::Txid, + std::collections::HashMap, +}; + +pub struct ProtocolManager { + config: ProtocolConfig, + call_man: CallManager, + resolve_man: MsgResolveManager, +} + +impl ProtocolManager { + // Need three datastore, and they're all in the same write transaction. + pub fn new(config: ProtocolConfig) -> Self { + Self { + config, + call_man: CallManager::new(), + resolve_man: MsgResolveManager::new(config), + } + } + + pub(crate) fn index_block( + &self, + context: &mut Context, + block: &BlockData, + operations: HashMap>, + ) -> Result { + let start = Instant::now(); + let mut inscriptions_size = 0; + let mut messages_size = 0; + let mut cost1 = 0u128; + let mut cost2 = 0u128; + let mut cost3 = 0u128; + // skip the coinbase transaction. + for (tx, txid) in block.txdata.iter() { + // skip coinbase transaction. + if tx + .input + .first() + .is_some_and(|tx_in| tx_in.previous_output.is_null()) + { + continue; + } + + // index inscription operations. + if let Some(tx_operations) = operations.get(txid) { + // save all transaction operations to ord database. + if self.config.enable_ord_receipts + && context.chain_conf.blockheight >= self.config.first_inscription_height + { + let start = Instant::now(); + context.save_transaction_operations(txid, tx_operations)?; + inscriptions_size += tx_operations.len(); + cost1 += start.elapsed().as_micros(); + } + + let start = Instant::now(); + // Resolve and execute messages. + let messages = self + .resolve_man + .resolve_message(context, tx, tx_operations)?; + cost2 += start.elapsed().as_micros(); + + let start = Instant::now(); + self.call_man.execute_message(context, txid, &messages)?; + cost3 += start.elapsed().as_micros(); + messages_size += messages.len(); + } + } + + let bitmap_start = Instant::now(); + let mut bitmap_count = 0; + if self.config.enable_index_bitmap { + bitmap_count = ord_proto::bitmap::index_bitmap(context, &operations)?; + } + let cost4 = bitmap_start.elapsed().as_millis(); + + log::info!( + "Protocol Manager indexed block {} with ord inscriptions {}, messages {}, bitmap {} in {} ms, {}/{}/{}/{}", + context.chain_conf.blockheight, + inscriptions_size, + messages_size, + bitmap_count, + start.elapsed().as_millis(), + cost1/1000, + cost2/1000, + cost3/1000, + cost4, + ); + Ok(()) + } +} diff --git a/src/okx/protocol/resolve_manager.rs b/src/okx/protocol/resolve_manager.rs new file mode 100644 index 0000000000..dc62a88410 --- /dev/null +++ b/src/okx/protocol/resolve_manager.rs @@ -0,0 +1,81 @@ +use { + super::*, + crate::{ + index::entry::{Entry, SatPointValue}, + okx::{ + datastore::{ + brc20::{redb::table::get_transferable_assets_by_outpoint, TransferableLog}, + ord::operation::InscriptionOp, + }, + protocol::{context::Context, Message}, + }, + Result, + }, + bitcoin::Transaction, + std::collections::HashMap, +}; + +pub struct MsgResolveManager { + config: ProtocolConfig, +} + +impl MsgResolveManager { + pub fn new(config: ProtocolConfig) -> Self { + Self { config } + } + + pub fn resolve_message( + &self, + context: &Context, + tx: &Transaction, + operations: &[InscriptionOp], + ) -> Result> { + log::debug!( + "Resolve Manager indexed transaction {}, operations size: {}, data: {:?}", + tx.txid(), + operations.len(), + operations + ); + let mut messages = Vec::new(); + let mut operation_iter = operations.iter().peekable(); + + for input in &tx.input { + // "operations" is a list of all the operations in the current block, and they are ordered. + // We just need to find the operation corresponding to the current transaction here. + while let Some(operation) = operation_iter.peek() { + if operation.old_satpoint.outpoint != input.previous_output { + break; + } + let operation = operation_iter.next().unwrap(); + + // Parse BRC20 message through inscription operation. + if self + .config + .first_brc20_height + .map(|height| context.chain_conf.blockheight >= height) + .unwrap_or(false) + { + let satpoint_to_transfer_assets: HashMap = + get_transferable_assets_by_outpoint( + context.BRC20_SATPOINT_TO_TRANSFERABLE_ASSETS, + input.previous_output, + )? + .into_iter() + .map(|(satpoint, asset)| (satpoint.store(), asset)) + .collect(); + + if let Some(msg) = brc20::Message::resolve(operation, satpoint_to_transfer_assets)? { + log::debug!( + "BRC20 resolved the message from {:?}, msg {:?}", + operation, + msg + ); + messages.push(Message::BRC20(msg)); + continue; + } + } + } + } + Ok(messages) + } +} diff --git a/src/options.rs b/src/options.rs index ec6261f0f8..a7854ccfca 100644 --- a/src/options.rs +++ b/src/options.rs @@ -6,7 +6,12 @@ use {super::*, bitcoincore_rpc::Auth}; .required(false) .args(&["chain_argument", "signet", "regtest", "testnet"]), ))] + pub struct Options { + #[arg(long, default_value_t=LogLevel::default(), help = "log level")] + pub(crate) log_level: LogLevel, + #[arg(long, help = "write log in directory ")] + pub(crate) log_dir: Option, #[arg(long, help = "Load Bitcoin Core data dir from .")] pub(crate) bitcoin_data_dir: Option, #[arg(long, help = "Authenticate to Bitcoin Core RPC with .")] @@ -33,6 +38,12 @@ pub struct Options { help = "Set index cache to bytes. By default takes 1/4 of available RAM." )] pub(crate) db_cache_size: Option, + #[arg( + long, + default_value = "10000000", + help = "Set lru cache to . By default 10000000" + )] + pub(crate) lru_size: usize, #[arg( long, help = "Don't look for inscriptions below ." @@ -66,6 +77,48 @@ pub struct Options { pub(crate) signet: bool, #[arg(long, short, help = "Use testnet. Equivalent to `--chain testnet`.")] pub(crate) testnet: bool, + #[arg(long, help = "Enable Save Ord Receipts.")] + pub(crate) enable_save_ord_receipts: bool, + #[arg(long, help = "Enable Index Bitmap Collection.")] + pub(crate) enable_index_bitmap: bool, + // OKX defined options. + #[arg(long, help = "Enable Index all of BRC20 Protocol")] + pub(crate) enable_index_brc20: bool, + #[arg( + long, + help = "Don't look for BRC20 messages below ." + )] + pub(crate) first_brc20_height: Option, + #[clap(long, default_value = "200", help = "DB commit interval.")] + pub(crate) commit_height_interval: u64, + #[clap( + long, + default_value = "0", + help = "(experimental) DB commit persist interval." + )] + pub(crate) commit_persist_interval: u64, +} + +#[derive(Debug, Clone)] +pub(crate) struct LogLevel(pub log::LevelFilter); + +impl Default for LogLevel { + fn default() -> Self { + Self(log::LevelFilter::Error) + } +} + +impl Display for LogLevel { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + Display::fmt(&self.0, f) + } +} + +impl FromStr for LogLevel { + type Err = ::Err; + fn from_str(level: &str) -> Result { + Ok(Self(log::LevelFilter::from_str(level)?)) + } } impl Options { @@ -91,6 +144,18 @@ impl Options { } } + pub(crate) fn first_brc20_height(&self) -> u32 { + if self.chain() == Chain::Regtest { + self.first_brc20_height.unwrap_or(0) + } else if integration_test() { + 0 + } else { + self + .first_brc20_height + .unwrap_or_else(|| self.chain().first_brc20_height()) + } + } + pub(crate) fn first_rune_height(&self) -> u32 { if integration_test() { 0 @@ -147,6 +212,17 @@ impl Options { self.chain().join_with_data_dir(&self.data_dir) } + pub(crate) fn log_level(&self) -> log::LevelFilter { + self.log_level.0 + } + + pub(crate) fn log_dir(&self) -> PathBuf { + self + .log_dir + .as_ref() + .map_or_else(|| self.data_dir().join("logs"), |path| path.clone()) + } + pub(crate) fn load_config(&self) -> Result { match &self.config { Some(path) => Ok(serde_yaml::from_reader(File::open(path)?)?), @@ -247,6 +323,22 @@ impl Options { Ok(client) } + + pub(crate) fn commit_height_interval(&self) -> u64 { + if self.commit_height_interval == 0 { + 1 + } else { + self.commit_height_interval + } + } + + pub(crate) fn commit_persist_interval(&self) -> u64 { + if self.commit_persist_interval == 0 { + 1 + } else { + self.commit_persist_interval + } + } } #[cfg(test)] diff --git a/src/subcommand/server.rs b/src/subcommand/server.rs index 925451d1c7..8705e410c5 100644 --- a/src/subcommand/server.rs +++ b/src/subcommand/server.rs @@ -3,7 +3,7 @@ use { accept_encoding::AcceptEncoding, accept_json::AcceptJson, deserialize_from_str::DeserializeFromStr, - error::{OptionExt, ServerError, ServerResult}, + error::{ApiError, OptionExt, ServerError, ServerResult}, }, super::*, crate::{ @@ -43,11 +43,23 @@ use { cors::{Any, CorsLayer}, set_header::SetResponseHeaderLayer, }, + utoipa::OpenApi, }; mod accept_encoding; mod accept_json; +mod api; +mod brc20; mod error; +mod info; +mod ord; +mod response; +mod sat; +mod types; +mod utils; + +use self::api::*; +use self::response::ApiResponse; #[derive(Copy, Clone)] pub(crate) enum InscriptionQuery { @@ -188,6 +200,79 @@ impl Server { }); INDEXER.lock().unwrap().replace(index_thread); + #[derive(OpenApi)] + #[openapi( + paths( + brc20::brc20_balance, + brc20::brc20_all_balance, + brc20::brc20_tick_info, + brc20::brc20_all_tick_info, + brc20::brc20_tx_events, + brc20::brc20_block_events, + brc20::brc20_transferable, + brc20::brc20_all_transferable, + + ord::ord_inscription_id, + ord::ord_inscription_number, + ord::ord_outpoint, + ord::ord_txid_inscriptions, + ord::ord_block_inscriptions, + + info::node_info, + ), + components(schemas( + // BRC20 schemas + brc20::ApiTickInfo, + brc20::ApiTickInfos, + brc20::ApiBalance, + brc20::ApiBalances, + brc20::ApiTxEvent, + brc20::ApiDeployEvent, + brc20::ApiMintEvent, + brc20::ApiInscribeTransferEvent, + brc20::ApiTransferEvent, + brc20::ApiErrorEvent, + brc20::ApiTxEvents, + brc20::ApiBlockEvents, + brc20::ApiTransferableAsset, + brc20::ApiTransferableAssets, + + // BRC20 responses schemas + response::ApiBRC20Tick, + response::ApiBRC20AllTick, + response::ApiBRC20Balance, + response::ApiBRC20AllBalance, + response::ApiBRC20TxEvents, + response::ApiBRC20BlockEvents, + response::ApiBRC20Transferable, + + // Ord schemas + ord::ApiInscription, + ord::ApiContentEncoding, + ord::ApiInscriptionDigest, + ord::ApiOutpointInscriptions, + ord::ApiOutPointResult, + ord::ApiInscriptionAction, + ord::ApiTxInscription, + ord::ApiTxInscriptions, + ord::ApiBlockInscriptions, + + // Ord responses schemas + response::ApiOrdInscription, + response::ApiOrdTxInscriptions, + response::ApiOrdBlockInscriptions, + response::ApiOrdOutPointResult, + + // Node Info schemas + info::NodeInfo, + info::ChainInfo, + types::ScriptPubkey, + response::Node, + ApiError + )) + )] + struct ApiDoc; + let config = Arc::new(options.load_config()?); let acme_domains = self.acme_domains()?; @@ -200,6 +285,68 @@ impl Server { decompress: self.decompress, }); + let api_v1_router = Router::new() + .route( + "/api-docs/openapi.json", + get(|| async { ApiDoc::openapi().to_pretty_json().unwrap() }), + ) + .route("/node/info", get(info::node_info)) + .route("/ord/id/:id/inscription", get(ord::ord_inscription_id)) + .route( + "/ord/number/:number/inscription", + get(ord::ord_inscription_number), + ) + .route("/ord/outpoint/:outpoint/info", get(ord::ord_outpoint)) + .route( + "/ord/tx/:txid/inscriptions", + get(ord::ord_txid_inscriptions), + ) + .route( + "/ord/block/:blockhash/inscriptions", + get(ord::ord_block_inscriptions), + ) + .route( + "/ord/debug/bitmap/district/:number", + get(ord::ord_debug_bitmap_district), + ) + .route("/brc20/tick/:tick", get(brc20::brc20_tick_info)) + .route("/brc20/tick", get(brc20::brc20_all_tick_info)) + .route( + "/brc20/tick/:tick/address/:address/balance", + get(brc20::brc20_balance), + ) + .route( + "/brc20/address/:address/balance", + get(brc20::brc20_all_balance), + ) + .route( + "/brc20/tick/:tick/address/:address/transferable", + get(brc20::brc20_transferable), + ) + .route( + "/brc20/address/:address/transferable", + get(brc20::brc20_all_transferable), + ) + .route( + "/brc20/outpoint/:outpoint/transferable", + get(brc20::brc20_outpoint), + ) + .route("/brc20/tx/:txid/events", get(brc20::brc20_tx_events)) + .route( + "/brc20/block/:block_hash/events", + get(brc20::brc20_block_events), + ) + .route( + "/sat/outpoint/:outpoint/info", + get(sat::sat_range_by_outpoint), + ) + .route( + "/sat/outpoint/:outpoint/rarity", + get(sat::sat_range_with_rarity_by_outpoint), + ); + + let api_router = Router::new().nest("/v1", api_v1_router); + let router = Router::new() .route("/", get(Self::home)) .route("/block/:query", get(Self::block)) @@ -270,6 +417,7 @@ impl Server { .route("/static/*path", get(Self::static_asset)) .route("/status", get(Self::status)) .route("/tx/:txid", get(Self::transaction)) + .nest("/api", api_router) .layer(Extension(index)) .layer(Extension(server_config.clone())) .layer(Extension(config)) @@ -3915,6 +4063,7 @@ mod tests { } #[test] + #[ignore] fn collections_page_prev_and_next() { let server = TestServer::new_with_regtest_with_index_sats(); diff --git a/src/subcommand/server/api.rs b/src/subcommand/server/api.rs new file mode 100644 index 0000000000..a5079a6026 --- /dev/null +++ b/src/subcommand/server/api.rs @@ -0,0 +1,31 @@ +use {super::*, utoipa::IntoParams}; + +#[derive(Deserialize, IntoParams)] +pub struct Pagination { + /// Start index of the result. + pub start: Option, + /// Limit of the result. + pub limit: Option, +} + +pub(crate) type ApiResult = Result>, ApiError>; + +pub(super) trait ApiOptionExt { + fn ok_or_api_err ApiError>(self, f: F) -> Result; + fn ok_or_api_not_found(self, s: S) -> Result; +} + +impl ApiOptionExt for Option { + fn ok_or_api_err ApiError>(self, f: F) -> Result { + match self { + Some(value) => Ok(value), + None => Err(f()), + } + } + fn ok_or_api_not_found(self, s: S) -> Result { + match self { + Some(value) => Ok(value), + None => Err(ApiError::not_found(s)), + } + } +} diff --git a/src/subcommand/server/brc20/balance.rs b/src/subcommand/server/brc20/balance.rs new file mode 100644 index 0000000000..de20d292af --- /dev/null +++ b/src/subcommand/server/brc20/balance.rs @@ -0,0 +1,115 @@ +use {super::*, crate::okx::datastore::brc20::Tick, axum::Json, utoipa::ToSchema}; + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +#[schema(as = brc20::Balance)] +pub struct ApiBalance { + /// Name of the ticker. + pub tick: String, + /// Available balance. + #[schema(format = "uint64")] + pub available_balance: String, + /// Transferable balance. + #[schema(format = "uint64")] + pub transferable_balance: String, + /// Overall balance. + #[schema(format = "uint64")] + pub overall_balance: String, +} + +/// Get the ticker balance of the address. +/// +/// Retrieve the asset balance of the 'ticker' for the address. +#[utoipa::path( + get, + path = "/api/v1/brc20/tick/{ticker}/address/{address}/balance", + params( + ("ticker" = String, Path, description = "Token ticker", min_length = 4, max_length = 4), + ("address" = String, Path, description = "Address") + ), + responses( + (status = 200, description = "Obtain account balance by query ticker.", body = BRC20Balance), + (status = 400, description = "Bad query.", body = ApiError, example = json!(&ApiError::bad_request("bad request"))), + (status = 404, description = "Not found.", body = ApiError, example = json!(&ApiError::not_found("not found"))), + (status = 500, description = "Internal server error.", body = ApiError, example = json!(&ApiError::internal("internal error"))), + ) + )] +pub(crate) async fn brc20_balance( + Extension(index): Extension>, + Path((tick, address)): Path<(String, String)>, +) -> ApiResult { + log::debug!("rpc: get brc20_balance: {} {}", tick, address); + + let rtx = index.begin_read()?; + let chain = index.get_chain(); + + let ticker = Tick::from_str(&tick).map_err(|_| BRC20ApiError::InvalidTicker(tick.clone()))?; + let script_key = utils::parse_and_validate_script_key_with_chain(&address, chain) + .map_err(ApiError::bad_request)?; + + let balance = Index::get_brc20_balance_by_tick_and_address(ticker, script_key, &rtx)? + .ok_or(BRC20ApiError::UnknownTicker(tick.clone()))?; + + let available_balance = balance.overall_balance - balance.transferable_balance; + + log::debug!("rpc: get brc20_balance: {} {} {:?}", tick, address, balance); + + Ok(Json(ApiResponse::ok(ApiBalance { + tick: balance.tick.to_string(), + available_balance: available_balance.to_string(), + transferable_balance: balance.transferable_balance.to_string(), + overall_balance: balance.overall_balance.to_string(), + }))) +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +#[schema(as = brc20::AllBalance)] +pub struct ApiBalances { + #[schema(value_type = Vec)] + pub balance: Vec, +} + +/// Get all ticker balances of the address. +/// +/// Retrieve all BRC20 protocol asset balances associated with a address. +#[utoipa::path( + get, + path = "/api/v1/brc20/address/{address}/balance", + params( + ("address" = String, Path, description = "Address") + ), + responses( + (status = 200, description = "Obtain account balances by query address.", body = BRC20AllBalance), + (status = 400, description = "Bad query.", body = ApiError, example = json!(&ApiError::bad_request("bad request"))), + (status = 404, description = "Not found.", body = ApiError, example = json!(&ApiError::not_found("not found"))), + (status = 500, description = "Internal server error.", body = ApiError, example = json!(&ApiError::internal("internal error"))), + ) + )] +pub(crate) async fn brc20_all_balance( + Extension(index): Extension>, + Path(account): Path, +) -> ApiResult { + log::debug!("rpc: get brc20_all_balance: {}", account); + + let rtx = index.begin_read()?; + let chain = index.get_chain(); + + let script_key = utils::parse_and_validate_script_key_with_chain(&account, chain) + .map_err(ApiError::bad_request)?; + + let all_balance = rtx.brc20_get_all_balance_by_address(script_key)?; + log::debug!("rpc: get brc20_all_balance: {} {:?}", account, all_balance); + + Ok(Json(ApiResponse::ok(ApiBalances { + balance: all_balance + .into_iter() + .map(|bal| ApiBalance { + tick: bal.tick.to_string(), + available_balance: (bal.overall_balance - bal.transferable_balance).to_string(), + transferable_balance: bal.transferable_balance.to_string(), + overall_balance: bal.overall_balance.to_string(), + }) + .collect(), + }))) +} diff --git a/src/subcommand/server/brc20/mod.rs b/src/subcommand/server/brc20/mod.rs new file mode 100644 index 0000000000..8d5887ce48 --- /dev/null +++ b/src/subcommand/server/brc20/mod.rs @@ -0,0 +1,33 @@ +use super::{types::ScriptPubkey, *}; +mod balance; +mod outpoint; +mod receipt; +mod ticker; +mod transferable; + +pub(super) use {balance::*, outpoint::*, receipt::*, ticker::*, transferable::*}; + +#[derive(Debug, thiserror::Error)] +pub(super) enum BRC20ApiError { + #[error("invalid ticker {0}, must be 4 characters long")] + InvalidTicker(String), + #[error("failed to retrieve ticker {0} in the database")] + UnknownTicker(String), + /// Thrown when a transaction receipt was requested but not matching transaction receipt exists + #[error("transaction receipt {0} not found")] + TransactionReceiptNotFound(Txid), + /// Thrown when an internal error occurs + #[error("internal error: {0}")] + Internal(String), +} + +impl From for ApiError { + fn from(error: BRC20ApiError) -> Self { + match error { + BRC20ApiError::InvalidTicker(_) => Self::bad_request(error.to_string()), + BRC20ApiError::UnknownTicker(_) => Self::not_found(error.to_string()), + BRC20ApiError::TransactionReceiptNotFound(_) => Self::not_found(error.to_string()), + BRC20ApiError::Internal(_) => Self::internal(error.to_string()), + } + } +} diff --git a/src/subcommand/server/brc20/outpoint.rs b/src/subcommand/server/brc20/outpoint.rs new file mode 100644 index 0000000000..d1fa2ceff6 --- /dev/null +++ b/src/subcommand/server/brc20/outpoint.rs @@ -0,0 +1,71 @@ +use {super::*, utoipa::ToSchema}; + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, ToSchema)] +#[schema(as = ord::ApiOutPointResult)] +#[serde(rename_all = "camelCase")] +pub struct ApiOutPointResult { + #[schema(value_type = Option)] + pub result: Option>, + pub latest_blockhash: String, + #[schema(format = "uint64")] + pub latest_height: u32, +} + +// /brc20/outpoint/:outpoint/transferable +/// Retrieve the outpoint brc20 transferable assets with the specified outpoint. +#[utoipa::path( + get, + path = "/api/v1/brc20/outpoint/{outpoint}/transferable", + params( + ("outpoint" = String, Path, description = "Outpoint") +), + responses( + (status = 200, description = "Obtain outpoint infomation", body = OrdOutPointData), + (status = 400, description = "Bad query.", body = ApiError, example = json!(&ApiError::bad_request("bad request"))), + (status = 404, description = "Not found.", body = ApiError, example = json!(&ApiError::not_found("not found"))), + (status = 500, description = "Internal server error.", body = ApiError, example = json!(&ApiError::internal("internal error"))), + ) +)] +pub(crate) async fn brc20_outpoint( + Extension(index): Extension>, + Path(outpoint): Path, +) -> ApiResult { + log::debug!("rpc: get brc20_outpoint: {outpoint}"); + + let rtx = index.begin_read()?; + + let (latest_height, latest_blockhash) = rtx.latest_block()?.ok_or_api_err(|| { + BRC20ApiError::Internal("Failed to retrieve the latest block from the database.".to_string()) + .into() + })?; + + let transferable_assets_with_satpoints = + rtx.brc20_transferable_assets_on_output_with_satpoints(outpoint)?; + + // If there are no inscriptions on the output, return None and parsed block states. + if transferable_assets_with_satpoints.is_empty() { + return Ok(Json(ApiResponse::ok(ApiOutPointResult { + result: None, + latest_height: latest_height.n(), + latest_blockhash: latest_blockhash.to_string(), + }))); + } + + Ok(Json(ApiResponse::ok(ApiOutPointResult { + result: Some( + transferable_assets_with_satpoints + .into_iter() + .map(|(satpoint, asset)| ApiTransferableAsset { + inscription_id: asset.inscription_id.to_string(), + inscription_number: asset.inscription_number, + amount: asset.amount.to_string(), + tick: asset.tick.as_str().to_string(), + owner: asset.owner.to_string(), + location: satpoint, + }) + .collect(), + ), + latest_height: latest_height.n(), + latest_blockhash: latest_blockhash.to_string(), + }))) +} diff --git a/src/subcommand/server/brc20/receipt.rs b/src/subcommand/server/brc20/receipt.rs new file mode 100644 index 0000000000..05bf0f2c8b --- /dev/null +++ b/src/subcommand/server/brc20/receipt.rs @@ -0,0 +1,388 @@ +use { + self::okx::datastore::brc20::OperationType, super::*, + crate::okx::datastore::brc20 as brc20_store, axum::Json, utoipa::ToSchema, +}; + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[schema(as = brc20::TxEvent)] +#[serde(untagged)] +#[serde(rename_all = "camelCase")] +pub enum ApiTxEvent { + /// Event generated by deployed ticker. + #[schema(value_type = brc20::ApiDeployEvent)] + Deploy(ApiDeployEvent), + /// Event generated by mining. + #[schema(value_type = brc20::ApiMintEvent)] + Mint(ApiMintEvent), + /// Event generated by pretransfer. + #[schema(value_type = brc20::ApiInscribeTransferEvent)] + InscribeTransfer(ApiInscribeTransferEvent), + #[schema(value_type = brc20::ApiTransferEvent)] + /// Event generated by transfer. + Transfer(ApiTransferEvent), + /// Event generated by the execution has failed. + #[schema(value_type = brc20::ApiErrorEvent)] + Error(ApiErrorEvent), +} + +impl From for ApiTxEvent { + fn from(event: brc20_store::Receipt) -> Self { + match event.result.as_ref() { + Ok(brc20_store::Event::Deploy(deploy_event)) => { + Self::Deploy(ApiDeployEvent::parse(&event, deploy_event)) + } + Ok(brc20_store::Event::Mint(mint_event)) => { + Self::Mint(ApiMintEvent::parse(&event, mint_event)) + } + Ok(brc20_store::Event::InscribeTransfer(inscribe_transfer_event)) => Self::InscribeTransfer( + ApiInscribeTransferEvent::parse(&event, inscribe_transfer_event), + ), + Ok(brc20_store::Event::Transfer(transfer_event)) => { + Self::Transfer(ApiTransferEvent::parse(&event, transfer_event)) + } + Err(err) => Self::Error(ApiErrorEvent::parse(&event, err)), + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[schema(as = brc20::ApiErrorEvent)] +#[serde(rename_all = "camelCase")] +pub struct ApiErrorEvent { + /// Event type. + #[serde(rename = "type")] + pub event: String, + /// The inscription id. + pub inscription_id: String, + /// The inscription number. + pub inscription_number: i32, + /// The inscription satpoint of the transaction input. + pub old_satpoint: String, + /// The inscription satpoint of the transaction output. + pub new_satpoint: String, + /// The message sender which is an address or script pubkey hash. + pub from: ScriptPubkey, + /// The message receiver which is an address or script pubkey hash. + pub to: ScriptPubkey, + /// Executed state. + pub valid: bool, + /// Error message. + pub msg: String, +} + +impl ApiErrorEvent { + fn parse(event: &brc20_store::Receipt, error: &brc20_store::BRC20Error) -> Self { + Self { + inscription_id: event.inscription_id.to_string(), + inscription_number: event.inscription_number, + old_satpoint: event.old_satpoint.to_string(), + new_satpoint: event.new_satpoint.to_string(), + from: event.from.clone().into(), + to: event.to.clone().into(), + valid: false, + msg: error.to_string(), + event: event.op.to_string(), + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[schema(as = brc20::ApiDeployEvent)] +#[serde(rename_all = "camelCase")] +pub struct ApiDeployEvent { + /// Event type. + #[serde(rename = "type")] + pub event: String, + /// The ticker deployed. + pub tick: String, + /// The inscription id. + pub inscription_id: String, + /// The inscription number. + pub inscription_number: i32, + /// The inscription satpoint of the transaction input. + pub old_satpoint: String, + /// The inscription satpoint of the transaction output. + pub new_satpoint: String, + /// The total supply of the deployed ticker. + pub supply: String, + /// The limit per mint of the deployed ticker. + pub limit_per_mint: String, + /// The decimal of the deployed ticker. + pub decimal: u8, + /// Whether the ticker is self minted. + pub self_mint: bool, + /// The message sender which is an address or script pubkey hash. + pub from: ScriptPubkey, + /// The message receiver which is an address or script pubkey hash. + pub to: ScriptPubkey, + /// Executed state. + pub valid: bool, + /// Message generated during execution. + pub msg: String, +} + +impl ApiDeployEvent { + fn parse(event: &brc20_store::Receipt, deploy_event: &brc20_store::DeployEvent) -> Self { + Self { + tick: deploy_event.tick.to_string(), + inscription_id: event.inscription_id.to_string(), + inscription_number: event.inscription_number, + old_satpoint: event.old_satpoint.to_string(), + new_satpoint: event.new_satpoint.to_string(), + supply: deploy_event.supply.to_string(), + limit_per_mint: deploy_event.limit_per_mint.to_string(), + decimal: deploy_event.decimal, + self_mint: deploy_event.self_mint, + from: event.from.clone().into(), + to: event.to.clone().into(), + valid: true, + msg: "ok".to_string(), + event: OperationType::Deploy.to_string(), + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[schema(as = brc20::ApiMintEvent)] +#[serde(rename_all = "camelCase")] +pub struct ApiMintEvent { + #[serde(rename = "type")] + /// Event type. + pub event: String, + /// The ticker minted. + pub tick: String, + /// The inscription id. + pub inscription_id: String, + /// The inscription number. + pub inscription_number: i32, + /// The inscription satpoint of the transaction input. + pub old_satpoint: String, + /// The inscription satpoint of the transaction output. + pub new_satpoint: String, + /// The amount minted. + pub amount: String, + /// The message sender which is an address or script pubkey hash. + pub from: ScriptPubkey, + /// The message receiver which is an address or script pubkey hash. + pub to: ScriptPubkey, + /// Executed state. + pub valid: bool, + /// Message generated during execution. + pub msg: String, +} + +impl ApiMintEvent { + fn parse(event: &brc20_store::Receipt, mint_event: &brc20_store::MintEvent) -> Self { + Self { + tick: mint_event.tick.to_string(), + inscription_id: event.inscription_id.to_string(), + inscription_number: event.inscription_number, + old_satpoint: event.old_satpoint.to_string(), + new_satpoint: event.new_satpoint.to_string(), + amount: mint_event.amount.to_string(), + from: event.from.clone().into(), + to: event.to.clone().into(), + valid: true, + msg: mint_event.msg.clone().unwrap_or("ok".to_string()), + event: OperationType::Mint.to_string(), + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[schema(as = brc20::ApiInscribeTransferEvent)] +#[serde(rename_all = "camelCase")] +pub struct ApiInscribeTransferEvent { + /// Event type. + #[serde(rename = "type")] + pub event: String, + /// The ticker of pretransfer. + pub tick: String, + /// The inscription id. + pub inscription_id: String, + /// The inscription number. + pub inscription_number: i32, + /// The inscription satpoint of the transaction input. + pub old_satpoint: String, + /// The inscription satpoint of the transaction output. + pub new_satpoint: String, + /// The amount of pretransfer. + pub amount: String, + /// The message sender which is an address or script pubkey hash. + pub from: ScriptPubkey, + /// The message receiver which is an address or script pubkey hash. + pub to: ScriptPubkey, + /// Executed state. + pub valid: bool, + /// Message generated during execution. + pub msg: String, +} + +impl ApiInscribeTransferEvent { + fn parse( + event: &brc20_store::Receipt, + transfer_event: &brc20_store::InscribeTransferEvent, + ) -> Self { + Self { + tick: transfer_event.tick.to_string(), + inscription_id: event.inscription_id.to_string(), + inscription_number: event.inscription_number, + old_satpoint: event.old_satpoint.to_string(), + new_satpoint: event.new_satpoint.to_string(), + amount: transfer_event.amount.to_string(), + from: event.from.clone().into(), + to: event.to.clone().into(), + valid: true, + msg: "ok".to_string(), + event: OperationType::InscribeTransfer.to_string(), + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[schema(as = brc20::ApiTransferEvent)] +#[serde(rename_all = "camelCase")] +pub struct ApiTransferEvent { + /// Event type. + #[serde(rename = "type")] + pub event: String, + /// The ticker of transfer. + pub tick: String, + /// The inscription id. + pub inscription_id: String, + /// The inscription number. + pub inscription_number: i32, + /// The inscription satpoint of the transaction input. + pub old_satpoint: String, + /// The inscription satpoint of the transaction output. + pub new_satpoint: String, + /// The amount of transfer. + pub amount: String, + /// The message sender which is an address or script pubkey hash. + pub from: ScriptPubkey, + /// The message receiver which is an address or script pubkey hash. + pub to: ScriptPubkey, + /// Executed state. + pub valid: bool, + /// Message generated during execution. + pub msg: String, +} + +impl ApiTransferEvent { + fn parse(event: &brc20_store::Receipt, transfer_event: &brc20_store::TransferEvent) -> Self { + Self { + tick: transfer_event.tick.to_string(), + inscription_id: event.inscription_id.to_string(), + inscription_number: event.inscription_number, + old_satpoint: event.old_satpoint.to_string(), + new_satpoint: event.new_satpoint.to_string(), + amount: transfer_event.amount.to_string(), + from: event.from.clone().into(), + to: event.to.clone().into(), + valid: true, + msg: transfer_event.msg.clone().unwrap_or("ok".to_string()), + event: OperationType::Transfer.to_string(), + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[schema(as = brc20::TxEvents)] +#[serde(rename_all = "camelCase")] +pub struct ApiTxEvents { + #[schema(value_type = Vec)] + pub events: Vec, + pub txid: String, +} + +/// Get transaction events by txid. +/// +/// Retrieve all BRC20 events associated with a transaction. +#[utoipa::path( + get, + path = "/api/v1/brc20/tx/{txid}/events", + params( + ("txid" = String, Path, description = "transaction ID") + ), + responses( + (status = 200, description = "Obtain transaction events by txid", body = BRC20TxEvents), + (status = 400, description = "Bad query.", body = ApiError, example = json!(&ApiError::bad_request("bad request"))), + (status = 404, description = "Not found.", body = ApiError, example = json!(&ApiError::not_found("not found"))), + (status = 500, description = "Internal server error.", body = ApiError, example = json!(&ApiError::internal("internal error"))), + ) + )] +pub(crate) async fn brc20_tx_events( + Extension(index): Extension>, + Path(txid): Path, +) -> ApiResult { + log::debug!("rpc: get brc20_tx_events: {}", txid); + + let txid = bitcoin::Txid::from_str(&txid).map_err(ApiError::bad_request)?; + let rtx = index.begin_read()?; + let client = index.bitcoin_rpc_client()?; + + let tx_events = Index::get_brc20_transaction_receipts(txid, &rtx, &client)? + .ok_or(BRC20ApiError::TransactionReceiptNotFound(txid))?; + + log::debug!("rpc: get brc20_tx_events: {} {:?}", txid, tx_events); + + Ok(Json(ApiResponse::ok(ApiTxEvents { + txid: txid.to_string(), + events: tx_events.into_iter().map(|e| e.into()).collect(), + }))) +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[schema(as = brc20::BlockEvents)] +#[serde(rename_all = "camelCase")] +pub struct ApiBlockEvents { + #[schema(value_type = Vec)] + pub block: Vec, +} + +/// Get block events by blockhash. +/// +/// Retrieve all BRC20 events associated with a block. +#[utoipa::path( + get, + path = "/api/v1/brc20/block/{blockhash}/events", + params( + ("blockhash" = String, Path, description = "block hash") + ), + responses( + (status = 200, description = "Obtain block events by block hash", body = BRC20BlockEvents), + (status = 400, description = "Bad query.", body = ApiError, example = json!(&ApiError::bad_request("bad request"))), + (status = 404, description = "Not found.", body = ApiError, example = json!(&ApiError::not_found("not found"))), + (status = 500, description = "Internal server error.", body = ApiError, example = json!(&ApiError::internal("internal error"))), + ) + )] +pub(crate) async fn brc20_block_events( + Extension(index): Extension>, + Path(blockhash): Path, +) -> ApiResult { + log::debug!("rpc: get brc20_block_events: {}", blockhash); + + let blockhash = bitcoin::BlockHash::from_str(&blockhash).map_err(ApiError::bad_request)?; + + let rtx = index.begin_read()?; + let client = index.bitcoin_rpc_client()?; + + let block_events = Index::get_brc20_block_receipts(blockhash, &rtx, &client)?; + + log::debug!( + "rpc: get brc20_block_events: {} {:?}", + blockhash, + block_events + ); + + Ok(Json(ApiResponse::ok(ApiBlockEvents { + block: block_events + .into_iter() + .map(|(txid, events)| ApiTxEvents { + txid: txid.to_string(), + events: events.into_iter().map(|e| e.into()).collect(), + }) + .filter(|e| !e.events.is_empty()) + .collect(), + }))) +} diff --git a/src/subcommand/server/brc20/ticker.rs b/src/subcommand/server/brc20/ticker.rs new file mode 100644 index 0000000000..6ab93a1966 --- /dev/null +++ b/src/subcommand/server/brc20/ticker.rs @@ -0,0 +1,147 @@ +use { + super::*, + crate::okx::datastore::brc20::{Tick, TokenInfo}, + axum::Json, + utoipa::ToSchema, +}; + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[schema(as = brc20::TickInfo)] +#[serde(rename_all = "camelCase")] +/// Description of a BRC20 ticker. +pub struct ApiTickInfo { + /// Name of the ticker. + pub tick: String, + /// Inscription ID of the ticker deployed. + pub inscription_id: String, + /// Inscription number of the ticker deployed. + pub inscription_number: i32, + /// The total supply of the ticker.
+ /// Maximum supply cannot exceed uint64_max. + /// + /// A string containing a 64-bit unsigned integer.
+ /// We represent u64 values as a string to ensure compatibility with languages such as JavaScript that do not parse u64s in JSON natively. + #[schema(format = "uint64")] + pub supply: String, + /// The amount of the ticker that has been burned. + #[schema(format = "uint64")] + pub burned_supply: String, + /// Whether the ticker is self minted. + pub self_mint: bool, + /// The maximum amount of each mining. + #[schema(format = "uint64")] + pub limit_per_mint: String, + /// The amount of the ticker that has been minted. + #[schema(format = "uint64")] + pub minted: String, + /// The decimal of the ticker.
+ /// Number of decimals cannot exceed 18 (default). + #[schema( + example = 18, + default = 18, + maximum = 18, + minimum = 0, + format = "uint8" + )] + pub decimal: u8, + pub deploy_by: ScriptPubkey, + /// A hex encoded 32 byte transaction ID that the ticker deployed. + /// + /// This is represented in a string as adding a prefix 0x to a 64 character hex string. + pub txid: String, + /// The height of the block that the ticker deployed. + #[schema(format = "uint32")] + pub deploy_height: u32, + /// The timestamp of the block that the ticker deployed. + #[schema(format = "uint32")] + pub deploy_blocktime: u32, +} + +impl From for ApiTickInfo { + fn from(tick_info: TokenInfo) -> Self { + Self { + tick: tick_info.tick.to_string(), + inscription_id: tick_info.inscription_id.to_string(), + inscription_number: tick_info.inscription_number, + supply: tick_info.supply.to_string(), + burned_supply: tick_info.burned_supply.to_string(), + limit_per_mint: tick_info.limit_per_mint.to_string(), + minted: tick_info.minted.to_string(), + decimal: tick_info.decimal, + self_mint: tick_info.is_self_mint, + deploy_by: tick_info.deploy_by.clone().into(), + txid: tick_info.inscription_id.txid.to_string(), + deploy_height: tick_info.deployed_number, + deploy_blocktime: tick_info.deployed_timestamp, + } + } +} + +/// Get the ticker info. +/// +/// Retrieve detailed information about the ticker. +#[utoipa::path( + get, + path = "/api/v1/brc20/tick/{ticker}", + params( + ("ticker" = String, Path, description = "Token ticker", min_length = 4, max_length = 4) + ), + responses( + (status = 200, description = "Obtain matching BRC20 ticker by query.", body = BRC20Tick), + (status = 400, description = "Bad query.", body = ApiError, example = json!(&ApiError::bad_request("bad request"))), + (status = 404, description = "Ticker not found.", body = ApiError, example = json!(&ApiError::not_found("not found"))), + (status = 500, description = "Internal server error.", body = ApiError, example = json!(&ApiError::internal("internal error"))), + ) + )] +pub(crate) async fn brc20_tick_info( + Extension(index): Extension>, + Path(tick): Path, +) -> ApiResult { + log::debug!("rpc: get brc20_tick_info: {}", tick); + + let rtx = index.begin_read()?; + let ticker = Tick::from_str(&tick).map_err(|_| BRC20ApiError::InvalidTicker(tick.clone()))?; + + let tick_info = rtx + .brc20_get_tick_info(&ticker)? + .ok_or(BRC20ApiError::UnknownTicker(tick.clone()))?; + + log::debug!("rpc: get brc20_tick_info: {:?} {:?}", tick, tick_info); + + Ok(Json(ApiResponse::ok(tick_info.into()))) +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[schema(as = brc20::AllTickInfo)] +#[serde(rename_all = "camelCase")] +pub struct ApiTickInfos { + #[schema(value_type = Vec)] + pub tokens: Vec, +} + +/// Get all tickers info. +/// +/// Retrieve detailed information about all tickers. +#[utoipa::path( + get, + path = "/api/v1/brc20/tick", + responses( + (status = 200, description = "Obtain matching all BRC20 tickers.", body = BRC20AllTick), + (status = 400, description = "Bad query.", body = ApiError, example = json!(&ApiError::bad_request("bad request"))), + (status = 404, description = "Not found.", body = ApiError, example = json!(&ApiError::not_found("not found"))), + (status = 500, description = "Internal server error.", body = ApiError, example = json!(&ApiError::internal("internal error"))), + ) + )] +pub(crate) async fn brc20_all_tick_info( + Extension(index): Extension>, +) -> ApiResult { + log::debug!("rpc: get brc20_all_tick_info"); + + let rtx = index.begin_read()?; + let all_tick_info = rtx.brc20_get_all_tick_info()?; + log::debug!("rpc: get brc20_all_tick_info: {:?}", all_tick_info); + + Ok(Json(ApiResponse::ok(ApiTickInfos { + tokens: all_tick_info.into_iter().map(|t| t.into()).collect(), + }))) +} diff --git a/src/subcommand/server/brc20/transferable.rs b/src/subcommand/server/brc20/transferable.rs new file mode 100644 index 0000000000..77be213dcb --- /dev/null +++ b/src/subcommand/server/brc20/transferable.rs @@ -0,0 +1,139 @@ +use {super::*, crate::okx::datastore::brc20::Tick, axum::Json, utoipa::ToSchema}; + +#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize, ToSchema)] +#[schema(as = brc20::TransferableAsset)] +#[serde(rename_all = "camelCase")] +pub struct ApiTransferableAsset { + /// The inscription id. + pub inscription_id: String, + /// The inscription number. + pub inscription_number: i32, + /// The amount of the ticker that will be transferred. + #[schema(format = "uint64")] + pub amount: String, + /// The ticker name that will be transferred. + pub tick: String, + /// The address to which the transfer will be made. + pub owner: String, + /// The inscription location. + pub location: SatPoint, +} + +/// Get the transferable inscriptions of the address. +/// +/// Retrieve the transferable inscriptions with the ticker from the given address. +#[utoipa::path( + get, + path = "/api/v1/brc20/tick/{ticker}/address/{address}/transferable", + params( + ("ticker" = String, Path, description = "Token ticker", min_length = 4, max_length = 4), + ("address" = String, Path, description = "Address") +), + responses( + (status = 200, description = "Obtain account transferable inscriptions of ticker.", body = BRC20Transferable), + (status = 400, description = "Bad query.", body = ApiError, example = json!(&ApiError::bad_request("bad request"))), + (status = 404, description = "Not found.", body = ApiError, example = json!(&ApiError::not_found("not found"))), + (status = 500, description = "Internal server error.", body = ApiError, example = json!(&ApiError::internal("internal error"))), + ) +)] +pub(crate) async fn brc20_transferable( + Extension(index): Extension>, + Path((tick, address)): Path<(String, String)>, +) -> ApiResult { + log::debug!("rpc: get brc20_transferable: {tick} {address}"); + + let rtx = index.begin_read()?; + let chain = index.get_chain(); + + let ticker = Tick::from_str(&tick).map_err(|_| BRC20ApiError::InvalidTicker(tick.clone()))?; + let script_key = utils::parse_and_validate_script_key_with_chain(&address, chain) + .map_err(ApiError::bad_request)?; + + let brc20_transferable_assets = + Index::get_brc20_transferable_utxo_by_tick_and_address(ticker, script_key, &rtx)? + .ok_or(BRC20ApiError::UnknownTicker(tick.clone()))?; + + log::debug!( + "rpc: get brc20_transferable: {tick} {address} {:?}", + brc20_transferable_assets + ); + + let mut api_transferable_assets = Vec::new(); + for (satpoint, transferable_asset) in brc20_transferable_assets { + api_transferable_assets.push(ApiTransferableAsset { + inscription_id: transferable_asset.inscription_id.to_string(), + inscription_number: transferable_asset.inscription_number, + amount: transferable_asset.amount.to_string(), + tick: transferable_asset.tick.as_str().to_string(), + owner: transferable_asset.owner.to_string(), + location: satpoint, + }); + } + + api_transferable_assets.sort_by(|a, b| a.inscription_number.cmp(&b.inscription_number)); + + Ok(Json(ApiResponse::ok(ApiTransferableAssets { + inscriptions: api_transferable_assets, + }))) +} + +#[derive(Default, Debug, Clone, Serialize, Deserialize, ToSchema)] +#[schema(as = brc20::ApiTransferableAssets)] +#[serde(rename_all = "camelCase")] +pub struct ApiTransferableAssets { + #[schema(value_type = Vec)] + pub inscriptions: Vec, +} + +/// Get the balance of ticker of the address. +/// +/// Retrieve the balance of the ticker from the given address. +#[utoipa::path( + get, + path = "/api/v1/brc20/address/{address}/transferable", + params( + ("address" = String, Path, description = "Address") +), + responses( + (status = 200, description = "Obtain account all transferable inscriptions.", body = BRC20Transferable), + (status = 400, description = "Bad query.", body = ApiError, example = json!(&ApiError::bad_request("bad request"))), + (status = 404, description = "Not found.", body = ApiError, example = json!(&ApiError::not_found("not found"))), + (status = 500, description = "Internal server error.", body = ApiError, example = json!(&ApiError::internal("internal error"))), + ) +)] +pub(crate) async fn brc20_all_transferable( + Extension(index): Extension>, + Path(account): Path, +) -> ApiResult { + log::debug!("rpc: get brc20_all_transferable: {account}"); + + let rtx = index.begin_read()?; + let chain = index.get_chain(); + + let script_key = utils::parse_and_validate_script_key_with_chain(&account, chain) + .map_err(ApiError::bad_request)?; + + let brc20_transferable_assets = rtx.brc20_get_all_transferable_by_address(script_key)?; + log::debug!( + "rpc: get brc20_all_transferable: {account} {:?}", + brc20_transferable_assets + ); + + let mut api_transferable_assets = Vec::new(); + for (satpoint, transferable_asset) in brc20_transferable_assets { + api_transferable_assets.push(ApiTransferableAsset { + inscription_id: transferable_asset.inscription_id.to_string(), + inscription_number: transferable_asset.inscription_number, + amount: transferable_asset.amount.to_string(), + tick: transferable_asset.tick.as_str().to_string(), + owner: transferable_asset.owner.to_string(), + location: satpoint, + }); + } + + api_transferable_assets.sort_by(|a, b| a.inscription_number.cmp(&b.inscription_number)); + + Ok(Json(ApiResponse::ok(ApiTransferableAssets { + inscriptions: api_transferable_assets, + }))) +} diff --git a/src/subcommand/server/error.rs b/src/subcommand/server/error.rs index ccbff8bc62..fa931f124d 100644 --- a/src/subcommand/server/error.rs +++ b/src/subcommand/server/error.rs @@ -1,3 +1,5 @@ +use serde::ser::SerializeStruct; +use utoipa::ToSchema; use {super::*, std::fmt::Write}; #[derive(Debug)] @@ -72,3 +74,91 @@ impl From for ServerError { Self::Internal(error) } } + +#[repr(i32)] +#[derive(ToSchema)] +pub(crate) enum ApiError { + /// Internal server error. + #[schema(example = json!(&ApiError::internal("internal error")))] + Internal(String) = 1, + + /// Bad request. + #[schema(example = json!(&ApiError::internal("bad request")))] + BadRequest(String) = 2, + + /// Resource not found. + #[schema(example = json!(&ApiError::internal("not found")))] + NotFound(String) = 3, +} + +impl ApiError { + pub(crate) fn code(&self) -> i32 { + match self { + Self::Internal(_) => 1, + Self::BadRequest(_) => 2, + Self::NotFound(_) => 3, + } + } + + pub(crate) fn not_found(message: S) -> Self { + Self::NotFound(message.to_string()) + } + + pub(crate) fn internal(message: S) -> Self { + Self::Internal(message.to_string()) + } + + pub(crate) fn bad_request(message: S) -> Self { + Self::BadRequest(message.to_string()) + } +} +impl Serialize for ApiError { + fn serialize(&self, serializer: S) -> Result { + let mut state = serializer.serialize_struct("ApiError", 2)?; + match self { + ApiError::Internal(msg) | ApiError::BadRequest(msg) | ApiError::NotFound(msg) => { + state.serialize_field("code", &self.code())?; + state.serialize_field("msg", &msg)?; + state.end() + } + } + } +} + +impl IntoResponse for ApiError { + fn into_response(self) -> Response { + let status_code = match &self { + Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR, + Self::BadRequest(_) => StatusCode::BAD_REQUEST, + Self::NotFound(_) => StatusCode::NOT_FOUND, + }; + + (status_code, axum::Json(self)).into_response() + } +} + +impl From for ApiError { + fn from(error: anyhow::Error) -> Self { + Self::internal(error) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_serialize_api_error() { + let api_error = ApiError::internal("internal error"); + let json = serde_json::to_string(&api_error).unwrap(); + assert_eq!(json, r#"{"code":1,"msg":"internal error"}"#); + + let api_error = ApiError::bad_request("bad request"); + let json = serde_json::to_string(&api_error).unwrap(); + assert_eq!(json, r#"{"code":2,"msg":"bad request"}"#); + + let api_error = ApiError::not_found("not found"); + let json = serde_json::to_string(&api_error).unwrap(); + assert_eq!(json, r#"{"code":3,"msg":"not found"}"#); + } +} diff --git a/src/subcommand/server/info.rs b/src/subcommand/server/info.rs new file mode 100644 index 0000000000..b17eeeddce --- /dev/null +++ b/src/subcommand/server/info.rs @@ -0,0 +1,95 @@ +use super::*; +use axum::Json; +use shadow_rs::shadow; +use utoipa::{IntoParams, ToSchema}; +shadow!(build); + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct NodeInfo { + /// Node version of the API endpoint build. + pub version: Option, + /// The name of the branch or tag of the API endpoint build. + pub branch: Option, + /// Git commit hash of the API endpoint build. + pub commit_hash: Option, + /// Build time of the API endpoint. + pub build_time: Option, + /// Chain information of the blockchain. + pub chain_info: ChainInfo, +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ChainInfo { + /// The network of the blockchain. + pub network: Option, + /// The block height of our indexer. + #[schema(format = "uint32")] + pub ord_block_height: u32, + /// The block hash of our indexer. + pub ord_block_hash: String, + /// The chain block height of the blockchain. + #[schema(format = "uint64")] + pub chain_block_height: Option, + /// The chain block hash of the blockchain. + pub chain_block_hash: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, IntoParams)] +pub struct NodeInfoQuery { + /// Optional to query the BTC chain status. + btc: Option, +} + +/// Retrieve the indexer status. +/// +/// Display indexer synchronization information, including indexer version, blockchain network, indexer height, blockchain network height, and other information. +#[utoipa::path( + get, + path = "/api/v1/node/info", + params( + NodeInfoQuery + ), + responses( + (status = 200, description = "Obtain node runtime status.", body = Node), + (status = 500, description = "Internal server error.", body = ApiError, example = json!(&ApiError::internal("internal error"))), + ) + )] +pub(crate) async fn node_info( + Extension(index): Extension>, + Query(query): Query, +) -> ApiResult { + log::debug!("rpc: get node_info"); + let rtx = index.begin_read()?; + let client = index.bitcoin_rpc_client()?; + + let (latest_height, latest_blockhash) = rtx.latest_block()?.ok_or_api_err(|| { + ApiError::Internal("Failed to retrieve the latest block from the database.".to_string()) + })?; + + let (chain_block_height, chain_block_hash) = match query.btc.unwrap_or_default() { + true => { + let chain_blockchain_info = client.get_blockchain_info().map_err(ApiError::internal)?; + ( + Some(u32::try_from(chain_blockchain_info.blocks).unwrap()), + Some(chain_blockchain_info.best_block_hash), + ) + } + false => (None, None), + }; + + Ok(Json(ApiResponse::ok(NodeInfo { + version: Some(build::PKG_VERSION.into()), + branch: Some(build::BRANCH.into()), + commit_hash: Some(build::SHORT_COMMIT.into()), + build_time: Some(build::BUILD_TIME.into()), + chain_info: ChainInfo { + network: Some(index.get_chain().to_string()), + ord_block_height: latest_height.0, + ord_block_hash: latest_blockhash.to_string(), + chain_block_height, + chain_block_hash: chain_block_hash.map(|hash| hash.to_string()), + }, + }))) +} diff --git a/src/subcommand/server/ord/inscription.rs b/src/subcommand/server/ord/inscription.rs new file mode 100644 index 0000000000..0ac551596a --- /dev/null +++ b/src/subcommand/server/ord/inscription.rs @@ -0,0 +1,441 @@ +use { + super::{error::ApiError, types::ScriptPubkey, *}, + crate::{index::rtx::Rtx, okx::datastore::ScriptKey}, + axum::Json, + utoipa::ToSchema, +}; + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, ToSchema)] +#[schema(as = ord::ApiContentEncoding)] +#[serde(rename_all = "camelCase")] +#[serde(tag = "type")] +pub enum ApiContentEncoding { + Br { decode: String }, + Unknown, +} + +#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] +#[schema(as = ord::ApiInscription)] +#[serde(rename_all = "camelCase")] +pub struct ApiInscription { + /// The inscription id. + pub id: String, + /// The inscription number. + pub number: i32, + /// The inscription content type. + pub content_type: Option, + /// The inscription content body. + pub content: Option, + /// The inscription content body length. + pub content_length: Option, + /// Decode the content encoding if the message has a content encoding tag. + pub content_encoding: Option, + /// The inscription metadata. + pub metadata: Option, + /// The inscription metaprotocol. + pub metaprotocol: Option, + /// The inscription parent inscription id. + pub parent: Option, + /// The delegate inscription id of the inscription. + pub delegate: Option, + /// The inscription pointer. + pub pointer: Option, + /// The inscription owner. + pub owner: Option, + /// The inscription genesis block height. + #[schema(format = "uint32")] + pub genesis_height: u32, + /// The inscription genesis timestamp. + #[schema(format = "uint32")] + pub genesis_timestamp: u32, + /// The inscription location. + pub location: String, + /// Collections of Inscriptions. + pub collections: Vec, + /// Charms of Inscriptions. + pub charms: Vec, + /// The inscription sat index. + pub sat: Option, +} + +// /ord/id/:id/inscription +/// Retrieve the inscription infomation with the specified inscription id. +#[utoipa::path( + get, + path = "/api/v1/ord/id/{id}/inscription", + params( + ("id" = String, Path, description = "inscription ID") +), + responses( + (status = 200, description = "Obtain inscription infomation.", body = OrdOrdInscription), + (status = 400, description = "Bad query.", body = ApiError, example = json!(&ApiError::bad_request("bad request"))), + (status = 404, description = "Not found.", body = ApiError, example = json!(&ApiError::not_found("not found"))), + (status = 500, description = "Internal server error.", body = ApiError, example = json!(&ApiError::internal("internal error"))), + ) +)] +pub(crate) async fn ord_inscription_id( + Extension(index): Extension>, + Path(id): Path, +) -> ApiResult { + log::debug!("rpc: get ord_inscription_id: {id}"); + + let rtx = index.begin_read()?; + let chain = index.get_chain(); + let client = index.bitcoin_rpc_client()?; + let index_transactions = index.has_transactions_index(); + + let id = InscriptionId::from_str(&id).map_err(ApiError::bad_request)?; + + ord_get_inscription_by_id(id, &rtx, client, chain, index_transactions) +} + +// /ord/number/:number/inscription +/// Retrieve the inscription infomation with the specified inscription number. +#[utoipa::path( + get, + path = "/api/v1/ord/number/{number}/inscription", + params( + ("number" = i64, Path, description = "inscription number") +), + responses( + (status = 200, description = "Obtain inscription infomation.", body = OrdOrdInscription), + (status = 400, description = "Bad query.", body = ApiError, example = json!(&ApiError::bad_request("bad request"))), + (status = 404, description = "Not found.", body = ApiError, example = json!(&ApiError::not_found("not found"))), + (status = 500, description = "Internal server error.", body = ApiError, example = json!(&ApiError::internal("internal error"))), + ) +)] +pub(crate) async fn ord_inscription_number( + Extension(index): Extension>, + Path(number): Path, +) -> ApiResult { + log::debug!("rpc: get ord_inscription_number: {number}"); + + let rtx = index.begin_read()?; + let chain = index.get_chain(); + let client = index.bitcoin_rpc_client()?; + let index_transactions = index.has_transactions_index(); + + let inscription_id = Index::get_inscription_id_by_inscription_number_with_rtx(number, &rtx)? + .ok_or(OrdApiError::UnknownInscriptionNumber(number))?; + + ord_get_inscription_by_id(inscription_id, &rtx, client, chain, index_transactions) +} + +fn ord_get_inscription_by_id( + inscription_id: InscriptionId, + rtx: &Rtx, + client: Client, + chain: Chain, + index_transactions: bool, +) -> ApiResult { + let inscription_entry = Index::get_inscription_entry_with_rtx(inscription_id, rtx)? + .ok_or(OrdApiError::UnknownInscriptionId(inscription_id))?; + + let tx = + Index::get_transaction_with_rtx(inscription_id.txid, rtx, &client, chain, index_transactions)? + .ok_or(OrdApiError::TransactionNotFound(inscription_id.txid))?; + + let inscription = ParsedEnvelope::from_transaction(&tx) + .get(usize::try_from(inscription_id.index).unwrap()) + .map(|envelope: &ParsedEnvelope| envelope.payload.clone()) + .ok_or(OrdApiError::InvalidInscription(inscription_id))?; + + let sat_point = Index::get_inscription_satpoint_by_id_with_rtx(inscription_id, rtx)? + .ok_or(OrdApiError::SatPointNotFound(inscription_id))?; + + let collections = rtx + .ord_inscription_id_to_collections(inscription_id)? + .unwrap_or_default(); + + let parent_inscription_id = match inscription_entry.parent { + Some(parent) => rtx + .sequence_number_to_inscription_entry(parent)? + .map(|entry| entry.id), + None => None, + }; + + let charms: Vec = Charm::ALL + .iter() + .filter(|charm| charm.is_set(inscription_entry.charms)) + .cloned() + .collect(); + + let location_outpoint = sat_point.outpoint; + + let output = if location_outpoint == unbound_outpoint() { + None + } else { + let location_transaction = if tx.txid() != location_outpoint.txid { + Index::get_transaction_with_rtx( + location_outpoint.txid, + rtx, + &client, + chain, + index_transactions, + )? + .ok_or(OrdApiError::TransactionNotFound(location_outpoint.txid))? + } else { + tx.clone() + }; + location_transaction + .output + .into_iter() + .nth(location_outpoint.vout.try_into().unwrap()) + }; + + Ok(Json(ApiResponse::ok(ApiInscription { + id: inscription_id.to_string(), + number: inscription_entry.inscription_number, + content_type: inscription.content_type().map(str::to_string), + content: inscription.body().map(hex::encode), + content_length: inscription.content_length(), + content_encoding: decompress_encoding_body(&inscription), + metaprotocol: inscription.metaprotocol().map(str::to_string), + metadata: inscription + .metadata() + .and_then(|_| inscription.metadata.as_deref().map(hex::encode)), + parent: parent_inscription_id, + pointer: inscription.pointer(), + delegate: inscription.delegate(), + owner: output.map(|vout| ScriptKey::from_script(&vout.script_pubkey, chain).into()), + genesis_height: inscription_entry.height, + genesis_timestamp: inscription_entry.timestamp, + location: sat_point.to_string(), + collections: collections.iter().map(|c| c.to_string()).collect(), + charms: charms.iter().map(|c| c.title().into()).collect(), + sat: inscription_entry.sat.map(|s| s.0), + }))) +} + +fn decompress_encoding_body(inscription: &Inscription) -> Option { + if let Some(header_value) = inscription.content_encoding() { + if header_value == "br" { + if let Some(body) = inscription.body() { + let mut decompressed = Vec::new(); + if Decompressor::new(body, 4096) + .read_to_end(&mut decompressed) + .is_ok() + { + return Some(ApiContentEncoding::Br { + decode: hex::encode(decompressed), + }); + } + } + } + return Some(ApiContentEncoding::Unknown); + } + None +} + +// ord/debug/bitmap/district/:number +pub(crate) async fn ord_debug_bitmap_district( + Extension(index): Extension>, + Path(number): Path, +) -> ApiResult { + log::debug!("rpc: get ord_debug_bitmap_district: number:{}", number); + + let rtx = index.begin_read()?; + let inscription_id = rtx + .ord_district_to_inscription_id(number)? + .ok_or_api_not_found(format!("district {number} not found."))?; + + log::debug!( + "rpc: get ord_debug_bitmap_district: {:?} {:?}", + number, + inscription_id + ); + + Ok(Json(ApiResponse::ok(inscription_id))) +} + +#[cfg(test)] +mod tests { + use super::*; + use brotli::{ + enc::{backward_references::BrotliEncoderMode, BrotliEncoderParams}, + CompressorWriter, + }; + use std::io::Write; + + #[test] + fn test_serialize_ord_inscription() { + let mut ord_inscription = ApiInscription { + id: InscriptionId { + txid: txid(1), + index: 0xFFFFFFFF, + } + .to_string(), + number: -100, + content_type: Some("content_type".to_string()), + content: Some("content".to_string()), + content_length: Some("content".to_string().len()), + content_encoding: Some(ApiContentEncoding::Br { + decode: "content_encoding".to_string(), + }), + metaprotocol: Some("mata_protocol".to_string()), + metadata: Some("0123456789abcdef".to_string()), + parent: Some(InscriptionId { + txid: txid(1), + index: 0xFFFFFFFE, + }), + delegate: Some(InscriptionId { + txid: txid(1), + index: 0xFFFFFFFD, + }), + pointer: Some(0), + owner: Some( + ScriptKey::from_script( + &Address::from_str("bc1qhvd6suvqzjcu9pxjhrwhtrlj85ny3n2mqql5w4") + .unwrap() + .assume_checked() + .script_pubkey(), + Chain::Mainnet, + ) + .into(), + ), + genesis_height: 1, + genesis_timestamp: 100, + location: SatPoint::from_str( + "5660d06bd69326c18ec63127b37fb3b32ea763c3846b3334c51beb6a800c57d3:1:3000", + ) + .unwrap() + .to_string(), + collections: Vec::new(), + charms: [Charm::Vindicated] + .iter() + .map(|c| c.title().into()) + .collect(), + sat: None, + }; + assert_eq!( + serde_json::to_string_pretty(&ord_inscription).unwrap(), + r#"{ + "id": "1111111111111111111111111111111111111111111111111111111111111111i4294967295", + "number": -100, + "contentType": "content_type", + "content": "content", + "contentLength": 7, + "contentEncoding": { + "type": "br", + "decode": "content_encoding" + }, + "metadata": "0123456789abcdef", + "metaprotocol": "mata_protocol", + "parent": "1111111111111111111111111111111111111111111111111111111111111111i4294967294", + "delegate": "1111111111111111111111111111111111111111111111111111111111111111i4294967293", + "pointer": 0, + "owner": { + "address": "bc1qhvd6suvqzjcu9pxjhrwhtrlj85ny3n2mqql5w4" + }, + "genesisHeight": 1, + "genesisTimestamp": 100, + "location": "5660d06bd69326c18ec63127b37fb3b32ea763c3846b3334c51beb6a800c57d3:1:3000", + "collections": [], + "charms": [ + "vindicated" + ], + "sat": null +}"#, + ); + ord_inscription.owner = None; + assert_eq!( + serde_json::to_string_pretty(&ord_inscription).unwrap(), + r#"{ + "id": "1111111111111111111111111111111111111111111111111111111111111111i4294967295", + "number": -100, + "contentType": "content_type", + "content": "content", + "contentLength": 7, + "contentEncoding": { + "type": "br", + "decode": "content_encoding" + }, + "metadata": "0123456789abcdef", + "metaprotocol": "mata_protocol", + "parent": "1111111111111111111111111111111111111111111111111111111111111111i4294967294", + "delegate": "1111111111111111111111111111111111111111111111111111111111111111i4294967293", + "pointer": 0, + "owner": null, + "genesisHeight": 1, + "genesisTimestamp": 100, + "location": "5660d06bd69326c18ec63127b37fb3b32ea763c3846b3334c51beb6a800c57d3:1:3000", + "collections": [], + "charms": [ + "vindicated" + ], + "sat": null +}"#, + ); + } + + #[test] + fn test_decompress_encoding_body() { + let mut compressed = Vec::new(); + let body = "ord".as_bytes(); + + CompressorWriter::with_params( + &mut compressed, + body.len(), + &BrotliEncoderParams { + lgblock: 24, + lgwin: 24, + mode: BrotliEncoderMode::BROTLI_MODE_TEXT, + quality: 11, + size_hint: body.len(), + ..Default::default() + }, + ) + .write_all(body) + .unwrap(); + + let inscription = Inscription { + content_encoding: Some("br".as_bytes().to_vec()), + ..inscription("text/plain;charset=utf-8", compressed) + }; + assert_eq!( + decompress_encoding_body(&inscription), + Some(ApiContentEncoding::Br { + decode: hex::encode(body) + }) + ); + } + + #[test] + fn test_except_decompress_encoding_body() { + let body = "ord".as_bytes(); + + let inscription1 = Inscription { + content_encoding: Some("br".as_bytes().to_vec()), + ..inscription("text/plain;charset=utf-8", body) + }; + assert_eq!( + decompress_encoding_body(&inscription1), + Some(ApiContentEncoding::Unknown) + ); + let body = Vec::new(); + + let inscription2 = Inscription { + content_encoding: Some("br".as_bytes().to_vec()), + ..inscription("text/plain;charset=utf-8", body) + }; + assert_eq!( + decompress_encoding_body(&inscription2), + Some(ApiContentEncoding::Unknown) + ); + } + + #[test] + fn test_serialize_content_encoding() { + assert_eq!( + serde_json::to_string(&ApiContentEncoding::Br { + decode: "content_encoding".to_string(), + }) + .unwrap(), + r#"{"type":"br","decode":"content_encoding"}"# + ); + assert_eq!( + serde_json::to_string(&ApiContentEncoding::Unknown).unwrap(), + r#"{"type":"unknown"}"# + ); + } +} diff --git a/src/subcommand/server/ord/mod.rs b/src/subcommand/server/ord/mod.rs new file mode 100644 index 0000000000..acac86617c --- /dev/null +++ b/src/subcommand/server/ord/mod.rs @@ -0,0 +1,46 @@ +use super::*; + +mod inscription; +mod outpoint; +mod transaction; + +pub(super) use {inscription::*, outpoint::*, transaction::*}; + +#[derive(Debug, thiserror::Error)] +pub enum OrdApiError { + /// Thrown when a inscription id was requested but not matching inscription exists + #[error("unknown inscription id {0}")] + UnknownInscriptionId(InscriptionId), + /// Thrown when a inscription number was requested but not matching inscription exists + #[error("unknown inscription number {0}")] + UnknownInscriptionNumber(i32), + /// Thrown when a transaction was requested but not matching transaction exists + #[error("transaction {0} not found")] + TransactionNotFound(Txid), + /// Thrown when a transaction receipt was requested but not matching transaction receipt exists + #[error("transaction receipt {0} not found")] + TransactionReceiptNotFound(Txid), + /// Thrown when parsing the inscription from the transaction fails + #[error("invalid inscription {0}")] + InvalidInscription(InscriptionId), + /// Thrown when the satpoint for the inscription cannot be found + #[error("satpoint not found for inscription {0}")] + SatPointNotFound(InscriptionId), + /// Thrown when an internal error occurs + #[error("internal error: {0}")] + Internal(String), +} + +impl From for ApiError { + fn from(error: OrdApiError) -> Self { + match error { + OrdApiError::UnknownInscriptionId(_) => Self::not_found(error.to_string()), + OrdApiError::UnknownInscriptionNumber(_) => Self::not_found(error.to_string()), + OrdApiError::TransactionReceiptNotFound(_) => Self::not_found(error.to_string()), + OrdApiError::TransactionNotFound(_) => Self::not_found(error.to_string()), + OrdApiError::InvalidInscription(_) => Self::internal(error.to_string()), + OrdApiError::SatPointNotFound(_) => Self::internal(error.to_string()), + OrdApiError::Internal(_) => Self::internal(error.to_string()), + } + } +} diff --git a/src/subcommand/server/ord/outpoint.rs b/src/subcommand/server/ord/outpoint.rs new file mode 100644 index 0000000000..8d4f438731 --- /dev/null +++ b/src/subcommand/server/ord/outpoint.rs @@ -0,0 +1,122 @@ +use { + super::{error::ApiError, types::ScriptPubkey, *}, + crate::okx::datastore::ScriptKey, + axum::Json, + utoipa::ToSchema, +}; + +#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize, ToSchema)] +#[schema(as = ord::ApiInscriptionDigest)] +#[serde(rename_all = "camelCase")] +pub struct ApiInscriptionDigest { + /// The inscription id. + pub id: String, + /// The inscription number. + pub number: i32, + /// The inscription location. + pub location: String, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, ToSchema)] +#[schema(as = ord::ApiOutPointResult)] +#[serde(rename_all = "camelCase")] +pub struct ApiOutPointResult { + #[schema(value_type = Option)] + pub result: Option, + pub latest_blockhash: String, + #[schema(format = "uint64")] + pub latest_height: u32, +} + +#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize, ToSchema)] +#[schema(as = ord::ApiOutpointInscriptions)] +#[serde(rename_all = "camelCase")] +pub struct ApiOutpointInscriptions { + /// The transaction id. + pub txid: String, + /// The script pubkey. + pub script_pub_key: String, + /// The owner of the script pubkey. + pub owner: ScriptPubkey, + /// The value of the transaction output. + #[schema(format = "uint64")] + pub value: u64, + #[schema(value_type = Vec)] + /// The inscriptions on the transaction output. + pub inscription_digest: Vec, +} + +// /ord/outpoint/:outpoint/info +/// Retrieve the outpoint infomation with the specified outpoint. +#[utoipa::path( + get, + path = "/api/v1/ord/outpoint/{outpoint}/info", + params( + ("outpoint" = String, Path, description = "Outpoint") +), + responses( + (status = 200, description = "Obtain outpoint infomation", body = OrdOutPointData), + (status = 400, description = "Bad query.", body = ApiError, example = json!(&ApiError::bad_request("bad request"))), + (status = 404, description = "Not found.", body = ApiError, example = json!(&ApiError::not_found("not found"))), + (status = 500, description = "Internal server error.", body = ApiError, example = json!(&ApiError::internal("internal error"))), + ) +)] +pub(crate) async fn ord_outpoint( + Extension(index): Extension>, + Path(outpoint): Path, +) -> ApiResult { + log::debug!("rpc: get ord_outpoint: {outpoint}"); + + let rtx = index.begin_read()?; + + let (latest_height, latest_blockhash) = rtx.latest_block()?.ok_or_api_err(|| { + OrdApiError::Internal("Failed to retrieve the latest block from the database.".to_string()) + .into() + })?; + + let inscriptions_with_satpoints = rtx.inscriptions_on_output_with_satpoints(outpoint)?; + let chain = index.get_chain(); + + // If there are no inscriptions on the output, return None and parsed block states. + if inscriptions_with_satpoints.is_empty() { + return Ok(Json(ApiResponse::ok(ApiOutPointResult { + result: None, + latest_height: latest_height.n(), + latest_blockhash: latest_blockhash.to_string(), + }))); + } + + let mut inscription_digests = Vec::with_capacity(inscriptions_with_satpoints.len()); + for (satpoint, inscription_id) in inscriptions_with_satpoints { + inscription_digests.push(ApiInscriptionDigest { + id: inscription_id.to_string(), + number: rtx + .get_inscription_entry(inscription_id)? + .map(|inscription_entry| inscription_entry.inscription_number) + .ok_or(OrdApiError::UnknownInscriptionId(inscription_id))?, + location: satpoint.to_string(), + }); + } + + // Get the txout from the database store or from an RPC request. + let vout = Index::fetch_vout( + &rtx, + &index.bitcoin_rpc_client()?, + outpoint, + chain, + index.has_transactions_index(), + )? + .ok_or(OrdApiError::TransactionNotFound(outpoint.txid))?; + + Ok(Json(ApiResponse::ok(ApiOutPointResult { + result: Some(ApiOutpointInscriptions { + txid: outpoint.txid.to_string(), + script_pub_key: vout.script_pubkey.to_asm_string(), + owner: ScriptKey::from_script(&vout.script_pubkey, chain).into(), + value: vout.value, + inscription_digest: inscription_digests, + }), + latest_height: latest_height.n(), + latest_blockhash: latest_blockhash.to_string(), + }))) +} diff --git a/src/subcommand/server/ord/transaction.rs b/src/subcommand/server/ord/transaction.rs new file mode 100644 index 0000000000..2eee3bcd76 --- /dev/null +++ b/src/subcommand/server/ord/transaction.rs @@ -0,0 +1,308 @@ +use { + super::{error::ApiError, types::ScriptPubkey, *}, + crate::{ + index::rtx::Rtx, + okx::datastore::{ + ord::{Action, InscriptionOp}, + ScriptKey, + }, + }, + axum::Json, + utoipa::ToSchema, +}; + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, ToSchema)] +#[schema(as = ord::ApiInscriptionAction)] +#[serde(rename_all = "camelCase")] +pub enum ApiInscriptionAction { + /// New inscription + New { cursed: bool, unbound: bool }, + /// Transfer inscription + Transfer, +} + +impl From for ApiInscriptionAction { + fn from(action: Action) -> Self { + match action { + Action::New { + cursed, unbound, .. + } => ApiInscriptionAction::New { cursed, unbound }, + Action::Transfer => ApiInscriptionAction::Transfer, + } + } +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, ToSchema)] +#[schema(as = ord::ApiTxInscription)] +#[serde(rename_all = "camelCase")] +pub struct ApiTxInscription { + /// The action of the inscription. + #[schema(value_type = ord::ApiInscriptionAction)] + pub action: ApiInscriptionAction, + /// The inscription number. + pub inscription_number: Option, + /// The inscription id. + pub inscription_id: String, + /// The inscription satpoint of the transaction input. + pub old_satpoint: String, + /// The inscription satpoint of the transaction output. + pub new_satpoint: Option, + /// The message sender which is an address or script pubkey hash. + pub from: ScriptPubkey, + /// The message receiver which is an address or script pubkey hash. + pub to: Option, +} + +impl ApiTxInscription { + pub(super) fn parse_from_operation( + operation: InscriptionOp, + rtx: &Rtx, + client: &Client, + chain: Chain, + index_transactions: bool, + ) -> Result { + let prevout = Index::fetch_vout( + rtx, + client, + operation.old_satpoint.outpoint, + chain, + index_transactions, + )? + .ok_or(OrdApiError::Internal(format!( + "Failed to get inscription prevout: {}", + operation.old_satpoint.outpoint + )))?; + + let output = match operation.new_satpoint { + Some(new_satpoint) if new_satpoint.outpoint != unbound_outpoint() => Index::fetch_vout( + rtx, + client, + new_satpoint.outpoint, + chain, + index_transactions, + )?, + _ => None, + }; + + Ok(ApiTxInscription { + from: ScriptKey::from_script(&prevout.script_pubkey, chain).into(), + to: output.map(|v| ScriptKey::from_script(&v.script_pubkey, chain).into()), + action: operation.action.into(), + inscription_number: operation.inscription_number, + inscription_id: operation.inscription_id.to_string(), + old_satpoint: operation.old_satpoint.to_string(), + new_satpoint: operation.new_satpoint.map(|v| v.to_string()), + }) + } +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, ToSchema)] +#[schema(as = ord::ApiTxInscriptions)] +#[serde(rename_all = "camelCase")] +pub struct ApiTxInscriptions { + #[schema(value_type = Vec)] + pub inscriptions: Vec, + pub txid: String, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, ToSchema)] +#[schema(as = ord::ApiBlockInscriptions)] +#[serde(rename_all = "camelCase")] +pub struct ApiBlockInscriptions { + #[schema(value_type = Vec)] + pub block: Vec, +} + +// ord/tx/:txid/inscriptions +/// Retrieve the inscription actions from the given transaction. +#[utoipa::path( + get, + path = "/api/v1/ord/tx/{txid}/inscriptions", + params( + ("txid" = String, Path, description = "transaction ID") +), + responses( + (status = 200, description = "Obtain inscription actions by txid", body = OrdTxInscriptions), + (status = 400, description = "Bad query.", body = ApiError, example = json!(&ApiError::bad_request("bad request"))), + (status = 404, description = "Not found.", body = ApiError, example = json!(&ApiError::not_found("not found"))), + (status = 500, description = "Internal server error.", body = ApiError, example = json!(&ApiError::internal("internal error"))), + ) +)] +pub(crate) async fn ord_txid_inscriptions( + Extension(index): Extension>, + Path(txid): Path, +) -> ApiResult { + log::debug!("rpc: get ord_txid_inscriptions: {}", txid); + let txid = Txid::from_str(&txid).map_err(ApiError::bad_request)?; + let rtx = index.begin_read()?; + let client = index.bitcoin_rpc_client()?; + let index_transactions = index.has_transactions_index(); + + let operations = Index::get_ord_inscription_operations(txid, &rtx, &client)? + .ok_or(OrdApiError::TransactionReceiptNotFound(txid))?; + log::debug!("rpc: get ord_txid_inscriptions: {:?}", operations); + + let mut api_tx_inscriptions = Vec::new(); + for operation in operations.into_iter() { + let tx_inscription = ApiTxInscription::parse_from_operation( + operation, + &rtx, + &client, + index.get_chain(), + index_transactions, + )?; + api_tx_inscriptions.push(tx_inscription); + } + + Ok(Json(ApiResponse::ok(ApiTxInscriptions { + inscriptions: api_tx_inscriptions, + txid: txid.to_string(), + }))) +} + +// ord/block/:blockhash/inscriptions +/// Retrieve the inscription actions from the given block. +#[utoipa::path( + get, + path = "/api/v1/ord/block/{blockhash}/inscriptions", + params( + ("blockhash" = String, Path, description = "block hash") +), + responses( + (status = 200, description = "Obtain inscription actions by blockhash", body = OrdBlockInscriptions), + (status = 400, description = "Bad query.", body = ApiError, example = json!(&ApiError::bad_request("bad request"))), + (status = 404, description = "Not found.", body = ApiError, example = json!(&ApiError::not_found("not found"))), + (status = 500, description = "Internal server error.", body = ApiError, example = json!(&ApiError::internal("internal error"))), + ) +)] +pub(crate) async fn ord_block_inscriptions( + Extension(index): Extension>, + Path(blockhash): Path, +) -> ApiResult { + log::debug!("rpc: get ord_block_inscriptions: {}", blockhash); + + let blockhash = bitcoin::BlockHash::from_str(&blockhash).map_err(ApiError::bad_request)?; + let rtx = index.begin_read()?; + let client = index.bitcoin_rpc_client()?; + let index_transactions = index.has_transactions_index(); + + let block_operations = Index::get_ord_block_inscription_operations(blockhash, &rtx, &client)?; + log::debug!("rpc: get ord_block_inscriptions: {:?}", block_operations); + + let mut api_block_operations = Vec::new(); + for (txid, tx_operations) in block_operations.into_iter() { + let mut api_tx_operations = Vec::new(); + for operation in tx_operations.into_iter() { + let tx_inscription = ApiTxInscription::parse_from_operation( + operation, + &rtx, + &client, + index.get_chain(), + index_transactions, + )?; + api_tx_operations.push(tx_inscription); + } + if !api_tx_operations.is_empty() { + api_block_operations.push(ApiTxInscriptions { + inscriptions: api_tx_operations, + txid: txid.to_string(), + }); + } + } + + Ok(Json(ApiResponse::ok(ApiBlockInscriptions { + block: api_block_operations, + }))) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{txid, InscriptionId, SatPoint}; + use std::str::FromStr; + + #[test] + fn serialize_ord_inscriptions() { + let mut tx_inscription = ApiTxInscription { + from: ScriptKey::from_script( + &Address::from_str("bc1qhvd6suvqzjcu9pxjhrwhtrlj85ny3n2mqql5w4") + .unwrap() + .assume_checked() + .script_pubkey(), + Chain::Mainnet, + ) + .into(), + to: Some( + ScriptKey::from_script( + &Address::from_str("bc1qhvd6suvqzjcu9pxjhrwhtrlj85ny3n2mqql5w4") + .unwrap() + .assume_checked() + .script_pubkey(), + Chain::Mainnet, + ) + .into(), + ), + action: ApiInscriptionAction::New { + cursed: false, + unbound: false, + }, + inscription_number: Some(100), + inscription_id: InscriptionId { + txid: txid(1), + index: 0xFFFFFFFF, + } + .to_string(), + old_satpoint: SatPoint::from_str( + "5660d06bd69326c18ec63127b37fb3b32ea763c3846b3334c51beb6a800c57d3:1:3000", + ) + .unwrap() + .to_string(), + + new_satpoint: Some( + SatPoint::from_str( + "5660d06bd69326c18ec63127b37fb3b32ea763c3846b3334c51beb6a800c57d3:1:3000", + ) + .unwrap() + .to_string(), + ), + }; + assert_eq!( + serde_json::to_string_pretty(&tx_inscription).unwrap(), + r#"{ + "action": { + "new": { + "cursed": false, + "unbound": false + } + }, + "inscriptionNumber": 100, + "inscriptionId": "1111111111111111111111111111111111111111111111111111111111111111i4294967295", + "oldSatpoint": "5660d06bd69326c18ec63127b37fb3b32ea763c3846b3334c51beb6a800c57d3:1:3000", + "newSatpoint": "5660d06bd69326c18ec63127b37fb3b32ea763c3846b3334c51beb6a800c57d3:1:3000", + "from": { + "address": "bc1qhvd6suvqzjcu9pxjhrwhtrlj85ny3n2mqql5w4" + }, + "to": { + "address": "bc1qhvd6suvqzjcu9pxjhrwhtrlj85ny3n2mqql5w4" + } +}"#, + ); + tx_inscription.action = ApiInscriptionAction::Transfer; + assert_eq!( + serde_json::to_string_pretty(&tx_inscription).unwrap(), + r#"{ + "action": "transfer", + "inscriptionNumber": 100, + "inscriptionId": "1111111111111111111111111111111111111111111111111111111111111111i4294967295", + "oldSatpoint": "5660d06bd69326c18ec63127b37fb3b32ea763c3846b3334c51beb6a800c57d3:1:3000", + "newSatpoint": "5660d06bd69326c18ec63127b37fb3b32ea763c3846b3334c51beb6a800c57d3:1:3000", + "from": { + "address": "bc1qhvd6suvqzjcu9pxjhrwhtrlj85ny3n2mqql5w4" + }, + "to": { + "address": "bc1qhvd6suvqzjcu9pxjhrwhtrlj85ny3n2mqql5w4" + } +}"#, + ); + } +} diff --git a/src/subcommand/server/response.rs b/src/subcommand/server/response.rs new file mode 100644 index 0000000000..957e8e4d51 --- /dev/null +++ b/src/subcommand/server/response.rs @@ -0,0 +1,42 @@ +use { + super::{info::NodeInfo, *}, + utoipa::ToSchema, +}; +#[derive(Default, Debug, Clone, Serialize, Deserialize, ToSchema)] +#[aliases( + ApiBRC20Tick = ApiResponse, + ApiBRC20AllTick = ApiResponse, + ApiBRC20Balance = ApiResponse, + ApiBRC20AllBalance = ApiResponse, + ApiBRC20TxEvents = ApiResponse, + ApiBRC20BlockEvents = ApiResponse, + ApiBRC20Transferable = ApiResponse, + + ApiOrdInscription = ApiResponse, + ApiOrdOutPointData = ApiResponse, + ApiOrdOutPointResult = ApiResponse, + ApiOrdTxInscriptions = ApiResponse, + ApiOrdBlockInscriptions = ApiResponse, + + Node = ApiResponse +)] +pub(crate) struct ApiResponse { + pub code: i32, + /// ok + #[schema(example = "ok")] + pub msg: String, + pub data: T, +} + +impl ApiResponse +where + T: Serialize, +{ + fn new(code: i32, msg: String, data: T) -> Self { + Self { code, msg, data } + } + + pub fn ok(data: T) -> Self { + Self::new(0, "ok".to_string(), data) + } +} diff --git a/src/subcommand/server/sat.rs b/src/subcommand/server/sat.rs new file mode 100644 index 0000000000..adf218aa1b --- /dev/null +++ b/src/subcommand/server/sat.rs @@ -0,0 +1,203 @@ +use { + super::{error::ApiError, *}, + axum::Json, + utoipa::ToSchema, +}; + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ApiOutPointResult { + pub result: Option, + pub latest_blockhash: String, + #[schema(format = "uint64")] + pub latest_height: u32, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct ApiSatRanges { + /// The transaction id. + pub outpoint: OutPoint, + /// The script pubkey. + pub sat_ranges: Vec, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +#[serde(untagged)] +pub enum ApiSatRange { + Sketchy((u64, u64)), + #[serde(rename_all = "camelCase")] + ExactWithRarity { + first: u64, + last: u64, + rarity_sats: Vec, + }, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub struct RaritySat { + pub sat: Sat, + pub offset: u64, + pub rarity: Rarity, +} + +// /sat/outpoint/:outpoint/info +/// Retrieve the sat range of the outpoint. +#[utoipa::path( + get, + path = "/api/v1/sat/outpoint/{outpoint}/info", + params( + ("outpoint" = String, Path, description = "Outpoint") + ), + responses( + (status = 200, description = "Obtain outpoint infomation", body = OrdOutPointData), + (status = 400, description = "Bad query.", body = ApiError, example = json!(&ApiError::bad_request("bad request"))), + (status = 404, description = "Not found.", body = ApiError, example = json!(&ApiError::not_found("not found"))), + (status = 500, description = "Internal server error.", body = ApiError, example = json!(&ApiError::internal("internal error"))), + ) + )] + +pub(crate) async fn sat_range_by_outpoint( + Extension(index): Extension>, + Path(outpoint): Path, +) -> ApiResult { + log::debug!("rpc: get sat_outpoint_sat_range: {outpoint}"); + + let rtx = index.begin_read()?; + + let (latest_height, latest_blockhash) = rtx.latest_block()?.ok_or_api_err(|| { + ApiError::internal("Failed to retrieve the latest block from the database.".to_string()) + })?; + + let sat_ranges = Index::list_sat_range(&rtx, outpoint, index.has_sat_index())?; + + Ok(Json(ApiResponse::ok(ApiOutPointResult { + result: sat_ranges.map(|ranges| ApiSatRanges { + outpoint, + sat_ranges: ranges.into_iter().map(ApiSatRange::Sketchy).collect(), + }), + latest_height: latest_height.n(), + latest_blockhash: latest_blockhash.to_string(), + }))) +} + +pub(crate) async fn sat_range_with_rarity_by_outpoint( + Extension(index): Extension>, + Path(outpoint): Path, +) -> ApiResult { + log::debug!("rpc: get sat_outpoint_sat_range: {outpoint}"); + + let rtx = index.begin_read()?; + + let (latest_height, latest_blockhash) = rtx.latest_block()?.ok_or_api_err(|| { + ApiError::internal("Failed to retrieve the latest block from the database.".to_string()) + })?; + + let Some(sat_ranges) = Index::list_sat_range(&rtx, outpoint, index.has_sat_index())? else { + return Ok(Json(ApiResponse::ok(ApiOutPointResult { + result: None, + latest_height: latest_height.n(), + latest_blockhash: latest_blockhash.to_string(), + }))); + }; + + let mut exact_sat_ranges = Vec::new(); + let mut value = 0; + for sat_range in sat_ranges { + let rarity_sats = Index::calculate_rarity_for_sat_range(sat_range) + .into_iter() + .map(|(sat, rarity)| RaritySat { + sat, + offset: sat.0 - sat_range.0 + value, + rarity, + }) + .collect(); + exact_sat_ranges.push(ApiSatRange::ExactWithRarity { + first: sat_range.0, + last: sat_range.1, + rarity_sats, + }); + value += sat_range.1 - sat_range.0; + } + + Ok(Json(ApiResponse::ok(ApiOutPointResult { + result: Some(ApiSatRanges { + outpoint, + sat_ranges: exact_sat_ranges, + }), + latest_height: latest_height.n(), + latest_blockhash: latest_blockhash.to_string(), + }))) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_outpoint_sat_range_json_serialization() { + let outpoint = unbound_outpoint(); + let sat_ranges = vec![(0, 100), (100, 200)]; + let api_outpoint_sat_ranges = ApiSatRanges { + outpoint, + sat_ranges: sat_ranges.into_iter().map(ApiSatRange::Sketchy).collect(), + }; + let json = serde_json::to_string(&api_outpoint_sat_ranges).unwrap(); + assert_eq!( + json, + r#"{"outpoint":"0000000000000000000000000000000000000000000000000000000000000000:0","satRanges":[[0,100],[100,200]]}"# + ); + } + + #[test] + fn test_outpoint_sat_range_with_rarity_json_serialization() { + let outpoint = unbound_outpoint(); + let rarity_sats = vec![ + RaritySat { + sat: Sat(0), + offset: 0, + rarity: Rarity::Uncommon, + }, + RaritySat { + sat: Sat(1), + offset: 1, + rarity: Rarity::Epic, + }, + ]; + let api_outpoint_sat_ranges = ApiSatRanges { + outpoint, + sat_ranges: vec![ApiSatRange::ExactWithRarity { + first: 0, + last: 100, + rarity_sats, + }], + }; + let json = serde_json::to_string_pretty(&api_outpoint_sat_ranges).unwrap(); + assert_eq!( + json, + r##"{ + "outpoint": "0000000000000000000000000000000000000000000000000000000000000000:0", + "satRanges": [ + { + "first": 0, + "last": 100, + "raritySats": [ + { + "sat": 0, + "offset": 0, + "rarity": "uncommon" + }, + { + "sat": 1, + "offset": 1, + "rarity": "epic" + } + ] + } + ] +}"## + ); + } +} diff --git a/src/subcommand/server/types.rs b/src/subcommand/server/types.rs new file mode 100644 index 0000000000..49971f120b --- /dev/null +++ b/src/subcommand/server/types.rs @@ -0,0 +1,63 @@ +use super::*; +use crate::okx::datastore::ScriptKey; +use utoipa::ToSchema; + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, ToSchema)] +#[serde(rename_all = "camelCase")] +pub enum ScriptPubkey { + /// Address. + Address(String), + /// Non-standard script hash. + NonStandard(String), +} +impl Default for ScriptPubkey { + fn default() -> Self { + ScriptPubkey::NonStandard(String::new()) + } +} + +impl From for ScriptPubkey { + fn from(script_key: ScriptKey) -> Self { + match script_key { + ScriptKey::Address(address) => ScriptPubkey::Address(address.assume_checked().to_string()), + ScriptKey::ScriptHash { script_hash, .. } => { + ScriptPubkey::NonStandard(script_hash.to_string()) + } + } + } +} +#[cfg(test)] +mod tests { + use super::*; + #[test] + fn serialize_script_pubkey() { + let script_pubkey: ScriptPubkey = ScriptKey::from_script( + &Address::from_str("bc1qhvd6suvqzjcu9pxjhrwhtrlj85ny3n2mqql5w4") + .unwrap() + .assume_checked() + .script_pubkey(), + Chain::Mainnet, + ) + .into(); + assert_eq!( + serde_json::to_string(&script_pubkey).unwrap(), + r#"{"address":"bc1qhvd6suvqzjcu9pxjhrwhtrlj85ny3n2mqql5w4"}"# + ); + let script_pubkey: ScriptPubkey = ScriptKey::from_script( + Script::from_bytes( + hex::decode( + "0014017fed86bba5f31f955f8b316c7fb9bd45cb6cbc00000000000000000000000000000000000000", + ) + .unwrap() + .as_slice(), + ), + Chain::Mainnet, + ) + .into(); + + assert_eq!( + serde_json::to_string(&script_pubkey).unwrap(), + r#"{"nonStandard":"df65c8a338dce7900824e7bd18c336656ca19e57"}"# + ); + } +} diff --git a/src/subcommand/server/utils.rs b/src/subcommand/server/utils.rs new file mode 100644 index 0000000000..e358334e27 --- /dev/null +++ b/src/subcommand/server/utils.rs @@ -0,0 +1,22 @@ +use self::okx::datastore::ScriptKey; +use super::*; +use bitcoin::ScriptHash; + +pub(crate) fn parse_and_validate_script_key_with_chain( + key: &str, + chain: Chain, +) -> Result { + if let Ok(address) = Address::from_str(key) { + match address.clone().require_network(chain.network()) { + Ok(_) => Ok(ScriptKey::Address(address)), + Err(_) => Err(anyhow!("invalid chain: {} for address: {}", chain, key)), + } + } else if let Ok(script_hash) = ScriptHash::from_str(key) { + Ok(ScriptKey::ScriptHash { + script_hash, + is_op_return: false, + }) + } else { + Err(anyhow!("invalid script key: {}", key)) + } +}