diff --git a/.github/workflows/push-data-observatory.yaml b/.github/workflows/push-data-observatory.yaml index 201708d3bd8..eb937a04d65 100644 --- a/.github/workflows/push-data-observatory.yaml +++ b/.github/workflows/push-data-observatory.yaml @@ -8,7 +8,7 @@ env: jobs: build-container: - runs-on: arc-ubuntu-22.04-dind + runs-on: arc-linux-latest-dind steps: - name: Login to Harbor uses: docker/login-action@v3 diff --git a/.gitignore b/.gitignore index 3c441754c6a..37bc8865732 100644 --- a/.gitignore +++ b/.gitignore @@ -63,3 +63,5 @@ nym-api/redocly/formatted-openapi.json **/settings.sql **/enter_db.sh + +*.profraw \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index 62d01b3e6da..e5659b47ac8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -11,7 +11,7 @@ dependencies = [ "macroific", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -24,15 +24,6 @@ dependencies = [ "psl-types", ] -[[package]] -name = "addr2line" -version = "0.24.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" -dependencies = [ - "gimli", -] - [[package]] name = "adler2" version = "2.0.1" @@ -133,9 +124,9 @@ checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" [[package]] name = "ammonia" -version = "4.1.2" +version = "4.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17e913097e1a2124b46746c980134e8c954bc17a6a59bb3fde96f088d126dde6" +checksum = "d6b346764dd0814805de8abf899fe03065bcee69bb1a4771c785817e39f3978f" dependencies = [ "cssparser", "html5ever", @@ -217,9 +208,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.98" +version = "1.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" +checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" [[package]] name = "arbitrary" @@ -408,7 +399,7 @@ dependencies = [ "rustc-hash", "serde", "serde_derive", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -490,7 +481,7 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -501,7 +492,7 @@ checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -655,7 +646,7 @@ checksum = "57d123550fa8d071b7255cb0cc04dc302baa6c8c4a79f55701552684d8399bce" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -688,21 +679,6 @@ dependencies = [ "url", ] -[[package]] -name = "backtrace" -version = "0.3.75" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6806a6321ec58106fea15becdad98371e28d92ccbc7c8f1b3b6dd724fe8f1002" -dependencies = [ - "addr2line", - "cfg-if", - "libc", - "miniz_oxide", - "object", - "rustc-demangle", - "windows-targets 0.52.6", -] - [[package]] name = "base16ct" version = "0.2.0" @@ -1065,7 +1041,7 @@ dependencies = [ "semver 1.0.26", "serde", "serde_json", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -1159,7 +1135,7 @@ dependencies = [ "num-traits", "serde", "wasm-bindgen", - "windows-link", + "windows-link 0.1.3", ] [[package]] @@ -1250,7 +1226,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -1328,7 +1304,7 @@ dependencies = [ "futures-core", "prost", "prost-types", - "tonic", + "tonic 0.12.3", "tracing-core", ] @@ -1352,7 +1328,7 @@ dependencies = [ "thread_local", "tokio", "tokio-stream", - "tonic", + "tonic 0.12.3", "tracing", "tracing-core", "tracing-subscriber", @@ -1434,19 +1410,22 @@ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "cosmos-sdk-proto" -version = "0.26.1" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "462e1f6a8e005acc8835d32d60cbd7973ed65ea2a8d8473830e675f050956427" +checksum = "95ac39be7373404accccaede7cc1ec942ccef14f0ca18d209967a756bf1dbb1f" dependencies = [ + "informalsystems-pbjson", "prost", + "serde", "tendermint-proto", + "tonic 0.13.1", ] [[package]] name = "cosmrs" -version = "0.21.1" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1394c263335da09e8ba8c4b2c675d804e3e0deb44cce0866a5f838d3ddd43d02" +checksum = "34e74fa7a22930fe0579bef560f2d64b78415d4c47b9dd976c0635136809471d" dependencies = [ "bip32", "cosmos-sdk-proto", @@ -1501,7 +1480,7 @@ checksum = "a782b93fae93e57ca8ad3e9e994e784583f5933aeaaa5c80a545c4b437be2047" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -1525,7 +1504,7 @@ checksum = "e01c9214319017f6ebd8e299036e1f717fa9bb6724e758f7d6fb2477599d1a29" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -1769,7 +1748,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331" dependencies = [ "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -1873,7 +1852,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -1921,7 +1900,7 @@ dependencies = [ "schemars 0.8.22", "serde", "sha2 0.10.9", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -2025,7 +2004,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -2036,7 +2015,7 @@ checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" dependencies = [ "darling_core", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -2089,7 +2068,7 @@ dependencies = [ "macroific", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -2132,7 +2111,7 @@ checksum = "30542c1ad912e0e3d22a1935c290e12e8a29d704a420177a31faad4a601a0800" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -2153,7 +2132,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -2163,7 +2142,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c" dependencies = [ "derive_builder_core", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -2192,7 +2171,7 @@ checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", "unicode-xid", ] @@ -2204,7 +2183,7 @@ checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", "unicode-xid", ] @@ -2262,7 +2241,7 @@ dependencies = [ "libc", "option-ext", "redox_users", - "windows-sys 0.60.2", + "windows-sys 0.61.2", ] [[package]] @@ -2273,7 +2252,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -2326,7 +2305,7 @@ version = "0.1.0" dependencies = [ "cosmwasm-std", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -2488,7 +2467,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -2585,7 +2564,7 @@ dependencies = [ "console_error_panic_hook", "js-sys", "serde-wasm-bindgen 0.6.5", - "thiserror 2.0.12", + "thiserror 2.0.17", "wasm-bindgen", "wasm-bindgen-futures", "wasm-storage", @@ -2618,7 +2597,7 @@ dependencies = [ "macroific", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -2848,7 +2827,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -2959,17 +2938,11 @@ dependencies = [ "polyval", ] -[[package]] -name = "gimli" -version = "0.31.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" - [[package]] name = "glob" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" +checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" [[package]] name = "gloo-net" @@ -3254,7 +3227,7 @@ dependencies = [ "rand 0.9.2", "ring", "rustls 0.23.29", - "thiserror 2.0.12", + "thiserror 2.0.17", "tinyvec", "tokio", "tokio-rustls 0.26.2", @@ -3280,7 +3253,7 @@ dependencies = [ "resolv-conf", "rustls 0.23.29", "smallvec", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "tokio-rustls 0.26.2", "tracing", @@ -3558,9 +3531,9 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.15" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f66d5bd4c6f02bf0542fad85d626775bab9258cf795a4256dcaf3161114d1df" +checksum = "8d9b05277c7e8da2c93a568989bb6207bef0112e8d17df7a6eda4a3cf143bc5e" dependencies = [ "base64 0.22.1", "bytes", @@ -3574,7 +3547,7 @@ dependencies = [ "libc", "percent-encoding", "pin-project-lite", - "socket2 0.5.10", + "socket2 0.6.0", "tokio", "tower-service", "tracing", @@ -3604,6 +3577,39 @@ dependencies = [ "cc", ] +[[package]] +name = "ibc-proto" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a650b51e384e54264b53974feb38e95e37aac70f7f2f9c07eb8022fe15eb8e20" +dependencies = [ + "base64 0.22.1", + "bytes", + "cosmos-sdk-proto", + "flex-error", + "ics23", + "informalsystems-pbjson", + "prost", + "serde", + "subtle-encoding", + "tendermint-proto", + "tonic 0.13.1", +] + +[[package]] +name = "ics23" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73b17f1a5bd7d12ad30a21445cfa5f52fd7651cb3243ba866f9916b1ec112f12" +dependencies = [ + "anyhow", + "bytes", + "hex", + "informalsystems-pbjson", + "prost", + "serde", +] + [[package]] name = "icu_collections" version = "2.0.0" @@ -3725,7 +3731,7 @@ checksum = "0ab604ee7085efba6efc65e4ebca0e9533e3aff6cb501d7d77b211e3a781c6d5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -3778,7 +3784,7 @@ dependencies = [ "js-sys", "sealed", "smallvec", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "wasm-bindgen", "wasm-bindgen-futures", @@ -3794,7 +3800,7 @@ dependencies = [ "macroific", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -3833,6 +3839,16 @@ dependencies = [ "web-time", ] +[[package]] +name = "informalsystems-pbjson" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aa4a0980c8379295100d70854354e78df2ee1c6ca0f96ffe89afeb3140e3a3d" +dependencies = [ + "base64 0.21.7", + "serde", +] + [[package]] name = "inotify" version = "0.9.6" @@ -3903,17 +3919,6 @@ dependencies = [ "rustversion", ] -[[package]] -name = "io-uring" -version = "0.7.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b86e202f00093dcba4275d4636b93ef9dd75d025ae560d2521b45ea28ab49013" -dependencies = [ - "bitflags 2.9.1", - "cfg-if", - "libc", -] - [[package]] name = "ip_network" version = "0.4.1" @@ -4044,7 +4049,7 @@ checksum = "03343451ff899767262ec32146f6d559dd759fdadf42ff0e227c7c48f72594b4" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -4069,9 +4074,9 @@ dependencies = [ [[package]] name = "jwt-simple" -version = "0.12.12" +version = "0.12.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "731011e9647a71ff4f8474176ff6ce6e0d2de87a0173f15613af3a84c3e3401a" +checksum = "6ad8761f175784dfbb83709f322fc4daf6b27afd5bf375492f2876f9e925ef5a" dependencies = [ "anyhow", "binstring", @@ -4089,7 +4094,7 @@ dependencies = [ "serde", "serde_json", "superboring", - "thiserror 2.0.12", + "thiserror 2.0.17", "zeroize", ] @@ -4316,7 +4321,7 @@ dependencies = [ "proc-macro2", "quote", "sealed", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -4328,7 +4333,7 @@ dependencies = [ "proc-macro2", "quote", "sealed", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -4341,7 +4346,7 @@ dependencies = [ "macroific_core", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -4369,7 +4374,7 @@ checksum = "ac84fd3f360fcc43dc5f5d186f02a94192761a080e8bc58621ad4d12296a58cf" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -4492,7 +4497,7 @@ dependencies = [ "rand 0.8.5", "serde", "serde-wasm-bindgen 0.6.5", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "tsify", "url", @@ -4900,7 +4905,7 @@ dependencies = [ "tempfile", "tendermint", "test-with", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "tokio-stream", @@ -4946,7 +4951,7 @@ dependencies = [ "sha2 0.10.9", "tendermint", "tendermint-rpc", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tracing", "ts-rs", @@ -4979,7 +4984,7 @@ dependencies = [ "nym-validator-client", "nym-wireguard-types", "semver 1.0.26", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "tokio-util", "tracing", @@ -5004,7 +5009,7 @@ dependencies = [ "serde", "sha2 0.10.9", "strum_macros", - "thiserror 2.0.12", + "thiserror 2.0.17", "tracing", "x25519-dalek", ] @@ -5023,7 +5028,7 @@ dependencies = [ "nym-task", "nym-validator-client", "rand 0.8.5", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -5130,7 +5135,7 @@ dependencies = [ "serde_json", "tap", "tempfile", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "toml 0.8.23", @@ -5167,7 +5172,7 @@ dependencies = [ "serde", "serde_json", "tap", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "tokio-tungstenite", @@ -5220,7 +5225,7 @@ dependencies = [ "sha2 0.10.9", "si-scale", "tempfile", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "tokio-stream", @@ -5247,7 +5252,7 @@ dependencies = [ "nym-sphinx-params", "nym-statistics-common", "serde", - "thiserror 2.0.12", + "thiserror 2.0.17", "url", ] @@ -5262,7 +5267,7 @@ dependencies = [ "nym-gateway-requests", "serde", "sqlx", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "tracing", @@ -5282,7 +5287,7 @@ dependencies = [ "nym-task", "sqlx", "sqlx-pool-guard", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "tracing", @@ -5305,7 +5310,7 @@ dependencies = [ "serde", "serde-wasm-bindgen 0.6.5", "serde_json", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio_with_wasm", "tsify", "wasm-bindgen", @@ -5366,7 +5371,7 @@ dependencies = [ "serde", "sha2 0.10.9", "subtle 2.6.1", - "thiserror 2.0.12", + "thiserror 2.0.17", "zeroize", ] @@ -5379,7 +5384,7 @@ dependencies = [ "log", "nym-network-defaults", "serde", - "thiserror 2.0.12", + "thiserror 2.0.17", "toml 0.8.23", "url", ] @@ -5396,7 +5401,7 @@ dependencies = [ "nym-ip-packet-requests", "nym-sdk", "pnet_packet", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "tokio-util", "tracing", @@ -5414,7 +5419,7 @@ dependencies = [ "schemars 0.8.22", "serde", "serde_json", - "thiserror 2.0.12", + "thiserror 2.0.17", "utoipa", "vergen 8.3.1", ] @@ -5483,7 +5488,7 @@ dependencies = [ "strum", "strum_macros", "tempfile", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "tokio-util", @@ -5524,7 +5529,7 @@ dependencies = [ "strum", "strum_macros", "tempfile", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "tokio-util", @@ -5575,7 +5580,7 @@ dependencies = [ "serde", "sqlx", "sqlx-pool-guard", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "zeroize", @@ -5594,7 +5599,7 @@ dependencies = [ "nym-credentials-interface", "nym-ecash-time", "nym-validator-client", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", ] @@ -5620,7 +5625,7 @@ dependencies = [ "nym-upgrade-mode-check", "nym-validator-client", "si-scale", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "tracing", @@ -5645,7 +5650,7 @@ dependencies = [ "nym-validator-client", "rand 0.8.5", "serde", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "zeroize", ] @@ -5663,7 +5668,7 @@ dependencies = [ "serde", "strum", "strum_macros", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "utoipa", ] @@ -5697,11 +5702,48 @@ dependencies = [ "serde_json", "sha2 0.10.9", "subtle-encoding", - "thiserror 2.0.12", + "thiserror 2.0.17", "x25519-dalek", "zeroize", ] +[[package]] +name = "nym-data-observatory" +version = "1.0.0" +dependencies = [ + "anyhow", + "async-trait", + "axum", + "blake3", + "chrono", + "clap", + "cosmrs", + "glob", + "nym-bin-common", + "nym-config", + "nym-network-defaults", + "nym-task", + "nym-validator-client", + "nyxd-scraper-psql", + "nyxd-scraper-shared", + "reqwest 0.12.22", + "schemars 0.8.22", + "serde", + "serde_json", + "sqlx", + "thiserror 2.0.17", + "time", + "tokio", + "tokio-util", + "tower-http", + "tracing", + "tracing-subscriber", + "url", + "utoipa", + "utoipa-swagger-ui", + "utoipauto", +] + [[package]] name = "nym-dkg" version = "0.1.0" @@ -5720,7 +5762,7 @@ dependencies = [ "serde", "serde_derive", "sha2 0.10.9", - "thiserror 2.0.12", + "thiserror 2.0.17", "zeroize", ] @@ -5735,7 +5777,7 @@ dependencies = [ "cw-utils", "cw2", "nym-multisig-contract-common", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -5748,7 +5790,7 @@ dependencies = [ "nym-network-defaults", "nym-validator-client", "semver 1.0.26", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "tracing", "url", @@ -5762,7 +5804,7 @@ dependencies = [ "nym-crypto", "semver 1.0.26", "serde", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tracing", "url", @@ -5784,7 +5826,7 @@ dependencies = [ "reqwest 0.12.22", "serde", "serde_json", - "thiserror 2.0.12", + "thiserror 2.0.17", "tracing", "utoipa", ] @@ -5847,7 +5889,7 @@ dependencies = [ "nym-wireguard-types", "rand 0.8.5", "serde", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "tokio-stream", @@ -5880,7 +5922,7 @@ dependencies = [ "rand 0.8.5", "serde", "si-scale", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "tokio-stream", @@ -5930,7 +5972,7 @@ dependencies = [ "rand 0.8.5", "serde", "serde_json", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "tokio-util", "tracing", @@ -5963,7 +6005,7 @@ dependencies = [ "serde_json", "strum", "subtle 2.6.1", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "tracing", @@ -5982,7 +6024,7 @@ dependencies = [ "nym-statistics-common", "sqlx", "strum", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "tracing", @@ -6001,7 +6043,7 @@ dependencies = [ "nym-gateway-requests", "nym-sphinx", "sqlx", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "tracing", @@ -6018,7 +6060,7 @@ dependencies = [ "nym-ffi-shared", "nym-sdk", "nym-sphinx-anonymous-replies", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "uniffi", "uniffi_build", @@ -6059,7 +6101,7 @@ dependencies = [ "serde_json", "serde_plain", "serde_yaml", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "tracing", "tracing-subscriber", @@ -6076,7 +6118,7 @@ dependencies = [ "proc-macro2", "quote", "reqwest 0.12.22", - "syn 2.0.106", + "syn 2.0.104", "uuid", ] @@ -6108,7 +6150,7 @@ version = "0.1.0" dependencies = [ "nym-credential-storage", "nym-credentials", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tracing", "zeroize", @@ -6134,7 +6176,7 @@ version = "0.1.0" dependencies = [ "log", "rand 0.8.5", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -6146,7 +6188,7 @@ dependencies = [ "futures", "nym-ip-packet-requests", "nym-sdk", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "tokio-util", "tracing", @@ -6164,7 +6206,7 @@ dependencies = [ "nym-sphinx", "rand 0.8.5", "serde", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "tokio-util", @@ -6205,7 +6247,7 @@ dependencies = [ "reqwest 0.12.22", "serde", "serde_json", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "tokio-tun", @@ -6221,7 +6263,7 @@ dependencies = [ "k256", "ledger-transport", "ledger-transport-hid", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -6268,7 +6310,7 @@ dependencies = [ "semver 1.0.26", "serde", "serde_repr", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "ts-rs", "utoipa", @@ -6294,7 +6336,7 @@ dependencies = [ "nym-task", "rand 0.8.5", "serde", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "tokio-util", @@ -6313,7 +6355,7 @@ dependencies = [ "cw4", "schemars 0.8.22", "serde", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -6407,7 +6449,7 @@ dependencies = [ "sqlx", "tap", "tempfile", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "tokio-tungstenite", @@ -6485,7 +6527,7 @@ dependencies = [ "serde_json", "sha2 0.10.9", "sysinfo", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "tokio-stream", @@ -6535,7 +6577,7 @@ dependencies = [ "serde_json", "strum", "strum_macros", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "url", @@ -6604,7 +6646,7 @@ dependencies = [ "sqlx", "strum", "strum_macros", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "tokio-stream", @@ -6650,7 +6692,7 @@ dependencies = [ "rand_chacha 0.3.1", "serde", "serde_json", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "wasm-utils", ] @@ -6665,7 +6707,7 @@ dependencies = [ "rand 0.8.5", "serde", "serde-wasm-bindgen 0.6.5", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "tsify", "wasm-bindgen", @@ -6692,7 +6734,7 @@ dependencies = [ "snow", "strum", "strum_macros", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "tokio-util", "tracing", @@ -6739,7 +6781,7 @@ name = "nym-ordered-buffer" version = "0.1.0" dependencies = [ "log", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -6756,7 +6798,7 @@ dependencies = [ "rand 0.8.5", "rayon", "sphinx-packet", - "thiserror 2.0.12", + "thiserror 2.0.17", "x25519-dalek", "zeroize", ] @@ -6780,7 +6822,7 @@ dependencies = [ "nym-contracts-common", "schemars 0.8.22", "serde", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -6792,7 +6834,7 @@ dependencies = [ "cw-controllers", "schemars 0.8.22", "serde", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", ] @@ -6809,7 +6851,7 @@ dependencies = [ "nym-registration-common", "nym-sdk", "nym-validator-client", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "tokio-util", "tracing", @@ -6874,7 +6916,7 @@ dependencies = [ "serde", "tap", "tempfile", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "tokio-stream", @@ -6904,7 +6946,7 @@ version = "0.1.0" dependencies = [ "bincode", "serde", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -6920,7 +6962,7 @@ dependencies = [ "nym-sphinx-anonymous-replies", "serde", "serde_json", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", ] @@ -6970,7 +7012,7 @@ dependencies = [ "serde", "serde_json", "tap", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "url", @@ -7004,7 +7046,7 @@ dependencies = [ "schemars 0.8.22", "serde", "tap", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "url", ] @@ -7036,7 +7078,7 @@ dependencies = [ "serde", "serde_json", "tap", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -7060,7 +7102,7 @@ dependencies = [ "rand 0.8.5", "rand_chacha 0.3.1", "rand_distr", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "tracing", ] @@ -7079,7 +7121,7 @@ dependencies = [ "nym-topology", "rand 0.8.5", "serde", - "thiserror 2.0.12", + "thiserror 2.0.17", "zeroize", ] @@ -7093,7 +7135,7 @@ dependencies = [ "rand 0.8.5", "serde", "serde_json", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -7109,7 +7151,7 @@ dependencies = [ "nym-topology", "rand 0.8.5", "rand_chacha 0.3.1", - "thiserror 2.0.12", + "thiserror 2.0.17", "tracing", "wasm-bindgen", ] @@ -7127,7 +7169,7 @@ dependencies = [ "nym-sphinx-types", "rand 0.8.5", "serde", - "thiserror 2.0.12", + "thiserror 2.0.17", "utoipa", "wasmtimer", ] @@ -7146,7 +7188,7 @@ dependencies = [ "nym-sphinx-types", "nym-topology", "rand 0.8.5", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -7157,7 +7199,7 @@ dependencies = [ "nym-sphinx-anonymous-replies", "nym-sphinx-params", "nym-sphinx-types", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -7170,7 +7212,7 @@ dependencies = [ "nym-sphinx-forwarding", "nym-sphinx-params", "nym-sphinx-types", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "tokio-util", "tracing", @@ -7183,7 +7225,7 @@ dependencies = [ "nym-crypto", "nym-sphinx-types", "serde", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -7192,7 +7234,7 @@ version = "0.1.0" dependencies = [ "nym-sphinx-addressing", "nym-sphinx-types", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -7201,7 +7243,7 @@ version = "0.2.0" dependencies = [ "nym-outfox", "sphinx-packet", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -7252,7 +7294,7 @@ dependencies = [ "strum", "strum_macros", "sysinfo", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "utoipa", @@ -7270,7 +7312,7 @@ dependencies = [ "rand 0.8.5", "serde", "serde_json", - "thiserror 2.0.12", + "thiserror 2.0.17", "zeroize", ] @@ -7283,7 +7325,7 @@ dependencies = [ "futures", "log", "nym-test-utils", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "tokio-util", "tracing", @@ -7333,7 +7375,7 @@ dependencies = [ "reqwest 0.12.22", "serde", "serde_json", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tracing", "tsify", @@ -7348,7 +7390,7 @@ dependencies = [ "etherparse", "log", "nym-wireguard-types", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "tokio-tun", ] @@ -7377,7 +7419,7 @@ dependencies = [ "strum", "strum_macros", "tempfile", - "thiserror 2.0.12", + "thiserror 2.0.17", "ts-rs", "url", "utoipa", @@ -7396,7 +7438,7 @@ dependencies = [ "reqwest 0.12.22", "serde", "serde_json", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tracing", "utoipa", @@ -7443,7 +7485,7 @@ dependencies = [ "serde_json", "sha2 0.10.9", "tendermint-rpc", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "tracing", @@ -7480,7 +7522,7 @@ dependencies = [ "nym-task", "nym-ticketbooks-merkle", "nym-validator-client", - "nyxd-scraper", + "nyxd-scraper-sqlite", "rand 0.8.5", "rand_chacha 0.3.1", "serde", @@ -7488,7 +7530,7 @@ dependencies = [ "serde_with", "sha2 0.10.9", "sqlx", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "tracing", @@ -7509,7 +7551,7 @@ dependencies = [ "nym-task", "nym-validator-client", "rand 0.8.5", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "tokio-util", @@ -7527,7 +7569,7 @@ dependencies = [ "nym-contracts-common", "nym-mixnet-contract-common", "serde", - "thiserror 2.0.12", + "thiserror 2.0.17", "ts-rs", ] @@ -7548,7 +7590,7 @@ dependencies = [ "serde", "serde-wasm-bindgen 0.6.5", "serde_json", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tsify", "wasm-bindgen", @@ -7593,7 +7635,7 @@ dependencies = [ "nym-node-metrics", "nym-task", "nym-wireguard-types", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "tokio-stream", "tracing", @@ -7638,7 +7680,7 @@ dependencies = [ "nym-credentials-interface", "schemars 0.8.22", "serde", - "thiserror 2.0.12", + "thiserror 2.0.17", "utoipa", ] @@ -7674,7 +7716,7 @@ dependencies = [ "nym-crypto", "rand 0.8.5", "serde", - "thiserror 2.0.12", + "thiserror 2.0.17", "x25519-dalek", ] @@ -7701,7 +7743,7 @@ dependencies = [ "serde_json", "sha2 0.10.9", "tar", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "tracing", @@ -7722,12 +7764,12 @@ dependencies = [ "nym-network-defaults", "nym-task", "nym-validator-client", - "nyxd-scraper", + "nyxd-scraper-sqlite", "reqwest 0.12.22", "schemars 0.8.22", "serde", "sqlx", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "tokio-util", @@ -7740,22 +7782,42 @@ dependencies = [ ] [[package]] -name = "nyxd-scraper" +name = "nyxd-scraper-psql" version = "0.1.0" dependencies = [ - "anyhow", "async-trait", + "base64 0.22.1", + "cosmrs", + "itertools 0.14.0", + "nyxd-scraper-shared", + "serde", + "serde_json", + "sqlx", + "thiserror 2.0.17", + "tokio", + "tracing", +] + +[[package]] +name = "nyxd-scraper-shared" +version = "0.1.0" +dependencies = [ + "async-trait", + "base64 0.22.1", "const_format", + "cosmos-sdk-proto", "cosmrs", "eyre", "futures", "humantime", + "ibc-proto", + "prost", "serde", + "serde_json", "sha2 0.10.9", - "sqlx", "tendermint", "tendermint-rpc", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "tokio-stream", @@ -7764,34 +7826,38 @@ dependencies = [ "url", ] +[[package]] +name = "nyxd-scraper-sqlite" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "nyxd-scraper-shared", + "sqlx", + "thiserror 2.0.17", + "tokio", + "tracing", +] + [[package]] name = "objc2-core-foundation" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c10c2894a6fed806ade6027bcd50662746363a9589d3ec9d9bef30a4e4bc166" +checksum = "2a180dd8642fa45cdb7dd721cd4c11b1cadd4929ce112ebd8b9f5803cc79d536" dependencies = [ "bitflags 2.9.1", ] [[package]] name = "objc2-io-kit" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71c1c64d6120e51cd86033f67176b1cb66780c2efe34dec55176f77befd93c0a" +checksum = "33fafba39597d6dc1fb709123dfa8289d39406734be322956a69f0931c73bb15" dependencies = [ "libc", "objc2-core-foundation", ] -[[package]] -name = "object" -version = "0.36.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" -dependencies = [ - "memchr", -] - [[package]] name = "once_cell" version = "1.21.3" @@ -8094,7 +8160,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1db05f56d34358a8b1066f67cbb203ee3e7ed2ba674a6263a1d5ec6db2204323" dependencies = [ "memchr", - "thiserror 2.0.12", + "thiserror 2.0.17", "ucd-trie", ] @@ -8118,7 +8184,7 @@ dependencies = [ "pest_meta", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -8181,7 +8247,7 @@ dependencies = [ "phf_shared", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -8210,7 +8276,7 @@ checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -8304,7 +8370,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -8462,11 +8528,11 @@ dependencies = [ [[package]] name = "proc-macro-crate" -version = "3.3.0" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edce586971a4dfaa28950c6f18ed55e0406c1ab88bbce2c6f6293a7aaba73d35" +checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983" dependencies = [ - "toml_edit", + "toml_edit 0.23.5", ] [[package]] @@ -8488,7 +8554,7 @@ dependencies = [ "proc-macro-error-attr2", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -8521,7 +8587,7 @@ dependencies = [ "memchr", "parking_lot", "protobuf", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -8544,7 +8610,7 @@ dependencies = [ "itertools 0.14.0", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -8621,7 +8687,7 @@ dependencies = [ "rustc-hash", "rustls 0.23.29", "socket2 0.5.10", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "tracing", "web-time", @@ -8642,7 +8708,7 @@ dependencies = [ "rustls 0.23.29", "rustls-pki-types", "slab", - "thiserror 2.0.12", + "thiserror 2.0.17", "tinyvec", "tracing", "web-time", @@ -8789,7 +8855,7 @@ checksum = "a4e608c6638b9c18977b00b475ac1f28d14e84b27d8d42f70e0bf1e3dec127ac" dependencies = [ "getrandom 0.2.16", "libredox", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -8809,7 +8875,7 @@ checksum = "1165225c21bff1f3bbce98f5a1f889949bc902d3575308cc7b0de30b4f6d27c7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -8930,7 +8996,7 @@ version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "21918d6644020c6f6ef1993242989bf6d4952d2e025617744f184c02df51c356" dependencies = [ - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -9044,7 +9110,7 @@ dependencies = [ "proc-macro2", "quote", "rust-embed-utils", - "syn 2.0.106", + "syn 2.0.104", "walkdir", ] @@ -9074,12 +9140,6 @@ dependencies = [ "thiserror 1.0.69", ] -[[package]] -name = "rustc-demangle" -version = "0.1.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "989e6739f80c4ad5b13e0fd7fe89531180375b18520cc8c82080e4dc4035b84f" - [[package]] name = "rustc-hash" version = "2.1.1" @@ -9333,7 +9393,7 @@ dependencies = [ "proc-macro2", "quote", "serde_derive_internals 0.29.1", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -9365,7 +9425,7 @@ checksum = "1783eabc414609e28a5ba76aee5ddd52199f7107a0b24c2e9746a1ecc34a683d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -9386,7 +9446,7 @@ checksum = "22f968c5ea23d555e670b449c1c5e7b2fc399fdaec1d304a17cd48e288abc107" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -9471,10 +9531,11 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" [[package]] name = "serde" -version = "1.0.219" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" dependencies = [ + "serde_core", "serde_derive", ] @@ -9518,15 +9579,24 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + [[package]] name = "serde_derive" -version = "1.0.219" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -9537,7 +9607,7 @@ checksum = "e578a843d40b4189a4d66bba51d7684f57da5bd7c304c64e14bd63efbef49509" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -9548,19 +9618,20 @@ checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] name = "serde_json" -version = "1.0.141" +version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30b9eff21ebe718216c6ec64e1d9ac57087aad11efc64e32002bce4a0d4c03d3" +checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c" dependencies = [ "itoa", "memchr", "ryu", "serde", + "serde_core", ] [[package]] @@ -9576,7 +9647,7 @@ dependencies = [ "serde_json", "serde_json_path_core", "serde_json_path_macros", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -9588,7 +9659,7 @@ dependencies = [ "inventory", "serde", "serde_json", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -9610,7 +9681,7 @@ checksum = "aafbefbe175fa9bf03ca83ef89beecff7d2a95aaacd5732325b90ac8c3bd7b90" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -9640,7 +9711,7 @@ checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -9693,7 +9764,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -9845,9 +9916,9 @@ checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" [[package]] name = "slab" -version = "0.4.11" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" +checksum = "04dc19736151f35336d325007ac991178d504a119863a2fcb3758cdb5e52c50d" [[package]] name = "sluice" @@ -10020,7 +10091,7 @@ dependencies = [ "serde_json", "sha2 0.10.9", "smallvec", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "tokio-stream", @@ -10039,7 +10110,7 @@ dependencies = [ "quote", "sqlx-core", "sqlx-macros-core", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -10062,7 +10133,7 @@ dependencies = [ "sqlx-mysql", "sqlx-postgres", "sqlx-sqlite", - "syn 2.0.106", + "syn 2.0.104", "tokio", "url", ] @@ -10105,7 +10176,7 @@ dependencies = [ "smallvec", "sqlx-core", "stringprep", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tracing", "whoami", @@ -10157,7 +10228,7 @@ dependencies = [ "smallvec", "sqlx-core", "stringprep", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tracing", "whoami", @@ -10183,7 +10254,7 @@ dependencies = [ "serde", "serde_urlencoded", "sqlx-core", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tracing", "url", @@ -10271,7 +10342,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -10327,9 +10398,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.106" +version = "2.0.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6" +checksum = "17b6f705963418cdb9927482fa304bc562ece2fdd4f616084c50b7023b435a40" dependencies = [ "proc-macro2", "quote", @@ -10359,14 +10430,14 @@ checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] name = "sysinfo" -version = "0.37.0" +version = "0.37.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07cec4dc2d2e357ca1e610cfb07de2fa7a10fc3e9fe89f72545f3d244ea87753" +checksum = "16607d5caffd1c07ce073528f9ed972d88db15dd44023fa57142963be3feb11f" dependencies = [ "libc", "memchr", @@ -10556,7 +10627,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -10592,7 +10663,7 @@ dependencies = [ "serde_json", "sqlx", "tempfile", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tokio", "toml 0.8.23", @@ -10621,11 +10692,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "2.0.12" +version = "2.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" +checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8" dependencies = [ - "thiserror-impl 2.0.12", + "thiserror-impl 2.0.17", ] [[package]] @@ -10636,18 +10707,18 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] name = "thiserror-impl" -version = "2.0.12" +version = "2.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" +checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -10752,34 +10823,31 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.47.1" +version = "1.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89e49afdadebb872d3145a5638b59eb0691ea23e46ca484037cfab3b76b95038" +checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408" dependencies = [ - "backtrace", "bytes", - "io-uring", "libc", "mio 1.0.4", "parking_lot", "pin-project-lite", "signal-hook-registry", - "slab", "socket2 0.6.0", "tokio-macros", "tracing", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] name = "tokio-macros" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" +checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -10928,7 +10996,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37e04c1865c281139e5ccf633cb9f76ffdaabeebfe53b703984cf82878e2aabb" dependencies = [ "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -10948,8 +11016,8 @@ checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" dependencies = [ "serde", "serde_spanned", - "toml_datetime", - "toml_edit", + "toml_datetime 0.6.11", + "toml_edit 0.22.27", ] [[package]] @@ -10961,6 +11029,15 @@ dependencies = [ "serde", ] +[[package]] +name = "toml_datetime" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2cdb639ebbc97961c51720f858597f7f24c4fc295327923af55b74c3c724533" +dependencies = [ + "serde_core", +] + [[package]] name = "toml_edit" version = "0.22.27" @@ -10970,11 +11047,32 @@ dependencies = [ "indexmap 2.10.0", "serde", "serde_spanned", - "toml_datetime", + "toml_datetime 0.6.11", "toml_write", "winnow", ] +[[package]] +name = "toml_edit" +version = "0.23.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2ad0b7ae9cfeef5605163839cb9221f453399f15cfb5c10be9885fcf56611f9" +dependencies = [ + "indexmap 2.10.0", + "toml_datetime 0.7.3", + "toml_parser", + "winnow", +] + +[[package]] +name = "toml_parser" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b551886f449aa90d4fe2bdaa9f4a2577ad2dde302c61ecf262d80b116db95c10" +dependencies = [ + "winnow", +] + [[package]] name = "toml_write" version = "0.1.2" @@ -11011,6 +11109,34 @@ dependencies = [ "tracing", ] +[[package]] +name = "tonic" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e581ba15a835f4d9ea06c55ab1bd4dce26fc53752c69a04aac00703bfb49ba9" +dependencies = [ + "async-trait", + "base64 0.22.1", + "bytes", + "h2 0.4.11", + "http 1.3.1", + "http-body 1.0.1", + "http-body-util", + "hyper 1.6.0", + "hyper-timeout", + "hyper-util", + "percent-encoding", + "pin-project", + "prost", + "socket2 0.5.10", + "tokio", + "tokio-stream", + "tower 0.5.2", + "tower-layer", + "tower-service", + "tracing", +] + [[package]] name = "tower" version = "0.4.13" @@ -11039,9 +11165,12 @@ checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" dependencies = [ "futures-core", "futures-util", + "indexmap 2.10.0", "pin-project-lite", + "slab", "sync_wrapper 1.0.2", "tokio", + "tokio-util", "tower-layer", "tower-service", "tracing", @@ -11108,7 +11237,7 @@ checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -11215,7 +11344,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04659ddb06c87d233c566112c1c9c5b9e98256d9af50ec3bc9c8327f873a7568" dependencies = [ "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -11272,7 +11401,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e640d9b0964e9d39df633548591090ab92f7a4567bc31d3891af23471a3365c6" dependencies = [ "lazy_static", - "thiserror 2.0.12", + "thiserror 2.0.17", "ts-rs-macros", ] @@ -11299,7 +11428,7 @@ checksum = "0e9d8656589772eeec2cf7a8264d9cda40fb28b9bc53118ceb9e8c07f8f38730" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", "termcolor", ] @@ -11326,7 +11455,7 @@ dependencies = [ "proc-macro2", "quote", "serde_derive_internals 0.28.0", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -11388,7 +11517,7 @@ checksum = "016c26257f448222014296978b2c8456e2cad4de308c35bdb1e383acd569ef5b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -11536,7 +11665,7 @@ dependencies = [ "indexmap 2.10.0", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -11551,7 +11680,7 @@ dependencies = [ "proc-macro2", "quote", "serde", - "syn 2.0.106", + "syn 2.0.104", "toml 0.5.11", "uniffi_meta", ] @@ -11678,7 +11807,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "syn 2.0.106", + "syn 2.0.104", "uuid", ] @@ -11717,7 +11846,7 @@ checksum = "268d76aaebb80eba79240b805972e52d7d410d4bcc52321b951318b0f440cd60" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -11728,7 +11857,7 @@ checksum = "382673bda1d05c85b4550d32fd4192ccd4cffe9a908543a0795d1e7682b36246" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", "utoipauto-core", ] @@ -11947,7 +12076,7 @@ dependencies = [ "log", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", "wasm-bindgen-shared", ] @@ -11982,7 +12111,7 @@ checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -12017,7 +12146,7 @@ checksum = "17d5042cc5fa009658f9a7333ef24291b1291a25b6382dd68862a7f3b969f69b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -12043,7 +12172,7 @@ dependencies = [ "rand 0.8.5", "serde", "serde-wasm-bindgen 0.6.5", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", "tsify", "url", @@ -12065,7 +12194,7 @@ dependencies = [ "nym-store-cipher", "serde", "serde-wasm-bindgen 0.6.5", - "thiserror 2.0.12", + "thiserror 2.0.17", "wasm-bindgen", "wasm-utils", ] @@ -12244,7 +12373,7 @@ dependencies = [ "windows-collections", "windows-core", "windows-future", - "windows-link", + "windows-link 0.1.3", "windows-numerics", ] @@ -12265,7 +12394,7 @@ checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3" dependencies = [ "windows-implement", "windows-interface", - "windows-link", + "windows-link 0.1.3", "windows-result", "windows-strings", ] @@ -12277,7 +12406,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc6a41e98427b19fe4b73c550f060b59fa592d7d686537eebf9385621bfbad8e" dependencies = [ "windows-core", - "windows-link", + "windows-link 0.1.3", "windows-threading", ] @@ -12289,7 +12418,7 @@ checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -12300,7 +12429,7 @@ checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -12309,6 +12438,12 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + [[package]] name = "windows-numerics" version = "0.2.0" @@ -12316,7 +12451,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9150af68066c4c5c07ddc0ce30421554771e528bde427614c61038bc2c92c2b1" dependencies = [ "windows-core", - "windows-link", + "windows-link 0.1.3", ] [[package]] @@ -12325,7 +12460,7 @@ version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" dependencies = [ - "windows-link", + "windows-link 0.1.3", ] [[package]] @@ -12334,7 +12469,7 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" dependencies = [ - "windows-link", + "windows-link 0.1.3", ] [[package]] @@ -12382,6 +12517,15 @@ dependencies = [ "windows-targets 0.53.2", ] +[[package]] +name = "windows-sys" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link 0.2.1", +] + [[package]] name = "windows-targets" version = "0.42.2" @@ -12450,7 +12594,7 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b66463ad2e0ea3bbf808b7f1d371311c80e115c0b71d60efc142cafbcfb057a6" dependencies = [ - "windows-link", + "windows-link 0.1.3", ] [[package]] @@ -12724,7 +12868,7 @@ checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", "synstructure", ] @@ -12745,7 +12889,7 @@ checksum = "9ecf5b4cc5364572d7f4c329661bcc82724222973f2cab6f050a4e5c22f75181" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -12765,15 +12909,15 @@ checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", "synstructure", ] [[package]] name = "zeroize" -version = "1.8.1" +version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" +checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" dependencies = [ "zeroize_derive", ] @@ -12786,7 +12930,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -12819,7 +12963,7 @@ checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.104", ] [[package]] @@ -12835,7 +12979,7 @@ dependencies = [ "flate2", "indexmap 2.10.0", "memchr", - "thiserror 2.0.12", + "thiserror 2.0.17", "zopfli", ] @@ -12856,7 +13000,7 @@ dependencies = [ "rand 0.8.5", "reqwest 0.12.22", "serde", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "tsify", "uuid", @@ -12916,7 +13060,7 @@ dependencies = [ "reqwest 0.12.22", "serde", "serde_json", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "tracing", "url", diff --git a/Cargo.toml b/Cargo.toml index eaad10bbc4f..85e43d76e2b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -87,7 +87,9 @@ members = [ "common/nymsphinx/params", "common/nymsphinx/routing", "common/nymsphinx/types", - "common/nyxd-scraper", + "common/nyxd-scraper-sqlite", + "common/nyxd-scraper-psql", + "common/nyxd-scraper-shared", "common/pemstore", "common/registration", "common/serde-helpers", @@ -124,6 +126,7 @@ members = [ "nym-credential-proxy/nym-credential-proxy", "nym-credential-proxy/nym-credential-proxy-requests", "nym-credential-proxy/vpn-api-lib-wasm", + "nym-data-observatory", "nym-ip-packet-client", "nym-network-monitor", "nym-node", @@ -263,6 +266,7 @@ futures = "0.3.31" futures-util = "0.3" generic-array = "0.14.7" getrandom = "0.2.10" +glob = "0.3" handlebars = "3.5.5" hex = "0.4.3" hickory-resolver = "0.25" @@ -398,7 +402,9 @@ cw-multi-test = "=2.3.2" bip32 = { version = "0.5.3", default-features = false } -cosmrs = { version = "0.21.1" } +cosmrs = { version = "0.22.0" } +cosmos-sdk-proto = { version = "0.27.0" } +ibc-proto = { version = "0.52.0" } tendermint = "0.40.4" tendermint-rpc = "0.40.4" prost = { version = "0.13", default-features = false } diff --git a/common/nyxd-scraper-psql/.sqlx/query-08f4e54ac24fccd54f4208797b3749e457f8cd4ba3d7d906a7ab3bf5b4e7dc9c.json b/common/nyxd-scraper-psql/.sqlx/query-08f4e54ac24fccd54f4208797b3749e457f8cd4ba3d7d906a7ab3bf5b4e7dc9c.json new file mode 100644 index 00000000000..cc5863fd0ea --- /dev/null +++ b/common/nyxd-scraper-psql/.sqlx/query-08f4e54ac24fccd54f4208797b3749e457f8cd4ba3d7d906a7ab3bf5b4e7dc9c.json @@ -0,0 +1,27 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO transaction\n (hash, height, index, success, messages, memo, signatures, signer_infos, fee, gas_wanted, gas_used, raw_log, logs, events)\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)\n ON CONFLICT (hash) DO UPDATE\n SET height = excluded.height,\n index = excluded.index,\n success = excluded.success,\n messages = excluded.messages,\n memo = excluded.memo,\n signatures = excluded.signatures,\n signer_infos = excluded.signer_infos,\n fee = excluded.fee,\n gas_wanted = excluded.gas_wanted,\n gas_used = excluded.gas_used,\n raw_log = excluded.raw_log,\n logs = excluded.logs,\n events = excluded.events\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Int8", + "Int4", + "Bool", + "Jsonb", + "Text", + "TextArray", + "Jsonb", + "Jsonb", + "Int8", + "Int8", + "Text", + "Jsonb", + "Jsonb" + ] + }, + "nullable": [] + }, + "hash": "08f4e54ac24fccd54f4208797b3749e457f8cd4ba3d7d906a7ab3bf5b4e7dc9c" +} diff --git a/common/nyxd-scraper-psql/.sqlx/query-0d3709efacf763b06bf14803bb803b5ee5b27879b0026bb0480b3f2722318a75.json b/common/nyxd-scraper-psql/.sqlx/query-0d3709efacf763b06bf14803bb803b5ee5b27879b0026bb0480b3f2722318a75.json new file mode 100644 index 00000000000..36ba8bb96b3 --- /dev/null +++ b/common/nyxd-scraper-psql/.sqlx/query-0d3709efacf763b06bf14803bb803b5ee5b27879b0026bb0480b3f2722318a75.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO validator (consensus_address, consensus_pubkey)\n VALUES ($1, $2)\n ON CONFLICT DO NOTHING\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Text" + ] + }, + "nullable": [] + }, + "hash": "0d3709efacf763b06bf14803bb803b5ee5b27879b0026bb0480b3f2722318a75" +} diff --git a/common/nyxd-scraper-psql/.sqlx/query-1c2fb0e9ffceca21ef8dbea19b116422b1f723d0a316314b50c43c8b29f8891d.json b/common/nyxd-scraper-psql/.sqlx/query-1c2fb0e9ffceca21ef8dbea19b116422b1f723d0a316314b50c43c8b29f8891d.json new file mode 100644 index 00000000000..2e10a89220b --- /dev/null +++ b/common/nyxd-scraper-psql/.sqlx/query-1c2fb0e9ffceca21ef8dbea19b116422b1f723d0a316314b50c43c8b29f8891d.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM pre_commit WHERE height < $1", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + }, + "hash": "1c2fb0e9ffceca21ef8dbea19b116422b1f723d0a316314b50c43c8b29f8891d" +} diff --git a/common/nyxd-scraper-psql/.sqlx/query-2561fb016951ea4cd29e43fb9a4a93e944b0d44ed1f7c1036f306e34372da11c.json b/common/nyxd-scraper-psql/.sqlx/query-2561fb016951ea4cd29e43fb9a4a93e944b0d44ed1f7c1036f306e34372da11c.json new file mode 100644 index 00000000000..0d1b70f8cce --- /dev/null +++ b/common/nyxd-scraper-psql/.sqlx/query-2561fb016951ea4cd29e43fb9a4a93e944b0d44ed1f7c1036f306e34372da11c.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT height\n FROM block\n ORDER BY height ASC\n LIMIT 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "height", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false + ] + }, + "hash": "2561fb016951ea4cd29e43fb9a4a93e944b0d44ed1f7c1036f306e34372da11c" +} diff --git a/common/nyxd-scraper-psql/.sqlx/query-2679cdf11fa66c7920678cde860c57402119ec7c3aae731b0da831327301466f.json b/common/nyxd-scraper-psql/.sqlx/query-2679cdf11fa66c7920678cde860c57402119ec7c3aae731b0da831327301466f.json new file mode 100644 index 00000000000..b97ea34d16a --- /dev/null +++ b/common/nyxd-scraper-psql/.sqlx/query-2679cdf11fa66c7920678cde860c57402119ec7c3aae731b0da831327301466f.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE metadata SET last_processed_height = GREATEST(last_processed_height, $1)", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + }, + "hash": "2679cdf11fa66c7920678cde860c57402119ec7c3aae731b0da831327301466f" +} diff --git a/common/nyxd-scraper-psql/.sqlx/query-36ba5941aca6e7b604a10b8b0aba70635028f392fe794d6131827b083e1755e1.json b/common/nyxd-scraper-psql/.sqlx/query-36ba5941aca6e7b604a10b8b0aba70635028f392fe794d6131827b083e1755e1.json new file mode 100644 index 00000000000..dede45475e4 --- /dev/null +++ b/common/nyxd-scraper-psql/.sqlx/query-36ba5941aca6e7b604a10b8b0aba70635028f392fe794d6131827b083e1755e1.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE pruning SET last_pruned_height = $1", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + }, + "hash": "36ba5941aca6e7b604a10b8b0aba70635028f392fe794d6131827b083e1755e1" +} diff --git a/common/nyxd-scraper-psql/.sqlx/query-3bdf81a9db6075f6f77224c30553f419a849d4ec45af40b052a4cbf09b44f3ec.json b/common/nyxd-scraper-psql/.sqlx/query-3bdf81a9db6075f6f77224c30553f419a849d4ec45af40b052a4cbf09b44f3ec.json new file mode 100644 index 00000000000..e638bce9220 --- /dev/null +++ b/common/nyxd-scraper-psql/.sqlx/query-3bdf81a9db6075f6f77224c30553f419a849d4ec45af40b052a4cbf09b44f3ec.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT last_pruned_height FROM pruning\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "last_pruned_height", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false + ] + }, + "hash": "3bdf81a9db6075f6f77224c30553f419a849d4ec45af40b052a4cbf09b44f3ec" +} diff --git a/common/nyxd-scraper-psql/.sqlx/query-52c27143720ddfdfd0f5644b60f5b67fd9281ce1de0653efa53b9d9b93cf335d.json b/common/nyxd-scraper-psql/.sqlx/query-52c27143720ddfdfd0f5644b60f5b67fd9281ce1de0653efa53b9d9b93cf335d.json new file mode 100644 index 00000000000..58af4f89c42 --- /dev/null +++ b/common/nyxd-scraper-psql/.sqlx/query-52c27143720ddfdfd0f5644b60f5b67fd9281ce1de0653efa53b9d9b93cf335d.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM message WHERE height < $1", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + }, + "hash": "52c27143720ddfdfd0f5644b60f5b67fd9281ce1de0653efa53b9d9b93cf335d" +} diff --git a/common/nyxd-scraper-psql/.sqlx/query-62e14613f5ffe692346a79086857a22f0444fbc679db1c06b651fb8b5538b278.json b/common/nyxd-scraper-psql/.sqlx/query-62e14613f5ffe692346a79086857a22f0444fbc679db1c06b651fb8b5538b278.json new file mode 100644 index 00000000000..a7c102469df --- /dev/null +++ b/common/nyxd-scraper-psql/.sqlx/query-62e14613f5ffe692346a79086857a22f0444fbc679db1c06b651fb8b5538b278.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO pre_commit (validator_address, height, timestamp, voting_power, proposer_priority)\n VALUES ($1, $2, $3, $4, $5)\n ON CONFLICT (validator_address, timestamp) DO NOTHING\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Int8", + "Timestamp", + "Int8", + "Int8" + ] + }, + "nullable": [] + }, + "hash": "62e14613f5ffe692346a79086857a22f0444fbc679db1c06b651fb8b5538b278" +} diff --git a/common/nyxd-scraper-psql/.sqlx/query-64a484fd46d8ec46797f944a4cced56b6e270ce186f0e49528865d1924343b78.json b/common/nyxd-scraper-psql/.sqlx/query-64a484fd46d8ec46797f944a4cced56b6e270ce186f0e49528865d1924343b78.json new file mode 100644 index 00000000000..08983f2af9f --- /dev/null +++ b/common/nyxd-scraper-psql/.sqlx/query-64a484fd46d8ec46797f944a4cced56b6e270ce186f0e49528865d1924343b78.json @@ -0,0 +1,19 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO block (height, hash, num_txs, total_gas, proposer_address, timestamp)\n VALUES ($1, $2, $3, $4, $5, $6)\n ON CONFLICT DO NOTHING\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Text", + "Int4", + "Int8", + "Text", + "Timestamp" + ] + }, + "nullable": [] + }, + "hash": "64a484fd46d8ec46797f944a4cced56b6e270ce186f0e49528865d1924343b78" +} diff --git a/common/nyxd-scraper-psql/.sqlx/query-7e82426f5dbcadf1631ba1a806e19cc462d04222fb20ad76de2a40f3f4f8fe15.json b/common/nyxd-scraper-psql/.sqlx/query-7e82426f5dbcadf1631ba1a806e19cc462d04222fb20ad76de2a40f3f4f8fe15.json new file mode 100644 index 00000000000..3a60c573ed8 --- /dev/null +++ b/common/nyxd-scraper-psql/.sqlx/query-7e82426f5dbcadf1631ba1a806e19cc462d04222fb20ad76de2a40f3f4f8fe15.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT height\n FROM block\n WHERE timestamp < $1\n ORDER BY timestamp DESC\n LIMIT 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "height", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Timestamp" + ] + }, + "nullable": [ + false + ] + }, + "hash": "7e82426f5dbcadf1631ba1a806e19cc462d04222fb20ad76de2a40f3f4f8fe15" +} diff --git a/common/nyxd-scraper-psql/.sqlx/query-9455331f9be5a3be28e2bd399a36b2e2d6a9ad4b225c4c883aafc4e9f0428008.json b/common/nyxd-scraper-psql/.sqlx/query-9455331f9be5a3be28e2bd399a36b2e2d6a9ad4b225c4c883aafc4e9f0428008.json new file mode 100644 index 00000000000..309aa81d9c7 --- /dev/null +++ b/common/nyxd-scraper-psql/.sqlx/query-9455331f9be5a3be28e2bd399a36b2e2d6a9ad4b225c4c883aafc4e9f0428008.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT height\n FROM block\n WHERE timestamp > $1\n ORDER BY timestamp\n LIMIT 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "height", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Timestamp" + ] + }, + "nullable": [ + false + ] + }, + "hash": "9455331f9be5a3be28e2bd399a36b2e2d6a9ad4b225c4c883aafc4e9f0428008" +} diff --git a/common/nyxd-scraper-psql/.sqlx/query-bc7795e58ce71893c3f32a19db8e77b7bc0a1af315ffd42c3e68156d6e4ace70.json b/common/nyxd-scraper-psql/.sqlx/query-bc7795e58ce71893c3f32a19db8e77b7bc0a1af315ffd42c3e68156d6e4ace70.json new file mode 100644 index 00000000000..caca484b94d --- /dev/null +++ b/common/nyxd-scraper-psql/.sqlx/query-bc7795e58ce71893c3f32a19db8e77b7bc0a1af315ffd42c3e68156d6e4ace70.json @@ -0,0 +1,24 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT COUNT(*) as count FROM pre_commit\n WHERE\n validator_address = $1\n AND height >= $2\n AND height <= $3\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Text", + "Int8", + "Int8" + ] + }, + "nullable": [ + null + ] + }, + "hash": "bc7795e58ce71893c3f32a19db8e77b7bc0a1af315ffd42c3e68156d6e4ace70" +} diff --git a/common/nyxd-scraper-psql/.sqlx/query-be43d4873911deca784b7be0531ab7bd82ecd68041aa932a56c8ce09623251e4.json b/common/nyxd-scraper-psql/.sqlx/query-be43d4873911deca784b7be0531ab7bd82ecd68041aa932a56c8ce09623251e4.json new file mode 100644 index 00000000000..f1df706371b --- /dev/null +++ b/common/nyxd-scraper-psql/.sqlx/query-be43d4873911deca784b7be0531ab7bd82ecd68041aa932a56c8ce09623251e4.json @@ -0,0 +1,28 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT * FROM validator\n WHERE EXISTS (\n SELECT 1 FROM pre_commit\n WHERE height = $1\n AND pre_commit.validator_address = validator.consensus_address\n )\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "consensus_address", + "type_info": "Text" + }, + { + "ordinal": 1, + "name": "consensus_pubkey", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + false + ] + }, + "hash": "be43d4873911deca784b7be0531ab7bd82ecd68041aa932a56c8ce09623251e4" +} diff --git a/common/nyxd-scraper-psql/.sqlx/query-c88d07fecc3f33deaa6e93db1469ce71582635df47f52dcf3fd1df4e7be6b96d.json b/common/nyxd-scraper-psql/.sqlx/query-c88d07fecc3f33deaa6e93db1469ce71582635df47f52dcf3fd1df4e7be6b96d.json new file mode 100644 index 00000000000..9bf3eaf97be --- /dev/null +++ b/common/nyxd-scraper-psql/.sqlx/query-c88d07fecc3f33deaa6e93db1469ce71582635df47f52dcf3fd1df4e7be6b96d.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT last_processed_height FROM metadata\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "last_processed_height", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false + ] + }, + "hash": "c88d07fecc3f33deaa6e93db1469ce71582635df47f52dcf3fd1df4e7be6b96d" +} diff --git a/common/nyxd-scraper-psql/.sqlx/query-cc0ae74082d7d8a89f2d3364676890bbf6150ab394c72783114340d4def5f9ef.json b/common/nyxd-scraper-psql/.sqlx/query-cc0ae74082d7d8a89f2d3364676890bbf6150ab394c72783114340d4def5f9ef.json new file mode 100644 index 00000000000..5c0da1448a3 --- /dev/null +++ b/common/nyxd-scraper-psql/.sqlx/query-cc0ae74082d7d8a89f2d3364676890bbf6150ab394c72783114340d4def5f9ef.json @@ -0,0 +1,19 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO message(transaction_hash, index, type, value, involved_accounts_addresses, height)\n VALUES ($1, $2, $3, $4, $5, $6)\n ON CONFLICT (transaction_hash, index) DO UPDATE\n SET height = excluded.height,\n type = excluded.type,\n value = excluded.value,\n involved_accounts_addresses = excluded.involved_accounts_addresses\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Int8", + "Text", + "Jsonb", + "TextArray", + "Int8" + ] + }, + "nullable": [] + }, + "hash": "cc0ae74082d7d8a89f2d3364676890bbf6150ab394c72783114340d4def5f9ef" +} diff --git a/common/nyxd-scraper-psql/.sqlx/query-cdba9b267f143c8a8c6c3d6ed713cf00236490b86779559d84740ec18bcfa3a9.json b/common/nyxd-scraper-psql/.sqlx/query-cdba9b267f143c8a8c6c3d6ed713cf00236490b86779559d84740ec18bcfa3a9.json new file mode 100644 index 00000000000..2ae11a8fbb4 --- /dev/null +++ b/common/nyxd-scraper-psql/.sqlx/query-cdba9b267f143c8a8c6c3d6ed713cf00236490b86779559d84740ec18bcfa3a9.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM block WHERE height < $1", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + }, + "hash": "cdba9b267f143c8a8c6c3d6ed713cf00236490b86779559d84740ec18bcfa3a9" +} diff --git a/common/nyxd-scraper-psql/.sqlx/query-d89558c37c51e8e6b1e6a9d5a2b13d0598fd856aa019a0cbbae12d7cafb4672f.json b/common/nyxd-scraper-psql/.sqlx/query-d89558c37c51e8e6b1e6a9d5a2b13d0598fd856aa019a0cbbae12d7cafb4672f.json new file mode 100644 index 00000000000..1970629169b --- /dev/null +++ b/common/nyxd-scraper-psql/.sqlx/query-d89558c37c51e8e6b1e6a9d5a2b13d0598fd856aa019a0cbbae12d7cafb4672f.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM transaction WHERE height < $1", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + }, + "hash": "d89558c37c51e8e6b1e6a9d5a2b13d0598fd856aa019a0cbbae12d7cafb4672f" +} diff --git a/common/nyxd-scraper-psql/Cargo.toml b/common/nyxd-scraper-psql/Cargo.toml new file mode 100644 index 00000000000..0c2253cf9ba --- /dev/null +++ b/common/nyxd-scraper-psql/Cargo.toml @@ -0,0 +1,34 @@ +[package] +name = "nyxd-scraper-psql" +version = "0.1.0" +authors.workspace = true +repository.workspace = true +homepage.workspace = true +documentation.workspace = true +edition.workspace = true +license.workspace = true +rust-version.workspace = true +readme.workspace = true + +[dependencies] +async-trait = { workspace = true } +base64 = { workspace = true } +itertools = { workspace = true } +serde = { workspace = true, features = ["derive"] } +serde_json = { workspace = true } +sqlx = { workspace = true, features = ["runtime-tokio-rustls", "postgres", "macros", "migrate", "time"] } +thiserror = { workspace = true } +tokio = { workspace = true, features = ["full"] } +tracing.workspace = true + +nyxd-scraper-shared = { path = "../nyxd-scraper-shared" } + +# temp due to cosmrs redefinitions for serde +cosmrs = { workspace = true } + +[build-dependencies] +sqlx = { workspace = true, features = ["runtime-tokio-rustls", "postgres", "macros", "migrate"] } +tokio = { workspace = true, features = ["rt-multi-thread", "macros"] } + +[lints] +workspace = true diff --git a/common/nyxd-scraper-psql/Makefile b/common/nyxd-scraper-psql/Makefile new file mode 100644 index 00000000000..408dfb2fbcf --- /dev/null +++ b/common/nyxd-scraper-psql/Makefile @@ -0,0 +1,105 @@ +# Makefile for nyxd-scraper-psql database management + +# --- Configuration --- +TEST_DATABASE_URL := postgres://testuser:testpass@localhost:5433/nyxd_scraper_test + +# Docker compose service names +DB_SERVICE_NAME := postgres-test +DB_CONTAINER_NAME := nyxd_scraper_psql_test + +# Default target +.PHONY: default +default: help + +# --- Main Targets --- +.PHONY: prepare-pg +prepare-pg: test-db-up test-db-wait test-db-migrate test-db-prepare test-db-down ## Setup PostgreSQL and prepare SQLx offline cache + +.PHONY: test-db +test-db: test-db-up test-db-wait test-db-migrate test-db-run test-db-down ## Run tests with PostgreSQL database + +.PHONY: dev-db +dev-db: test-db-up test-db-wait test-db-migrate ## Start PostgreSQL for development (keeps running) + @echo "PostgreSQL is running on port 5433" + @echo "Connection string: $(TEST_DATABASE_URL)" + +.PHONY: dev-db-restart +dev-db-restart: clean-db dev-db + +# --- Docker Compose Targets --- +.PHONY: test-db-up +test-db-up: ## Start the PostgreSQL test database in the background + @echo "Starting PostgreSQL test database..." + docker compose up -d $(DB_SERVICE_NAME) + +.PHONY: test-db-wait +test-db-wait: ## Wait for the PostgreSQL database to be healthy + @echo "Waiting for PostgreSQL database..." + @while ! docker inspect --format='{{.State.Health.Status}}' $(DB_CONTAINER_NAME) 2>/dev/null | grep -q 'healthy'; do \ + echo -n "."; \ + sleep 1; \ + done; \ + echo " Database is healthy!" + +.PHONY: test-db-down +test-db-down: ## Stop and remove the test database + @echo "Stopping PostgreSQL test database..." + docker compose down + +# --- SQLx Targets --- +.PHONY: test-db-migrate +test-db-migrate: ## Run database migrations against PostgreSQL + @echo "Running PostgreSQL migrations..." + DATABASE_URL="$(TEST_DATABASE_URL)" sqlx migrate run --source sql_migrations + +.PHONY: test-db-prepare +test-db-prepare: ## Run sqlx prepare for compile-time query verification + @echo "Running sqlx prepare for PostgreSQL..." + DATABASE_URL="$(TEST_DATABASE_URL)" cargo sqlx prepare + +# --- Build and Test Targets --- +.PHONY: test-db-run +test-db-run: ## Run tests with PostgreSQL feature + @echo "Running tests with PostgreSQL..." + DATABASE_URL="$(TEST_DATABASE_URL)" cargo test --features pg --no-default-features + +.PHONY: build-pg +build-pg: ## Build with PostgreSQL feature + @echo "Building with PostgreSQL feature..." + cargo build + +.PHONY: check-pg +check-pg: ## Check code with PostgreSQL feature + @echo "Checking code with PostgreSQL feature..." + cargo check + +.PHONY: clippy +clippy: clippy-pg + +.PHONY: clippy-pg +clippy-pg: ## Run clippy with PostgreSQL feature + @echo "Running clippy with PostgreSQL feature..." + cargo clippy -- -D warnings + +# --- Cleanup Targets --- +.PHONY: clean +clean: ## Clean build artifacts and SQLx cache + cargo clean + rm -rf .sqlx + +.PHONY: clean-db +clean-db: test-db-down ## Stop database and clean volumes + docker volume rm -f nym-node-status-api_postgres_test_data 2>/dev/null || true + +# --- Utility Targets --- +.PHONY: sqlx-cli +sqlx-cli: ## Install sqlx-cli if not already installed + @command -v sqlx >/dev/null 2>&1 || cargo install sqlx-cli --features postgres + +.PHONY: psql +psql: ## Connect to the running PostgreSQL database with psql + @docker exec -it $(DB_CONTAINER_NAME) psql -U testuser -d nyxd_scraper_test + +.PHONY: help +help: ## Show help for Makefile targets + @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}' \ No newline at end of file diff --git a/common/nyxd-scraper-psql/README.md b/common/nyxd-scraper-psql/README.md new file mode 100644 index 00000000000..d2946ec4161 --- /dev/null +++ b/common/nyxd-scraper-psql/README.md @@ -0,0 +1,80 @@ +## Quick Start with PostgreSQL + +### 1. Install Prerequisites + +```bash +# Install sqlx-cli if not already installed +make sqlx-cli +``` + +### 2. Prepare PostgreSQL for Development + +```bash +# This will: +# - Start PostgreSQL in Docker +# - Run migrations +# - Generate SQLx offline query cache +# - Stop the database +make prepare-pg +``` + +### 3. Build with PostgreSQL + +```bash +# Build with PostgreSQL feature +make build-pg + +# Or manually: +cargo build +``` + +### 4. Run with PostgreSQL + +```bash +# Start PostgreSQL for development (keeps running) +make dev-db + +# In another terminal, run the application +DATABASE_URL=postgres://testuser:testpass@localhost:5433/nym_node_status_api_test \ +cargo run +``` + +## Makefile Targets + +```bash +make help # Show all available targets +make prepare-pg # Setup PostgreSQL and prepare SQLx cache +make dev-db # Start PostgreSQL for development +make test-db # Run tests with PostgreSQL +make build-pg # Build with PostgreSQL +make psql # Connect to running PostgreSQL +make clean # Clean build artifacts +make clean-db # Stop database and clean volumes +make dev-db-restart # Stop database, clean volumes, rebuild test database and restart +``` + +## Environment Variables + +See `.env.example` for all configuration options. Key variable: + +```bash +# For PostgreSQL: +DATABASE_URL=postgres://testuser:testpass@localhost:5433/nym_node_status_api_test +``` + +## Troubleshooting + +### SQLx Offline Mode + +If you see "no cached data for this query" errors: + +1. Ensure PostgreSQL is running: `make dev-db` +2. Run: `make test-db-prepare` + +### Connection Refused + +If you see "Connection refused" errors: + +1. Check Docker is running: `docker ps` +2. Check PostgreSQL container: `docker ps | grep nym_node_status_api_postgres_test` +3. Restart database: `make test-db-down && make dev-db` \ No newline at end of file diff --git a/common/nyxd-scraper-psql/build.rs b/common/nyxd-scraper-psql/build.rs new file mode 100644 index 00000000000..2903970a552 --- /dev/null +++ b/common/nyxd-scraper-psql/build.rs @@ -0,0 +1,8 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +fn main() { + if let Ok(database_url) = std::env::var("DATABASE_URL") { + println!("cargo::rustc-env=DATABASE_URL={database_url}"); + } +} diff --git a/common/nyxd-scraper-psql/docker-compose.yml b/common/nyxd-scraper-psql/docker-compose.yml new file mode 100644 index 00000000000..3965792b45e --- /dev/null +++ b/common/nyxd-scraper-psql/docker-compose.yml @@ -0,0 +1,21 @@ +services: + postgres-test: + image: postgres:16-alpine + container_name: nyxd_scraper_psql_test + environment: + POSTGRES_DB: nyxd_scraper_test + POSTGRES_USER: testuser + POSTGRES_PASSWORD: testpass + ports: + - '5433:5432' # Map to 5433 to avoid conflicts with default PostgreSQL + healthcheck: + test: [ 'CMD-SHELL', 'pg_isready -U testuser -d nyxd_scraper_test' ] + interval: 5s + timeout: 5s + retries: 5 + # Optional: Add volume for persistent data during development + # volumes: + # - postgres_test_data:/var/lib/postgresql/data + +# volumes: +# postgres_test_data: \ No newline at end of file diff --git a/common/nyxd-scraper-psql/sql_migrations/0001_metadata.sql b/common/nyxd-scraper-psql/sql_migrations/0001_metadata.sql new file mode 100644 index 00000000000..43070210c4a --- /dev/null +++ b/common/nyxd-scraper-psql/sql_migrations/0001_metadata.sql @@ -0,0 +1,10 @@ +/* + * Copyright 2023 - Nym Technologies SA + * SPDX-License-Identifier: Apache-2.0 + */ + +CREATE TABLE METADATA +( + id INTEGER PRIMARY KEY CHECK (id = 0), + last_processed_height BIGINT NOT NULL +); \ No newline at end of file diff --git a/common/nyxd-scraper-psql/sql_migrations/0002_cosmos.sql b/common/nyxd-scraper-psql/sql_migrations/0002_cosmos.sql new file mode 100644 index 00000000000..00440004ada --- /dev/null +++ b/common/nyxd-scraper-psql/sql_migrations/0002_cosmos.sql @@ -0,0 +1,127 @@ +CREATE TABLE validator +( + consensus_address TEXT NOT NULL PRIMARY KEY, /* Validator consensus address */ + consensus_pubkey TEXT NOT NULL UNIQUE /* Validator consensus public key */ +); + +CREATE TABLE pre_commit +( + validator_address TEXT NOT NULL REFERENCES validator (consensus_address), + height BIGINT NOT NULL, + timestamp TIMESTAMP WITHOUT TIME ZONE NOT NULL, + voting_power BIGINT NOT NULL, + proposer_priority BIGINT NOT NULL, + UNIQUE (validator_address, timestamp) +); +CREATE INDEX pre_commit_validator_address_index ON pre_commit (validator_address); +CREATE INDEX pre_commit_height_index ON pre_commit (height); + +CREATE TABLE block +( + height BIGINT UNIQUE PRIMARY KEY, + hash TEXT NOT NULL UNIQUE, + num_txs INTEGER DEFAULT 0, + total_gas BIGINT DEFAULT 0, + proposer_address TEXT REFERENCES validator (consensus_address), + timestamp TIMESTAMP WITHOUT TIME ZONE NOT NULL +); +CREATE INDEX block_height_index ON block (height); +CREATE INDEX block_hash_index ON block (hash); +CREATE INDEX block_proposer_address_index ON block (proposer_address); +ALTER TABLE block + SET ( + autovacuum_vacuum_scale_factor = 0, + autovacuum_analyze_scale_factor = 0, + autovacuum_vacuum_threshold = 10000, + autovacuum_analyze_threshold = 10000 + ); + +CREATE TABLE transaction +( + hash TEXT NOT NULL, + height BIGINT NOT NULL REFERENCES block (height), + "index" INTEGER NOT NULL, -- <<<=== not present in original bdjuno table, but it's quite useful + success BOOLEAN NOT NULL, + + /* Body */ + messages JSONB NOT NULL DEFAULT '[]'::JSONB, + memo TEXT, + signatures TEXT[] NOT NULL, + + /* AuthInfo */ + signer_infos JSONB NOT NULL DEFAULT '[]'::JSONB, + fee JSONB NOT NULL DEFAULT '{}'::JSONB, + + /* Tx response */ + gas_wanted BIGINT DEFAULT 0, + gas_used BIGINT DEFAULT 0, + raw_log TEXT, + logs JSONB, + events JSONB, + + CONSTRAINT unique_tx UNIQUE (hash) +); +CREATE INDEX transaction_hash_index ON transaction (hash); +CREATE INDEX transaction_height_index ON transaction (height); + +CREATE TYPE COIN AS +( + denom TEXT, + amount TEXT +); + +CREATE TABLE message +( + transaction_hash TEXT NOT NULL, + index BIGINT NOT NULL, + type TEXT NOT NULL, + value JSONB NOT NULL, + involved_accounts_addresses TEXT[] NOT NULL, + height BIGINT NOT NULL, + + funds COIN[] DEFAULT '{}', + + FOREIGN KEY (transaction_hash) REFERENCES transaction (hash), + CONSTRAINT unique_message_per_tx UNIQUE (transaction_hash, index) +); +CREATE INDEX message_transaction_hash_index ON message (transaction_hash); +CREATE INDEX message_type_index ON message (type); +CREATE INDEX message_involved_accounts_index ON message USING GIN (involved_accounts_addresses); + +/** + * This function is used to find all the utils that involve any of the given addresses and have + * type that is one of the specified types. + */ +CREATE FUNCTION messages_by_address( + addresses TEXT[], + types TEXT[], + "limit" BIGINT = 100, + "offset" BIGINT = 0) + RETURNS SETOF message AS +$$ +SELECT * +FROM message +WHERE (cardinality(types) = 0 OR type = ANY (types)) + AND addresses && involved_accounts_addresses +ORDER BY height DESC +LIMIT "limit" OFFSET "offset" +$$ LANGUAGE sql STABLE; + +CREATE FUNCTION messages_by_type( + types text[], + "limit" bigint DEFAULT 100, + "offset" bigint DEFAULT 0) + RETURNS SETOF message AS +$$ +SELECT * +FROM message +WHERE (cardinality(types) = 0 OR type = ANY (types)) +ORDER BY height DESC +LIMIT "limit" OFFSET "offset" +$$ LANGUAGE sql STABLE; + +CREATE TABLE pruning +( + last_pruned_height BIGINT NOT NULL +); + diff --git a/common/nyxd-scraper-psql/src/error.rs b/common/nyxd-scraper-psql/src/error.rs new file mode 100644 index 00000000000..9e94af39edd --- /dev/null +++ b/common/nyxd-scraper-psql/src/error.rs @@ -0,0 +1,43 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use nyxd_scraper_shared::helpers::MalformedDataError; +use nyxd_scraper_shared::storage::NyxdScraperStorageError; +use thiserror::Error; + +#[derive(Error, Debug)] +pub enum PostgresScraperError { + #[error("experienced internal database error: {0}")] + InternalDatabaseError(#[from] sqlx::error::Error), + + #[error("failed to perform startup SQL migration: {0}")] + StartupMigrationFailure(#[from] sqlx::migrate::MigrateError), + + #[error("failed to begin storage tx: {source}")] + StorageTxBeginFailure { + #[source] + source: sqlx::error::Error, + }, + + #[error("failed to commit storage tx: {source}")] + StorageTxCommitFailure { + #[source] + source: sqlx::error::Error, + }, + + #[error(transparent)] + MalformedData(#[from] MalformedDataError), + + // TOOD: add struct name + #[error("json serialisation failure: {source}")] + SerialisationFailure { + #[from] + source: serde_json::Error, + }, +} + +impl From for NyxdScraperStorageError { + fn from(err: PostgresScraperError) -> Self { + NyxdScraperStorageError::new(err) + } +} diff --git a/common/nyxd-scraper-psql/src/lib.rs b/common/nyxd-scraper-psql/src/lib.rs new file mode 100644 index 00000000000..85fcbb46efa --- /dev/null +++ b/common/nyxd-scraper-psql/src/lib.rs @@ -0,0 +1,21 @@ +// Copyright 2023 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::storage::block_storage::PostgresScraperStorage; +use nyxd_scraper_shared::NyxdScraper; + +pub use nyxd_scraper_shared::constants; +pub use nyxd_scraper_shared::error::ScraperError; +pub use nyxd_scraper_shared::{ + BlockModule, MsgModule, NyxdScraperTransaction, ParsedTransactionResponse, PruningOptions, + PruningStrategy, StartingBlockOpts, TxModule, +}; +pub use storage::models; + +pub mod error; +pub mod storage; + +pub type PostgresNyxdScraper = NyxdScraper; + +// TODO: for now just use exactly the same config +pub use nyxd_scraper_shared::Config; diff --git a/common/nyxd-scraper-psql/src/storage/block_storage.rs b/common/nyxd-scraper-psql/src/storage/block_storage.rs new file mode 100644 index 00000000000..49f2bab6220 --- /dev/null +++ b/common/nyxd-scraper-psql/src/storage/block_storage.rs @@ -0,0 +1,236 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::error::PostgresScraperError; +use crate::models::{CommitSignature, Validator}; +use crate::storage::manager::{ + StorageManager, prune_blocks, prune_messages, prune_pre_commits, prune_transactions, + update_last_pruned, +}; +use crate::storage::transaction::PostgresStorageTransaction; +use async_trait::async_trait; +use nyxd_scraper_shared::storage::helpers::log_db_operation_time; +use nyxd_scraper_shared::storage::{NyxdScraperStorage, NyxdScraperStorageError}; +use sqlx::types::time::{OffsetDateTime, PrimitiveDateTime}; +use tokio::time::Instant; +use tracing::{debug, error, info, instrument, warn}; + +#[derive(Clone)] +pub struct PostgresScraperStorage { + pub(crate) manager: StorageManager, +} + +impl PostgresScraperStorage { + #[instrument] + pub async fn init(connection_string: &str) -> Result { + debug!("initialising scraper database with '{connection_string}'",); + + let connection_pool = match sqlx::PgPool::connect(connection_string).await { + Ok(db) => db, + Err(err) => { + error!("Failed to connect to SQLx database: {err}"); + return Err(err.into()); + } + }; + + if let Err(err) = sqlx::migrate!("./sql_migrations") + .run(&connection_pool) + .await + { + warn!("Failed to initialize SQLx database: {err}"); + // return Err(err.into()); + } + + info!("Database migration finished!"); + + let manager = StorageManager { connection_pool }; + manager.set_initial_metadata().await?; + + let storage = PostgresScraperStorage { manager }; + + Ok(storage) + } + + #[instrument(skip(self))] + pub async fn prune_storage( + &self, + oldest_to_keep: u32, + current_height: u32, + ) -> Result<(), PostgresScraperError> { + let start = Instant::now(); + + let mut tx = self.begin_processing_tx().await?; + + prune_messages(oldest_to_keep.into(), &mut **tx).await?; + prune_transactions(oldest_to_keep.into(), &mut **tx).await?; + prune_pre_commits(oldest_to_keep.into(), &mut **tx).await?; + prune_blocks(oldest_to_keep.into(), &mut **tx).await?; + update_last_pruned(current_height.into(), &mut **tx).await?; + + let commit_start = Instant::now(); + tx.inner + .commit() + .await + .map_err(|source| PostgresScraperError::StorageTxCommitFailure { source })?; + log_db_operation_time("committing pruning tx", commit_start); + + log_db_operation_time("pruning storage", start); + Ok(()) + } + + #[instrument(skip_all)] + pub async fn begin_processing_tx( + &self, + ) -> Result { + debug!("starting storage tx"); + self.manager + .connection_pool + .begin() + .await + .map(|inner| PostgresStorageTransaction { inner }) + .map_err(|source| PostgresScraperError::StorageTxBeginFailure { source }) + } + + pub async fn lowest_block_height(&self) -> Result, PostgresScraperError> { + Ok(self.manager.get_lowest_block().await?) + } + + pub async fn get_first_block_height_after( + &self, + time: OffsetDateTime, + ) -> Result, PostgresScraperError> { + let time = PrimitiveDateTime::new(time.date(), time.time()); + + Ok(self.manager.get_first_block_height_after(time).await?) + } + + pub async fn get_last_block_height_before( + &self, + time: OffsetDateTime, + ) -> Result, PostgresScraperError> { + let time = PrimitiveDateTime::new(time.date(), time.time()); + + Ok(self.manager.get_last_block_height_before(time).await?) + } + + pub async fn get_blocks_between( + &self, + start_time: OffsetDateTime, + end_time: OffsetDateTime, + ) -> Result { + let Some(block_start) = self.get_first_block_height_after(start_time).await? else { + return Ok(0); + }; + let Some(block_end) = self.get_last_block_height_before(end_time).await? else { + return Ok(0); + }; + + Ok(block_end - block_start) + } + + pub async fn get_signed_between( + &self, + consensus_address: &str, + start_height: i64, + end_height: i64, + ) -> Result { + Ok(self + .manager + .get_signed_between(consensus_address, start_height, end_height) + .await?) + } + + pub async fn get_signed_between_times( + &self, + consensus_address: &str, + start_time: OffsetDateTime, + end_time: OffsetDateTime, + ) -> Result { + let Some(block_start) = self.get_first_block_height_after(start_time).await? else { + return Ok(0); + }; + let Some(block_end) = self.get_last_block_height_before(end_time).await? else { + return Ok(0); + }; + + self.get_signed_between(consensus_address, block_start, block_end) + .await + } + + pub async fn get_precommit( + &self, + consensus_address: &str, + height: i64, + ) -> Result, PostgresScraperError> { + Ok(self + .manager + .get_precommit(consensus_address, height) + .await?) + } + + pub async fn get_block_signers( + &self, + height: i64, + ) -> Result, PostgresScraperError> { + Ok(self.manager.get_block_validators(height).await?) + } + + pub async fn get_all_known_validators(&self) -> Result, PostgresScraperError> { + Ok(self.manager.get_validators().await?) + } + + pub async fn get_last_processed_height(&self) -> Result { + Ok(self.manager.get_last_processed_height().await?) + } + + pub async fn get_pruned_height(&self) -> Result { + Ok(self.manager.get_pruned_height().await?) + } +} + +#[async_trait] +impl NyxdScraperStorage for PostgresScraperStorage { + type StorageTransaction = PostgresStorageTransaction; + + async fn initialise(storage: &str) -> Result { + PostgresScraperStorage::init(storage) + .await + .map_err(NyxdScraperStorageError::from) + } + + async fn begin_processing_tx( + &self, + ) -> Result { + self.begin_processing_tx() + .await + .map_err(NyxdScraperStorageError::from) + } + + async fn get_last_processed_height(&self) -> Result { + self.get_last_processed_height() + .await + .map_err(NyxdScraperStorageError::from) + } + + async fn get_pruned_height(&self) -> Result { + self.get_pruned_height() + .await + .map_err(NyxdScraperStorageError::from) + } + + async fn lowest_block_height(&self) -> Result, NyxdScraperStorageError> { + self.lowest_block_height() + .await + .map_err(NyxdScraperStorageError::from) + } + + async fn prune_storage( + &self, + oldest_to_keep: u32, + current_height: u32, + ) -> Result<(), NyxdScraperStorageError> { + self.prune_storage(oldest_to_keep, current_height) + .await + .map_err(NyxdScraperStorageError::from) + } +} diff --git a/common/nyxd-scraper-psql/src/storage/helpers.rs b/common/nyxd-scraper-psql/src/storage/helpers.rs new file mode 100644 index 00000000000..8d56ee72f78 --- /dev/null +++ b/common/nyxd-scraper-psql/src/storage/helpers.rs @@ -0,0 +1,25 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use cosmrs::AccountId; +use itertools::Itertools; +use nyxd_scraper_shared::ParsedTransactionResponse; +use std::str::FromStr; + +// replicate behaviour of `CosmosMessageAddressesParser` from juno +pub(crate) fn parse_addresses_from_events(tx: &ParsedTransactionResponse) -> Vec { + let mut addresses: Vec = Vec::new(); + for event in &tx.tx_result.events { + for attribute in &event.attributes { + let Ok(value) = attribute.value_str() else { + continue; + }; + + // Try parsing the address as an account address + if let Ok(address) = AccountId::from_str(value) { + addresses.push(address.to_string()); + } + } + } + addresses.into_iter().unique().collect() +} diff --git a/common/nyxd-scraper-psql/src/storage/manager.rs b/common/nyxd-scraper-psql/src/storage/manager.rs new file mode 100644 index 00000000000..b0bf11f2f5a --- /dev/null +++ b/common/nyxd-scraper-psql/src/storage/manager.rs @@ -0,0 +1,543 @@ +// Copyright 2023 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::storage::models::{CommitSignature, Validator}; +use nyxd_scraper_shared::storage::helpers::log_db_operation_time; +use sqlx::types::JsonValue; +use sqlx::types::time::PrimitiveDateTime; +use sqlx::{Executor, Postgres}; +use tokio::time::Instant; +use tracing::{instrument, trace}; + +#[derive(Clone)] +pub(crate) struct StorageManager { + pub(crate) connection_pool: sqlx::Pool, +} + +impl StorageManager { + pub(crate) async fn set_initial_metadata(&self) -> Result<(), sqlx::Error> { + if sqlx::query("SELECT * from metadata") + .fetch_optional(&self.connection_pool) + .await? + .is_none() + { + sqlx::query("INSERT INTO metadata (id, last_processed_height) VALUES (0, 0)") + .execute(&self.connection_pool) + .await?; + } + Ok(()) + } + + pub(crate) async fn get_lowest_block(&self) -> Result, sqlx::Error> { + trace!("get_lowest_block"); + let start = Instant::now(); + + let maybe_record = sqlx::query!( + r#" + SELECT height + FROM block + ORDER BY height ASC + LIMIT 1 + "#, + ) + .fetch_optional(&self.connection_pool) + .await?; + log_db_operation_time("get_lowest_block", start); + + Ok(maybe_record.map(|x| x.height)) + } + + pub(crate) async fn get_first_block_height_after( + &self, + time: PrimitiveDateTime, + ) -> Result, sqlx::Error> { + trace!("get_first_block_height_after"); + let start = Instant::now(); + + let maybe_record = sqlx::query!( + r#" + SELECT height + FROM block + WHERE timestamp > $1 + ORDER BY timestamp + LIMIT 1 + "#, + time + ) + .fetch_optional(&self.connection_pool) + .await?; + log_db_operation_time("get_first_block_height_after", start); + + Ok(maybe_record.map(|x| x.height)) + } + + pub(crate) async fn get_last_block_height_before( + &self, + time: PrimitiveDateTime, + ) -> Result, sqlx::Error> { + trace!("get_last_block_height_before"); + let start = Instant::now(); + + let maybe_record = sqlx::query!( + r#" + SELECT height + FROM block + WHERE timestamp < $1 + ORDER BY timestamp DESC + LIMIT 1 + "#, + time + ) + .fetch_optional(&self.connection_pool) + .await?; + log_db_operation_time("get_last_block_height_before", start); + + Ok(maybe_record.map(|x| x.height)) + } + + pub(crate) async fn get_signed_between( + &self, + consensus_address: &str, + start_height: i64, + end_height: i64, + ) -> Result { + trace!("get_signed_between"); + let start = Instant::now(); + + let count = sqlx::query!( + r#" + SELECT COUNT(*) as count FROM pre_commit + WHERE + validator_address = $1 + AND height >= $2 + AND height <= $3 + "#, + consensus_address, + start_height, + end_height + ) + .fetch_one(&self.connection_pool) + .await? + .count; + log_db_operation_time("get_signed_between", start); + + Ok(count.unwrap_or(0)) + } + + pub(crate) async fn get_precommit( + &self, + consensus_address: &str, + height: i64, + ) -> Result, sqlx::Error> { + trace!("get_precommit"); + let start = Instant::now(); + + let res = sqlx::query_as( + r#" + SELECT * FROM pre_commit + WHERE validator_address = $1 + AND height = $2 + "#, + ) + .bind(consensus_address) + .bind(height) + .fetch_optional(&self.connection_pool) + .await?; + log_db_operation_time("get_precommit", start); + + Ok(res) + } + + pub(crate) async fn get_block_validators( + &self, + height: i64, + ) -> Result, sqlx::Error> { + trace!("get_block_validators"); + let start = Instant::now(); + + let res = sqlx::query_as!( + Validator, + r#" + SELECT * FROM validator + WHERE EXISTS ( + SELECT 1 FROM pre_commit + WHERE height = $1 + AND pre_commit.validator_address = validator.consensus_address + ) + "#, + height + ) + .fetch_all(&self.connection_pool) + .await?; + log_db_operation_time("get_block_validators", start); + + Ok(res) + } + + pub(crate) async fn get_validators(&self) -> Result, sqlx::Error> { + trace!("get_validators"); + let start = Instant::now(); + + let res = sqlx::query_as("SELECT * FROM validator") + .fetch_all(&self.connection_pool) + .await?; + log_db_operation_time("get_validators", start); + + Ok(res) + } + + pub(crate) async fn get_last_processed_height(&self) -> Result { + trace!("get_last_processed_height"); + let start = Instant::now(); + + let maybe_record = sqlx::query!( + r#" + SELECT last_processed_height FROM metadata + "# + ) + .fetch_optional(&self.connection_pool) + .await?; + log_db_operation_time("get_last_processed_height", start); + + if let Some(row) = maybe_record { + #[allow(clippy::useless_conversion)] + Ok(row.last_processed_height.into()) + } else { + Ok(-1) + } + } + + pub(crate) async fn get_pruned_height(&self) -> Result { + trace!("get_pruned_height"); + let start = Instant::now(); + + let maybe_record = sqlx::query!( + r#" + SELECT last_pruned_height FROM pruning + "# + ) + .fetch_optional(&self.connection_pool) + .await?; + + log_db_operation_time("get_pruned_height", start); + + if let Some(row) = maybe_record { + Ok(row.last_pruned_height) + } else { + Ok(-1) + } + } +} + +// make those generic over executor so that they could be performed over connection pool and a tx + +#[instrument(skip(executor))] +pub(crate) async fn insert_validator<'a, E>( + consensus_address: String, + consensus_pubkey: String, + executor: E, +) -> Result<(), sqlx::Error> +where + E: Executor<'a, Database = Postgres>, +{ + trace!("insert_validator"); + let start = Instant::now(); + + sqlx::query!( + r#" + INSERT INTO validator (consensus_address, consensus_pubkey) + VALUES ($1, $2) + ON CONFLICT DO NOTHING + "#, + consensus_address, + consensus_pubkey + ) + .execute(executor) + .await?; + log_db_operation_time("insert_validator", start); + + Ok(()) +} + +#[instrument(skip(executor))] +pub(crate) async fn insert_block<'a, E>( + height: i64, + hash: String, + num_txs: i32, + total_gas: i64, + proposer_address: String, + timestamp: PrimitiveDateTime, + executor: E, +) -> Result<(), sqlx::Error> +where + E: Executor<'a, Database = Postgres>, +{ + trace!("insert_block"); + let start = Instant::now(); + + sqlx::query!( + r#" + INSERT INTO block (height, hash, num_txs, total_gas, proposer_address, timestamp) + VALUES ($1, $2, $3, $4, $5, $6) + ON CONFLICT DO NOTHING + "#, + height, + hash, + num_txs, + total_gas, + proposer_address, + timestamp + ) + .execute(executor) + .await?; + log_db_operation_time("insert_block", start); + + Ok(()) +} + +#[instrument(skip(executor))] +pub(crate) async fn insert_precommit<'a, E>( + validator_address: String, + height: i64, + timestamp: PrimitiveDateTime, + voting_power: i64, + proposer_priority: i64, + executor: E, +) -> Result<(), sqlx::Error> +where + E: Executor<'a, Database = Postgres>, +{ + trace!("insert_precommit"); + let start = Instant::now(); + + sqlx::query!( + r#" + INSERT INTO pre_commit (validator_address, height, timestamp, voting_power, proposer_priority) + VALUES ($1, $2, $3, $4, $5) + ON CONFLICT (validator_address, timestamp) DO NOTHING + "#, + validator_address, + height, + timestamp, + voting_power, + proposer_priority + ) + .execute(executor) + .await?; + log_db_operation_time("insert_precommit", start); + + Ok(()) +} + +#[instrument(skip(executor))] +#[allow(clippy::too_many_arguments)] +pub(crate) async fn insert_transaction<'a, E>( + hash: String, + height: i64, + index: i32, + success: bool, + messages: JsonValue, + memo: String, + signatures: Vec, + signer_infos: JsonValue, + fee: JsonValue, + gas_wanted: i64, + gas_used: i64, + raw_log: String, + logs: JsonValue, + events: JsonValue, + executor: E, +) -> Result<(), sqlx::Error> +where + E: Executor<'a, Database = Postgres>, +{ + trace!("insert_transaction"); + let start = Instant::now(); + + sqlx::query!( + r#" + INSERT INTO transaction + (hash, height, index, success, messages, memo, signatures, signer_infos, fee, gas_wanted, gas_used, raw_log, logs, events) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14) + ON CONFLICT (hash) DO UPDATE + SET height = excluded.height, + index = excluded.index, + success = excluded.success, + messages = excluded.messages, + memo = excluded.memo, + signatures = excluded.signatures, + signer_infos = excluded.signer_infos, + fee = excluded.fee, + gas_wanted = excluded.gas_wanted, + gas_used = excluded.gas_used, + raw_log = excluded.raw_log, + logs = excluded.logs, + events = excluded.events + "#, + hash, + height, + index, + success, + messages, + memo, + &signatures, + signer_infos, + fee, + gas_wanted, + gas_used, + raw_log, + logs, + events, + ) + .execute(executor) + .await?; + log_db_operation_time("insert_transaction", start); + + Ok(()) +} + +#[allow(clippy::too_many_arguments)] +#[instrument(skip(executor))] +pub(crate) async fn insert_message<'a, E>( + transaction_hash: String, + index: i64, + typ: String, + value: JsonValue, + involved_account_addresses: Vec, + height: i64, + executor: E, +) -> Result<(), sqlx::Error> +where + E: Executor<'a, Database = Postgres>, +{ + trace!("insert_message"); + let start = Instant::now(); + + sqlx::query!( + r#" + INSERT INTO message(transaction_hash, index, type, value, involved_accounts_addresses, height) + VALUES ($1, $2, $3, $4, $5, $6) + ON CONFLICT (transaction_hash, index) DO UPDATE + SET height = excluded.height, + type = excluded.type, + value = excluded.value, + involved_accounts_addresses = excluded.involved_accounts_addresses + "#, + transaction_hash, + index, + typ, + value, + &involved_account_addresses, + height, + ) + .execute(executor) + .await?; + log_db_operation_time("insert_message", start); + + Ok(()) +} + +#[instrument(skip(executor))] +pub(crate) async fn update_last_processed<'a, E>( + height: i64, + executor: E, +) -> Result<(), sqlx::Error> +where + E: Executor<'a, Database = Postgres>, +{ + trace!("update_last_processed"); + let start = Instant::now(); + + sqlx::query!( + "UPDATE metadata SET last_processed_height = GREATEST(last_processed_height, $1)", + height as i32 + ) + .execute(executor) + .await?; + log_db_operation_time("update_last_processed", start); + + Ok(()) +} + +#[instrument(skip(executor))] +pub(crate) async fn update_last_pruned<'a, E>(height: i64, executor: E) -> Result<(), sqlx::Error> +where + E: Executor<'a, Database = Postgres>, +{ + trace!("update_last_pruned"); + let start = Instant::now(); + + sqlx::query!("UPDATE pruning SET last_pruned_height = $1", height) + .execute(executor) + .await?; + log_db_operation_time("update_last_pruned", start); + + Ok(()) +} + +pub(crate) async fn prune_blocks<'a, E>(oldest_to_keep: i64, executor: E) -> Result<(), sqlx::Error> +where + E: Executor<'a, Database = Postgres>, +{ + trace!("prune_blocks"); + let start = Instant::now(); + + sqlx::query!("DELETE FROM block WHERE height < $1", oldest_to_keep) + .execute(executor) + .await?; + log_db_operation_time("prune_blocks", start); + + Ok(()) +} + +pub(crate) async fn prune_pre_commits<'a, E>( + oldest_to_keep: i64, + executor: E, +) -> Result<(), sqlx::Error> +where + E: Executor<'a, Database = Postgres>, +{ + trace!("prune_pre_commits"); + let start = Instant::now(); + + sqlx::query!("DELETE FROM pre_commit WHERE height < $1", oldest_to_keep) + .execute(executor) + .await?; + log_db_operation_time("prune_pre_commits", start); + + Ok(()) +} + +pub(crate) async fn prune_transactions<'a, E>( + oldest_to_keep: i64, + executor: E, +) -> Result<(), sqlx::Error> +where + E: Executor<'a, Database = Postgres>, +{ + trace!("prune_transactions"); + let start = Instant::now(); + sqlx::query!("DELETE FROM transaction WHERE height < $1", oldest_to_keep) + .execute(executor) + .await?; + log_db_operation_time("prune_transactions", start); + + Ok(()) +} + +pub(crate) async fn prune_messages<'a, E>( + oldest_to_keep: i64, + executor: E, +) -> Result<(), sqlx::Error> +where + E: Executor<'a, Database = Postgres>, +{ + trace!("prune_messages"); + let start = Instant::now(); + sqlx::query!("DELETE FROM message WHERE height < $1", oldest_to_keep) + .execute(executor) + .await?; + log_db_operation_time("prune_messages", start); + + Ok(()) +} diff --git a/common/nyxd-scraper-psql/src/storage/mod.rs b/common/nyxd-scraper-psql/src/storage/mod.rs new file mode 100644 index 00000000000..091f5c0f2e4 --- /dev/null +++ b/common/nyxd-scraper-psql/src/storage/mod.rs @@ -0,0 +1,8 @@ +// Copyright 2023 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +pub mod block_storage; +mod helpers; +mod manager; +pub mod models; +pub mod transaction; diff --git a/common/nyxd-scraper-psql/src/storage/models.rs b/common/nyxd-scraper-psql/src/storage/models.rs new file mode 100644 index 00000000000..59b4f529513 --- /dev/null +++ b/common/nyxd-scraper-psql/src/storage/models.rs @@ -0,0 +1,47 @@ +// Copyright 2023 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use serde::{Deserialize, Serialize}; +use sqlx::FromRow; +use sqlx::types::time::OffsetDateTime; + +#[derive(Debug, Clone, Eq, PartialEq, Hash, FromRow)] +pub struct Validator { + pub consensus_address: String, + pub consensus_pubkey: String, +} + +#[derive(Debug, Clone, FromRow)] +pub struct Block { + pub height: i64, + pub hash: String, + pub num_txs: u32, + pub total_gas: i64, + pub proposer_address: String, + pub timestamp: OffsetDateTime, +} + +#[derive(Debug, Clone, FromRow)] +pub struct CommitSignature { + pub height: i64, + pub validator_address: String, + pub voting_power: i64, + pub proposer_priority: i64, + pub timestamp: OffsetDateTime, +} + +#[derive(Debug, Serialize, Deserialize, sqlx::Type)] +#[sqlx(type_name = "coin")] +pub struct DbCoin { + pub amount: String, + pub denom: String, +} + +impl From for DbCoin { + fn from(coin: cosmrs::proto::cosmos::base::v1beta1::Coin) -> Self { + Self { + amount: coin.amount, + denom: coin.denom, + } + } +} diff --git a/common/nyxd-scraper-psql/src/storage/transaction.rs b/common/nyxd-scraper-psql/src/storage/transaction.rs new file mode 100644 index 00000000000..5aad23dfe19 --- /dev/null +++ b/common/nyxd-scraper-psql/src/storage/transaction.rs @@ -0,0 +1,299 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::error::PostgresScraperError; +use crate::storage::helpers::parse_addresses_from_events; +use crate::storage::manager::{ + insert_block, insert_message, insert_precommit, insert_transaction, insert_validator, + update_last_processed, +}; +use async_trait::async_trait; +use base64::Engine as _; +use base64::engine::general_purpose; +use cosmrs::proto; +use nyxd_scraper_shared::ParsedTransactionResponse; +use nyxd_scraper_shared::helpers::{ + validator_consensus_address, validator_info, validator_pubkey_to_bech32, +}; +use nyxd_scraper_shared::storage::validators::Response; +use nyxd_scraper_shared::storage::{ + Block, Commit, CommitSig, NyxdScraperStorageError, NyxdScraperTransaction, validators, +}; +use serde_json::json; +use sqlx::types::time::{OffsetDateTime, PrimitiveDateTime}; +use sqlx::{Postgres, Transaction}; +use std::ops::{Deref, DerefMut}; +use tracing::{debug, error, trace, warn}; + +pub struct PostgresStorageTransaction { + pub(super) inner: Transaction<'static, Postgres>, +} + +impl Deref for PostgresStorageTransaction { + type Target = Transaction<'static, Postgres>; + fn deref(&self) -> &Self::Target { + &self.inner + } +} + +impl DerefMut for PostgresStorageTransaction { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.inner + } +} + +impl PostgresStorageTransaction { + async fn persist_validators( + &mut self, + validators: &validators::Response, + ) -> Result<(), PostgresScraperError> { + debug!("persisting {} validators", validators.total); + for validator in &validators.validators { + let consensus_address = validator_consensus_address(validator.address)?; + let consensus_pubkey = validator_pubkey_to_bech32(validator.pub_key)?; + + insert_validator( + consensus_address.to_string(), + consensus_pubkey.to_string(), + self.inner.as_mut(), + ) + .await?; + } + + Ok(()) + } + + async fn persist_block_data( + &mut self, + block: &Block, + total_gas: i64, + ) -> Result<(), PostgresScraperError> { + let proposer_address = + validator_consensus_address(block.header.proposer_address)?.to_string(); + + let offset_datetime: OffsetDateTime = block.header.time.into(); + let time = PrimitiveDateTime::new(offset_datetime.date(), offset_datetime.time()); + + insert_block( + block.header.height.into(), + block.header.hash().to_string(), + block.data.len() as i32, + total_gas, + proposer_address, + time, + self.inner.as_mut(), + ) + .await?; + Ok(()) + } + + async fn persist_commits( + &mut self, + commits: &Commit, + validators: &validators::Response, + ) -> Result<(), PostgresScraperError> { + debug!("persisting up to {} commits", commits.signatures.len()); + let height: i64 = commits.height.into(); + + for commit_sig in &commits.signatures { + let (validator_id, timestamp, signature) = match commit_sig { + CommitSig::BlockIdFlagAbsent => { + trace!("absent signature"); + continue; + } + CommitSig::BlockIdFlagCommit { + validator_address, + timestamp, + signature, + } => (validator_address, timestamp, signature), + CommitSig::BlockIdFlagNil { + validator_address, + timestamp, + signature, + } => (validator_address, timestamp, signature), + }; + + let validator = validator_info(*validator_id, validators)?; + let validator_address = validator_consensus_address(*validator_id)?; + + if signature.is_none() { + warn!("empty signature for {validator_address} at height {height}"); + continue; + } + + let offset_datetime: OffsetDateTime = (*timestamp).into(); + let time = PrimitiveDateTime::new(offset_datetime.date(), offset_datetime.time()); + + insert_precommit( + validator_address.to_string(), + height, + time, + validator.power.into(), + validator.proposer_priority.value(), + self.inner.as_mut(), + ) + .await?; + } + + Ok(()) + } + + async fn persist_txs( + &mut self, + txs: &[ParsedTransactionResponse], + ) -> Result<(), PostgresScraperError> { + debug!("persisting {} txs", txs.len()); + + for chain_tx in txs { + // bdjuno style, base64 encode them + let signatures = chain_tx + .tx + .signatures + .iter() + .map(|sig| general_purpose::STANDARD.encode(sig)) + .collect(); + + let messages = chain_tx + .parsed_messages + .values() + .cloned() + .collect::>(); + + let signer_infos = chain_tx + .tx + .auth_info + .signer_infos + .iter() + .map(|info| proto::cosmos::tx::v1beta1::SignerInfo::from(info.clone())) + .collect::>(); + + let hash = chain_tx.hash.to_string(); + let height = chain_tx.height.into(); + let index = chain_tx.index as i32; + + let log = serde_json::to_value(chain_tx.tx_result.log.clone()) + .inspect_err(|e| error!(hash, height, index, "Failed to parse logs: {e}")) + .unwrap_or_default(); + let events = &chain_tx.tx_result.events; + + insert_transaction( + hash, + height, + index, + chain_tx.tx_result.code.is_ok(), + serde_json::Value::Array(messages), + chain_tx.tx.body.memo.clone(), + signatures, + serde_json::to_value(signer_infos)?, + serde_json::to_value(&chain_tx.tx.auth_info.fee)?, + chain_tx.tx_result.gas_wanted, + chain_tx.tx_result.gas_used, + chain_tx.tx_result.log.clone(), + json!(log), + json!(events), + self.inner.as_mut(), + ) + .await?; + } + + Ok(()) + } + + async fn persist_messages( + &mut self, + txs: &[ParsedTransactionResponse], + ) -> Result<(), PostgresScraperError> { + debug!("persisting messages"); + + for chain_tx in txs { + let involved_addresses = parse_addresses_from_events(chain_tx); + for (index, msg) in chain_tx.tx.body.messages.iter().enumerate() { + let parsed_message = chain_tx.parsed_messages.get(&index); + let value = serde_json::to_value(parsed_message)?; + + insert_message( + chain_tx.hash.to_string(), + index as i64, + msg.type_url.clone(), + value, + involved_addresses.clone(), + chain_tx.height.into(), + self.inner.as_mut(), + ) + .await? + } + } + + Ok(()) + } + + async fn update_last_processed(&mut self, height: i64) -> Result<(), PostgresScraperError> { + debug!("update_last_processed"); + update_last_processed(height, self.inner.as_mut()).await?; + Ok(()) + } +} + +#[async_trait] +impl NyxdScraperTransaction for PostgresStorageTransaction { + async fn commit(self) -> Result<(), NyxdScraperStorageError> { + self.inner + .commit() + .await + .map_err(PostgresScraperError::from) + .map_err(NyxdScraperStorageError::from) + } + + async fn persist_validators( + &mut self, + validators: &Response, + ) -> Result<(), NyxdScraperStorageError> { + self.persist_validators(validators) + .await + .map_err(NyxdScraperStorageError::from) + } + + async fn persist_block_data( + &mut self, + block: &Block, + total_gas: i64, + ) -> Result<(), NyxdScraperStorageError> { + self.persist_block_data(block, total_gas) + .await + .map_err(NyxdScraperStorageError::from) + } + + async fn persist_commits( + &mut self, + commits: &Commit, + validators: &Response, + ) -> Result<(), NyxdScraperStorageError> { + self.persist_commits(commits, validators) + .await + .map_err(NyxdScraperStorageError::from) + } + + async fn persist_txs( + &mut self, + txs: &[ParsedTransactionResponse], + ) -> Result<(), NyxdScraperStorageError> { + self.persist_txs(txs) + .await + .map_err(NyxdScraperStorageError::from) + } + + async fn persist_messages( + &mut self, + txs: &[ParsedTransactionResponse], + ) -> Result<(), NyxdScraperStorageError> { + self.persist_messages(txs) + .await + .map_err(NyxdScraperStorageError::from) + } + + async fn update_last_processed(&mut self, height: i64) -> Result<(), NyxdScraperStorageError> { + self.update_last_processed(height) + .await + .map_err(NyxdScraperStorageError::from) + } +} diff --git a/common/nyxd-scraper/Cargo.toml b/common/nyxd-scraper-shared/Cargo.toml similarity index 61% rename from common/nyxd-scraper/Cargo.toml rename to common/nyxd-scraper-shared/Cargo.toml index 025e906d56e..3f7a343ec23 100644 --- a/common/nyxd-scraper/Cargo.toml +++ b/common/nyxd-scraper-shared/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "nyxd-scraper" +name = "nyxd-scraper-shared" version = "0.1.0" authors.workspace = true repository.workspace = true @@ -8,19 +8,22 @@ documentation.workspace = true edition.workspace = true license.workspace = true rust-version.workspace = true - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +readme.workspace = true [dependencies] async-trait.workspace = true +base64.workspace = true const_format = { workspace = true } cosmrs.workspace = true +cosmos-sdk-proto = { workspace = true, features = ["serde", "cosmwasm"] } # we need to explicitly include serde feature eyre = { workspace = true } futures.workspace = true humantime = { workspace = true } +ibc-proto = { workspace = true, features = ["serde"] } +prost = { workspace = true } sha2 = { workspace = true } serde = { workspace = true, features = ["derive"] } -sqlx = { workspace = true, features = ["runtime-tokio-rustls", "sqlite", "macros", "migrate", "time"] } +serde_json = { workspace = true } tendermint.workspace = true tendermint-rpc = { workspace = true, features = ["websocket-client", "http-client"] } thiserror.workspace = true @@ -32,11 +35,5 @@ tracing.workspace = true url.workspace = true -# TEMP -#nym-bin-common = { path = "../bin-common", features = ["basic_tracing"]} - - -[build-dependencies] -anyhow = { workspace = true } -sqlx = { workspace = true, features = ["runtime-tokio-rustls", "sqlite", "macros", "migrate"] } -tokio = { workspace = true, features = ["rt-multi-thread", "macros"] } +[lints] +workspace = true diff --git a/common/nyxd-scraper/src/block_processor/helpers.rs b/common/nyxd-scraper-shared/src/block_processor/helpers.rs similarity index 100% rename from common/nyxd-scraper/src/block_processor/helpers.rs rename to common/nyxd-scraper-shared/src/block_processor/helpers.rs diff --git a/common/nyxd-scraper/src/block_processor/mod.rs b/common/nyxd-scraper-shared/src/block_processor/mod.rs similarity index 97% rename from common/nyxd-scraper/src/block_processor/mod.rs rename to common/nyxd-scraper-shared/src/block_processor/mod.rs index a99bc1c21f9..0d30df40b78 100644 --- a/common/nyxd-scraper/src/block_processor/mod.rs +++ b/common/nyxd-scraper-shared/src/block_processor/mod.rs @@ -8,7 +8,7 @@ use crate::block_requester::BlockRequest; use crate::error::ScraperError; use crate::modules::{BlockModule, MsgModule, TxModule}; use crate::rpc_client::RpcClient; -use crate::storage::{ScraperStorage, persist_block}; +use crate::storage::{NyxdScraperStorage, NyxdScraperTransaction, persist_block}; use futures::StreamExt; use std::cmp::max; use std::collections::{BTreeMap, HashSet, VecDeque}; @@ -77,7 +77,7 @@ impl BlockProcessorConfig { } } -pub struct BlockProcessor { +pub struct BlockProcessor { config: BlockProcessorConfig, cancel: CancellationToken, synced: Arc, @@ -90,7 +90,7 @@ pub struct BlockProcessor { rpc_client: RpcClient, incoming: UnboundedReceiverStream, block_requester: Sender, - storage: ScraperStorage, + storage: S, // future work: rather than sending each msg to every msg module, // let them subscribe based on `type_url` inside the message itself @@ -101,14 +101,17 @@ pub struct BlockProcessor { } #[allow(clippy::too_many_arguments)] -impl BlockProcessor { +impl BlockProcessor +where + S: NyxdScraperStorage, +{ pub async fn new( config: BlockProcessorConfig, cancel: CancellationToken, synced: Arc, incoming: UnboundedReceiver, block_requester: Sender, - storage: ScraperStorage, + storage: S, rpc_client: RpcClient, ) -> Result { let last_processed = storage.get_last_processed_height().await?; @@ -164,7 +167,11 @@ impl BlockProcessor { // process the entire block as a transaction so that if anything fails, // we won't end up with a corrupted storage. - let mut tx = self.storage.begin_processing_tx().await?; + let mut tx = self + .storage + .begin_processing_tx() + .await + .map_err(ScraperError::tx_begin_failure)?; persist_block(&full_info, &mut tx, self.config.store_precommits).await?; @@ -192,10 +199,8 @@ impl BlockProcessor { } let commit_start = Instant::now(); - tx.commit() - .await - .map_err(|source| ScraperError::StorageTxCommitFailure { source })?; - crate::storage::log_db_operation_time("committing processing tx", commit_start); + tx.commit().await.map_err(ScraperError::tx_commit_failure)?; + crate::storage::helpers::log_db_operation_time("committing processing tx", commit_start); self.last_processed_height = full_info.block.header.height.value() as u32; self.last_processed_at = Instant::now(); diff --git a/common/nyxd-scraper/src/block_processor/pruning.rs b/common/nyxd-scraper-shared/src/block_processor/pruning.rs similarity index 100% rename from common/nyxd-scraper/src/block_processor/pruning.rs rename to common/nyxd-scraper-shared/src/block_processor/pruning.rs diff --git a/common/nyxd-scraper/src/block_processor/types.rs b/common/nyxd-scraper-shared/src/block_processor/types.rs similarity index 95% rename from common/nyxd-scraper/src/block_processor/types.rs rename to common/nyxd-scraper-shared/src/block_processor/types.rs index 1c456b93181..8bf184c0d9b 100644 --- a/common/nyxd-scraper/src/block_processor/types.rs +++ b/common/nyxd-scraper-shared/src/block_processor/types.rs @@ -3,6 +3,7 @@ use crate::error::ScraperError; use crate::helpers; +use std::collections::HashMap; use tendermint::{Block, Hash, abci, block, tx}; use tendermint_rpc::endpoint::{block as block_endpoint, block_results, validators}; use tendermint_rpc::event::{Event, EventData}; @@ -26,6 +27,12 @@ pub struct ParsedTransactionResponse { pub tx: cosmrs::tx::Tx, pub proof: Option, + + pub parsed_messages: HashMap, + + pub parsed_message_urls: HashMap, + + pub block: Block, } #[derive(Debug)] diff --git a/common/nyxd-scraper/src/block_requester/mod.rs b/common/nyxd-scraper-shared/src/block_requester/mod.rs similarity index 100% rename from common/nyxd-scraper/src/block_requester/mod.rs rename to common/nyxd-scraper-shared/src/block_requester/mod.rs diff --git a/common/nyxd-scraper/src/constants.rs b/common/nyxd-scraper-shared/src/constants.rs similarity index 100% rename from common/nyxd-scraper/src/constants.rs rename to common/nyxd-scraper-shared/src/constants.rs diff --git a/common/nyxd-scraper-shared/src/cosmos_module/message_registry.rs b/common/nyxd-scraper-shared/src/cosmos_module/message_registry.rs new file mode 100644 index 00000000000..45bfe786dd5 --- /dev/null +++ b/common/nyxd-scraper-shared/src/cosmos_module/message_registry.rs @@ -0,0 +1,146 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::cosmos_module::CosmosModule; +use crate::cosmos_module::modules::auth::Auth; +use crate::cosmos_module::modules::authz::Authz; +use crate::cosmos_module::modules::bank::Bank; +use crate::cosmos_module::modules::capability::Capability; +use crate::cosmos_module::modules::consensus::Consensus; +use crate::cosmos_module::modules::crisis::Crisis; +use crate::cosmos_module::modules::distribution::Distribution; +use crate::cosmos_module::modules::evidence::Evidence; +use crate::cosmos_module::modules::feegrant::Feegrant; +use crate::cosmos_module::modules::gov_v1::GovV1; +use crate::cosmos_module::modules::gov_v1beta1::GovV1Beta1; +use crate::cosmos_module::modules::group::Group; +use crate::cosmos_module::modules::ibc_core::IbcCore; +use crate::cosmos_module::modules::ibc_fee::IbcFee; +use crate::cosmos_module::modules::ibc_interchain_accounts_controller::IbcInterchainAccountsController; +use crate::cosmos_module::modules::ibc_transfer_v1::IbcTransferV1; +use crate::cosmos_module::modules::ibc_transfer_v2::IbcTransferV2; +use crate::cosmos_module::modules::mint::Mint; +use crate::cosmos_module::modules::nft::Nft; +use crate::cosmos_module::modules::params::Params; +use crate::cosmos_module::modules::slashing::Slashing; +use crate::cosmos_module::modules::staking::Staking; +use crate::cosmos_module::modules::upgrade::Upgrade; +use crate::cosmos_module::modules::vesting::Vesting; +use crate::cosmos_module::modules::wasm::Wasm; +use crate::error::ScraperError; +use cosmrs::Any; +use cosmrs::proto::prost::Name; +use cosmrs::proto::traits::Message; +use serde::Serialize; +use std::collections::HashMap; + +pub(crate) fn default_proto_to_json( + msg: &Any, +) -> Result { + let proto = ::decode(msg.value.as_slice()).map_err(|error| { + ScraperError::InvalidProtoRepresentation { + type_url: msg.type_url.clone(), + error, + } + })?; + let mut base_serde = + serde_json::to_value(&proto).map_err(|error| ScraperError::JsonSerialisationFailure { + type_url: msg.type_url.clone(), + error, + })?; + + // in bdjuno's output we also had @type field with the type_url + let obj = base_serde.as_object_mut().ok_or_else(|| { + ScraperError::JsonSerialisationFailureNotObject { + type_url: msg.type_url.clone(), + } + })?; + obj.insert( + "@type".to_string(), + serde_json::Value::String(msg.type_url.clone()), + ); + + Ok(base_serde) +} + +type ConvertFn = fn(&Any) -> Result; + +#[derive(Default, Clone)] +pub struct MessageRegistry { + // type url to function converting bytes to proto and finally to json + registered_types: HashMap, +} + +impl MessageRegistry { + pub fn new() -> Self { + MessageRegistry { + registered_types: Default::default(), + } + } + + pub fn register(&mut self) + where + T: Message + Default + Name + Serialize + 'static, + { + self.register_with_custom_fn::(default_proto_to_json::) + } + + #[allow(clippy::panic)] + pub fn register_with_custom_fn(&mut self, convert_fn: ConvertFn) + where + T: Message + Default + Name + Serialize + 'static, + { + if self + .registered_types + .insert(::type_url(), convert_fn) + .is_some() + { + // don't allow duplicate registration because it most likely implies bug in the code + panic!("duplicate registration of type {}", ::type_url()); + } + } + + pub fn try_decode(&self, raw: &Any) -> Result { + self.registered_types.get(&raw.type_url).ok_or( + ScraperError::MissingTypeUrlRegistration { + type_url: raw.type_url.clone(), + }, + )?(raw) + } +} + +pub fn default_message_registry() -> MessageRegistry { + let mut registry = MessageRegistry::new(); + let modules: Vec> = vec![ + Box::new(Auth), + Box::new(Authz), + Box::new(Bank), + Box::new(Capability), + Box::new(Consensus), + Box::new(Wasm), + Box::new(Crisis), + Box::new(Distribution), + Box::new(Evidence), + Box::new(Feegrant), + Box::new(GovV1), + Box::new(GovV1Beta1), + Box::new(Group), + Box::new(IbcCore), + Box::new(IbcFee), + Box::new(IbcTransferV1), + Box::new(IbcTransferV2), + Box::new(IbcInterchainAccountsController), + Box::new(Mint), + Box::new(Nft), + Box::new(Params), + Box::new(Slashing), + Box::new(Staking), + Box::new(Upgrade), + Box::new(Vesting), + ]; + + for module in modules { + module.register_messages(&mut registry) + } + registry +} diff --git a/common/nyxd-scraper-shared/src/cosmos_module/mod.rs b/common/nyxd-scraper-shared/src/cosmos_module/mod.rs new file mode 100644 index 00000000000..d60bb7caf1f --- /dev/null +++ b/common/nyxd-scraper-shared/src/cosmos_module/mod.rs @@ -0,0 +1,11 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::cosmos_module::message_registry::MessageRegistry; + +pub mod message_registry; +mod modules; + +pub trait CosmosModule { + fn register_messages(&self, registry: &mut MessageRegistry); +} diff --git a/common/nyxd-scraper-shared/src/cosmos_module/modules/auth.rs b/common/nyxd-scraper-shared/src/cosmos_module/modules/auth.rs new file mode 100644 index 00000000000..79a7da4b901 --- /dev/null +++ b/common/nyxd-scraper-shared/src/cosmos_module/modules/auth.rs @@ -0,0 +1,14 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::cosmos_module::CosmosModule; +use crate::cosmos_module::message_registry::MessageRegistry; +use cosmos_sdk_proto::cosmos::auth::v1beta1::MsgUpdateParams; + +pub(crate) struct Auth; + +impl CosmosModule for Auth { + fn register_messages(&self, registry: &mut MessageRegistry) { + registry.register::() + } +} diff --git a/common/nyxd-scraper-shared/src/cosmos_module/modules/authz.rs b/common/nyxd-scraper-shared/src/cosmos_module/modules/authz.rs new file mode 100644 index 00000000000..5088f7f7477 --- /dev/null +++ b/common/nyxd-scraper-shared/src/cosmos_module/modules/authz.rs @@ -0,0 +1,16 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::cosmos_module::CosmosModule; +use crate::cosmos_module::message_registry::MessageRegistry; +use cosmos_sdk_proto::cosmos::authz::v1beta1::{MsgExec, MsgGrant, MsgRevoke}; + +pub(crate) struct Authz; + +impl CosmosModule for Authz { + fn register_messages(&self, registry: &mut MessageRegistry) { + registry.register::(); + registry.register::(); + registry.register::(); + } +} diff --git a/common/nyxd-scraper-shared/src/cosmos_module/modules/bank.rs b/common/nyxd-scraper-shared/src/cosmos_module/modules/bank.rs new file mode 100644 index 00000000000..b84b0bec5d7 --- /dev/null +++ b/common/nyxd-scraper-shared/src/cosmos_module/modules/bank.rs @@ -0,0 +1,19 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::cosmos_module::CosmosModule; +use crate::cosmos_module::message_registry::MessageRegistry; +use cosmos_sdk_proto::cosmos::bank::v1beta1::{ + MsgMultiSend, MsgSend, MsgSetSendEnabled, MsgUpdateParams, +}; + +pub(crate) struct Bank; + +impl CosmosModule for Bank { + fn register_messages(&self, registry: &mut MessageRegistry) { + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + } +} diff --git a/common/nyxd-scraper-shared/src/cosmos_module/modules/capability.rs b/common/nyxd-scraper-shared/src/cosmos_module/modules/capability.rs new file mode 100644 index 00000000000..2a8d785d758 --- /dev/null +++ b/common/nyxd-scraper-shared/src/cosmos_module/modules/capability.rs @@ -0,0 +1,11 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::cosmos_module::CosmosModule; +use crate::cosmos_module::message_registry::MessageRegistry; + +pub(crate) struct Capability; + +impl CosmosModule for Capability { + fn register_messages(&self, _registry: &mut MessageRegistry) {} +} diff --git a/common/nyxd-scraper-shared/src/cosmos_module/modules/consensus.rs b/common/nyxd-scraper-shared/src/cosmos_module/modules/consensus.rs new file mode 100644 index 00000000000..6d8321ddeac --- /dev/null +++ b/common/nyxd-scraper-shared/src/cosmos_module/modules/consensus.rs @@ -0,0 +1,11 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::cosmos_module::CosmosModule; +use crate::cosmos_module::message_registry::MessageRegistry; + +pub(crate) struct Consensus; + +impl CosmosModule for Consensus { + fn register_messages(&self, _registry: &mut MessageRegistry) {} +} diff --git a/common/nyxd-scraper-shared/src/cosmos_module/modules/crisis.rs b/common/nyxd-scraper-shared/src/cosmos_module/modules/crisis.rs new file mode 100644 index 00000000000..0f3ace22dd2 --- /dev/null +++ b/common/nyxd-scraper-shared/src/cosmos_module/modules/crisis.rs @@ -0,0 +1,15 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::cosmos_module::CosmosModule; +use crate::cosmos_module::message_registry::MessageRegistry; +use cosmos_sdk_proto::cosmos::crisis::v1beta1::{MsgUpdateParams, MsgVerifyInvariant}; + +pub(crate) struct Crisis; + +impl CosmosModule for Crisis { + fn register_messages(&self, registry: &mut MessageRegistry) { + registry.register::(); + registry.register::(); + } +} diff --git a/common/nyxd-scraper-shared/src/cosmos_module/modules/distribution.rs b/common/nyxd-scraper-shared/src/cosmos_module/modules/distribution.rs new file mode 100644 index 00000000000..810ac75e5c6 --- /dev/null +++ b/common/nyxd-scraper-shared/src/cosmos_module/modules/distribution.rs @@ -0,0 +1,22 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::cosmos_module::CosmosModule; +use crate::cosmos_module::message_registry::MessageRegistry; +use cosmos_sdk_proto::cosmos::distribution::v1beta1::{ + MsgCommunityPoolSpend, MsgFundCommunityPool, MsgSetWithdrawAddress, MsgUpdateParams, + MsgWithdrawDelegatorReward, MsgWithdrawValidatorCommission, +}; + +pub(crate) struct Distribution; + +impl CosmosModule for Distribution { + fn register_messages(&self, registry: &mut MessageRegistry) { + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + } +} diff --git a/common/nyxd-scraper-shared/src/cosmos_module/modules/evidence.rs b/common/nyxd-scraper-shared/src/cosmos_module/modules/evidence.rs new file mode 100644 index 00000000000..0a370776d88 --- /dev/null +++ b/common/nyxd-scraper-shared/src/cosmos_module/modules/evidence.rs @@ -0,0 +1,14 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::cosmos_module::CosmosModule; +use crate::cosmos_module::message_registry::MessageRegistry; +use cosmos_sdk_proto::cosmos::evidence::v1beta1::MsgSubmitEvidence; + +pub(crate) struct Evidence; + +impl CosmosModule for Evidence { + fn register_messages(&self, registry: &mut MessageRegistry) { + registry.register::() + } +} diff --git a/common/nyxd-scraper-shared/src/cosmos_module/modules/feegrant.rs b/common/nyxd-scraper-shared/src/cosmos_module/modules/feegrant.rs new file mode 100644 index 00000000000..844ff8ec892 --- /dev/null +++ b/common/nyxd-scraper-shared/src/cosmos_module/modules/feegrant.rs @@ -0,0 +1,18 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::cosmos_module::CosmosModule; +use crate::cosmos_module::message_registry::MessageRegistry; +use cosmos_sdk_proto::cosmos::feegrant::v1beta1::{ + MsgGrantAllowance, MsgPruneAllowances, MsgRevokeAllowance, +}; + +pub(crate) struct Feegrant; + +impl CosmosModule for Feegrant { + fn register_messages(&self, registry: &mut MessageRegistry) { + registry.register::(); + registry.register::(); + registry.register::(); + } +} diff --git a/common/nyxd-scraper-shared/src/cosmos_module/modules/gov_v1.rs b/common/nyxd-scraper-shared/src/cosmos_module/modules/gov_v1.rs new file mode 100644 index 00000000000..0b852db22b9 --- /dev/null +++ b/common/nyxd-scraper-shared/src/cosmos_module/modules/gov_v1.rs @@ -0,0 +1,21 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::cosmos_module::CosmosModule; +use crate::cosmos_module::message_registry::MessageRegistry; +use cosmos_sdk_proto::cosmos::gov::v1::{ + MsgDeposit, MsgExecLegacyContent, MsgSubmitProposal, MsgUpdateParams, MsgVote, MsgVoteWeighted, +}; + +pub(crate) struct GovV1; + +impl CosmosModule for GovV1 { + fn register_messages(&self, registry: &mut MessageRegistry) { + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + } +} diff --git a/common/nyxd-scraper-shared/src/cosmos_module/modules/gov_v1beta1.rs b/common/nyxd-scraper-shared/src/cosmos_module/modules/gov_v1beta1.rs new file mode 100644 index 00000000000..25ef3e56795 --- /dev/null +++ b/common/nyxd-scraper-shared/src/cosmos_module/modules/gov_v1beta1.rs @@ -0,0 +1,19 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::cosmos_module::CosmosModule; +use crate::cosmos_module::message_registry::MessageRegistry; +use cosmos_sdk_proto::cosmos::gov::v1beta1::{ + MsgDeposit, MsgSubmitProposal, MsgVote, MsgVoteWeighted, +}; + +pub(crate) struct GovV1Beta1; + +impl CosmosModule for GovV1Beta1 { + fn register_messages(&self, registry: &mut MessageRegistry) { + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + } +} diff --git a/common/nyxd-scraper-shared/src/cosmos_module/modules/group.rs b/common/nyxd-scraper-shared/src/cosmos_module/modules/group.rs new file mode 100644 index 00000000000..d78bd936aec --- /dev/null +++ b/common/nyxd-scraper-shared/src/cosmos_module/modules/group.rs @@ -0,0 +1,29 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::cosmos_module::CosmosModule; +use crate::cosmos_module::message_registry::MessageRegistry; +use tracing::warn; + +pub(crate) struct Group; + +impl CosmosModule for Group { + fn register_messages(&self, _registry: &mut MessageRegistry) { + warn!("missing cosmos-sdk-proto definition for 'group::MsgCreateGroup'"); + warn!("missing cosmos-sdk-proto definition for 'group::MsgUpdateGroupMembers'"); + warn!("missing cosmos-sdk-proto definition for 'group::MsgUpdateGroupAdmin'"); + warn!("missing cosmos-sdk-proto definition for 'group::MsgUpdateGroupMetadata'"); + warn!("missing cosmos-sdk-proto definition for 'group::MsgCreateGroupWithPolicy'"); + warn!("missing cosmos-sdk-proto definition for 'group::MsgCreateGroupPolicy'"); + warn!("missing cosmos-sdk-proto definition for 'group::MsgUpdateGroupPolicyAdmin'"); + warn!( + "missing cosmos-sdk-proto definition for 'group::MsgUpdateGroupPolicyDecisionPolicy'" + ); + warn!("missing cosmos-sdk-proto definition for 'group::MsgUpdateGroupPolicyMetadata'"); + warn!("missing cosmos-sdk-proto definition for 'group::MsgSubmitProposal'"); + warn!("missing cosmos-sdk-proto definition for 'group::MsgWithdrawProposal'"); + warn!("missing cosmos-sdk-proto definition for 'group::MsgVote'"); + warn!("missing cosmos-sdk-proto definition for 'group::MsgExec'"); + warn!("missing cosmos-sdk-proto definition for 'group::MsgLeaveGroup'"); + } +} diff --git a/common/nyxd-scraper-shared/src/cosmos_module/modules/ibc_core.rs b/common/nyxd-scraper-shared/src/cosmos_module/modules/ibc_core.rs new file mode 100644 index 00000000000..304602fa545 --- /dev/null +++ b/common/nyxd-scraper-shared/src/cosmos_module/modules/ibc_core.rs @@ -0,0 +1,70 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::{CosmosModule, MessageRegistry}; +use ibc_proto::ibc::core::channel::{ + self, + v1::{ + MsgAcknowledgement, MsgChannelCloseConfirm, MsgChannelCloseInit, MsgChannelOpenAck, + MsgChannelOpenConfirm, MsgChannelOpenInit, MsgChannelOpenTry, MsgChannelUpgradeAck, + MsgChannelUpgradeCancel, MsgChannelUpgradeConfirm, MsgChannelUpgradeInit, + MsgChannelUpgradeOpen, MsgChannelUpgradeTimeout, MsgChannelUpgradeTry, + MsgPruneAcknowledgements, MsgRecvPacket, MsgTimeout, MsgTimeoutOnClose, + }, +}; +use ibc_proto::ibc::core::client::{ + self, + v1::{ + MsgCreateClient, MsgIbcSoftwareUpgrade, MsgRecoverClient, MsgSubmitMisbehaviour, + MsgUpdateClient, MsgUpgradeClient, + }, +}; +use ibc_proto::ibc::core::connection::{ + self, + v1::{ + MsgConnectionOpenAck, MsgConnectionOpenConfirm, MsgConnectionOpenInit, MsgConnectionOpenTry, + }, +}; + +pub(crate) struct IbcCore; + +impl CosmosModule for IbcCore { + fn register_messages(&self, registry: &mut MessageRegistry) { + // channel + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + + // client + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + + // connection + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + } +} diff --git a/common/nyxd-scraper-shared/src/cosmos_module/modules/ibc_fee.rs b/common/nyxd-scraper-shared/src/cosmos_module/modules/ibc_fee.rs new file mode 100644 index 00000000000..b5e3d16b534 --- /dev/null +++ b/common/nyxd-scraper-shared/src/cosmos_module/modules/ibc_fee.rs @@ -0,0 +1,18 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::{CosmosModule, MessageRegistry}; +use ibc_proto::ibc::applications::fee::v1::{ + MsgPayPacketFee, MsgPayPacketFeeAsync, MsgRegisterPayee, RegisteredCounterpartyPayee, +}; + +pub(crate) struct IbcFee; + +impl CosmosModule for IbcFee { + fn register_messages(&self, registry: &mut MessageRegistry) { + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + } +} diff --git a/common/nyxd-scraper-shared/src/cosmos_module/modules/ibc_interchain_accounts_controller.rs b/common/nyxd-scraper-shared/src/cosmos_module/modules/ibc_interchain_accounts_controller.rs new file mode 100644 index 00000000000..fc6ef915eb3 --- /dev/null +++ b/common/nyxd-scraper-shared/src/cosmos_module/modules/ibc_interchain_accounts_controller.rs @@ -0,0 +1,17 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::{CosmosModule, MessageRegistry}; +use ibc_proto::ibc::applications::interchain_accounts::controller::v1::{ + MsgRegisterInterchainAccount, MsgSendTx, MsgUpdateParams, +}; + +pub(crate) struct IbcInterchainAccountsController; + +impl CosmosModule for IbcInterchainAccountsController { + fn register_messages(&self, registry: &mut MessageRegistry) { + registry.register::(); + registry.register::(); + registry.register::(); + } +} diff --git a/common/nyxd-scraper-shared/src/cosmos_module/modules/ibc_transfer_v1.rs b/common/nyxd-scraper-shared/src/cosmos_module/modules/ibc_transfer_v1.rs new file mode 100644 index 00000000000..0f2f92524a1 --- /dev/null +++ b/common/nyxd-scraper-shared/src/cosmos_module/modules/ibc_transfer_v1.rs @@ -0,0 +1,14 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::{CosmosModule, MessageRegistry}; +use ibc_proto::ibc::applications::transfer::v1::{MsgTransfer, MsgUpdateParams}; + +pub(crate) struct IbcTransferV1; + +impl CosmosModule for IbcTransferV1 { + fn register_messages(&self, registry: &mut MessageRegistry) { + registry.register::(); + registry.register::(); + } +} diff --git a/common/nyxd-scraper-shared/src/cosmos_module/modules/ibc_transfer_v2.rs b/common/nyxd-scraper-shared/src/cosmos_module/modules/ibc_transfer_v2.rs new file mode 100644 index 00000000000..d0e707e35fa --- /dev/null +++ b/common/nyxd-scraper-shared/src/cosmos_module/modules/ibc_transfer_v2.rs @@ -0,0 +1,10 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::{CosmosModule, MessageRegistry}; + +pub(crate) struct IbcTransferV2; + +impl CosmosModule for IbcTransferV2 { + fn register_messages(&self, _registry: &mut MessageRegistry) {} +} diff --git a/common/nyxd-scraper-shared/src/cosmos_module/modules/mint.rs b/common/nyxd-scraper-shared/src/cosmos_module/modules/mint.rs new file mode 100644 index 00000000000..be6625b237f --- /dev/null +++ b/common/nyxd-scraper-shared/src/cosmos_module/modules/mint.rs @@ -0,0 +1,14 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::cosmos_module::CosmosModule; +use crate::cosmos_module::message_registry::MessageRegistry; +use cosmos_sdk_proto::cosmos::mint::v1beta1::MsgUpdateParams; + +pub(crate) struct Mint; + +impl CosmosModule for Mint { + fn register_messages(&self, registry: &mut MessageRegistry) { + registry.register::() + } +} diff --git a/common/nyxd-scraper-shared/src/cosmos_module/modules/mod.rs b/common/nyxd-scraper-shared/src/cosmos_module/modules/mod.rs new file mode 100644 index 00000000000..5c13923dcba --- /dev/null +++ b/common/nyxd-scraper-shared/src/cosmos_module/modules/mod.rs @@ -0,0 +1,28 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +pub(crate) mod auth; +pub(crate) mod authz; +pub(crate) mod bank; +pub(crate) mod capability; +pub(crate) mod consensus; +pub(crate) mod crisis; +pub(crate) mod distribution; +pub(crate) mod evidence; +pub(crate) mod feegrant; +pub(crate) mod gov_v1; +pub(crate) mod gov_v1beta1; +pub(crate) mod group; +pub(crate) mod ibc_core; +pub(crate) mod ibc_fee; +pub(crate) mod ibc_interchain_accounts_controller; +pub(crate) mod ibc_transfer_v1; +pub(crate) mod ibc_transfer_v2; +pub(crate) mod mint; +pub(crate) mod nft; +pub(crate) mod params; +pub(crate) mod slashing; +pub(crate) mod staking; +pub(crate) mod upgrade; +pub(crate) mod vesting; +pub(crate) mod wasm; diff --git a/common/nyxd-scraper-shared/src/cosmos_module/modules/nft.rs b/common/nyxd-scraper-shared/src/cosmos_module/modules/nft.rs new file mode 100644 index 00000000000..e4e96f57d56 --- /dev/null +++ b/common/nyxd-scraper-shared/src/cosmos_module/modules/nft.rs @@ -0,0 +1,11 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::cosmos_module::CosmosModule; +use crate::cosmos_module::message_registry::MessageRegistry; + +pub(crate) struct Nft; + +impl CosmosModule for Nft { + fn register_messages(&self, _registry: &mut MessageRegistry) {} +} diff --git a/common/nyxd-scraper-shared/src/cosmos_module/modules/params.rs b/common/nyxd-scraper-shared/src/cosmos_module/modules/params.rs new file mode 100644 index 00000000000..70fe04783ad --- /dev/null +++ b/common/nyxd-scraper-shared/src/cosmos_module/modules/params.rs @@ -0,0 +1,11 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::cosmos_module::CosmosModule; +use crate::cosmos_module::message_registry::MessageRegistry; + +pub(crate) struct Params; + +impl CosmosModule for Params { + fn register_messages(&self, _registry: &mut MessageRegistry) {} +} diff --git a/common/nyxd-scraper-shared/src/cosmos_module/modules/slashing.rs b/common/nyxd-scraper-shared/src/cosmos_module/modules/slashing.rs new file mode 100644 index 00000000000..481295516f5 --- /dev/null +++ b/common/nyxd-scraper-shared/src/cosmos_module/modules/slashing.rs @@ -0,0 +1,11 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::cosmos_module::CosmosModule; +use crate::cosmos_module::message_registry::MessageRegistry; + +pub(crate) struct Slashing; + +impl CosmosModule for Slashing { + fn register_messages(&self, _registry: &mut MessageRegistry) {} +} diff --git a/common/nyxd-scraper-shared/src/cosmos_module/modules/staking.rs b/common/nyxd-scraper-shared/src/cosmos_module/modules/staking.rs new file mode 100644 index 00000000000..dc87d6cfea9 --- /dev/null +++ b/common/nyxd-scraper-shared/src/cosmos_module/modules/staking.rs @@ -0,0 +1,23 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::cosmos_module::CosmosModule; +use crate::cosmos_module::message_registry::MessageRegistry; +use cosmos_sdk_proto::cosmos::staking::v1beta1::{ + MsgBeginRedelegate, MsgCancelUnbondingDelegation, MsgCreateValidator, MsgDelegate, + MsgEditValidator, MsgUndelegate, MsgUpdateParams, +}; + +pub(crate) struct Staking; + +impl CosmosModule for Staking { + fn register_messages(&self, registry: &mut MessageRegistry) { + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + } +} diff --git a/common/nyxd-scraper-shared/src/cosmos_module/modules/upgrade.rs b/common/nyxd-scraper-shared/src/cosmos_module/modules/upgrade.rs new file mode 100644 index 00000000000..cf8174fe5ff --- /dev/null +++ b/common/nyxd-scraper-shared/src/cosmos_module/modules/upgrade.rs @@ -0,0 +1,15 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::cosmos_module::CosmosModule; +use crate::cosmos_module::message_registry::MessageRegistry; +use cosmos_sdk_proto::cosmos::upgrade::v1beta1::{MsgCancelUpgrade, MsgSoftwareUpgrade}; + +pub(crate) struct Upgrade; + +impl CosmosModule for Upgrade { + fn register_messages(&self, registry: &mut MessageRegistry) { + registry.register::(); + registry.register::(); + } +} diff --git a/common/nyxd-scraper-shared/src/cosmos_module/modules/vesting.rs b/common/nyxd-scraper-shared/src/cosmos_module/modules/vesting.rs new file mode 100644 index 00000000000..e518829b7f8 --- /dev/null +++ b/common/nyxd-scraper-shared/src/cosmos_module/modules/vesting.rs @@ -0,0 +1,18 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::cosmos_module::CosmosModule; +use crate::cosmos_module::message_registry::MessageRegistry; +use cosmos_sdk_proto::cosmos::vesting::v1beta1::{ + MsgCreatePeriodicVestingAccount, MsgCreatePermanentLockedAccount, MsgCreateVestingAccount, +}; + +pub(crate) struct Vesting; + +impl CosmosModule for Vesting { + fn register_messages(&self, registry: &mut MessageRegistry) { + registry.register::(); + registry.register::(); + registry.register::(); + } +} diff --git a/common/nyxd-scraper-shared/src/cosmos_module/modules/wasm.rs b/common/nyxd-scraper-shared/src/cosmos_module/modules/wasm.rs new file mode 100644 index 00000000000..c9b7ad5bb0e --- /dev/null +++ b/common/nyxd-scraper-shared/src/cosmos_module/modules/wasm.rs @@ -0,0 +1,104 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::cosmos_module::CosmosModule; +use crate::cosmos_module::message_registry::{MessageRegistry, default_proto_to_json}; +use crate::error::ScraperError; +use base64::Engine; +use base64::engine::general_purpose::STANDARD; +use cosmos_sdk_proto::cosmwasm::wasm::v1::{ + MsgAddCodeUploadParamsAddresses, MsgClearAdmin, MsgExecuteContract, MsgIbcCloseChannel, + MsgIbcSend, MsgInstantiateContract, MsgInstantiateContract2, MsgMigrateContract, MsgPinCodes, + MsgRemoveCodeUploadParamsAddresses, MsgStoreAndInstantiateContract, MsgStoreAndMigrateContract, + MsgStoreCode, MsgSudoContract, MsgUnpinCodes, MsgUpdateAdmin, MsgUpdateContractLabel, + MsgUpdateInstantiateConfig, MsgUpdateParams, +}; +use cosmrs::Any; +use prost::Message; +use serde::Serialize; +use tracing::warn; + +pub(crate) struct Wasm; + +fn decode_wasm_message( + msg: &Any, +) -> Result { + let field = "msg"; + // 1. perform basic decoding + let mut base = default_proto_to_json::(msg)?; + let Some(encoded_field) = base.get_mut(field) else { + warn!( + "missing field 'msg' in wasm message of type {} - can't perform additional decoding", + msg.type_url + ); + return Ok(base); + }; + + // 2. decode 'msg' field + let as_str = + encoded_field + .as_str() + .ok_or(ScraperError::JsonWasmSerialisationFailureNotString { + field: field.to_string(), + type_url: msg.type_url.clone(), + })?; + + let decoded = STANDARD.decode(as_str).map_err(|error| { + ScraperError::JsonWasmSerialisationFailureInvalidBase64Encoding { + field: field.to_string(), + type_url: msg.type_url.clone(), + error, + } + })?; + + // 3. replace original 'msg' with the new json + let re_decoded: serde_json::Value = serde_json::from_slice(&decoded).map_err(|error| { + ScraperError::JsonSerialisationFailure { + type_url: format!("{}.{field}", msg.type_url), + error, + } + })?; + + *encoded_field = re_decoded; + Ok(base) +} + +impl CosmosModule for Wasm { + fn register_messages(&self, registry: &mut MessageRegistry) { + registry.register::(); + registry.register::(); + registry.register::(); + + registry.register_with_custom_fn::(|msg| { + decode_wasm_message::(msg) + }); + registry.register_with_custom_fn::(|msg| { + decode_wasm_message::(msg) + }); + registry.register_with_custom_fn::(|msg| { + decode_wasm_message::(msg) + }); + registry.register_with_custom_fn::(|msg| { + decode_wasm_message::(msg) + }); + registry.register_with_custom_fn::(|msg| { + decode_wasm_message::(msg) + }); + registry.register_with_custom_fn::(|msg| { + decode_wasm_message::(msg) + }); + registry.register_with_custom_fn::(|msg| { + decode_wasm_message::(msg) + }); + + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + registry.register::(); + } +} diff --git a/common/nyxd-scraper/src/error.rs b/common/nyxd-scraper-shared/src/error.rs similarity index 72% rename from common/nyxd-scraper/src/error.rs rename to common/nyxd-scraper-shared/src/error.rs index d9dd359f6ad..76723950bef 100644 --- a/common/nyxd-scraper/src/error.rs +++ b/common/nyxd-scraper-shared/src/error.rs @@ -4,17 +4,16 @@ use crate::block_processor::pruning::{ EVERYTHING_PRUNING_INTERVAL, EVERYTHING_PRUNING_KEEP_RECENT, }; +use crate::helpers::MalformedDataError; +use crate::storage::NyxdScraperStorageError; use tendermint::Hash; use thiserror::Error; use tokio::sync::mpsc::error::SendError; #[derive(Debug, Error)] pub enum ScraperError { - #[error("experienced internal database error: {0}")] - InternalDatabaseError(#[from] sqlx::Error), - - #[error("failed to perform startup SQL migration: {0}")] - StartupMigrationFailure(#[from] sqlx::migrate::MigrateError), + #[error("storage error: {0}")] + StorageError(#[from] NyxdScraperStorageError), #[error("the block scraper is already running")] ScraperAlreadyRunning, @@ -106,40 +105,26 @@ pub enum ScraperError { #[error("failed to begin storage tx: {source}")] StorageTxBeginFailure { #[source] - source: sqlx::Error, + source: NyxdScraperStorageError, }, #[error("failed to commit storage tx: {source}")] StorageTxCommitFailure { #[source] - source: sqlx::Error, + source: NyxdScraperStorageError, }, #[error("failed to send on a closed channel")] ClosedChannelError, - #[error("failed to parse validator's address: {source}")] - MalformedValidatorAddress { - #[source] - source: eyre::Report, - }, - - #[error("failed to parse validator's address: {source}")] - MalformedValidatorPubkey { - #[source] - source: eyre::Report, - }, + #[error(transparent)] + MalformedData(#[from] MalformedDataError), #[error( "could not find the block proposer ('{proposer}') for height {height} in the validator set" )] BlockProposerNotInValidatorSet { height: u32, proposer: String }, - #[error( - "could not find validator information for {address}; the validator has signed a commit" - )] - MissingValidatorInfoCommitted { address: String }, - #[error( "pruning.interval must not be set to 0. If you want to disable pruning, select pruning.strategy = \"nothing\"" )] @@ -156,6 +141,49 @@ pub enum ScraperError { EVERYTHING_PRUNING_KEEP_RECENT )] TooSmallKeepRecent { keep_recent: u32 }, + + #[error("'{type_url}' is not registered in the message registry")] + MissingTypeUrlRegistration { type_url: String }, + + #[error("failed to decode message of type '{type_url}': {error}")] + InvalidProtoRepresentation { + type_url: String, + #[source] + error: prost::DecodeError, + }, + + #[error("failed to encode message of type '{type_url}' to json: '{error}'")] + JsonSerialisationFailure { + type_url: String, + #[source] + error: serde_json::Error, + }, + + #[error("serialisation of message of type '{type_url}' didn't result in an object!")] + JsonSerialisationFailureNotObject { type_url: String }, + + #[error("field '{field}' in '{type_url}' is not a string")] + JsonWasmSerialisationFailureNotString { field: String, type_url: String }, + + #[error("field '{field}' in '{type_url}' has invalid base64 encoding: {error}")] + JsonWasmSerialisationFailureInvalidBase64Encoding { + field: String, + type_url: String, + #[source] + error: base64::DecodeError, + }, +} + +impl ScraperError { + pub fn tx_begin_failure(source: NyxdScraperStorageError) -> ScraperError +where { + ScraperError::StorageTxBeginFailure { source } + } + + pub fn tx_commit_failure(source: NyxdScraperStorageError) -> ScraperError +where { + ScraperError::StorageTxCommitFailure { source } + } } impl From> for ScraperError { diff --git a/common/nyxd-scraper-shared/src/helpers.rs b/common/nyxd-scraper-shared/src/helpers.rs new file mode 100644 index 00000000000..54c5a1cbc00 --- /dev/null +++ b/common/nyxd-scraper-shared/src/helpers.rs @@ -0,0 +1,66 @@ +// Copyright 2023 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::block_processor::types::ParsedTransactionResponse; +use crate::constants::{BECH32_CONESNSUS_PUBKEY_PREFIX, BECH32_CONSENSUS_ADDRESS_PREFIX}; +use cosmrs::AccountId; +use sha2::{Digest, Sha256}; +use tendermint::{Hash, validator}; +use tendermint::{PublicKey, account}; +use tendermint_rpc::endpoint::validators; +use thiserror::Error; + +#[derive(Error, Debug)] +pub enum MalformedDataError { + #[error("failed to parse validator's address: {source}")] + MalformedValidatorAddress { + #[source] + source: eyre::Report, + }, + + #[error("failed to parse validator's address: {source}")] + MalformedValidatorPubkey { + #[source] + source: eyre::Report, + }, + + #[error( + "could not find validator information for {address}; the validator has signed a commit" + )] + MissingValidatorInfoCommitted { address: String }, +} + +pub fn tx_hash>(raw_tx: M) -> Hash { + Hash::Sha256(Sha256::digest(raw_tx).into()) +} + +pub fn validator_pubkey_to_bech32(pubkey: PublicKey) -> Result { + // TODO: this one seem to attach additional prefix to they pubkeys, is that what we want instead maybe? + // Ok(pubkey.to_bech32(BECH32_CONESNSUS_PUBKEY_PREFIX)) + AccountId::new(BECH32_CONESNSUS_PUBKEY_PREFIX, &pubkey.to_bytes()) + .map_err(|source| MalformedDataError::MalformedValidatorPubkey { source }) +} + +pub fn validator_consensus_address(id: account::Id) -> Result { + AccountId::new(BECH32_CONSENSUS_ADDRESS_PREFIX, id.as_ref()) + .map_err(|source| MalformedDataError::MalformedValidatorAddress { source }) +} + +pub fn tx_gas_sum(txs: &[ParsedTransactionResponse]) -> i64 { + txs.iter().map(|tx| tx.tx_result.gas_used).sum() +} + +pub fn validator_info( + id: account::Id, + validators: &validators::Response, +) -> Result<&validator::Info, MalformedDataError> { + match validators.validators.iter().find(|v| v.address == id) { + Some(info) => Ok(info), + None => { + let addr = validator_consensus_address(id)?; + Err(MalformedDataError::MissingValidatorInfoCommitted { + address: addr.to_string(), + }) + } + } +} diff --git a/common/nyxd-scraper/src/lib.rs b/common/nyxd-scraper-shared/src/lib.rs similarity index 69% rename from common/nyxd-scraper/src/lib.rs rename to common/nyxd-scraper-shared/src/lib.rs index 7d22921a3eb..7d6b18509c3 100644 --- a/common/nyxd-scraper/src/lib.rs +++ b/common/nyxd-scraper-shared/src/lib.rs @@ -1,14 +1,12 @@ // Copyright 2023 - Nym Technologies SA // SPDX-License-Identifier: Apache-2.0 -#![warn(clippy::expect_used)] -#![warn(clippy::unwrap_used)] - pub(crate) mod block_processor; pub(crate) mod block_requester; pub mod constants; +mod cosmos_module; pub mod error; -pub(crate) mod helpers; +pub mod helpers; pub mod modules; pub(crate) mod rpc_client; pub(crate) mod scraper; @@ -16,6 +14,11 @@ pub mod storage; pub use block_processor::pruning::{PruningOptions, PruningStrategy}; pub use block_processor::types::ParsedTransactionResponse; +pub use cosmos_module::{ + CosmosModule, + message_registry::{MessageRegistry, default_message_registry}, +}; +pub use cosmrs::Any; pub use modules::{BlockModule, MsgModule, TxModule}; pub use scraper::{Config, NyxdScraper, StartingBlockOpts}; -pub use storage::models; +pub use storage::{NyxdScraperStorage, NyxdScraperTransaction}; diff --git a/common/nyxd-scraper/src/modules/block_module.rs b/common/nyxd-scraper-shared/src/modules/block_module.rs similarity index 79% rename from common/nyxd-scraper/src/modules/block_module.rs rename to common/nyxd-scraper-shared/src/modules/block_module.rs index 9ca1ba9b204..1ea3c2899d9 100644 --- a/common/nyxd-scraper/src/modules/block_module.rs +++ b/common/nyxd-scraper-shared/src/modules/block_module.rs @@ -3,7 +3,7 @@ use crate::block_processor::types::FullBlockInformation; use crate::error::ScraperError; -use crate::storage::StorageTransaction; +use crate::storage::NyxdScraperTransaction; use async_trait::async_trait; #[async_trait] @@ -11,6 +11,6 @@ pub trait BlockModule { async fn handle_block( &mut self, block: &FullBlockInformation, - storage_tx: &mut StorageTransaction, + storage_tx: &mut dyn NyxdScraperTransaction, ) -> Result<(), ScraperError>; } diff --git a/common/nyxd-scraper/src/modules/mod.rs b/common/nyxd-scraper-shared/src/modules/mod.rs similarity index 100% rename from common/nyxd-scraper/src/modules/mod.rs rename to common/nyxd-scraper-shared/src/modules/mod.rs diff --git a/common/nyxd-scraper/src/modules/msg_module.rs b/common/nyxd-scraper-shared/src/modules/msg_module.rs similarity index 83% rename from common/nyxd-scraper/src/modules/msg_module.rs rename to common/nyxd-scraper-shared/src/modules/msg_module.rs index 1d195bee14a..60f53b1553d 100644 --- a/common/nyxd-scraper/src/modules/msg_module.rs +++ b/common/nyxd-scraper-shared/src/modules/msg_module.rs @@ -3,7 +3,7 @@ use crate::block_processor::types::ParsedTransactionResponse; use crate::error::ScraperError; -use crate::storage::StorageTransaction; +use crate::storage::NyxdScraperTransaction; use async_trait::async_trait; use cosmrs::Any; @@ -16,6 +16,6 @@ pub trait MsgModule { index: usize, msg: &Any, tx: &ParsedTransactionResponse, - storage_tx: &mut StorageTransaction, + storage_tx: &mut dyn NyxdScraperTransaction, ) -> Result<(), ScraperError>; } diff --git a/common/nyxd-scraper/src/modules/tx_module.rs b/common/nyxd-scraper-shared/src/modules/tx_module.rs similarity index 79% rename from common/nyxd-scraper/src/modules/tx_module.rs rename to common/nyxd-scraper-shared/src/modules/tx_module.rs index 07d012ab5a8..8d2f5b22b1e 100644 --- a/common/nyxd-scraper/src/modules/tx_module.rs +++ b/common/nyxd-scraper-shared/src/modules/tx_module.rs @@ -3,7 +3,7 @@ use crate::block_processor::types::ParsedTransactionResponse; use crate::error::ScraperError; -use crate::storage::StorageTransaction; +use crate::storage::NyxdScraperTransaction; use async_trait::async_trait; #[async_trait] @@ -11,6 +11,6 @@ pub trait TxModule { async fn handle_tx( &mut self, tx: &ParsedTransactionResponse, - storage_tx: &mut StorageTransaction, + storage_tx: &mut dyn NyxdScraperTransaction, ) -> Result<(), ScraperError>; } diff --git a/common/nyxd-scraper/src/rpc_client.rs b/common/nyxd-scraper-shared/src/rpc_client.rs similarity index 79% rename from common/nyxd-scraper/src/rpc_client.rs rename to common/nyxd-scraper-shared/src/rpc_client.rs index 5a3621abd4a..3f4ee84d26e 100644 --- a/common/nyxd-scraper/src/rpc_client.rs +++ b/common/nyxd-scraper-shared/src/rpc_client.rs @@ -6,15 +6,16 @@ use crate::block_processor::types::{ }; use crate::error::ScraperError; use crate::helpers::tx_hash; +use crate::{Any, MessageRegistry, default_message_registry}; use futures::StreamExt; use futures::future::join3; -use std::collections::BTreeMap; +use std::collections::{BTreeMap, HashMap}; use std::sync::Arc; use tendermint::Hash; use tendermint_rpc::endpoint::{block, block_results, tx, validators}; use tendermint_rpc::{Client, HttpClient, Paging}; use tokio::sync::Mutex; -use tracing::{debug, instrument}; +use tracing::{debug, instrument, warn}; use url::Url; #[derive(Clone)] @@ -22,6 +23,9 @@ pub struct RpcClient { // right now I don't care about anything nym specific, so a simple http client is sufficient, // once this is inadequate, we can switch to a NyxdClient inner: Arc, + + // kinda like very limited cosmos sdk codec + pub(crate) message_registry: MessageRegistry, } impl RpcClient { @@ -35,9 +39,20 @@ impl RpcClient { Ok(RpcClient { inner: Arc::new(http_client), + message_registry: default_message_registry(), }) } + fn decode_or_skip(&self, msg: &Any) -> Option { + match self.message_registry.try_decode(msg) { + Ok(decoded) => Some(decoded), + Err(err) => { + warn!("Failed to decode raw message: {err}"); + None + } + } + } + #[instrument(skip(self, block), fields(height = block.height))] pub async fn try_get_full_details( &self, @@ -56,19 +71,33 @@ impl RpcClient { let raw_transactions = raw_transactions?; let mut transactions = Vec::with_capacity(raw_transactions.len()); - for tx in raw_transactions { + for raw_tx in raw_transactions { + let mut parsed_messages = HashMap::new(); + let mut parsed_message_urls = HashMap::new(); + let tx = cosmrs::Tx::from_bytes(&raw_tx.tx).map_err(|source| { + ScraperError::TxParseFailure { + hash: raw_tx.hash, + source, + } + })?; + + for (index, msg) in tx.body.messages.iter().enumerate() { + if let Some(value) = self.decode_or_skip(msg) { + parsed_messages.insert(index, value); + parsed_message_urls.insert(index, msg.type_url.clone()); + } + } + transactions.push(ParsedTransactionResponse { - hash: tx.hash, - height: tx.height, - index: tx.index, - tx_result: tx.tx_result, - tx: cosmrs::Tx::from_bytes(&tx.tx).map_err(|source| { - ScraperError::TxParseFailure { - hash: tx.hash, - source, - } - })?, - proof: tx.proof, + hash: raw_tx.hash, + height: raw_tx.height, + index: raw_tx.index, + tx_result: raw_tx.tx_result, + tx, + proof: raw_tx.proof, + parsed_messages, + parsed_message_urls, + block: block.block.clone(), }) } diff --git a/common/nyxd-scraper/src/scraper/mod.rs b/common/nyxd-scraper-shared/src/scraper/mod.rs similarity index 92% rename from common/nyxd-scraper/src/scraper/mod.rs rename to common/nyxd-scraper-shared/src/scraper/mod.rs index 5d067044da5..df9224ae220 100644 --- a/common/nyxd-scraper/src/scraper/mod.rs +++ b/common/nyxd-scraper-shared/src/scraper/mod.rs @@ -9,9 +9,9 @@ use crate::error::ScraperError; use crate::modules::{BlockModule, MsgModule, TxModule}; use crate::rpc_client::RpcClient; use crate::scraper::subscriber::ChainSubscriber; -use crate::storage::ScraperStorage; +use crate::storage::NyxdScraperStorage; use futures::future::join_all; -use std::path::PathBuf; +use std::marker::PhantomData; use std::sync::Arc; use tokio::sync::Notify; use tokio::sync::mpsc::{ @@ -40,7 +40,8 @@ pub struct Config { /// Url to the rpc endpoint of a validator, for example `https://rpc.nymtech.net/` pub rpc_url: Url, - pub database_path: PathBuf, + /// Points to either underlying file (sqlite) or connection string (postgres) + pub database_storage: String, pub pruning_options: PruningOptions, @@ -49,7 +50,8 @@ pub struct Config { pub start_block: StartingBlockOpts, } -pub struct NyxdScraperBuilder { +pub struct NyxdScraperBuilder { + _storage: PhantomData, config: Config, block_modules: Vec>, @@ -57,9 +59,13 @@ pub struct NyxdScraperBuilder { msg_modules: Vec>, } -impl NyxdScraperBuilder { - pub async fn build_and_start(self) -> Result { - let scraper = NyxdScraper::new(self.config).await?; +impl NyxdScraperBuilder +where + S: NyxdScraperStorage + Send + Sync + 'static, + S::StorageTransaction: Send + Sync + 'static, +{ + pub async fn build_and_start(self) -> Result, ScraperError> { + let scraper = NyxdScraper::::new(self.config).await?; let (processing_tx, processing_rx) = unbounded_channel(); let (req_tx, req_rx) = channel(5); @@ -110,6 +116,7 @@ impl NyxdScraperBuilder { pub fn new(config: Config) -> Self { NyxdScraperBuilder { + _storage: PhantomData, config, block_modules: vec![], tx_modules: vec![], @@ -133,24 +140,28 @@ impl NyxdScraperBuilder { } } -pub struct NyxdScraper { +pub struct NyxdScraper { config: Config, task_tracker: TaskTracker, cancel_token: CancellationToken, startup_sync: Arc, - storage: ScraperStorage, + storage: S, rpc_client: RpcClient, } -impl NyxdScraper { - pub fn builder(config: Config) -> NyxdScraperBuilder { +impl NyxdScraper +where + S: NyxdScraperStorage + Send + Sync + 'static, + S::StorageTransaction: Send + Sync + 'static, +{ + pub fn builder(config: Config) -> NyxdScraperBuilder { NyxdScraperBuilder::new(config) } pub async fn new(config: Config) -> Result { config.pruning_options.validate()?; - let storage = ScraperStorage::init(&config.database_path).await?; + let storage = S::initialise(&config.database_storage).await?; let rpc_client = RpcClient::new(&config.rpc_url)?; Ok(NyxdScraper { @@ -163,14 +174,14 @@ impl NyxdScraper { }) } - pub fn storage(&self) -> ScraperStorage { - self.storage.clone() + pub fn storage(&self) -> &S { + &self.storage } fn start_tasks( &self, mut block_requester: BlockRequester, - mut block_processor: BlockProcessor, + mut block_processor: BlockProcessor, mut chain_subscriber: ChainSubscriber, ) { self.task_tracker @@ -336,7 +347,7 @@ impl NyxdScraper { &self, req_tx: Sender, processing_rx: UnboundedReceiver, - ) -> Result { + ) -> Result, ScraperError> { let block_processor_config = BlockProcessorConfig::new( self.config.pruning_options, self.config.store_precommits, @@ -344,7 +355,7 @@ impl NyxdScraper { self.config.start_block.use_best_effort_start_height, ); - BlockProcessor::new( + BlockProcessor::::new( block_processor_config, self.cancel_token.clone(), self.startup_sync.clone(), diff --git a/common/nyxd-scraper/src/scraper/subscriber.rs b/common/nyxd-scraper-shared/src/scraper/subscriber.rs similarity index 100% rename from common/nyxd-scraper/src/scraper/subscriber.rs rename to common/nyxd-scraper-shared/src/scraper/subscriber.rs diff --git a/common/nyxd-scraper-shared/src/storage/helpers.rs b/common/nyxd-scraper-shared/src/storage/helpers.rs new file mode 100644 index 00000000000..488103fcc64 --- /dev/null +++ b/common/nyxd-scraper-shared/src/storage/helpers.rs @@ -0,0 +1,18 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use tokio::time::Instant; +use tracing::{debug, error, info, trace, warn}; + +pub fn log_db_operation_time(op_name: &str, start_time: Instant) { + let elapsed = start_time.elapsed(); + let formatted = humantime::format_duration(elapsed); + + match elapsed.as_millis() { + v if v > 10000 => error!("{op_name} took {formatted} to execute"), + v if v > 1000 => warn!("{op_name} took {formatted} to execute"), + v if v > 100 => info!("{op_name} took {formatted} to execute"), + v if v > 10 => debug!("{op_name} took {formatted} to execute"), + _ => trace!("{op_name} took {formatted} to execute"), + } +} diff --git a/common/nyxd-scraper-shared/src/storage/mod.rs b/common/nyxd-scraper-shared/src/storage/mod.rs new file mode 100644 index 00000000000..a847f2a6bf1 --- /dev/null +++ b/common/nyxd-scraper-shared/src/storage/mod.rs @@ -0,0 +1,124 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::error::ScraperError; +use async_trait::async_trait; +use thiserror::Error; +use tracing::warn; + +pub use crate::ParsedTransactionResponse; +pub use crate::block_processor::types::FullBlockInformation; +pub use tendermint::Block; +pub use tendermint::block::{Commit, CommitSig}; +pub use tendermint_rpc::endpoint::validators; + +pub mod helpers; + +// a workaround for needing associated type (which is a no-no in dynamic dispatch) +#[derive(Error, Debug)] +#[error(transparent)] +pub struct NyxdScraperStorageError(Box); + +impl NyxdScraperStorageError { + pub fn new(error: E) -> Self + where + E: std::error::Error + Send + Sync + 'static, + { + NyxdScraperStorageError(Box::new(error)) + } +} + +#[async_trait] +pub trait NyxdScraperStorage: Clone + Sized { + type StorageTransaction: NyxdScraperTransaction; + + /// Either connection string (postgres) or storage path (sqlite) + async fn initialise(storage: &str) -> Result; + + async fn begin_processing_tx( + &self, + ) -> Result; + + async fn get_last_processed_height(&self) -> Result; + + async fn get_pruned_height(&self) -> Result; + + async fn lowest_block_height(&self) -> Result, NyxdScraperStorageError>; + + async fn prune_storage( + &self, + oldest_to_keep: u32, + current_height: u32, + ) -> Result<(), NyxdScraperStorageError>; +} + +#[async_trait] +pub trait NyxdScraperTransaction { + async fn commit(mut self) -> Result<(), NyxdScraperStorageError>; + + async fn persist_validators( + &mut self, + validators: &validators::Response, + ) -> Result<(), NyxdScraperStorageError>; + + async fn persist_block_data( + &mut self, + block: &Block, + total_gas: i64, + ) -> Result<(), NyxdScraperStorageError>; + + async fn persist_commits( + &mut self, + commits: &Commit, + validators: &validators::Response, + ) -> Result<(), NyxdScraperStorageError>; + + async fn persist_txs( + &mut self, + txs: &[ParsedTransactionResponse], + ) -> Result<(), NyxdScraperStorageError>; + + async fn persist_messages( + &mut self, + txs: &[ParsedTransactionResponse], + ) -> Result<(), NyxdScraperStorageError>; + + async fn update_last_processed(&mut self, height: i64) -> Result<(), NyxdScraperStorageError>; +} + +pub async fn persist_block( + block: &FullBlockInformation, + tx: &mut Tx, + store_precommits: bool, +) -> Result<(), ScraperError> +where + Tx: NyxdScraperTransaction, +{ + let total_gas = crate::helpers::tx_gas_sum(&block.transactions); + + // SANITY CHECK: make sure the block proposer is present in the validator set + block.ensure_proposer()?; + + tx.persist_validators(&block.validators).await?; + + tx.persist_block_data(&block.block, total_gas).await?; + + if store_precommits { + if let Some(commit) = &block.block.last_commit { + tx.persist_commits(commit, &block.validators).await?; + } else { + warn!("no commits for block {}", block.block.header.height) + } + } + + // persist txs + tx.persist_txs(&block.transactions).await?; + + // persist messages (inside the transactions) + tx.persist_messages(&block.transactions).await?; + + tx.update_last_processed(block.block.header.height.into()) + .await?; + + Ok(()) +} diff --git a/common/nyxd-scraper-sqlite/Cargo.toml b/common/nyxd-scraper-sqlite/Cargo.toml new file mode 100644 index 00000000000..eb10336f0bb --- /dev/null +++ b/common/nyxd-scraper-sqlite/Cargo.toml @@ -0,0 +1,29 @@ +[package] +name = "nyxd-scraper-sqlite" +version = "0.1.0" +authors.workspace = true +repository.workspace = true +homepage.workspace = true +documentation.workspace = true +edition.workspace = true +license.workspace = true + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +async-trait = { workspace = true } +sqlx = { workspace = true, features = ["runtime-tokio-rustls", "sqlite", "macros", "migrate", "time"] } +thiserror = { workspace = true } +tokio = { workspace = true, features = ["full"] } +tracing.workspace = true + +nyxd-scraper-shared = { path = "../nyxd-scraper-shared" } + + +[build-dependencies] +sqlx = { workspace = true, features = ["runtime-tokio-rustls", "sqlite", "macros", "migrate"] } +tokio = { workspace = true, features = ["rt-multi-thread", "macros"] } +anyhow.workspace = true + +[lints] +workspace = true \ No newline at end of file diff --git a/common/nyxd-scraper/README.md b/common/nyxd-scraper-sqlite/README.md similarity index 100% rename from common/nyxd-scraper/README.md rename to common/nyxd-scraper-sqlite/README.md diff --git a/common/nyxd-scraper/build.rs b/common/nyxd-scraper-sqlite/build.rs similarity index 100% rename from common/nyxd-scraper/build.rs rename to common/nyxd-scraper-sqlite/build.rs diff --git a/common/nyxd-scraper/sql_migrations/01_metadata.sql b/common/nyxd-scraper-sqlite/sql_migrations/01_metadata.sql similarity index 100% rename from common/nyxd-scraper/sql_migrations/01_metadata.sql rename to common/nyxd-scraper-sqlite/sql_migrations/01_metadata.sql diff --git a/common/nyxd-scraper/sql_migrations/02_cosmos.sql b/common/nyxd-scraper-sqlite/sql_migrations/02_cosmos.sql similarity index 100% rename from common/nyxd-scraper/sql_migrations/02_cosmos.sql rename to common/nyxd-scraper-sqlite/sql_migrations/02_cosmos.sql diff --git a/common/nyxd-scraper-sqlite/src/error.rs b/common/nyxd-scraper-sqlite/src/error.rs new file mode 100644 index 00000000000..c91a2c2b8d8 --- /dev/null +++ b/common/nyxd-scraper-sqlite/src/error.rs @@ -0,0 +1,36 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use nyxd_scraper_shared::helpers::MalformedDataError; +use nyxd_scraper_shared::storage::NyxdScraperStorageError; +use thiserror::Error; + +#[derive(Error, Debug)] +pub enum SqliteScraperError { + #[error("experienced internal database error: {0}")] + InternalDatabaseError(#[from] sqlx::error::Error), + + #[error("failed to perform startup SQL migration: {0}")] + StartupMigrationFailure(#[from] sqlx::migrate::MigrateError), + + #[error("failed to begin storage tx: {source}")] + StorageTxBeginFailure { + #[source] + source: sqlx::error::Error, + }, + + #[error("failed to commit storage tx: {source}")] + StorageTxCommitFailure { + #[source] + source: sqlx::error::Error, + }, + + #[error(transparent)] + MalformedData(#[from] MalformedDataError), +} + +impl From for NyxdScraperStorageError { + fn from(err: SqliteScraperError) -> Self { + NyxdScraperStorageError::new(err) + } +} diff --git a/common/nyxd-scraper-sqlite/src/lib.rs b/common/nyxd-scraper-sqlite/src/lib.rs new file mode 100644 index 00000000000..56c202b63ff --- /dev/null +++ b/common/nyxd-scraper-sqlite/src/lib.rs @@ -0,0 +1,21 @@ +// Copyright 2023 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::storage::block_storage::SqliteScraperStorage; +use nyxd_scraper_shared::NyxdScraper; + +pub use nyxd_scraper_shared::constants; +pub use nyxd_scraper_shared::error::ScraperError; +pub use nyxd_scraper_shared::{ + BlockModule, MsgModule, NyxdScraperTransaction, ParsedTransactionResponse, PruningOptions, + PruningStrategy, StartingBlockOpts, TxModule, +}; +pub use storage::models; + +pub mod error; +pub mod storage; + +pub type SqliteNyxdScraper = NyxdScraper; + +// TODO: for now just use exactly the same config +pub use nyxd_scraper_shared::Config; diff --git a/common/nyxd-scraper-sqlite/src/storage/block_storage.rs b/common/nyxd-scraper-sqlite/src/storage/block_storage.rs new file mode 100644 index 00000000000..9a0a33d52ab --- /dev/null +++ b/common/nyxd-scraper-sqlite/src/storage/block_storage.rs @@ -0,0 +1,251 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::error::SqliteScraperError; +use crate::models::{CommitSignature, Validator}; +use crate::storage::manager::{ + StorageManager, prune_blocks, prune_messages, prune_pre_commits, prune_transactions, + update_last_pruned, +}; +use crate::storage::transaction::SqliteStorageTransaction; +use async_trait::async_trait; +use nyxd_scraper_shared::storage::helpers::log_db_operation_time; +use nyxd_scraper_shared::storage::{NyxdScraperStorage, NyxdScraperStorageError}; +use sqlx::ConnectOptions; +use sqlx::sqlite::{SqliteAutoVacuum, SqliteSynchronous}; +use sqlx::types::time::OffsetDateTime; +use std::fmt::Debug; +use std::path::Path; +use tokio::time::Instant; +use tracing::{debug, error, info, instrument}; + +#[derive(Clone)] +pub struct SqliteScraperStorage { + pub(crate) manager: StorageManager, +} + +impl SqliteScraperStorage { + #[instrument] + pub async fn init + Debug>( + database_path: P, + ) -> Result { + let database_path = database_path.as_ref(); + debug!( + "initialising scraper database path to '{}'", + database_path.display() + ); + + let opts = sqlx::sqlite::SqliteConnectOptions::new() + .journal_mode(sqlx::sqlite::SqliteJournalMode::Wal) + .synchronous(SqliteSynchronous::Normal) + .auto_vacuum(SqliteAutoVacuum::Incremental) + .filename(database_path) + .create_if_missing(true) + .disable_statement_logging(); + + // TODO: do we want auto_vacuum ? + + let connection_pool = match sqlx::SqlitePool::connect_with(opts).await { + Ok(db) => db, + Err(err) => { + error!("Failed to connect to SQLx database: {err}"); + return Err(err.into()); + } + }; + + if let Err(err) = sqlx::migrate!("./sql_migrations") + .run(&connection_pool) + .await + { + error!("Failed to initialize SQLx database: {err}"); + return Err(err.into()); + } + + info!("Database migration finished!"); + + let manager = StorageManager { connection_pool }; + manager.set_initial_metadata().await?; + + let storage = SqliteScraperStorage { manager }; + + Ok(storage) + } + + #[instrument(skip(self))] + pub async fn prune_storage( + &self, + oldest_to_keep: u32, + current_height: u32, + ) -> Result<(), SqliteScraperError> { + let start = Instant::now(); + + let mut tx = self.begin_processing_tx().await?; + + prune_messages(oldest_to_keep.into(), &mut **tx).await?; + prune_transactions(oldest_to_keep.into(), &mut **tx).await?; + prune_pre_commits(oldest_to_keep.into(), &mut **tx).await?; + prune_blocks(oldest_to_keep.into(), &mut **tx).await?; + update_last_pruned(current_height.into(), &mut **tx).await?; + + let commit_start = Instant::now(); + tx.0.commit() + .await + .map_err(|source| SqliteScraperError::StorageTxCommitFailure { source })?; + log_db_operation_time("committing pruning tx", commit_start); + + log_db_operation_time("pruning storage", start); + Ok(()) + } + + #[instrument(skip_all)] + pub async fn begin_processing_tx( + &self, + ) -> Result { + debug!("starting storage tx"); + self.manager + .connection_pool + .begin() + .await + .map(SqliteStorageTransaction) + .map_err(|source| SqliteScraperError::StorageTxBeginFailure { source }) + } + + pub async fn lowest_block_height(&self) -> Result, SqliteScraperError> { + Ok(self.manager.get_lowest_block().await?) + } + + pub async fn get_first_block_height_after( + &self, + time: OffsetDateTime, + ) -> Result, SqliteScraperError> { + Ok(self.manager.get_first_block_height_after(time).await?) + } + + pub async fn get_last_block_height_before( + &self, + time: OffsetDateTime, + ) -> Result, SqliteScraperError> { + Ok(self.manager.get_last_block_height_before(time).await?) + } + + pub async fn get_blocks_between( + &self, + start_time: OffsetDateTime, + end_time: OffsetDateTime, + ) -> Result { + let Some(block_start) = self.get_first_block_height_after(start_time).await? else { + return Ok(0); + }; + let Some(block_end) = self.get_last_block_height_before(end_time).await? else { + return Ok(0); + }; + + Ok(block_end - block_start) + } + + pub async fn get_signed_between( + &self, + consensus_address: &str, + start_height: i64, + end_height: i64, + ) -> Result { + Ok(self + .manager + .get_signed_between(consensus_address, start_height, end_height) + .await?) + } + + pub async fn get_signed_between_times( + &self, + consensus_address: &str, + start_time: OffsetDateTime, + end_time: OffsetDateTime, + ) -> Result { + let Some(block_start) = self.get_first_block_height_after(start_time).await? else { + return Ok(0); + }; + let Some(block_end) = self.get_last_block_height_before(end_time).await? else { + return Ok(0); + }; + + self.get_signed_between(consensus_address, block_start, block_end) + .await + } + + pub async fn get_precommit( + &self, + consensus_address: &str, + height: i64, + ) -> Result, SqliteScraperError> { + Ok(self + .manager + .get_precommit(consensus_address, height) + .await?) + } + + pub async fn get_block_signers( + &self, + height: i64, + ) -> Result, SqliteScraperError> { + Ok(self.manager.get_block_validators(height).await?) + } + + pub async fn get_all_known_validators(&self) -> Result, SqliteScraperError> { + Ok(self.manager.get_validators().await?) + } + + pub async fn get_last_processed_height(&self) -> Result { + Ok(self.manager.get_last_processed_height().await?) + } + + pub async fn get_pruned_height(&self) -> Result { + Ok(self.manager.get_pruned_height().await?) + } +} + +#[async_trait] +impl NyxdScraperStorage for SqliteScraperStorage { + type StorageTransaction = SqliteStorageTransaction; + + async fn initialise(storage: &str) -> Result { + SqliteScraperStorage::init(storage) + .await + .map_err(NyxdScraperStorageError::from) + } + + async fn begin_processing_tx( + &self, + ) -> Result { + self.begin_processing_tx() + .await + .map_err(NyxdScraperStorageError::from) + } + + async fn get_last_processed_height(&self) -> Result { + self.get_last_processed_height() + .await + .map_err(NyxdScraperStorageError::from) + } + + async fn get_pruned_height(&self) -> Result { + self.get_pruned_height() + .await + .map_err(NyxdScraperStorageError::from) + } + + async fn lowest_block_height(&self) -> Result, NyxdScraperStorageError> { + self.lowest_block_height() + .await + .map_err(NyxdScraperStorageError::from) + } + + async fn prune_storage( + &self, + oldest_to_keep: u32, + current_height: u32, + ) -> Result<(), NyxdScraperStorageError> { + self.prune_storage(oldest_to_keep, current_height) + .await + .map_err(NyxdScraperStorageError::from) + } +} diff --git a/common/nyxd-scraper/src/storage/manager.rs b/common/nyxd-scraper-sqlite/src/storage/manager.rs similarity index 99% rename from common/nyxd-scraper/src/storage/manager.rs rename to common/nyxd-scraper-sqlite/src/storage/manager.rs index fcc3485b952..34d894407db 100644 --- a/common/nyxd-scraper/src/storage/manager.rs +++ b/common/nyxd-scraper-sqlite/src/storage/manager.rs @@ -1,8 +1,8 @@ // Copyright 2023 - Nym Technologies SA // SPDX-License-Identifier: Apache-2.0 -use crate::storage::log_db_operation_time; use crate::storage::models::{CommitSignature, Validator}; +use nyxd_scraper_shared::storage::helpers::log_db_operation_time; use sqlx::types::time::OffsetDateTime; use sqlx::{Executor, Sqlite}; use tokio::time::Instant; diff --git a/common/nyxd-scraper/src/storage/helpers.rs b/common/nyxd-scraper-sqlite/src/storage/mod.rs similarity index 57% rename from common/nyxd-scraper/src/storage/helpers.rs rename to common/nyxd-scraper-sqlite/src/storage/mod.rs index 38dc36ebc38..e16997df982 100644 --- a/common/nyxd-scraper/src/storage/helpers.rs +++ b/common/nyxd-scraper-sqlite/src/storage/mod.rs @@ -1,2 +1,7 @@ // Copyright 2023 - Nym Technologies SA // SPDX-License-Identifier: Apache-2.0 + +pub mod block_storage; +mod manager; +pub mod models; +pub mod transaction; diff --git a/common/nyxd-scraper/src/storage/models.rs b/common/nyxd-scraper-sqlite/src/storage/models.rs similarity index 100% rename from common/nyxd-scraper/src/storage/models.rs rename to common/nyxd-scraper-sqlite/src/storage/models.rs diff --git a/common/nyxd-scraper-sqlite/src/storage/transaction.rs b/common/nyxd-scraper-sqlite/src/storage/transaction.rs new file mode 100644 index 00000000000..0cd57b78c88 --- /dev/null +++ b/common/nyxd-scraper-sqlite/src/storage/transaction.rs @@ -0,0 +1,243 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 + +use crate::error::SqliteScraperError; +use crate::storage::manager::{ + insert_block, insert_message, insert_precommit, insert_transaction, insert_validator, + update_last_processed, +}; +use async_trait::async_trait; +use nyxd_scraper_shared::ParsedTransactionResponse; +use nyxd_scraper_shared::helpers::{ + validator_consensus_address, validator_info, validator_pubkey_to_bech32, +}; +use nyxd_scraper_shared::storage::validators::Response; +use nyxd_scraper_shared::storage::{ + Block, Commit, CommitSig, NyxdScraperStorageError, NyxdScraperTransaction, validators, +}; +use sqlx::{Sqlite, Transaction}; +use std::ops::{Deref, DerefMut}; +use tracing::{debug, trace, warn}; + +pub struct SqliteStorageTransaction(pub(crate) Transaction<'static, Sqlite>); + +impl Deref for SqliteStorageTransaction { + type Target = Transaction<'static, Sqlite>; + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for SqliteStorageTransaction { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +impl SqliteStorageTransaction { + async fn persist_validators( + &mut self, + validators: &validators::Response, + ) -> Result<(), SqliteScraperError> { + debug!("persisting {} validators", validators.total); + for validator in &validators.validators { + let consensus_address = validator_consensus_address(validator.address)?; + let consensus_pubkey = validator_pubkey_to_bech32(validator.pub_key)?; + + insert_validator( + consensus_address.to_string(), + consensus_pubkey.to_string(), + self.0.as_mut(), + ) + .await?; + } + + Ok(()) + } + + async fn persist_block_data( + &mut self, + block: &Block, + total_gas: i64, + ) -> Result<(), SqliteScraperError> { + let proposer_address = + validator_consensus_address(block.header.proposer_address)?.to_string(); + + insert_block( + block.header.height.into(), + block.header.hash().to_string(), + block.data.len() as u32, + total_gas, + proposer_address, + block.header.time.into(), + self.0.as_mut(), + ) + .await?; + Ok(()) + } + + async fn persist_commits( + &mut self, + commits: &Commit, + validators: &validators::Response, + ) -> Result<(), SqliteScraperError> { + debug!("persisting up to {} commits", commits.signatures.len()); + let height: i64 = commits.height.into(); + + for commit_sig in &commits.signatures { + let (validator_id, timestamp, signature) = match commit_sig { + CommitSig::BlockIdFlagAbsent => { + trace!("absent signature"); + continue; + } + CommitSig::BlockIdFlagCommit { + validator_address, + timestamp, + signature, + } => (validator_address, timestamp, signature), + CommitSig::BlockIdFlagNil { + validator_address, + timestamp, + signature, + } => (validator_address, timestamp, signature), + }; + + let validator = validator_info(*validator_id, validators)?; + let validator_address = validator_consensus_address(*validator_id)?; + + if signature.is_none() { + warn!("empty signature for {validator_address} at height {height}"); + continue; + } + + insert_precommit( + validator_address.to_string(), + height, + (*timestamp).into(), + validator.power.into(), + validator.proposer_priority.value(), + self.0.as_mut(), + ) + .await?; + } + + Ok(()) + } + + async fn persist_txs( + &mut self, + txs: &[ParsedTransactionResponse], + ) -> Result<(), SqliteScraperError> { + debug!("persisting {} txs", txs.len()); + + for chain_tx in txs { + insert_transaction( + chain_tx.hash.to_string(), + chain_tx.height.into(), + chain_tx.index as i64, + chain_tx.tx_result.code.is_ok(), + chain_tx.tx.body.messages.len() as i64, + chain_tx.tx.body.memo.clone(), + chain_tx.tx_result.gas_wanted, + chain_tx.tx_result.gas_used, + chain_tx.tx_result.log.clone(), + self.0.as_mut(), + ) + .await?; + } + + Ok(()) + } + + async fn persist_messages( + &mut self, + txs: &[ParsedTransactionResponse], + ) -> Result<(), SqliteScraperError> { + debug!("persisting messages"); + + for chain_tx in txs { + for (index, msg) in chain_tx.tx.body.messages.iter().enumerate() { + insert_message( + chain_tx.hash.to_string(), + index as i64, + msg.type_url.clone(), + chain_tx.height.into(), + self.0.as_mut(), + ) + .await? + } + } + + Ok(()) + } + + async fn update_last_processed(&mut self, height: i64) -> Result<(), SqliteScraperError> { + debug!("update_last_processed"); + update_last_processed(height, self.0.as_mut()).await?; + Ok(()) + } +} + +#[async_trait] +impl NyxdScraperTransaction for SqliteStorageTransaction { + async fn commit(self) -> Result<(), NyxdScraperStorageError> { + self.0 + .commit() + .await + .map_err(SqliteScraperError::from) + .map_err(NyxdScraperStorageError::from) + } + + async fn persist_validators( + &mut self, + validators: &Response, + ) -> Result<(), NyxdScraperStorageError> { + self.persist_validators(validators) + .await + .map_err(NyxdScraperStorageError::from) + } + + async fn persist_block_data( + &mut self, + block: &Block, + total_gas: i64, + ) -> Result<(), NyxdScraperStorageError> { + self.persist_block_data(block, total_gas) + .await + .map_err(NyxdScraperStorageError::from) + } + + async fn persist_commits( + &mut self, + commits: &Commit, + validators: &Response, + ) -> Result<(), NyxdScraperStorageError> { + self.persist_commits(commits, validators) + .await + .map_err(NyxdScraperStorageError::from) + } + + async fn persist_txs( + &mut self, + txs: &[ParsedTransactionResponse], + ) -> Result<(), NyxdScraperStorageError> { + self.persist_txs(txs) + .await + .map_err(NyxdScraperStorageError::from) + } + + async fn persist_messages( + &mut self, + txs: &[ParsedTransactionResponse], + ) -> Result<(), NyxdScraperStorageError> { + self.persist_messages(txs) + .await + .map_err(NyxdScraperStorageError::from) + } + + async fn update_last_processed(&mut self, height: i64) -> Result<(), NyxdScraperStorageError> { + self.update_last_processed(height) + .await + .map_err(NyxdScraperStorageError::from) + } +} diff --git a/common/nyxd-scraper/src/helpers.rs b/common/nyxd-scraper/src/helpers.rs deleted file mode 100644 index 44bef81b89c..00000000000 --- a/common/nyxd-scraper/src/helpers.rs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright 2023 - Nym Technologies SA -// SPDX-License-Identifier: Apache-2.0 - -use crate::block_processor::types::ParsedTransactionResponse; -use crate::constants::{BECH32_CONESNSUS_PUBKEY_PREFIX, BECH32_CONSENSUS_ADDRESS_PREFIX}; -use crate::error::ScraperError; -use cosmrs::AccountId; -use sha2::{Digest, Sha256}; -use tendermint::{Hash, validator}; -use tendermint::{PublicKey, account}; -use tendermint_rpc::endpoint::validators; - -pub(crate) fn tx_hash>(raw_tx: M) -> Hash { - Hash::Sha256(Sha256::digest(raw_tx).into()) -} - -pub(crate) fn validator_pubkey_to_bech32(pubkey: PublicKey) -> Result { - // TODO: this one seem to attach additional prefix to they pubkeys, is that what we want instead maybe? - // Ok(pubkey.to_bech32(BECH32_CONESNSUS_PUBKEY_PREFIX)) - AccountId::new(BECH32_CONESNSUS_PUBKEY_PREFIX, &pubkey.to_bytes()) - .map_err(|source| ScraperError::MalformedValidatorPubkey { source }) -} - -pub(crate) fn validator_consensus_address(id: account::Id) -> Result { - AccountId::new(BECH32_CONSENSUS_ADDRESS_PREFIX, id.as_ref()) - .map_err(|source| ScraperError::MalformedValidatorAddress { source }) -} - -pub(crate) fn tx_gas_sum(txs: &[ParsedTransactionResponse]) -> i64 { - txs.iter().map(|tx| tx.tx_result.gas_used).sum() -} - -pub(crate) fn validator_info( - id: account::Id, - validators: &validators::Response, -) -> Result<&validator::Info, ScraperError> { - match validators.validators.iter().find(|v| v.address == id) { - Some(info) => Ok(info), - None => { - let addr = validator_consensus_address(id)?; - Err(ScraperError::MissingValidatorInfoCommitted { - address: addr.to_string(), - }) - } - } -} diff --git a/common/nyxd-scraper/src/storage/mod.rs b/common/nyxd-scraper/src/storage/mod.rs deleted file mode 100644 index 2f0a0a8660c..00000000000 --- a/common/nyxd-scraper/src/storage/mod.rs +++ /dev/null @@ -1,400 +0,0 @@ -// Copyright 2023 - Nym Technologies SA -// SPDX-License-Identifier: Apache-2.0 - -use crate::{ - block_processor::types::{FullBlockInformation, ParsedTransactionResponse}, - error::ScraperError, - storage::{ - manager::{ - StorageManager, insert_block, insert_message, insert_precommit, insert_transaction, - insert_validator, prune_blocks, prune_messages, prune_pre_commits, prune_transactions, - update_last_processed, update_last_pruned, - }, - models::{CommitSignature, Validator}, - }, -}; -use sqlx::{ - ConnectOptions, Sqlite, Transaction, - sqlite::{SqliteAutoVacuum, SqliteSynchronous}, - types::time::OffsetDateTime, -}; -use std::{fmt::Debug, path::Path}; -use tendermint::{ - Block, - block::{Commit, CommitSig}, -}; -use tendermint_rpc::endpoint::validators; -use tokio::time::Instant; -use tracing::{debug, error, info, instrument, trace, warn}; - -mod helpers; -mod manager; -pub mod models; - -pub type StorageTransaction = Transaction<'static, Sqlite>; - -#[derive(Clone)] -pub struct ScraperStorage { - pub(crate) manager: StorageManager, -} - -pub(crate) fn log_db_operation_time(op_name: &str, start_time: Instant) { - let elapsed = start_time.elapsed(); - let formatted = humantime::format_duration(elapsed); - - match elapsed.as_millis() { - v if v > 10000 => error!("{op_name} took {formatted} to execute"), - v if v > 1000 => warn!("{op_name} took {formatted} to execute"), - v if v > 100 => info!("{op_name} took {formatted} to execute"), - v if v > 10 => debug!("{op_name} took {formatted} to execute"), - _ => trace!("{op_name} took {formatted} to execute"), - } -} - -impl ScraperStorage { - #[instrument] - pub async fn init + Debug>(database_path: P) -> Result { - let database_path = database_path.as_ref(); - debug!( - "initialising scraper database path to '{}'", - database_path.display() - ); - - let opts = sqlx::sqlite::SqliteConnectOptions::new() - .journal_mode(sqlx::sqlite::SqliteJournalMode::Wal) - .synchronous(SqliteSynchronous::Normal) - .auto_vacuum(SqliteAutoVacuum::Incremental) - .filename(database_path) - .create_if_missing(true) - .disable_statement_logging(); - - // TODO: do we want auto_vacuum ? - - let connection_pool = match sqlx::SqlitePool::connect_with(opts).await { - Ok(db) => db, - Err(err) => { - error!("Failed to connect to SQLx database: {err}"); - return Err(err.into()); - } - }; - - if let Err(err) = sqlx::migrate!("./sql_migrations") - .run(&connection_pool) - .await - { - error!("Failed to initialize SQLx database: {err}"); - return Err(err.into()); - } - - info!("Database migration finished!"); - - let manager = StorageManager { connection_pool }; - manager.set_initial_metadata().await?; - - let storage = ScraperStorage { manager }; - - Ok(storage) - } - - #[instrument(skip(self))] - pub async fn prune_storage( - &self, - oldest_to_keep: u32, - current_height: u32, - ) -> Result<(), ScraperError> { - let start = Instant::now(); - - let mut tx = self.begin_processing_tx().await?; - - prune_messages(oldest_to_keep.into(), &mut *tx).await?; - prune_transactions(oldest_to_keep.into(), &mut *tx).await?; - prune_pre_commits(oldest_to_keep.into(), &mut *tx).await?; - prune_blocks(oldest_to_keep.into(), &mut *tx).await?; - update_last_pruned(current_height.into(), &mut *tx).await?; - - let commit_start = Instant::now(); - tx.commit() - .await - .map_err(|source| ScraperError::StorageTxCommitFailure { source })?; - log_db_operation_time("committing pruning tx", commit_start); - - log_db_operation_time("pruning storage", start); - Ok(()) - } - - #[instrument(skip_all)] - pub async fn begin_processing_tx(&self) -> Result { - debug!("starting storage tx"); - self.manager - .connection_pool - .begin() - .await - .map_err(|source| ScraperError::StorageTxBeginFailure { source }) - } - - pub async fn lowest_block_height(&self) -> Result, ScraperError> { - Ok(self.manager.get_lowest_block().await?) - } - - pub async fn get_first_block_height_after( - &self, - time: OffsetDateTime, - ) -> Result, ScraperError> { - Ok(self.manager.get_first_block_height_after(time).await?) - } - - pub async fn get_last_block_height_before( - &self, - time: OffsetDateTime, - ) -> Result, ScraperError> { - Ok(self.manager.get_last_block_height_before(time).await?) - } - - pub async fn get_blocks_between( - &self, - start_time: OffsetDateTime, - end_time: OffsetDateTime, - ) -> Result { - let Some(block_start) = self.get_first_block_height_after(start_time).await? else { - return Ok(0); - }; - let Some(block_end) = self.get_last_block_height_before(end_time).await? else { - return Ok(0); - }; - - Ok(block_end - block_start) - } - - pub async fn get_signed_between( - &self, - consensus_address: &str, - start_height: i64, - end_height: i64, - ) -> Result { - Ok(self - .manager - .get_signed_between(consensus_address, start_height, end_height) - .await?) - } - - pub async fn get_signed_between_times( - &self, - consensus_address: &str, - start_time: OffsetDateTime, - end_time: OffsetDateTime, - ) -> Result { - let Some(block_start) = self.get_first_block_height_after(start_time).await? else { - return Ok(0); - }; - let Some(block_end) = self.get_last_block_height_before(end_time).await? else { - return Ok(0); - }; - - self.get_signed_between(consensus_address, block_start, block_end) - .await - } - - pub async fn get_precommit( - &self, - consensus_address: &str, - height: i64, - ) -> Result, ScraperError> { - Ok(self - .manager - .get_precommit(consensus_address, height) - .await?) - } - - pub async fn get_block_signers(&self, height: i64) -> Result, ScraperError> { - Ok(self.manager.get_block_validators(height).await?) - } - - pub async fn get_all_known_validators(&self) -> Result, ScraperError> { - Ok(self.manager.get_validators().await?) - } - - pub async fn get_last_processed_height(&self) -> Result { - Ok(self.manager.get_last_processed_height().await?) - } - - pub async fn get_pruned_height(&self) -> Result { - Ok(self.manager.get_pruned_height().await?) - } -} - -pub async fn persist_block( - block: &FullBlockInformation, - tx: &mut StorageTransaction, - store_precommits: bool, -) -> Result<(), ScraperError> { - let total_gas = crate::helpers::tx_gas_sum(&block.transactions); - - // SANITY CHECK: make sure the block proposer is present in the validator set - block.ensure_proposer()?; - - // persist validators - persist_validators(&block.validators, tx).await?; - - // persist block data - persist_block_data(&block.block, total_gas, tx).await?; - - if store_precommits { - if let Some(commit) = &block.block.last_commit { - persist_commits(commit, &block.validators, tx).await?; - } else { - warn!("no commits for block {}", block.block.header.height) - } - } - - // persist txs - persist_txs(&block.transactions, tx).await?; - - // persist messages (inside the transactions) - persist_messages(&block.transactions, tx).await?; - - update_last_processed(block.block.header.height.into(), tx.as_mut()).await?; - - Ok(()) -} - -async fn persist_validators( - validators: &validators::Response, - tx: &mut StorageTransaction, -) -> Result<(), ScraperError> { - debug!("persisting {} validators", validators.total); - for validator in &validators.validators { - let consensus_address = crate::helpers::validator_consensus_address(validator.address)?; - let consensus_pubkey = crate::helpers::validator_pubkey_to_bech32(validator.pub_key)?; - - insert_validator( - consensus_address.to_string(), - consensus_pubkey.to_string(), - tx.as_mut(), - ) - .await?; - } - - Ok(()) -} - -async fn persist_block_data( - block: &Block, - total_gas: i64, - tx: &mut StorageTransaction, -) -> Result<(), ScraperError> { - let proposer_address = - crate::helpers::validator_consensus_address(block.header.proposer_address)?.to_string(); - - insert_block( - block.header.height.into(), - block.header.hash().to_string(), - block.data.len() as u32, - total_gas, - proposer_address, - block.header.time.into(), - tx.as_mut(), - ) - .await?; - Ok(()) -} - -async fn persist_commits( - commits: &Commit, - validators: &validators::Response, - tx: &mut StorageTransaction, -) -> Result<(), ScraperError> { - debug!("persisting up to {} commits", commits.signatures.len()); - let height: i64 = commits.height.into(); - - for commit_sig in &commits.signatures { - let (validator_id, timestamp, signature) = match commit_sig { - CommitSig::BlockIdFlagAbsent => { - trace!("absent signature"); - continue; - } - CommitSig::BlockIdFlagCommit { - validator_address, - timestamp, - signature, - } => (validator_address, timestamp, signature), - CommitSig::BlockIdFlagNil { - validator_address, - timestamp, - signature, - } => (validator_address, timestamp, signature), - }; - - let validator = match crate::helpers::validator_info(*validator_id, validators) { - Ok(validator_info) => validator_info, - Err(err) => { - error!("{err}"); - continue; - } - }; - let validator_address = crate::helpers::validator_consensus_address(*validator_id)?; - - if signature.is_none() { - warn!("empty signature for {validator_address} at height {height}"); - continue; - } - - insert_precommit( - validator_address.to_string(), - height, - (*timestamp).into(), - validator.power.into(), - validator.proposer_priority.value(), - tx.as_mut(), - ) - .await?; - } - - Ok(()) -} - -async fn persist_txs( - txs: &[ParsedTransactionResponse], - tx: &mut StorageTransaction, -) -> Result<(), ScraperError> { - debug!("persisting {} txs", txs.len()); - - for chain_tx in txs { - insert_transaction( - chain_tx.hash.to_string(), - chain_tx.height.into(), - chain_tx.index as i64, - chain_tx.tx_result.code.is_ok(), - chain_tx.tx.body.messages.len() as i64, - chain_tx.tx.body.memo.clone(), - chain_tx.tx_result.gas_wanted, - chain_tx.tx_result.gas_used, - chain_tx.tx_result.log.clone(), - tx.as_mut(), - ) - .await?; - } - - Ok(()) -} - -async fn persist_messages( - txs: &[ParsedTransactionResponse], - tx: &mut StorageTransaction, -) -> Result<(), ScraperError> { - debug!("persisting messages"); - - for chain_tx in txs { - for (index, msg) in chain_tx.tx.body.messages.iter().enumerate() { - insert_message( - chain_tx.hash.to_string(), - index as i64, - msg.type_url.clone(), - chain_tx.height.into(), - tx.as_mut(), - ) - .await? - } - } - - Ok(()) -} diff --git a/nym-data-observatory/.sqlx/query-08f4e54ac24fccd54f4208797b3749e457f8cd4ba3d7d906a7ab3bf5b4e7dc9c.json b/nym-data-observatory/.sqlx/query-08f4e54ac24fccd54f4208797b3749e457f8cd4ba3d7d906a7ab3bf5b4e7dc9c.json new file mode 100644 index 00000000000..cc5863fd0ea --- /dev/null +++ b/nym-data-observatory/.sqlx/query-08f4e54ac24fccd54f4208797b3749e457f8cd4ba3d7d906a7ab3bf5b4e7dc9c.json @@ -0,0 +1,27 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO transaction\n (hash, height, index, success, messages, memo, signatures, signer_infos, fee, gas_wanted, gas_used, raw_log, logs, events)\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)\n ON CONFLICT (hash) DO UPDATE\n SET height = excluded.height,\n index = excluded.index,\n success = excluded.success,\n messages = excluded.messages,\n memo = excluded.memo,\n signatures = excluded.signatures,\n signer_infos = excluded.signer_infos,\n fee = excluded.fee,\n gas_wanted = excluded.gas_wanted,\n gas_used = excluded.gas_used,\n raw_log = excluded.raw_log,\n logs = excluded.logs,\n events = excluded.events\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Int8", + "Int4", + "Bool", + "Jsonb", + "Text", + "TextArray", + "Jsonb", + "Jsonb", + "Int8", + "Int8", + "Text", + "Jsonb", + "Jsonb" + ] + }, + "nullable": [] + }, + "hash": "08f4e54ac24fccd54f4208797b3749e457f8cd4ba3d7d906a7ab3bf5b4e7dc9c" +} diff --git a/nym-data-observatory/.sqlx/query-0d3709efacf763b06bf14803bb803b5ee5b27879b0026bb0480b3f2722318a75.json b/nym-data-observatory/.sqlx/query-0d3709efacf763b06bf14803bb803b5ee5b27879b0026bb0480b3f2722318a75.json new file mode 100644 index 00000000000..36ba8bb96b3 --- /dev/null +++ b/nym-data-observatory/.sqlx/query-0d3709efacf763b06bf14803bb803b5ee5b27879b0026bb0480b3f2722318a75.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO validator (consensus_address, consensus_pubkey)\n VALUES ($1, $2)\n ON CONFLICT DO NOTHING\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Text" + ] + }, + "nullable": [] + }, + "hash": "0d3709efacf763b06bf14803bb803b5ee5b27879b0026bb0480b3f2722318a75" +} diff --git a/nym-data-observatory/.sqlx/query-0f4f26923d4fdf4541deb1fa5e7e3c542713e7443b6f0cd4ddb0c00f7a644060.json b/nym-data-observatory/.sqlx/query-0f4f26923d4fdf4541deb1fa5e7e3c542713e7443b6f0cd4ddb0c00f7a644060.json new file mode 100644 index 00000000000..a88f36b51ac --- /dev/null +++ b/nym-data-observatory/.sqlx/query-0f4f26923d4fdf4541deb1fa5e7e3c542713e7443b6f0cd4ddb0c00f7a644060.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO startup_info(start_ts, end_ts, error_message)\n VALUES ($1, $2, $3)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Timestamptz", + "Timestamptz", + "Text" + ] + }, + "nullable": [] + }, + "hash": "0f4f26923d4fdf4541deb1fa5e7e3c542713e7443b6f0cd4ddb0c00f7a644060" +} diff --git a/nym-data-observatory/.sqlx/query-140df23f816ff5d7501128682ce378d582b7da78c45bc0de934f92c1abe14bda.json b/nym-data-observatory/.sqlx/query-140df23f816ff5d7501128682ce378d582b7da78c45bc0de934f92c1abe14bda.json new file mode 100644 index 00000000000..b190fd0fb17 --- /dev/null +++ b/nym-data-observatory/.sqlx/query-140df23f816ff5d7501128682ce378d582b7da78c45bc0de934f92c1abe14bda.json @@ -0,0 +1,19 @@ +{ + "db_name": "PostgreSQL", + "query": "INSERT INTO price_history\n (timestamp, chf, usd, eur, gbp, btc)\n VALUES\n ($1, $2, $3, $4, $5, $6)\n ON CONFLICT(timestamp) DO UPDATE SET\n chf=excluded.chf,\n usd=excluded.usd,\n eur=excluded.eur,\n gbp=excluded.gbp,\n btc=excluded.btc;", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Float8", + "Float8", + "Float8", + "Float8", + "Float8" + ] + }, + "nullable": [] + }, + "hash": "140df23f816ff5d7501128682ce378d582b7da78c45bc0de934f92c1abe14bda" +} diff --git a/nym-data-observatory/.sqlx/query-1c2fb0e9ffceca21ef8dbea19b116422b1f723d0a316314b50c43c8b29f8891d.json b/nym-data-observatory/.sqlx/query-1c2fb0e9ffceca21ef8dbea19b116422b1f723d0a316314b50c43c8b29f8891d.json new file mode 100644 index 00000000000..2e10a89220b --- /dev/null +++ b/nym-data-observatory/.sqlx/query-1c2fb0e9ffceca21ef8dbea19b116422b1f723d0a316314b50c43c8b29f8891d.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM pre_commit WHERE height < $1", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + }, + "hash": "1c2fb0e9ffceca21ef8dbea19b116422b1f723d0a316314b50c43c8b29f8891d" +} diff --git a/nym-data-observatory/.sqlx/query-2561fb016951ea4cd29e43fb9a4a93e944b0d44ed1f7c1036f306e34372da11c.json b/nym-data-observatory/.sqlx/query-2561fb016951ea4cd29e43fb9a4a93e944b0d44ed1f7c1036f306e34372da11c.json new file mode 100644 index 00000000000..0d1b70f8cce --- /dev/null +++ b/nym-data-observatory/.sqlx/query-2561fb016951ea4cd29e43fb9a4a93e944b0d44ed1f7c1036f306e34372da11c.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT height\n FROM block\n ORDER BY height ASC\n LIMIT 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "height", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false + ] + }, + "hash": "2561fb016951ea4cd29e43fb9a4a93e944b0d44ed1f7c1036f306e34372da11c" +} diff --git a/nym-data-observatory/.sqlx/query-2679cdf11fa66c7920678cde860c57402119ec7c3aae731b0da831327301466f.json b/nym-data-observatory/.sqlx/query-2679cdf11fa66c7920678cde860c57402119ec7c3aae731b0da831327301466f.json new file mode 100644 index 00000000000..b97ea34d16a --- /dev/null +++ b/nym-data-observatory/.sqlx/query-2679cdf11fa66c7920678cde860c57402119ec7c3aae731b0da831327301466f.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE metadata SET last_processed_height = GREATEST(last_processed_height, $1)", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + }, + "hash": "2679cdf11fa66c7920678cde860c57402119ec7c3aae731b0da831327301466f" +} diff --git a/nym-data-observatory/.sqlx/query-36ba5941aca6e7b604a10b8b0aba70635028f392fe794d6131827b083e1755e1.json b/nym-data-observatory/.sqlx/query-36ba5941aca6e7b604a10b8b0aba70635028f392fe794d6131827b083e1755e1.json new file mode 100644 index 00000000000..dede45475e4 --- /dev/null +++ b/nym-data-observatory/.sqlx/query-36ba5941aca6e7b604a10b8b0aba70635028f392fe794d6131827b083e1755e1.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE pruning SET last_pruned_height = $1", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + }, + "hash": "36ba5941aca6e7b604a10b8b0aba70635028f392fe794d6131827b083e1755e1" +} diff --git a/nym-data-observatory/.sqlx/query-3bdf81a9db6075f6f77224c30553f419a849d4ec45af40b052a4cbf09b44f3ec.json b/nym-data-observatory/.sqlx/query-3bdf81a9db6075f6f77224c30553f419a849d4ec45af40b052a4cbf09b44f3ec.json new file mode 100644 index 00000000000..e638bce9220 --- /dev/null +++ b/nym-data-observatory/.sqlx/query-3bdf81a9db6075f6f77224c30553f419a849d4ec45af40b052a4cbf09b44f3ec.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT last_pruned_height FROM pruning\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "last_pruned_height", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false + ] + }, + "hash": "3bdf81a9db6075f6f77224c30553f419a849d4ec45af40b052a4cbf09b44f3ec" +} diff --git a/nym-data-observatory/.sqlx/query-4b16ddeda8e6e3571836b09e63ad1e764a35477f06dcdc2fcb1420334f07e38d.json b/nym-data-observatory/.sqlx/query-4b16ddeda8e6e3571836b09e63ad1e764a35477f06dcdc2fcb1420334f07e38d.json new file mode 100644 index 00000000000..e3ec034c958 --- /dev/null +++ b/nym-data-observatory/.sqlx/query-4b16ddeda8e6e3571836b09e63ad1e764a35477f06dcdc2fcb1420334f07e38d.json @@ -0,0 +1,70 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO wasm_execute_contract (\n sender,\n contract_address,\n message_type,\n raw_contract_message,\n funds,\n executed_at,\n height,\n hash,\n message_index,\n memo,\n fee\n ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Text", + "Text", + "Jsonb", + { + "Custom": { + "name": "coin[]", + "kind": { + "Array": { + "Custom": { + "name": "coin", + "kind": { + "Composite": [ + [ + "denom", + "Text" + ], + [ + "amount", + "Text" + ] + ] + } + } + } + } + } + }, + "Timestamp", + "Int8", + "Text", + "Int8", + "Text", + { + "Custom": { + "name": "coin[]", + "kind": { + "Array": { + "Custom": { + "name": "coin", + "kind": { + "Composite": [ + [ + "denom", + "Text" + ], + [ + "amount", + "Text" + ] + ] + } + } + } + } + } + } + ] + }, + "nullable": [] + }, + "hash": "4b16ddeda8e6e3571836b09e63ad1e764a35477f06dcdc2fcb1420334f07e38d" +} diff --git a/nym-data-observatory/.sqlx/query-52c27143720ddfdfd0f5644b60f5b67fd9281ce1de0653efa53b9d9b93cf335d.json b/nym-data-observatory/.sqlx/query-52c27143720ddfdfd0f5644b60f5b67fd9281ce1de0653efa53b9d9b93cf335d.json new file mode 100644 index 00000000000..58af4f89c42 --- /dev/null +++ b/nym-data-observatory/.sqlx/query-52c27143720ddfdfd0f5644b60f5b67fd9281ce1de0653efa53b9d9b93cf335d.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM message WHERE height < $1", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + }, + "hash": "52c27143720ddfdfd0f5644b60f5b67fd9281ce1de0653efa53b9d9b93cf335d" +} diff --git a/nym-data-observatory/.sqlx/query-62e14613f5ffe692346a79086857a22f0444fbc679db1c06b651fb8b5538b278.json b/nym-data-observatory/.sqlx/query-62e14613f5ffe692346a79086857a22f0444fbc679db1c06b651fb8b5538b278.json new file mode 100644 index 00000000000..a7c102469df --- /dev/null +++ b/nym-data-observatory/.sqlx/query-62e14613f5ffe692346a79086857a22f0444fbc679db1c06b651fb8b5538b278.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO pre_commit (validator_address, height, timestamp, voting_power, proposer_priority)\n VALUES ($1, $2, $3, $4, $5)\n ON CONFLICT (validator_address, timestamp) DO NOTHING\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Int8", + "Timestamp", + "Int8", + "Int8" + ] + }, + "nullable": [] + }, + "hash": "62e14613f5ffe692346a79086857a22f0444fbc679db1c06b651fb8b5538b278" +} diff --git a/nym-data-observatory/.sqlx/query-64a484fd46d8ec46797f944a4cced56b6e270ce186f0e49528865d1924343b78.json b/nym-data-observatory/.sqlx/query-64a484fd46d8ec46797f944a4cced56b6e270ce186f0e49528865d1924343b78.json new file mode 100644 index 00000000000..08983f2af9f --- /dev/null +++ b/nym-data-observatory/.sqlx/query-64a484fd46d8ec46797f944a4cced56b6e270ce186f0e49528865d1924343b78.json @@ -0,0 +1,19 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO block (height, hash, num_txs, total_gas, proposer_address, timestamp)\n VALUES ($1, $2, $3, $4, $5, $6)\n ON CONFLICT DO NOTHING\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Text", + "Int4", + "Int8", + "Text", + "Timestamp" + ] + }, + "nullable": [] + }, + "hash": "64a484fd46d8ec46797f944a4cced56b6e270ce186f0e49528865d1924343b78" +} diff --git a/nym-data-observatory/.sqlx/query-7e82426f5dbcadf1631ba1a806e19cc462d04222fb20ad76de2a40f3f4f8fe15.json b/nym-data-observatory/.sqlx/query-7e82426f5dbcadf1631ba1a806e19cc462d04222fb20ad76de2a40f3f4f8fe15.json new file mode 100644 index 00000000000..3a60c573ed8 --- /dev/null +++ b/nym-data-observatory/.sqlx/query-7e82426f5dbcadf1631ba1a806e19cc462d04222fb20ad76de2a40f3f4f8fe15.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT height\n FROM block\n WHERE timestamp < $1\n ORDER BY timestamp DESC\n LIMIT 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "height", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Timestamp" + ] + }, + "nullable": [ + false + ] + }, + "hash": "7e82426f5dbcadf1631ba1a806e19cc462d04222fb20ad76de2a40f3f4f8fe15" +} diff --git a/nym-data-observatory/.sqlx/query-9455331f9be5a3be28e2bd399a36b2e2d6a9ad4b225c4c883aafc4e9f0428008.json b/nym-data-observatory/.sqlx/query-9455331f9be5a3be28e2bd399a36b2e2d6a9ad4b225c4c883aafc4e9f0428008.json new file mode 100644 index 00000000000..309aa81d9c7 --- /dev/null +++ b/nym-data-observatory/.sqlx/query-9455331f9be5a3be28e2bd399a36b2e2d6a9ad4b225c4c883aafc4e9f0428008.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT height\n FROM block\n WHERE timestamp > $1\n ORDER BY timestamp\n LIMIT 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "height", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Timestamp" + ] + }, + "nullable": [ + false + ] + }, + "hash": "9455331f9be5a3be28e2bd399a36b2e2d6a9ad4b225c4c883aafc4e9f0428008" +} diff --git a/nym-data-observatory/.sqlx/query-a57b74a049b33aee36b72741056d60df8ad35a747808d5d1d3d525a76bbf0618.json b/nym-data-observatory/.sqlx/query-a57b74a049b33aee36b72741056d60df8ad35a747808d5d1d3d525a76bbf0618.json new file mode 100644 index 00000000000..0a8728ccb80 --- /dev/null +++ b/nym-data-observatory/.sqlx/query-a57b74a049b33aee36b72741056d60df8ad35a747808d5d1d3d525a76bbf0618.json @@ -0,0 +1,52 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT timestamp, chf, usd, eur, gbp, btc FROM price_history WHERE timestamp >= $1;", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "timestamp", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "chf", + "type_info": "Float8" + }, + { + "ordinal": 2, + "name": "usd", + "type_info": "Float8" + }, + { + "ordinal": 3, + "name": "eur", + "type_info": "Float8" + }, + { + "ordinal": 4, + "name": "gbp", + "type_info": "Float8" + }, + { + "ordinal": 5, + "name": "btc", + "type_info": "Float8" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false + ] + }, + "hash": "a57b74a049b33aee36b72741056d60df8ad35a747808d5d1d3d525a76bbf0618" +} diff --git a/nym-data-observatory/.sqlx/query-bc7795e58ce71893c3f32a19db8e77b7bc0a1af315ffd42c3e68156d6e4ace70.json b/nym-data-observatory/.sqlx/query-bc7795e58ce71893c3f32a19db8e77b7bc0a1af315ffd42c3e68156d6e4ace70.json new file mode 100644 index 00000000000..caca484b94d --- /dev/null +++ b/nym-data-observatory/.sqlx/query-bc7795e58ce71893c3f32a19db8e77b7bc0a1af315ffd42c3e68156d6e4ace70.json @@ -0,0 +1,24 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT COUNT(*) as count FROM pre_commit\n WHERE\n validator_address = $1\n AND height >= $2\n AND height <= $3\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Text", + "Int8", + "Int8" + ] + }, + "nullable": [ + null + ] + }, + "hash": "bc7795e58ce71893c3f32a19db8e77b7bc0a1af315ffd42c3e68156d6e4ace70" +} diff --git a/nym-data-observatory/.sqlx/query-be43d4873911deca784b7be0531ab7bd82ecd68041aa932a56c8ce09623251e4.json b/nym-data-observatory/.sqlx/query-be43d4873911deca784b7be0531ab7bd82ecd68041aa932a56c8ce09623251e4.json new file mode 100644 index 00000000000..f1df706371b --- /dev/null +++ b/nym-data-observatory/.sqlx/query-be43d4873911deca784b7be0531ab7bd82ecd68041aa932a56c8ce09623251e4.json @@ -0,0 +1,28 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT * FROM validator\n WHERE EXISTS (\n SELECT 1 FROM pre_commit\n WHERE height = $1\n AND pre_commit.validator_address = validator.consensus_address\n )\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "consensus_address", + "type_info": "Text" + }, + { + "ordinal": 1, + "name": "consensus_pubkey", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + false + ] + }, + "hash": "be43d4873911deca784b7be0531ab7bd82ecd68041aa932a56c8ce09623251e4" +} diff --git a/nym-data-observatory/.sqlx/query-c88d07fecc3f33deaa6e93db1469ce71582635df47f52dcf3fd1df4e7be6b96d.json b/nym-data-observatory/.sqlx/query-c88d07fecc3f33deaa6e93db1469ce71582635df47f52dcf3fd1df4e7be6b96d.json new file mode 100644 index 00000000000..9bf3eaf97be --- /dev/null +++ b/nym-data-observatory/.sqlx/query-c88d07fecc3f33deaa6e93db1469ce71582635df47f52dcf3fd1df4e7be6b96d.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT last_processed_height FROM metadata\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "last_processed_height", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false + ] + }, + "hash": "c88d07fecc3f33deaa6e93db1469ce71582635df47f52dcf3fd1df4e7be6b96d" +} diff --git a/nym-data-observatory/.sqlx/query-cc0ae74082d7d8a89f2d3364676890bbf6150ab394c72783114340d4def5f9ef.json b/nym-data-observatory/.sqlx/query-cc0ae74082d7d8a89f2d3364676890bbf6150ab394c72783114340d4def5f9ef.json new file mode 100644 index 00000000000..5c0da1448a3 --- /dev/null +++ b/nym-data-observatory/.sqlx/query-cc0ae74082d7d8a89f2d3364676890bbf6150ab394c72783114340d4def5f9ef.json @@ -0,0 +1,19 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO message(transaction_hash, index, type, value, involved_accounts_addresses, height)\n VALUES ($1, $2, $3, $4, $5, $6)\n ON CONFLICT (transaction_hash, index) DO UPDATE\n SET height = excluded.height,\n type = excluded.type,\n value = excluded.value,\n involved_accounts_addresses = excluded.involved_accounts_addresses\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Int8", + "Text", + "Jsonb", + "TextArray", + "Int8" + ] + }, + "nullable": [] + }, + "hash": "cc0ae74082d7d8a89f2d3364676890bbf6150ab394c72783114340d4def5f9ef" +} diff --git a/nym-data-observatory/.sqlx/query-cdba9b267f143c8a8c6c3d6ed713cf00236490b86779559d84740ec18bcfa3a9.json b/nym-data-observatory/.sqlx/query-cdba9b267f143c8a8c6c3d6ed713cf00236490b86779559d84740ec18bcfa3a9.json new file mode 100644 index 00000000000..2ae11a8fbb4 --- /dev/null +++ b/nym-data-observatory/.sqlx/query-cdba9b267f143c8a8c6c3d6ed713cf00236490b86779559d84740ec18bcfa3a9.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM block WHERE height < $1", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + }, + "hash": "cdba9b267f143c8a8c6c3d6ed713cf00236490b86779559d84740ec18bcfa3a9" +} diff --git a/nym-data-observatory/.sqlx/query-d89558c37c51e8e6b1e6a9d5a2b13d0598fd856aa019a0cbbae12d7cafb4672f.json b/nym-data-observatory/.sqlx/query-d89558c37c51e8e6b1e6a9d5a2b13d0598fd856aa019a0cbbae12d7cafb4672f.json new file mode 100644 index 00000000000..1970629169b --- /dev/null +++ b/nym-data-observatory/.sqlx/query-d89558c37c51e8e6b1e6a9d5a2b13d0598fd856aa019a0cbbae12d7cafb4672f.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM transaction WHERE height < $1", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + }, + "hash": "d89558c37c51e8e6b1e6a9d5a2b13d0598fd856aa019a0cbbae12d7cafb4672f" +} diff --git a/nym-data-observatory/.sqlx/query-f81a3275a1c7cbeefb3fdf7904c677d46a284e0446b96a2fc5bd77630c62d4b8.json b/nym-data-observatory/.sqlx/query-f81a3275a1c7cbeefb3fdf7904c677d46a284e0446b96a2fc5bd77630c62d4b8.json new file mode 100644 index 00000000000..e58a1a74516 --- /dev/null +++ b/nym-data-observatory/.sqlx/query-f81a3275a1c7cbeefb3fdf7904c677d46a284e0446b96a2fc5bd77630c62d4b8.json @@ -0,0 +1,50 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT timestamp, chf, usd, eur, gbp, btc FROM price_history ORDER BY timestamp DESC LIMIT 1;", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "timestamp", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "chf", + "type_info": "Float8" + }, + { + "ordinal": 2, + "name": "usd", + "type_info": "Float8" + }, + { + "ordinal": 3, + "name": "eur", + "type_info": "Float8" + }, + { + "ordinal": 4, + "name": "gbp", + "type_info": "Float8" + }, + { + "ordinal": 5, + "name": "btc", + "type_info": "Float8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false, + false, + false, + false, + false, + false + ] + }, + "hash": "f81a3275a1c7cbeefb3fdf7904c677d46a284e0446b96a2fc5bd77630c62d4b8" +} diff --git a/nym-data-observatory/Cargo.toml b/nym-data-observatory/Cargo.toml new file mode 100644 index 00000000000..d23b40f20de --- /dev/null +++ b/nym-data-observatory/Cargo.toml @@ -0,0 +1,52 @@ +# Copyright 2024 - Nym Technologies SA +# SPDX-License-Identifier: GPL-3.0-only + +[package] +name = "nym-data-observatory" +version = "1.0.0" +authors.workspace = true +repository.workspace = true +homepage.workspace = true +documentation.workspace = true +edition.workspace = true +license.workspace = true +rust-version.workspace = true + +[dependencies] +anyhow = { workspace = true } +async-trait.workspace = true +axum = { workspace = true, features = ["tokio"] } +chrono = { workspace = true } +clap = { workspace = true, features = ["cargo", "derive", "env"] } +cosmrs = { workspace = true } +nym-config = { path = "../common/config" } +nym-bin-common = { path = "../common/bin-common", features = ["output_format"] } +nym-network-defaults = { path = "../common/network-defaults" } +nym-task = { path = "../common/task" } +nym-validator-client = { path = "../common/client-libs/validator-client" } +nyxd-scraper-psql = { path = "../common/nyxd-scraper-psql" } +nyxd-scraper-shared = { path = "../common/nyxd-scraper-shared" } +reqwest = { workspace = true, features = ["rustls-tls"] } +schemars = { workspace = true } +serde = { workspace = true, features = ["derive"] } +serde_json = { workspace = true } +sqlx = { workspace = true, features = ["runtime-tokio-rustls", "postgres", "time"] } +thiserror = { workspace = true } +time = { workspace = true } +tokio = { workspace = true, features = ["process", "rt-multi-thread"] } +tokio-util = { workspace = true } +tracing = { workspace = true } +tracing-subscriber = { workspace = true, features = ["env-filter"] } +tower-http = { workspace = true, features = ["cors", "trace"] } +url = { workspace = true } +utoipa = { workspace = true, features = ["axum_extras", "time"] } +utoipa-swagger-ui = { workspace = true, features = ["axum"] } +utoipauto = { workspace = true } + + +[build-dependencies] +anyhow = { workspace = true } +blake3 = { workspace = true } +glob = { workspace = true } +tokio = { workspace = true, features = ["macros", "rt-multi-thread"] } +sqlx = { workspace = true, features = ["runtime-tokio-rustls", "postgres"] } \ No newline at end of file diff --git a/nym-data-observatory/Dockerfile b/nym-data-observatory/Dockerfile new file mode 100644 index 00000000000..780564bec18 --- /dev/null +++ b/nym-data-observatory/Dockerfile @@ -0,0 +1,32 @@ +# this will only work with VPN, otherwise remove the harbor part +FROM harbor.nymte.ch/dockerhub/rust:latest AS builder + +COPY ./ /usr/src/nym +WORKDIR /usr/src/nym/nym-data-observatory + +RUN cargo build --release + +#------------------------------------------------------------------- +# The following environment variables are required at runtime: +# +# NYM_DATA_OBSERVATORY_DB_URL=postgres://nym_data_observatory:data-data-data@localhost/nym_data_observatory_data +# +# And optionally: +# +# NYM_DATA_OBSERVATORY_WEBHOOK_URL="https://webhook.site" +# NYM_DATA_OBSERVATORY_WEBHOOK_AUTH=1234 +# NYX_CHAIN_WATCHER_CONFIG_ENV_FILE_ARG = /mnt/sandbox.env for sandbox environment +# +# see https://github.com/nymtech/nym/blob/develop/nym-data-observatory/src/cli/commands/run/args.rs for details +# and https://github.com/nymtech/nym/blob/develop/nym-data-observatory/src/env.rs for env vars +#------------------------------------------------------------------- + +FROM harbor.nymte.ch/dockerhub/ubuntu:24.04 + +RUN apt update && apt install -yy curl ca-certificates + +WORKDIR /nym + +COPY --from=builder /usr/src/nym/target/release/nym-data-observatory ./ +ENTRYPOINT [ "/nym/nym-data-observatory", "run" ] + diff --git a/nym-data-observatory/Makefile b/nym-data-observatory/Makefile new file mode 100644 index 00000000000..017263df031 --- /dev/null +++ b/nym-data-observatory/Makefile @@ -0,0 +1,107 @@ +# Makefile for nyx_chain_scraper database management + +# --- Configuration --- +TEST_DATABASE_URL := postgres://testuser:testpass@localhost:5433/nym_data_observatory_test + +# Docker compose service names +DB_SERVICE_NAME := postgres-test +DB_CONTAINER_NAME := nym_data_observatory_test + +# Default target +.PHONY: default +default: help + +# --- Main Targets --- +.PHONY: prepare-pg +prepare-pg: test-db-up test-db-wait test-db-migrate test-db-prepare test-db-down ## Setup PostgreSQL and prepare SQLx offline cache + +.PHONY: test-db +test-db: test-db-up test-db-wait test-db-migrate test-db-run test-db-down ## Run tests with PostgreSQL database + +.PHONY: dev-db +dev-db: test-db-up test-db-wait test-db-migrate ## Start PostgreSQL for development (keeps running) + @echo "PostgreSQL is running on port 5433" + @echo "Connection string: $(TEST_DATABASE_URL)" + +.PHONY: dev-db-restart +dev-db-restart: clean-db dev-db + +# --- Docker Compose Targets --- +.PHONY: test-db-up +test-db-up: ## Start the PostgreSQL test database in the background + @echo "Starting PostgreSQL test database..." + docker compose up -d $(DB_SERVICE_NAME) + +.PHONY: test-db-wait +test-db-wait: ## Wait for the PostgreSQL database to be healthy + @echo "Waiting for PostgreSQL database..." + @while ! docker inspect --format='{{.State.Health.Status}}' $(DB_CONTAINER_NAME) 2>/dev/null | grep -q 'healthy'; do \ + echo -n "."; \ + sleep 1; \ + done; \ + echo " Database is healthy!" + +.PHONY: test-db-down +test-db-down: ## Stop and remove the test database + @echo "Stopping PostgreSQL test database..." + docker compose down + +# --- SQLx Targets --- +.PHONY: test-db-migrate +test-db-migrate: ## Run database migrations against PostgreSQL + @echo "Copying common PostgreSQL migrations..." + cp ../common/nyxd-scraper-psql/sql_migrations/* migrations + @echo "Running watcher PostgreSQL migrations..." + RUST_LOG=debug DATABASE_URL="$(TEST_DATABASE_URL)" sqlx migrate run --source migrations + +.PHONY: test-db-prepare +test-db-prepare: ## Run sqlx prepare for compile-time query verification + @echo "Running sqlx prepare for PostgreSQL..." + DATABASE_URL="$(TEST_DATABASE_URL)" cargo sqlx prepare -- + +# --- Build and Test Targets --- +.PHONY: test-db-run +test-db-run: ## Run tests with PostgreSQL feature + @echo "Running tests with PostgreSQL..." + DATABASE_URL="$(TEST_DATABASE_URL)" cargo test --no-default-features + +.PHONY: build-pg +build-pg: ## Build with PostgreSQL feature + @echo "Building with PostgreSQL feature..." + cargo build --no-default-features + +.PHONY: check-pg +check-pg: ## Check code with PostgreSQL feature + @echo "Checking code with PostgreSQL feature..." + cargo check --no-default-features + +.PHONY: clippy +clippy: clippy-pg + +.PHONY: clippy-pg +clippy-pg: ## Run clippy with PostgreSQL feature + @echo "Running clippy with PostgreSQL feature..." + DATABASE_URL="$(TEST_DATABASE_URL)" cargo clippy --no-default-features -- -D warnings + +# --- Cleanup Targets --- +.PHONY: clean +clean: ## Clean build artifacts and SQLx cache + cargo clean + rm -rf .sqlx + +.PHONY: clean-db +clean-db: test-db-down ## Stop database and clean volumes + docker volume rm -f nym_data_observatory_test_data 2>/dev/null || true + +# --- Utility Targets --- +.PHONY: sqlx-cli +sqlx-cli: ## Install sqlx-cli if not already installed + @command -v sqlx >/dev/null 2>&1 || cargo install sqlx-cli --features postgres + +.PHONY: psql +psql: ## Connect to the running PostgreSQL database with psql + @docker exec -it $(DB_CONTAINER_NAME) psql -U testuser -d nym_data_observatory_test + +.PHONY: help +help: ## Show help for Makefile targets + @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}' \ No newline at end of file diff --git a/nym-data-observatory/README.md b/nym-data-observatory/README.md new file mode 100644 index 00000000000..5a88b7024de --- /dev/null +++ b/nym-data-observatory/README.md @@ -0,0 +1,110 @@ +# Nym Data Observatory + +Collects data about the Nym network including: + +- **Chain scraper** - that parses blocks, transactions and messages on the Nyx chain +- **Price scraper** - to get the NYM/USD token price from CoinGecko +- **Webhooks** - trigger on messages or all messages to call with details + +## Running locally + +### 1. Install Prerequisites + +```bash +# Install sqlx-cli if not already installed +make sqlx-cli +``` + +### 2. Prepare PostgreSQL for Development + +```bash +# This will: +# - Start PostgreSQL in Docker +# - Run migrations +# - Generate SQLx offline query cache +# - Stop the database +make prepare-pg +``` + +### 3. Build + +```bash +make build-pg +``` + +### 4. Run with PostgreSQL + +```bash +# Start PostgreSQL for development (keeps running) +make test-db-up + +# In another terminal, run the application +NYM_DATA_OBSERVATORY_DB_URL=postgres://testuser:testpass@localhost:5433/nym_data_observatory_test \ +NYM_DATA_OBSERVATORY_WEBHOOK_URL="https://webhook.site" \ +NYM_DATA_OBSERVATORY_WEBHOOK_AUTH=1234 \ +cargo run -- run +``` + +To start from a block add the env var: `NYXD_SCRAPER_START_HEIGHT=19266184`. + +## Deploying + +Connect with `psql` to your local database: + +```sql +CREATE USER nym_data_observatory WITH PASSWORD 'data-data-data'; + +CREATE DATABASE nym_data_observatory_data; +GRANT ALL ON DATABASE nym_data_observatory_data TO nym_data_observatory; +``` + +Then run: + +``` +cargo run -- init --db_url postgres://testuser:testpass@localhost:5433/nym_data_observatory_test +``` + +and then: + +``` +NYM_DATA_OBSERVATORY_DB_URL=postgres://testuser:testpass@localhost:5433/nym_data_observatory_test \ +NYM_DATA_OBSERVATORY_WEBHOOK_URL="https://webhook.site" \ +NYM_DATA_OBSERVATORY_WEBHOOK_AUTH=1234 \ +cargo run -- run --websocket-url wss://rpc.nymtech.net/websocket --rpc-url https://rpc.nymtech.net +``` + +or just: + +``` +NYM_DATA_OBSERVATORY_DB_URL=postgres://testuser:testpass@localhost:5433/nym_data_observatory_test cargo run -- run --websocket-url wss://rpc.nymtech.net/websocket --rpc-url https://rpc.nymtech.net +``` + +If you want to watch for cosmwasm messages and send to a webhook: + +``` +NYM_DATA_OBSERVATORY_WEBHOOK_URL=https://webhook.site \ +NYM_DATA_OBSERVATORY_DB_URL=postgres://testuser:testpass@localhost:5433/nym_data_observatory_test\ +cargo run -- run --websocket-url wss://rpc.nymtech.net/websocket --rpc-url https://rpc.nymtech.net --start-block-height 20966360 --watch-for-chain-message-types "/cosmwasm.wasm.v1.MsgExecuteContract" +``` + +## Troubleshooting + +### SQLx Offline Mode + +If you see "no cached data for this query" errors: + +1. Ensure PostgreSQL is running: `make dev-db` +2. Run: `make test-db-prepare` + +Also see [README_SQLX.md](../nyx-chain-watcher/README_SQLX.md). + +### Connection Refused + +If you see "Connection refused" errors: + +1. Check Docker is running: `docker ps` +2. Check PostgreSQL container: `docker ps | grep nym_data_observatory +3. Restart database: `make test-db-down && make dev-db` + + + diff --git a/nym-data-observatory/build.rs b/nym-data-observatory/build.rs new file mode 100644 index 00000000000..a56d73fe37b --- /dev/null +++ b/nym-data-observatory/build.rs @@ -0,0 +1,87 @@ +use std::collections::HashMap; +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: Apache-2.0 +use glob::glob; +use std::env; +use std::path::Path; + +fn main() -> anyhow::Result<()> { + // check if migrations in "../common/nyxd-scraper-psql/sql_migrations/* are in "nym-data-observatory/migrations" + println!("Checking common migrations..."); + let manifest_dir_string = env::var("CARGO_MANIFEST_DIR").unwrap(); + let common_migrations_path = Path::new(&manifest_dir_string) + .join("../common/nyxd-scraper-psql/sql_migrations/") + .canonicalize()?; + let output_path = Path::new(&manifest_dir_string) + .join("migrations") + .canonicalize()?; + println!( + "output_path: {:?} (exists = {})", + output_path, + output_path.exists() + ); + let common_migrations_path = common_migrations_path.as_path(); + println!( + "common_migrations_path: {:?} (exists = {})", + common_migrations_path, + common_migrations_path.exists() + ); + + // hash contents of files in common migrations + let mut common_migrations_hashes = HashMap::new(); + for file in glob(&format!("{}/*", common_migrations_path.to_str().unwrap())) + .unwrap() + .flatten() + { + let hash = blake3::hash(std::fs::read(&file)?.as_slice()); + common_migrations_hashes.insert(hash, file); + } + + // hash contents of files in data observatory migrations + let mut data_observatory_migrations_hashes = HashMap::new(); + for file in glob(&format!("{}/*", output_path.to_str().unwrap())) + .unwrap() + .flatten() + { + let hash = blake3::hash(std::fs::read(&file)?.as_slice()); + data_observatory_migrations_hashes.insert(hash, file); + } + + let mut errors = vec![]; + + for entry in common_migrations_hashes { + println!( + "- checking if {:?} exists in nym-data-observatory/migrations directory...", + entry.1 + ); + let res = data_observatory_migrations_hashes.get(&entry.0); + let res_path = res.and_then(|r| r.to_str()).unwrap_or("(not found)"); + println!( + "- {} {} => {res_path} (content matches = {})", + if res.is_some() { "✅" } else { "❌" }, + entry.1.as_path().to_str().unwrap(), + res.is_some() + ); + + if res.is_none() { + errors.push(format!("- {:?}", entry.1.as_path())); + } + } + + // show all errors + if !errors.is_empty() { + anyhow::bail!( + "the following migrations have changed or do not exist in nym-data-observatory/migrations directory, please check and copy them:\n{}", + errors.join("\n") + ); + } + + // sqlx + if let Ok(database_url) = std::env::var("DATABASE_URL") { + println!("cargo:rustc-env=DATABASE_URL={database_url}"); + } + + println!("✅ done"); + + Ok(()) +} diff --git a/nym-data-observatory/docker-compose.yml b/nym-data-observatory/docker-compose.yml new file mode 100644 index 00000000000..64152aa1bd0 --- /dev/null +++ b/nym-data-observatory/docker-compose.yml @@ -0,0 +1,21 @@ +services: + postgres-test: + image: postgres:16-alpine + container_name: nym_data_observatory_test + environment: + POSTGRES_DB: nym_data_observatory_test + POSTGRES_USER: testuser + POSTGRES_PASSWORD: testpass + ports: + - '5433:5432' # Map to 5433 to avoid conflicts with default PostgreSQL + healthcheck: + test: ['CMD-SHELL', 'pg_isready -U testuser -d nyx_chain_scraper_test'] + interval: 5s + timeout: 5s + retries: 5 + # Optional: Add volume for persistent data during development +# volumes: +# - nym_data_observatory_test_data:/var/lib/postgresql/data + +#volumes: +# nym_data_observatory_test_data: \ No newline at end of file diff --git a/nym-data-observatory/migrations/0001_metadata.sql b/nym-data-observatory/migrations/0001_metadata.sql new file mode 100644 index 00000000000..43070210c4a --- /dev/null +++ b/nym-data-observatory/migrations/0001_metadata.sql @@ -0,0 +1,10 @@ +/* + * Copyright 2023 - Nym Technologies SA + * SPDX-License-Identifier: Apache-2.0 + */ + +CREATE TABLE METADATA +( + id INTEGER PRIMARY KEY CHECK (id = 0), + last_processed_height BIGINT NOT NULL +); \ No newline at end of file diff --git a/nym-data-observatory/migrations/0002_cosmos.sql b/nym-data-observatory/migrations/0002_cosmos.sql new file mode 100644 index 00000000000..00440004ada --- /dev/null +++ b/nym-data-observatory/migrations/0002_cosmos.sql @@ -0,0 +1,127 @@ +CREATE TABLE validator +( + consensus_address TEXT NOT NULL PRIMARY KEY, /* Validator consensus address */ + consensus_pubkey TEXT NOT NULL UNIQUE /* Validator consensus public key */ +); + +CREATE TABLE pre_commit +( + validator_address TEXT NOT NULL REFERENCES validator (consensus_address), + height BIGINT NOT NULL, + timestamp TIMESTAMP WITHOUT TIME ZONE NOT NULL, + voting_power BIGINT NOT NULL, + proposer_priority BIGINT NOT NULL, + UNIQUE (validator_address, timestamp) +); +CREATE INDEX pre_commit_validator_address_index ON pre_commit (validator_address); +CREATE INDEX pre_commit_height_index ON pre_commit (height); + +CREATE TABLE block +( + height BIGINT UNIQUE PRIMARY KEY, + hash TEXT NOT NULL UNIQUE, + num_txs INTEGER DEFAULT 0, + total_gas BIGINT DEFAULT 0, + proposer_address TEXT REFERENCES validator (consensus_address), + timestamp TIMESTAMP WITHOUT TIME ZONE NOT NULL +); +CREATE INDEX block_height_index ON block (height); +CREATE INDEX block_hash_index ON block (hash); +CREATE INDEX block_proposer_address_index ON block (proposer_address); +ALTER TABLE block + SET ( + autovacuum_vacuum_scale_factor = 0, + autovacuum_analyze_scale_factor = 0, + autovacuum_vacuum_threshold = 10000, + autovacuum_analyze_threshold = 10000 + ); + +CREATE TABLE transaction +( + hash TEXT NOT NULL, + height BIGINT NOT NULL REFERENCES block (height), + "index" INTEGER NOT NULL, -- <<<=== not present in original bdjuno table, but it's quite useful + success BOOLEAN NOT NULL, + + /* Body */ + messages JSONB NOT NULL DEFAULT '[]'::JSONB, + memo TEXT, + signatures TEXT[] NOT NULL, + + /* AuthInfo */ + signer_infos JSONB NOT NULL DEFAULT '[]'::JSONB, + fee JSONB NOT NULL DEFAULT '{}'::JSONB, + + /* Tx response */ + gas_wanted BIGINT DEFAULT 0, + gas_used BIGINT DEFAULT 0, + raw_log TEXT, + logs JSONB, + events JSONB, + + CONSTRAINT unique_tx UNIQUE (hash) +); +CREATE INDEX transaction_hash_index ON transaction (hash); +CREATE INDEX transaction_height_index ON transaction (height); + +CREATE TYPE COIN AS +( + denom TEXT, + amount TEXT +); + +CREATE TABLE message +( + transaction_hash TEXT NOT NULL, + index BIGINT NOT NULL, + type TEXT NOT NULL, + value JSONB NOT NULL, + involved_accounts_addresses TEXT[] NOT NULL, + height BIGINT NOT NULL, + + funds COIN[] DEFAULT '{}', + + FOREIGN KEY (transaction_hash) REFERENCES transaction (hash), + CONSTRAINT unique_message_per_tx UNIQUE (transaction_hash, index) +); +CREATE INDEX message_transaction_hash_index ON message (transaction_hash); +CREATE INDEX message_type_index ON message (type); +CREATE INDEX message_involved_accounts_index ON message USING GIN (involved_accounts_addresses); + +/** + * This function is used to find all the utils that involve any of the given addresses and have + * type that is one of the specified types. + */ +CREATE FUNCTION messages_by_address( + addresses TEXT[], + types TEXT[], + "limit" BIGINT = 100, + "offset" BIGINT = 0) + RETURNS SETOF message AS +$$ +SELECT * +FROM message +WHERE (cardinality(types) = 0 OR type = ANY (types)) + AND addresses && involved_accounts_addresses +ORDER BY height DESC +LIMIT "limit" OFFSET "offset" +$$ LANGUAGE sql STABLE; + +CREATE FUNCTION messages_by_type( + types text[], + "limit" bigint DEFAULT 100, + "offset" bigint DEFAULT 0) + RETURNS SETOF message AS +$$ +SELECT * +FROM message +WHERE (cardinality(types) = 0 OR type = ANY (types)) +ORDER BY height DESC +LIMIT "limit" OFFSET "offset" +$$ LANGUAGE sql STABLE; + +CREATE TABLE pruning +( + last_pruned_height BIGINT NOT NULL +); + diff --git a/nym-data-observatory/migrations/0103_wasm.sql b/nym-data-observatory/migrations/0103_wasm.sql new file mode 100644 index 00000000000..8cdd5206d9d --- /dev/null +++ b/nym-data-observatory/migrations/0103_wasm.sql @@ -0,0 +1,18 @@ +CREATE TABLE wasm_execute_contract +( + sender TEXT NOT NULL, + contract_address TEXT NOT NULL, + message_type TEXT NULL, + raw_contract_message JSONB NOT NULL DEFAULT '{}'::JSONB, + funds COIN[] NOT NULL DEFAULT '{}', + fee COIN[] NOT NULL DEFAULT '{}', + executed_at TIMESTAMP NOT NULL, + height BIGINT NOT NULL, + hash TEXT NOT NULL, + message_index BIGINT NOT NULL, + memo TEXT NULL +); +CREATE INDEX execute_contract_height_index ON wasm_execute_contract (height); +CREATE INDEX execute_contract_executed_at_index ON wasm_execute_contract (executed_at); +CREATE INDEX execute_contract_message_type_index ON wasm_execute_contract (message_type); +CREATE INDEX execute_contract_sender ON wasm_execute_contract (sender); diff --git a/nym-data-observatory/migrations/1000_startup_info.sql b/nym-data-observatory/migrations/1000_startup_info.sql new file mode 100644 index 00000000000..1e16bde072b --- /dev/null +++ b/nym-data-observatory/migrations/1000_startup_info.sql @@ -0,0 +1,11 @@ +/* + * Copyright 2025 - Nym Technologies SA + * SPDX-License-Identifier: GPL-3.0-only + */ + +CREATE TABLE startup_info +( + start_ts TIMESTAMPTZ NOT NULL, + end_ts TIMESTAMPTZ NOT NULL, + error_message TEXT +); \ No newline at end of file diff --git a/nym-data-observatory/migrations/1101_price_data.sql b/nym-data-observatory/migrations/1101_price_data.sql new file mode 100644 index 00000000000..80f30d736c8 --- /dev/null +++ b/nym-data-observatory/migrations/1101_price_data.sql @@ -0,0 +1,8 @@ +CREATE TABLE price_history ( + timestamp bigint PRIMARY KEY, + chf double precision NOT NULL, + usd double precision NOT NULL, + eur double precision NOT NULL, + btc double precision NOT NULL, + gbp double precision NOT NULL +); \ No newline at end of file diff --git a/nym-data-observatory/src/chain_scraper/mod.rs b/nym-data-observatory/src/chain_scraper/mod.rs new file mode 100644 index 00000000000..a9cc9b9c216 --- /dev/null +++ b/nym-data-observatory/src/chain_scraper/mod.rs @@ -0,0 +1,47 @@ +use crate::cli::commands::run::Args; +use crate::db::DbPool; +use nyxd_scraper_psql::{PostgresNyxdScraper, PruningOptions}; +use std::fs; +use tracing::{info, warn}; + +pub(crate) mod webhook; + +pub(crate) async fn run_chain_scraper( + args: Args, + config: &crate::config::Config, + connection_pool: DbPool, +) -> anyhow::Result { + let use_best_effort_start_height = args.start_block_height.is_some(); + + if args.nuke_db { + warn!("☢️☢️☢️ NUKING THE SCRAPER DATABASE"); + fs::remove_file(config.chain_scraper_connection_string())?; + } + + let database_storage = config + .chain_scraper_connection_string + .clone() + .and(args.db_connection_string) + .expect("no database connection string set in config"); + + let scraper = PostgresNyxdScraper::builder(nyxd_scraper_psql::Config { + websocket_url: args.websocket_url, + rpc_url: args.rpc_url, + database_storage, + pruning_options: PruningOptions::nothing(), + store_precommits: false, + start_block: nyxd_scraper_psql::StartingBlockOpts { + start_block_height: args.start_block_height, + use_best_effort_start_height, + }, + }) + .with_msg_module(crate::modules::wasm::WasmModule::new(connection_pool)) + .with_tx_module(webhook::WebhookModule::new(config.clone())?); + + let instance = scraper.build_and_start().await?; + + info!("🚧 blocking until the chain has caught up..."); + instance.wait_for_startup_sync().await; + + Ok(instance) +} diff --git a/nym-data-observatory/src/chain_scraper/webhook.rs b/nym-data-observatory/src/chain_scraper/webhook.rs new file mode 100644 index 00000000000..641f5741d70 --- /dev/null +++ b/nym-data-observatory/src/chain_scraper/webhook.rs @@ -0,0 +1,143 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: GPL-3.0-only + +use crate::config::data_observatory::{HttpAuthenticationOptions, WebhookConfig}; +use crate::models::WebhookPayload; +use anyhow::Context; +use async_trait::async_trait; +use nyxd_scraper_psql::{ + NyxdScraperTransaction, ParsedTransactionResponse, ScraperError, TxModule, +}; +use reqwest::{Client, Url}; +use tracing::{error, info}; + +pub struct WebhookModule { + webhooks: Vec, +} + +impl WebhookModule { + pub fn new(config: crate::config::Config) -> anyhow::Result { + let webhooks = config + .data_observatory_config + .webhooks + .iter() + .map(|watcher_cfg| Webhook::new(watcher_cfg.clone())) + .collect::>>()?; + Ok(Self { webhooks }) + } +} + +#[async_trait] +impl TxModule for WebhookModule { + async fn handle_tx( + &mut self, + tx: &ParsedTransactionResponse, + _: &mut dyn NyxdScraperTransaction, + ) -> Result<(), ScraperError> { + for (index, msg) in &tx.parsed_messages { + if let Some(parsed_message_type_url) = tx.parsed_message_urls.get(index) { + let payload = WebhookPayload { + height: tx.height.value(), + message_index: *index as u64, + transaction_hash: tx.hash.to_string(), + message: Some(msg.clone()), + }; + + // println!( + // "->>>>>>>>>>>>>>>>>>>>>>>>> {}", + // serde_json::to_string(&payload).unwrap() + // ); + + for webhook in self.webhooks.clone() { + // if the webhook requires a type and the parsed message type doesn't match, skip + if !webhook.config.watch_for_chain_message_types.is_empty() + && !webhook + .config + .watch_for_chain_message_types + .contains(parsed_message_type_url) + { + continue; + } + + let payload = payload.clone(); + + // TODO: some excellent advice from Andrew, for another day: + // - pass a cancellation token for shutdown + // - use TaskManager and limit number of webhooks to spawn at once + tokio::spawn(async move { + if let Err(e) = webhook.invoke_webhook(&payload).await { + error!("webhook error: {}", e); + } + }); + } + } + } + Ok(()) + } +} + +#[derive(Clone)] +pub(crate) struct Webhook { + webhook_url: Url, + config: WebhookConfig, +} + +impl Webhook { + pub(crate) fn new(config: WebhookConfig) -> anyhow::Result { + Ok(Webhook { + webhook_url: config + .webhook_url + .as_str() + .parse() + .context("invalid config: provided webhook URL is malformed")?, + config, + }) + } + + pub(crate) fn id(&self) -> &str { + &self.config.id + } + + pub(crate) async fn invoke_webhook(&self, payload: &WebhookPayload) -> anyhow::Result<()> { + let client = Client::builder() + .user_agent(format!( + "nym-data-observatory/{}/webhook-{}", + env!("CARGO_PKG_VERSION"), + self.id() + )) + .build() + .context("failed to build reqwest client")?; + + let mut request_builder = client.post(self.webhook_url.clone()).json(payload); + + if let Some(auth) = &self.config.authentication { + match auth { + HttpAuthenticationOptions::AuthorizationBearerToken { token } => { + request_builder = request_builder.bearer_auth(token); + } + } + } + + match request_builder.send().await { + Ok(res) => info!( + "[webhook = {}] ✅ Webhook {} {} - tx {}, index {}", + self.config.id, + res.status(), + res.url(), + payload.transaction_hash, + payload.message_index, + ), + Err(err) => { + error!( + "[webhook = {}] ❌ Webhook {:?} {:?} error = {err}", + self.config.id, + err.status(), + err.url(), + ); + return Err(err.into()); + } + } + + Ok(()) + } +} diff --git a/nym-data-observatory/src/cli/commands/build_info.rs b/nym-data-observatory/src/cli/commands/build_info.rs new file mode 100644 index 00000000000..e3957b66fa2 --- /dev/null +++ b/nym-data-observatory/src/cli/commands/build_info.rs @@ -0,0 +1,17 @@ +// Copyright 2024 - Nym Technologies SA +// SPDX-License-Identifier: GPL-3.0-only + +use crate::error::NymDataObservatoryError; +use nym_bin_common::bin_info_owned; +use nym_bin_common::output_format::OutputFormat; + +#[derive(clap::Args, Debug)] +pub(crate) struct Args { + #[clap(short, long, default_value_t = OutputFormat::default())] + output: OutputFormat, +} + +pub(crate) fn execute(args: Args) -> Result<(), NymDataObservatoryError> { + println!("{}", args.output.format(&bin_info_owned!())); + Ok(()) +} diff --git a/nym-data-observatory/src/cli/commands/init.rs b/nym-data-observatory/src/cli/commands/init.rs new file mode 100644 index 00000000000..4a0a2b9e928 --- /dev/null +++ b/nym-data-observatory/src/cli/commands/init.rs @@ -0,0 +1,46 @@ +// Copyright 2024 - Nym Technologies SA +// SPDX-License-Identifier: GPL-3.0-only + +use crate::cli::DEFAULT_NYM_DATA_OBSERVATORY_ID; +use crate::config::data_observatory::HttpAuthenticationOptions::AuthorizationBearerToken; +use crate::config::data_observatory::WebhookConfig; +use crate::config::{Config, ConfigBuilder, DataObservatoryConfig, default_config_filepath}; +use crate::env::vars::*; +use crate::error::NymDataObservatoryError; +use nym_config::save_unformatted_config_to_file; + +#[derive(clap::Args, Debug)] +pub(crate) struct Args { + /// (Override) Postgres connection string for data storage + #[arg(long, env = NYM_DATA_OBSERVATORY_DB_URL, alias = "db_url")] + pub(crate) chain_history_db_connection_string: String, +} + +pub(crate) async fn execute(args: Args) -> Result<(), NymDataObservatoryError> { + let config_path = default_config_filepath(); + let data_dir = Config::default_data_directory(&config_path)?; + + let builder = ConfigBuilder::new( + config_path.clone(), + data_dir, + args.chain_history_db_connection_string, + ) + .with_data_observatory_config(DataObservatoryConfig { + webhooks: vec![WebhookConfig { + id: DEFAULT_NYM_DATA_OBSERVATORY_ID.to_string(), + webhook_url: url::Url::parse("https://webhook.site")?, + authentication: Some(AuthorizationBearerToken { + token: "1234".to_string(), + }), + description: None, + watch_for_chain_message_types: vec![ + "/cosmos.bank.v1beta1.MsgSend".to_string(), + "/ibc.applications.transfer.v1.MsgTransfer".to_string(), + ], + }], + }); + + let config = builder.build(); + + Ok(save_unformatted_config_to_file(&config, &config_path)?) +} diff --git a/nym-data-observatory/src/cli/commands/mod.rs b/nym-data-observatory/src/cli/commands/mod.rs new file mode 100644 index 00000000000..b1f63f4ae53 --- /dev/null +++ b/nym-data-observatory/src/cli/commands/mod.rs @@ -0,0 +1,3 @@ +pub(crate) mod build_info; +pub(crate) mod init; +pub(crate) mod run; diff --git a/nym-data-observatory/src/cli/commands/run/args.rs b/nym-data-observatory/src/cli/commands/run/args.rs new file mode 100644 index 00000000000..6d4f71b2723 --- /dev/null +++ b/nym-data-observatory/src/cli/commands/run/args.rs @@ -0,0 +1,46 @@ +// Copyright 2024 - Nym Technologies SA +// SPDX-License-Identifier: GPL-3.0-only + +use crate::env::vars::*; +use url::Url; + +#[derive(clap::Args, Debug, Clone)] +pub(crate) struct Args { + #[arg(long, env = NYXD_WS, alias = "nyxd_ws")] + pub(crate) websocket_url: Url, + + #[arg(long, env = NYXD, alias = "nyxd")] + pub(crate) rpc_url: Url, + + #[arg(long, env = NYXD_SCRAPER_START_HEIGHT)] + pub(crate) start_block_height: Option, + + #[arg(long, env = NYXD_SCRAPER_UNSAFE_NUKE_DB, default_value = "false")] + pub(crate) nuke_db: bool, + + /// (Override) Postgres connection string for chain scraper history + #[arg(long, env = NYM_DATA_OBSERVATORY_DB_URL, alias = "db_url")] + pub(crate) db_connection_string: Option, + + /// (Override) Watch for chain messages of these types + #[clap( + long, + value_delimiter = ',', + env = NYM_DATA_OBSERVATORY_WATCH_CHAIN_MESSAGE_TYPES + )] + pub watch_for_chain_message_types: Vec, + + /// (Override) The webhook to call when we find something + #[clap( + long, + env = NYM_DATA_OBSERVATORY_WEBHOOK_URL + )] + pub webhook_url: Option, + + /// (Override) Optionally, authenticate with the webhook + #[clap( + long, + env = NYM_DATA_OBSERVATORY_WEBHOOK_AUTH + )] + pub webhook_auth: Option, +} diff --git a/nym-data-observatory/src/cli/commands/run/config.rs b/nym-data-observatory/src/cli/commands/run/config.rs new file mode 100644 index 00000000000..61eb5d8d1c3 --- /dev/null +++ b/nym-data-observatory/src/cli/commands/run/config.rs @@ -0,0 +1,61 @@ +use crate::cli::DEFAULT_NYM_DATA_OBSERVATORY_ID; +use crate::cli::commands::run::args::Args; +use crate::config::data_observatory::{HttpAuthenticationOptions, WebhookConfig}; +use crate::config::{Config, ConfigBuilder, DataObservatoryConfig, default_config_filepath}; +use crate::error::NymDataObservatoryError; +use tracing::{info, warn}; + +pub(crate) fn get_run_config(args: Args) -> Result { + let Args { + watch_for_chain_message_types, + webhook_auth, + webhook_url, + .. + } = args; + + // if there are no args set, then try load the config + if args.db_connection_string.is_none() { + info!("Loading default config file..."); + return Config::read_from_toml_file_in_default_location(); + } + + let config_path = default_config_filepath(); + let data_dir = Config::default_data_directory(&config_path)?; + + if args.db_connection_string.is_none() { + return Err(NymDataObservatoryError::DbConnectionStringMissing); + } + + let mut builder = ConfigBuilder::new( + config_path, + data_dir, + args.db_connection_string + .expect("db connection string is required"), + ); + + if let Some(webhook_url) = webhook_url { + let authentication = + webhook_auth.map(|token| HttpAuthenticationOptions::AuthorizationBearerToken { token }); + + let watcher_config = DataObservatoryConfig { + webhooks: vec![WebhookConfig { + id: DEFAULT_NYM_DATA_OBSERVATORY_ID.to_string(), + description: None, + watch_for_chain_message_types, + webhook_url, + authentication, + }], + }; + + info!("Overriding watcher config with env vars"); + + builder = builder.with_data_observatory_config(watcher_config); + } else { + warn!( + "You did not specify a webhook in {}. Only database items will be stored.", + crate::env::vars::NYM_DATA_OBSERVATORY_WEBHOOK_URL + ); + } + + Ok(builder.build()) +} diff --git a/nym-data-observatory/src/cli/commands/run/mod.rs b/nym-data-observatory/src/cli/commands/run/mod.rs new file mode 100644 index 00000000000..d6d3188cbf6 --- /dev/null +++ b/nym-data-observatory/src/cli/commands/run/mod.rs @@ -0,0 +1,215 @@ +// Copyright 2024 - Nym Technologies SA +// SPDX-License-Identifier: GPL-3.0-only + +use crate::error::NymDataObservatoryError; +use anyhow::Context; +use std::time::Duration; +use time::OffsetDateTime; +use tokio::task::{JoinHandle, JoinSet}; +use tokio_util::sync::CancellationToken; +use tracing::{error, info}; + +mod args; +mod config; + +use crate::chain_scraper::run_chain_scraper; +use crate::db::DbPool; +use crate::http::state::PriceScraperState; +use crate::price_scraper::PriceScraper; +use crate::{db, http}; +pub(crate) use args::Args; +use nym_task::signal::wait_for_signal; + +async fn try_insert_startup_information( + db_pool: DbPool, + start: OffsetDateTime, + end: OffsetDateTime, + error_message: Option, +) { + let _ = sqlx::query!( + r#" + INSERT INTO startup_info(start_ts, end_ts, error_message) + VALUES ($1, $2, $3) + "#, + start.into(), + end.into(), + error_message + ) + .execute(&db_pool) + .await + .inspect_err(|err| error!("failed to insert run information: {err}")); +} + +async fn wait_for_shutdown( + db_pool: DbPool, + start: OffsetDateTime, + main_cancellation_token: CancellationToken, + scraper_cancellation_token: CancellationToken, + mut tasks: JoinSet>>, +) { + async fn finalize_shutdown( + db_pool: DbPool, + start: OffsetDateTime, + main_cancellation_token: CancellationToken, + scraper_cancellation_token: CancellationToken, + mut tasks: JoinSet>>, + error_message: Option, + ) { + // cancel all tasks + main_cancellation_token.cancel(); + scraper_cancellation_token.cancel(); + + // stupid nasty and hacky workaround to make sure all relevant tasks have finished before hard aborting them + // nasty stupid and hacky workaround + tokio::time::sleep(Duration::from_secs(1)).await; + tasks.abort_all(); + + // insert execution result into the db + try_insert_startup_information(db_pool, start, OffsetDateTime::now_utc(), error_message) + .await + } + + tokio::select! { + // graceful shutdown + _ = wait_for_signal() => { + info!("received shutdown signal"); + finalize_shutdown(db_pool, start, main_cancellation_token, scraper_cancellation_token, tasks, None).await; + } + _ = scraper_cancellation_token.cancelled() => { + info!("the scraper has issued cancellation"); + finalize_shutdown(db_pool, start, main_cancellation_token, scraper_cancellation_token, tasks, Some("unexpected scraper task cancellation".into())).await; + } + _ = main_cancellation_token.cancelled() => { + info!("one of the tasks has cancelled the token"); + finalize_shutdown(db_pool, start, main_cancellation_token, scraper_cancellation_token, tasks, Some("unexpected main task cancellation".into())).await; + } + task_result = tasks.join_next() => { + // the first unwrap is fine => join set was not empty + let error_message = match task_result.unwrap() { + Err(_join_err) => Some("unexpected join error".to_string()), + Ok(Some(Ok(_))) => None, + Ok(Some(Err(err))) => Some(err.to_string()), + Ok(None) => { + Some("unexpected task cancellation".to_string()) + } + }; + + error!("unexpected task termination: {error_message:?}"); + finalize_shutdown(db_pool, start, main_cancellation_token, scraper_cancellation_token, tasks, error_message).await; + } + + } +} + +pub(crate) async fn execute(args: Args, http_port: u16) -> Result<(), NymDataObservatoryError> { + let start = OffsetDateTime::now_utc(); + + let config = config::get_run_config(args.clone())?; + + let db_connection_string = config.chain_scraper_connection_string(); + + info!("nyxd wss: {}", args.websocket_url.to_string()); + info!("nyxd rpc: {}", args.rpc_url.to_string()); + info!("start_block_height: {:#?}", args.start_block_height); + info!( + "webhooks: {}", + config.data_observatory_config.webhooks.len() + ); + for w in &config.data_observatory_config.webhooks { + info!( + "- {}: {} {:?}", + &w.id, + w.webhook_url.as_str(), + w.watch_for_chain_message_types + ); + } + info!("nuke_db: {}", args.nuke_db); + + let storage = db::Storage::init(db_connection_string).await?; + let watcher_pool = storage.pool_owned(); + + let mut tasks = JoinSet::new(); + let cancellation_token = CancellationToken::new(); + + let scraper_pool = storage.pool_owned(); + let shutdown_pool = storage.pool_owned(); + + // construct shared state + let price_scraper_shared_state = PriceScraperState::new(); + + // spawn all the tasks + + // 1. chain scraper (note: this doesn't really spawn the full scraper on this task, but we don't want to be blocking waiting for its startup) + let scraper_token_handle: JoinHandle> = tokio::spawn({ + let config = config.clone(); + async move { + // this only blocks until startup sync is done; it then runs on its own set of tasks + let scraper = run_chain_scraper(args, &config, scraper_pool).await?; + Ok(scraper.cancel_token()) + } + }); + + // 2. price scraper (note, this task never terminates on its own) + let price_scraper = PriceScraper::new(price_scraper_shared_state.clone(), watcher_pool); + { + let token = cancellation_token.clone(); + tasks.spawn(async move { + token + .run_until_cancelled(async move { + price_scraper.run().await; + Ok(()) + }) + .await + }); + } + + // 3. http api + let http_server = http::server::build_http_api( + storage.pool_owned(), + &config, + http_port, + price_scraper_shared_state, + ) + .await?; + { + let token = cancellation_token.clone(); + tasks.spawn(async move { + info!("Starting HTTP server on port {http_port}",); + async move { + Some( + http_server + .run(token.cancelled_owned()) + .await + .context("http server failure"), + ) + } + .await + }); + } + + // 1. wait for either shutdown or scraper having finished startup + tokio::select! { + _ = wait_for_signal() => { + info!("received shutdown signal while waiting for scraper to finish its startup"); + return Ok(()) + } + scraper_token = scraper_token_handle => { + let scraper_token = match scraper_token { + Ok(Ok(token)) => token, + Ok(Err(startup_err)) => { + error!("failed to startup the chain scraper: {startup_err}"); + return Err(startup_err.into()); + } + Err(runtime_err) => { + error!("failed to finish the scraper startup task: {runtime_err}"); + return Ok(()) + + } + }; + + wait_for_shutdown(shutdown_pool, start, cancellation_token, scraper_token, tasks).await + } + } + + Ok(()) +} diff --git a/nym-data-observatory/src/cli/mod.rs b/nym-data-observatory/src/cli/mod.rs new file mode 100644 index 00000000000..7aa4d99dbba --- /dev/null +++ b/nym-data-observatory/src/cli/mod.rs @@ -0,0 +1,67 @@ +// Copyright 2024 - Nym Technologies SA +// SPDX-License-Identifier: GPL-3.0-only + +use crate::cli::commands::{build_info, init, run}; +use crate::env::vars::*; +use crate::error::NymDataObservatoryError; +use clap::{Parser, Subcommand}; +use nym_bin_common::bin_info; +use std::sync::OnceLock; + +pub(crate) mod commands; + +pub const DEFAULT_NYM_DATA_OBSERVATORY_ID: &str = "default-nym-data-observatory"; + +// Helper for passing LONG_VERSION to clap +fn pretty_build_info_static() -> &'static str { + static PRETTY_BUILD_INFORMATION: OnceLock = OnceLock::new(); + PRETTY_BUILD_INFORMATION.get_or_init(|| bin_info!().pretty_print()) +} + +#[derive(Parser, Debug)] +#[clap(author = "Nymtech", version, long_version = pretty_build_info_static(), about)] +pub(crate) struct Cli { + /// Path pointing to an env file that configures the nym-data-observatory and overrides any preconfigured values. + #[clap( + short, + long, + env = NYM_DATA_OBSERVATORY_CONFIG_ENV_FILE_ARG + )] + pub(crate) config_env_file: Option, + + /// Flag used for disabling the printed banner in tty. + #[clap( + long, + env = NYM_DATA_OBSERVATORY_NO_BANNER_ARG + )] + pub(crate) no_banner: bool, + + /// Port to listen on + #[arg(long, default_value_t = 8000, env = "NYM_DATA_OBSERVATORY_HTTP_PORT")] + pub(crate) http_port: u16, + + #[clap(subcommand)] + command: Commands, +} + +impl Cli { + pub(crate) async fn execute(self) -> Result<(), NymDataObservatoryError> { + match self.command { + Commands::BuildInfo(args) => build_info::execute(args), + Commands::Run(args) => run::execute(*args, self.http_port).await, + Commands::Init(args) => init::execute(args).await, + } + } +} + +#[derive(Subcommand, Debug)] +pub(crate) enum Commands { + /// Show build information of this binary + BuildInfo(build_info::Args), + + /// Start this nym-chain-watcher + Run(Box), + + /// Initialise config + Init(init::Args), +} diff --git a/nym-data-observatory/src/config/data_observatory.rs b/nym-data-observatory/src/config/data_observatory.rs new file mode 100644 index 00000000000..fa9341e35b6 --- /dev/null +++ b/nym-data-observatory/src/config/data_observatory.rs @@ -0,0 +1,20 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Default, Clone, Serialize, Deserialize)] +pub struct DataObservatoryConfig { + pub webhooks: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct WebhookConfig { + pub id: String, + pub description: Option, + pub webhook_url: url::Url, + pub watch_for_chain_message_types: Vec, + pub authentication: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub enum HttpAuthenticationOptions { + AuthorizationBearerToken { token: String }, +} diff --git a/nym-data-observatory/src/config/mod.rs b/nym-data-observatory/src/config/mod.rs new file mode 100644 index 00000000000..86cd357faa5 --- /dev/null +++ b/nym-data-observatory/src/config/mod.rs @@ -0,0 +1,222 @@ +// Copyright 2024 - Nym Technologies SA +// SPDX-License-Identifier: GPL-3.0-only + +use crate::config::template::CONFIG_TEMPLATE; +use nym_bin_common::logging::LoggingSettings; +use nym_config::{ + DEFAULT_CONFIG_DIR, DEFAULT_CONFIG_FILENAME, DEFAULT_DATA_DIR, NYM_DIR, NymConfigTemplate, + must_get_home, read_config_from_toml_file, save_unformatted_config_to_file, +}; +use serde::{Deserialize, Serialize}; +use std::path::{Path, PathBuf}; +use tracing::{debug, error}; + +pub(crate) mod data_observatory; +mod template; + +pub use crate::config::data_observatory::DataObservatoryConfig; +use crate::error::NymDataObservatoryError; + +const DEFAULT_NYM_DATA_OBSERVATORY_DIR: &str = "nym-data-observatory"; + +/// Derive default path to nym-data-observatory's config directory. +/// It should get resolved to `$HOME/.nym/nym-data-observatory/config` +pub fn default_config_directory() -> PathBuf { + must_get_home() + .join(NYM_DIR) + .join(DEFAULT_NYM_DATA_OBSERVATORY_DIR) + .join(DEFAULT_CONFIG_DIR) +} + +/// Derive default path to nym-data-observatory's config file. +/// It should get resolved to `$HOME/.nym/nym-data-observatory/config/config.toml` +pub fn default_config_filepath() -> PathBuf { + default_config_directory().join(DEFAULT_CONFIG_FILENAME) +} + +pub struct ConfigBuilder { + pub config_path: PathBuf, + + pub data_dir: PathBuf, + + pub chain_scraper_connection_string: String, + pub data_observatory_config: Option, + + pub logging: Option, +} + +impl ConfigBuilder { + pub fn new( + config_path: PathBuf, + data_dir: PathBuf, + chain_scraper_connection_string: String, + ) -> Self { + ConfigBuilder { + config_path, + data_dir, + data_observatory_config: None, + logging: None, + chain_scraper_connection_string, + } + } + + #[allow(dead_code)] + pub fn with_data_observatory_config( + mut self, + data_observatory_config: impl Into, + ) -> Self { + self.data_observatory_config = Some(data_observatory_config.into()); + self + } + + #[allow(dead_code)] + pub fn with_logging(mut self, section: impl Into>) -> Self { + self.logging = section.into(); + self + } + + pub fn build(self) -> Config { + Config { + logging: self.logging.unwrap_or_default(), + save_path: Some(self.config_path), + data_observatory_config: self.data_observatory_config.unwrap_or_default(), + data_dir: self.data_dir, + chain_scraper_connection_string: Some(self.chain_scraper_connection_string), + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(deny_unknown_fields)] +pub struct Config { + // additional metadata holding on-disk location of this config file + #[serde(skip)] + pub(crate) save_path: Option, + + #[serde(skip)] + pub(crate) data_dir: PathBuf, + + pub chain_scraper_connection_string: Option, + + #[serde(default)] + pub data_observatory_config: DataObservatoryConfig, + + #[serde(default)] + pub logging: LoggingSettings, +} + +impl NymConfigTemplate for Config { + fn template(&self) -> &'static str { + CONFIG_TEMPLATE + } +} + +impl Config { + #[allow(unused)] + pub fn save(&self) -> Result<(), NymDataObservatoryError> { + let save_location = self.save_location(); + debug!( + "attempting to save config file to '{}'", + save_location.display() + ); + save_unformatted_config_to_file(self, &save_location).map_err(|source| { + NymDataObservatoryError::UnformattedConfigSaveFailure { + path: save_location, + source, + } + }) + } + + #[allow(unused)] + pub fn save_location(&self) -> PathBuf { + self.save_path + .clone() + .unwrap_or(self.default_save_location()) + } + + #[allow(unused)] + pub fn default_save_location(&self) -> PathBuf { + default_config_filepath() + } + + pub fn default_data_directory>( + config_path: P, + ) -> Result { + let config_path = config_path.as_ref(); + + // we got a proper path to the .toml file + let Some(config_dir) = config_path.parent() else { + error!( + "'{}' does not have a parent directory. Have you pointed to the fs root?", + config_path.display() + ); + return Err(NymDataObservatoryError::DataDirDerivationFailure); + }; + + let Some(config_dir_name) = config_dir.file_name() else { + error!( + "could not obtain parent directory name of '{}'. Have you used relative paths?", + config_path.display() + ); + return Err(NymDataObservatoryError::DataDirDerivationFailure); + }; + + if config_dir_name != DEFAULT_CONFIG_DIR { + error!( + "the parent directory of '{}' ({}) is not {DEFAULT_CONFIG_DIR}. currently this is not supported", + config_path.display(), + config_dir_name.to_str().unwrap_or("UNKNOWN") + ); + return Err(NymDataObservatoryError::DataDirDerivationFailure); + } + + let Some(node_dir) = config_dir.parent() else { + error!( + "'{}' does not have a parent directory. Have you pointed to the fs root?", + config_dir.display() + ); + return Err(NymDataObservatoryError::DataDirDerivationFailure); + }; + + Ok(node_dir.join(DEFAULT_DATA_DIR)) + } + + pub fn chain_scraper_connection_string(&self) -> String { + self.chain_scraper_connection_string + .clone() + .expect("database connection string not set") + } + + // simple wrapper that reads config file and assigns path location + fn read_from_path>( + path: P, + data_dir: P, + ) -> Result { + let path = path.as_ref(); + let data_dir = data_dir.as_ref(); + let mut loaded: Config = read_config_from_toml_file(path).map_err(|source| { + NymDataObservatoryError::ConfigLoadFailure { + path: path.to_path_buf(), + source, + } + })?; + loaded.data_dir = data_dir.to_path_buf(); + loaded.save_path = Some(path.to_path_buf()); + debug!("loaded config file from {}", path.display()); + Ok(loaded) + } + + #[allow(unused)] + pub fn read_from_toml_file>( + path: P, + data_dir: P, + ) -> Result { + Self::read_from_path(path, data_dir) + } + + pub fn read_from_toml_file_in_default_location() -> Result { + let config_path = default_config_filepath(); + let data_dir = Config::default_data_directory(&config_path)?; + Self::read_from_path(config_path, data_dir) + } +} diff --git a/nym-data-observatory/src/config/template.rs b/nym-data-observatory/src/config/template.rs new file mode 100644 index 00000000000..ddc3475aac4 --- /dev/null +++ b/nym-data-observatory/src/config/template.rs @@ -0,0 +1,29 @@ +// Copyright 2024 - Nym Technologies SA +// SPDX-License-Identifier: GPL-3.0-only + +// While using normal toml marshalling would have been way simpler with less overhead, +// I think it's useful to have comments attached to the saved config file to explain behaviour of +// particular fields. +// Note: any changes to the template must be reflected in the appropriate structs. +pub(crate) const CONFIG_TEMPLATE: &str = r#" +# This is a TOML config file. +# For more information, see https://github.com/toml-lang/toml + +[data_observatory_config] +{{#each data_observatory_config.webhooks }} +[[webhooks]] +id={{this.id}} +description='{{this.description}}' +webhook_url='{{this.webhook_url}}' +{{/each}} + + + + +##### logging configuration options ##### + +[logging] + +# TODO + +"#; diff --git a/nym-data-observatory/src/db/mod.rs b/nym-data-observatory/src/db/mod.rs new file mode 100644 index 00000000000..70c36a7b618 --- /dev/null +++ b/nym-data-observatory/src/db/mod.rs @@ -0,0 +1,32 @@ +use anyhow::{Result, anyhow}; +use sqlx::{Postgres, postgres::PgConnectOptions}; +use std::str::FromStr; + +pub(crate) mod models; +pub(crate) mod queries { + pub mod price; + pub mod wasm; +} + +pub(crate) type DbPool = sqlx::Pool; + +pub(crate) struct Storage { + pool: DbPool, +} + +impl Storage { + pub async fn init(connection_url: String) -> Result { + let connect_options = PgConnectOptions::from_str(&connection_url)?; + + let pool = DbPool::connect_with(connect_options) + .await + .map_err(|err| anyhow!("Failed to connect to {}: {}", &connection_url, err))?; + + Ok(Storage { pool }) + } + + /// Cloning pool is cheap, it's the same underlying set of connections + pub fn pool_owned(&self) -> DbPool { + self.pool.clone() + } +} diff --git a/nym-data-observatory/src/db/models.rs b/nym-data-observatory/src/db/models.rs new file mode 100644 index 00000000000..31e754c7dab --- /dev/null +++ b/nym-data-observatory/src/db/models.rs @@ -0,0 +1,33 @@ +use serde::{Deserialize, Serialize}; +use utoipa::ToSchema; + +#[derive(Clone, Serialize, Deserialize, Debug, ToSchema)] +pub(crate) struct CurrencyPrices { + pub(crate) chf: f64, + pub(crate) usd: f64, + pub(crate) eur: f64, + pub(crate) gbp: f64, + pub(crate) btc: f64, +} + +// Struct to hold Coingecko response +#[derive(Clone, Serialize, Deserialize, Debug, ToSchema)] +pub(crate) struct CoingeckoPriceResponse { + pub(crate) nym: CurrencyPrices, +} + +#[derive(Clone, Deserialize, Debug, ToSchema)] +pub(crate) struct PriceRecord { + pub(crate) timestamp: i64, + pub(crate) nym: CurrencyPrices, +} + +#[derive(Serialize, Deserialize, Debug, ToSchema)] +pub(crate) struct PriceHistory { + pub(crate) timestamp: i64, + pub(crate) chf: f64, + pub(crate) usd: f64, + pub(crate) eur: f64, + pub(crate) gbp: f64, + pub(crate) btc: f64, +} diff --git a/nym-data-observatory/src/db/queries/mod.rs b/nym-data-observatory/src/db/queries/mod.rs new file mode 100644 index 00000000000..5cd63b2e1cc --- /dev/null +++ b/nym-data-observatory/src/db/queries/mod.rs @@ -0,0 +1,7 @@ +mod price; +mod wasm; + +// re-exporting allows us to access all queries via `queries::bla`` +pub(crate) use payments::{get_last_checked_height, insert_payment}; +pub(crate) use price::{get_latest_price, insert_nym_prices}; +pub(crate) use wasm::{insert_wasm_execute}; diff --git a/nym-data-observatory/src/db/queries/price.rs b/nym-data-observatory/src/db/queries/price.rs new file mode 100644 index 00000000000..493a1031d73 --- /dev/null +++ b/nym-data-observatory/src/db/queries/price.rs @@ -0,0 +1,119 @@ +use crate::db::DbPool; +use crate::db::models::{PriceHistory, PriceRecord}; +use chrono::Local; +use std::ops::Sub; + +pub(crate) async fn insert_nym_prices( + pool: &DbPool, + price_data: PriceRecord, +) -> anyhow::Result<()> { + let mut conn = pool.acquire().await?; + let timestamp = price_data.timestamp; + sqlx::query!( + "INSERT INTO price_history + (timestamp, chf, usd, eur, gbp, btc) + VALUES + ($1, $2, $3, $4, $5, $6) + ON CONFLICT(timestamp) DO UPDATE SET + chf=excluded.chf, + usd=excluded.usd, + eur=excluded.eur, + gbp=excluded.gbp, + btc=excluded.btc;", + timestamp, + price_data.nym.chf, + price_data.nym.usd, + price_data.nym.eur, + price_data.nym.gbp, + price_data.nym.btc, + ) + .execute(&mut *conn) + .await?; + + Ok(()) +} + +pub(crate) async fn get_latest_price(pool: &DbPool) -> anyhow::Result { + let result = sqlx::query!( + "SELECT timestamp, chf, usd, eur, gbp, btc FROM price_history ORDER BY timestamp DESC LIMIT 1;" + ) + .fetch_one(pool) + .await?; + + Ok(PriceHistory { + timestamp: result.timestamp, + chf: result.chf, + usd: result.usd, + eur: result.eur, + gbp: result.gbp, + btc: result.btc, + }) +} + +pub(crate) async fn get_average_price(pool: &DbPool) -> anyhow::Result { + // now less 1 day + let earliest_timestamp = Local::now().sub(chrono::Duration::days(1)).timestamp(); + + let result = sqlx::query!( + "SELECT timestamp, chf, usd, eur, gbp, btc FROM price_history WHERE timestamp >= $1;", + earliest_timestamp + ) + .fetch_all(pool) + .await?; + + let mut price = PriceHistory { + timestamp: Local::now().timestamp(), + chf: 0f64, + usd: 0f64, + eur: 0f64, + gbp: 0f64, + btc: 0f64, + }; + + let mut chf_count = 0; + let mut usd_count = 0; + let mut eur_count = 0; + let mut gbp_count = 0; + let mut btc_count = 0; + + for p in &result { + if p.chf != 0f64 { + price.chf += p.chf; + chf_count += 1; + } + if p.usd != 0f64 { + price.usd += p.usd; + usd_count += 1; + } + if p.eur != 0f64 { + price.eur += p.eur; + eur_count += 1; + } + if p.gbp != 0f64 { + price.gbp += p.gbp; + gbp_count += 1; + } + if p.btc != 0f64 { + price.btc += p.btc; + btc_count += 1; + } + } + + if chf_count > 0 { + price.chf /= chf_count as f64; + } + if usd_count > 0 { + price.usd /= usd_count as f64; + } + if eur_count > 0 { + price.eur /= eur_count as f64; + } + if gbp_count > 0 { + price.gbp /= gbp_count as f64; + } + if btc_count > 0 { + price.btc /= btc_count as f64; + } + + Ok(price) +} diff --git a/nym-data-observatory/src/db/queries/wasm.rs b/nym-data-observatory/src/db/queries/wasm.rs new file mode 100644 index 00000000000..252efe69b97 --- /dev/null +++ b/nym-data-observatory/src/db/queries/wasm.rs @@ -0,0 +1,54 @@ +use crate::db::DbPool; +use anyhow::Result; +use nyxd_scraper_psql::models::DbCoin; +use serde_json::Value; +use time::PrimitiveDateTime; + +#[allow(clippy::too_many_arguments)] +pub async fn insert_wasm_execute( + pool: &DbPool, + sender: String, + contract_address: String, + message_type: String, + raw_contract_message: &Value, + funds: Option>, + executed_at: PrimitiveDateTime, + height: i64, + hash: String, + message_index: i64, + memo: String, + fee: &Vec, +) -> Result<()> { + sqlx::query!( + r#" + INSERT INTO wasm_execute_contract ( + sender, + contract_address, + message_type, + raw_contract_message, + funds, + executed_at, + height, + hash, + message_index, + memo, + fee + ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11) + "#, + sender, + contract_address, + message_type, + raw_contract_message, + &funds as _, + executed_at, + height, + hash, + message_index, + memo, + &fee as _, + ) + .execute(pool) + .await?; + + Ok(()) +} diff --git a/nym-data-observatory/src/env.rs b/nym-data-observatory/src/env.rs new file mode 100644 index 00000000000..cff2212b57f --- /dev/null +++ b/nym-data-observatory/src/env.rs @@ -0,0 +1,30 @@ +// Copyright 2024 - Nym Technologies SA +// SPDX-License-Identifier: GPL-3.0-only + +#[allow(unused)] +pub mod vars { + pub const NYXD_WS: &str = "NYXD_WS"; + pub const NYXD: &str = "NYXD"; + + pub const NYM_DATA_OBSERVATORY_NO_BANNER_ARG: &str = "NYM_DATA_OBSERVATORY_NO_BANNER"; + pub const NYM_DATA_OBSERVATORY_CONFIG_ENV_FILE_ARG: &str = + "NYM_DATA_OBSERVATORY_CONFIG_ENV_FILE_ARG"; + + pub const NYM_DATA_OBSERVATORY_DB_URL: &str = "NYM_DATA_OBSERVATORY_DB_URL"; + + pub const NYXD_SCRAPER_START_HEIGHT: &str = "NYXD_SCRAPER_START_HEIGHT"; + pub const NYXD_SCRAPER_USE_BEST_EFFORT_START_HEIGHT: &str = + "NYXD_SCRAPER_USE_BEST_EFFORT_START_HEIGHT"; + + pub const NYXD_SCRAPER_UNSAFE_NUKE_DB: &str = "NYXD_SCRAPER_UNSAFE_NUKE_DB"; + + pub const NYM_DATA_OBSERVATORY_ID_ARG: &str = "NYM_DATA_OBSERVATORY_ID"; + pub const NYM_DATA_OBSERVATORY_OUTPUT_ARG: &str = "NYM_DATA_OBSERVATORY_OUTPUT"; + + pub const NYM_DATA_OBSERVATORY_CONFIG_PATH_ARG: &str = "NYM_DATA_OBSERVATORY_CONFIG"; + + pub const NYM_DATA_OBSERVATORY_WATCH_CHAIN_MESSAGE_TYPES: &str = + "NYM_DATA_OBSERVATORY_WATCH_CHAIN_MESSAGE_TYPES"; + pub const NYM_DATA_OBSERVATORY_WEBHOOK_URL: &str = "NYM_DATA_OBSERVATORY_WEBHOOK_URL"; + pub const NYM_DATA_OBSERVATORY_WEBHOOK_AUTH: &str = "NYM_DATA_OBSERVATORY_WEBHOOK_AUTH"; +} diff --git a/nym-data-observatory/src/error.rs b/nym-data-observatory/src/error.rs new file mode 100644 index 00000000000..03d49219ea1 --- /dev/null +++ b/nym-data-observatory/src/error.rs @@ -0,0 +1,48 @@ +use std::io; +use std::path::PathBuf; +use thiserror::Error; + +#[derive(Debug, Error)] +pub enum NymDataObservatoryError { + // #[error("failed to save config file using path '{}'. detailed message: {source}", path.display())] + // ConfigSaveFailure { + // path: PathBuf, + // #[source] + // source: io::Error, + // }, + #[error("failed to save config file using path '{}'. detailed message: {source}", path.display())] + UnformattedConfigSaveFailure { + path: PathBuf, + #[source] + source: nym_config::error::NymConfigTomlError, + }, + + #[error("could not derive path to data directory of this nyx chain watcher")] + DataDirDerivationFailure, + + #[error( + "please provide a database connection string as an env var, cli argument or in a config file" + )] + DbConnectionStringMissing, + + // #[error("could not derive path to config directory of this nyx chain watcher")] + // ConfigDirDerivationFailure, + #[error("failed to load config file using path '{}'. detailed message: {source}", path.display())] + ConfigLoadFailure { + path: PathBuf, + #[source] + source: io::Error, + }, + + #[error(transparent)] + FileIoFailure(#[from] io::Error), + + #[error(transparent)] + AnyhowFailure(#[from] anyhow::Error), + + #[error(transparent)] + NymConfigTomlE(#[from] nym_config::error::NymConfigTomlError), + + #[error(transparent)] + UrlParseFailure(#[from] url::ParseError), +} diff --git a/nym-data-observatory/src/http/api/mod.rs b/nym-data-observatory/src/http/api/mod.rs new file mode 100644 index 00000000000..ee0fbe85634 --- /dev/null +++ b/nym-data-observatory/src/http/api/mod.rs @@ -0,0 +1,79 @@ +use anyhow::anyhow; +use axum::{Router, response::Redirect}; +use tokio::net::ToSocketAddrs; +use tower_http::{cors::CorsLayer, trace::TraceLayer}; +use utoipa::OpenApi; +use utoipa_swagger_ui::SwaggerUi; + +use crate::http::{api_docs, server::HttpServer, state::AppState}; + +pub(crate) mod price; +pub(crate) mod status; + +pub(crate) struct RouterBuilder { + unfinished_router: Router, +} + +impl RouterBuilder { + pub(crate) fn with_default_routes() -> Self { + let router = Router::new() + .merge( + SwaggerUi::new("/swagger") + .url("/api-docs/openapi.json", api_docs::ApiDoc::openapi()), + ) + .route( + "/", + axum::routing::get(|| async { Redirect::permanent("/swagger") }), + ) + .nest( + "/v1", + Router::new() + .nest("/status", status::routes()) + .nest("/price", price::routes()), + ); + + Self { + unfinished_router: router, + } + } + + pub(crate) fn with_state(self, state: AppState) -> RouterWithState { + RouterWithState { + router: self.finalize_routes().with_state(state), + } + } + + fn finalize_routes(self) -> Router { + // layers added later wrap earlier layers + self.unfinished_router + // CORS layer needs to wrap other API layers + .layer(setup_cors()) + // logger should be outermost layer + .layer(TraceLayer::new_for_http()) + } +} + +pub(crate) struct RouterWithState { + router: Router, +} + +impl RouterWithState { + pub(crate) async fn build_server( + self, + bind_address: A, + ) -> anyhow::Result { + tokio::net::TcpListener::bind(bind_address) + .await + .map(|listener| HttpServer::new(self.router, listener)) + .map_err(|err| anyhow!("Couldn't bind to address due to {}", err)) + } +} + +fn setup_cors() -> CorsLayer { + use axum::http::Method; + CorsLayer::new() + .allow_origin(tower_http::cors::Any) + .allow_methods([Method::POST, Method::GET, Method::PATCH, Method::OPTIONS]) + .allow_headers(tower_http::cors::Any) + .allow_credentials(false) +} diff --git a/nym-data-observatory/src/http/api/price.rs b/nym-data-observatory/src/http/api/price.rs new file mode 100644 index 00000000000..3a7829d5983 --- /dev/null +++ b/nym-data-observatory/src/http/api/price.rs @@ -0,0 +1,44 @@ +use crate::db::models::PriceHistory; +use crate::db::queries::price::{get_average_price, get_latest_price}; +use crate::http::error::Error; +use crate::http::error::HttpResult; +use crate::http::state::AppState; +use axum::{Json, Router, extract::State}; + +pub(crate) fn routes() -> Router { + Router::new() + .route("/", axum::routing::get(price)) + .route("/average", axum::routing::get(average_price)) +} + +#[utoipa::path( + tag = "NYM Price", + get, + path = "/v1/price", + responses( + (status = 200, body = String) + ) +)] +/// Fetch the latest price cached by this API +async fn price(State(state): State) -> HttpResult> { + get_latest_price(state.db_pool()) + .await + .map(Json::from) + .map_err(|_| Error::internal()) +} + +#[utoipa::path( + tag = "NYM Price", + get, + path = "/v1/price/average", + responses( + (status = 200, body = String) + ) +)] +/// Fetch the average price cached by this API +async fn average_price(State(state): State) -> HttpResult> { + get_average_price(state.db_pool()) + .await + .map(Json::from) + .map_err(|_| Error::internal()) +} diff --git a/nym-data-observatory/src/http/api/status.rs b/nym-data-observatory/src/http/api/status.rs new file mode 100644 index 00000000000..35d7168cd94 --- /dev/null +++ b/nym-data-observatory/src/http/api/status.rs @@ -0,0 +1,79 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: GPL-3.0-only + +use crate::http::models::status::{ + ApiStatus, HealthResponse, PriceScraperLastError, PriceScraperLastSuccess, + PriceScraperStatusResponse, +}; +use crate::http::state::{AppState, PriceScraperState, StatusState}; +use axum::extract::State; +use axum::routing::get; +use axum::{Json, Router}; +use nym_bin_common::build_information::BinaryBuildInformationOwned; + +pub(crate) fn routes() -> Router { + Router::new() + .route("/health", get(health)) + .route("/build-information", get(build_information)) + .route("/price-scraper", get(price_scraper_status)) +} + +#[utoipa::path( + tag = "Status", + get, + path = "/build-information", + context_path = "/v1/status", + responses( + (status = 200, body = BinaryBuildInformationOwned) + ) +)] +async fn build_information(State(state): State) -> Json { + Json(state.build_information.to_owned()) +} + +#[utoipa::path( + tag = "Status", + get, + path = "/health", + context_path = "/v1/status", + responses( + (status = 200, body = HealthResponse) + ) +)] +async fn health(State(state): State) -> Json { + let uptime = state.startup_time.elapsed(); + + let health = HealthResponse { + status: ApiStatus::Up, + uptime: uptime.as_secs(), + }; + Json(health) +} + +#[utoipa::path( + tag = "Status", + get, + path = "/price-scraper", + context_path = "/v1/status", + responses( + (status = 200, body = PriceScraperStatusResponse) + ) +)] +pub(crate) async fn price_scraper_status( + State(state): State, +) -> Json { + let guard = state.inner.read().await; + Json(PriceScraperStatusResponse { + last_success: guard + .last_success + .as_ref() + .map(|s| PriceScraperLastSuccess { + timestamp: s.timestamp, + response: s.response.clone(), + }), + last_failure: guard.last_failure.as_ref().map(|f| PriceScraperLastError { + timestamp: f.timestamp, + message: f.message.clone(), + }), + }) +} diff --git a/nym-data-observatory/src/http/api_docs.rs b/nym-data-observatory/src/http/api_docs.rs new file mode 100644 index 00000000000..c7dbe0118d0 --- /dev/null +++ b/nym-data-observatory/src/http/api_docs.rs @@ -0,0 +1,14 @@ +use utoipa::OpenApi; +use utoipauto::utoipauto; + +// manually import external structs which are behind feature flags because they +// can't be automatically discovered +// https://github.com/ProbablyClem/utoipauto/issues/13#issuecomment-1974911829 +#[utoipauto(paths = "./nym-data-observatory/src")] +#[derive(OpenApi)] +#[openapi( + info(title = "Nym Data Observatory API"), + tags(), + components(schemas()) +)] +pub(super) struct ApiDoc; diff --git a/nym-data-observatory/src/http/error.rs b/nym-data-observatory/src/http/error.rs new file mode 100644 index 00000000000..fa6b274b69b --- /dev/null +++ b/nym-data-observatory/src/http/error.rs @@ -0,0 +1,21 @@ +pub(crate) type HttpResult = Result; + +pub(crate) struct Error { + message: String, + status: axum::http::StatusCode, +} + +impl Error { + pub(crate) fn internal() -> Self { + Self { + message: String::from("Internal server error"), + status: axum::http::StatusCode::INTERNAL_SERVER_ERROR, + } + } +} + +impl axum::response::IntoResponse for Error { + fn into_response(self) -> axum::response::Response { + (self.status, self.message).into_response() + } +} diff --git a/nym-data-observatory/src/http/mod.rs b/nym-data-observatory/src/http/mod.rs new file mode 100644 index 00000000000..c57d17c47b3 --- /dev/null +++ b/nym-data-observatory/src/http/mod.rs @@ -0,0 +1,6 @@ +pub(crate) mod api; +pub(crate) mod api_docs; +pub(crate) mod error; +pub(crate) mod models; +pub(crate) mod server; +pub(crate) mod state; diff --git a/nym-data-observatory/src/http/models.rs b/nym-data-observatory/src/http/models.rs new file mode 100644 index 00000000000..34d6d305f1d --- /dev/null +++ b/nym-data-observatory/src/http/models.rs @@ -0,0 +1,70 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: GPL-3.0-only + +// if we ever create some sort of chain watcher client, those would need to be extracted + +pub mod status { + use crate::config::data_observatory::WebhookConfig; + use crate::db::models::CoingeckoPriceResponse; + use serde::{Deserialize, Serialize}; + use time::OffsetDateTime; + use url::Url; + use utoipa::ToSchema; + + #[derive(Clone, Copy, Debug, Serialize, Deserialize, schemars::JsonSchema, ToSchema)] + #[serde(rename_all = "lowercase")] + pub enum ApiStatus { + Up, + } + + #[derive(Clone, Copy, Debug, Serialize, Deserialize, schemars::JsonSchema, ToSchema)] + pub struct HealthResponse { + pub status: ApiStatus, + pub uptime: u64, + } + + #[derive(Debug, Serialize, Deserialize, ToSchema)] + pub struct ActiveWebhooksResponse { + pub watchers: Vec, + } + + #[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] + pub struct Webhook { + pub id: String, + pub description: String, + #[schema(value_type = String)] + pub webhook_url: Url, + pub watched_message_types: Vec, + } + + impl From<&WebhookConfig> for Webhook { + fn from(value: &WebhookConfig) -> Self { + Webhook { + id: value.id.clone(), + description: value.description.clone().unwrap_or_default(), + webhook_url: value.webhook_url.clone(), + watched_message_types: value.watch_for_chain_message_types.clone(), + } + } + } + + #[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] + pub(crate) struct PriceScraperStatusResponse { + pub(crate) last_success: Option, + pub(crate) last_failure: Option, + } + + #[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] + pub(crate) struct PriceScraperLastSuccess { + #[serde(with = "time::serde::rfc3339")] + pub(crate) timestamp: OffsetDateTime, + pub(crate) response: CoingeckoPriceResponse, + } + + #[derive(Debug, Clone, Serialize, Deserialize, ToSchema)] + pub(crate) struct PriceScraperLastError { + #[serde(with = "time::serde::rfc3339")] + pub(crate) timestamp: OffsetDateTime, + pub(crate) message: String, + } +} diff --git a/nym-data-observatory/src/http/server.rs b/nym-data-observatory/src/http/server.rs new file mode 100644 index 00000000000..fd01f01f871 --- /dev/null +++ b/nym-data-observatory/src/http/server.rs @@ -0,0 +1,59 @@ +use axum::Router; +use core::net::SocketAddr; +use tokio::net::TcpListener; +use tokio_util::sync::WaitForCancellationFutureOwned; + +use crate::config::Config; +use crate::http::state::PriceScraperState; +use crate::{ + db::DbPool, + http::{api::RouterBuilder, state::AppState}, +}; + +pub(crate) async fn build_http_api( + db_pool: DbPool, + config: &Config, + http_port: u16, + price_scraper_state: PriceScraperState, +) -> anyhow::Result { + let router_builder = RouterBuilder::with_default_routes(); + + let state = AppState::new( + db_pool, + config + .data_observatory_config + .webhooks + .iter() + .map(Into::into) + .collect(), + price_scraper_state, + ); + let router = router_builder.with_state(state); + + let bind_addr = format!("0.0.0.0:{http_port}"); + let server = router.build_server(bind_addr).await?; + Ok(server) +} + +pub(crate) struct HttpServer { + router: Router, + listener: TcpListener, +} + +impl HttpServer { + pub(crate) fn new(router: Router, listener: TcpListener) -> Self { + Self { router, listener } + } + + pub(crate) async fn run(self, receiver: WaitForCancellationFutureOwned) -> std::io::Result<()> { + // into_make_service_with_connect_info allows us to see client ip address + // in middleware, for logging, TLS, routing etc. + axum::serve( + self.listener, + self.router + .into_make_service_with_connect_info::(), + ) + .with_graceful_shutdown(receiver) + .await + } +} diff --git a/nym-data-observatory/src/http/state.rs b/nym-data-observatory/src/http/state.rs new file mode 100644 index 00000000000..d3ed8acf09d --- /dev/null +++ b/nym-data-observatory/src/http/state.rs @@ -0,0 +1,124 @@ +use crate::db::DbPool; +use crate::db::models::CoingeckoPriceResponse; +use crate::http::models::status::Webhook; +use axum::extract::FromRef; +use nym_bin_common::bin_info; +use nym_bin_common::build_information::BinaryBuildInformation; +use std::ops::Deref; +use std::sync::Arc; +use time::OffsetDateTime; +use tokio::sync::RwLock; +use tokio::time::Instant; + +#[derive(Debug, Clone)] +pub(crate) struct AppState { + db_pool: DbPool, + #[allow(dead_code)] + pub(crate) registered_webhooks: Arc>, + pub(crate) status_state: StatusState, + pub(crate) price_scraper_state: PriceScraperState, +} + +impl AppState { + pub(crate) fn new( + db_pool: DbPool, + registered_payment_watchers: Vec, + price_scraper_state: PriceScraperState, + ) -> Self { + Self { + db_pool, + registered_webhooks: Arc::new(registered_payment_watchers), + status_state: Default::default(), + price_scraper_state, + } + } + + pub(crate) fn db_pool(&self) -> &DbPool { + &self.db_pool + } +} + +#[derive(Clone, Debug)] +pub(crate) struct StatusState { + inner: Arc, +} + +impl Default for StatusState { + fn default() -> Self { + StatusState { + inner: Arc::new(StatusStateInner { + startup_time: Instant::now(), + build_information: bin_info!(), + }), + } + } +} + +impl Deref for StatusState { + type Target = StatusStateInner; + fn deref(&self) -> &Self::Target { + &self.inner + } +} + +#[derive(Debug)] +pub(crate) struct StatusStateInner { + pub(crate) startup_time: Instant, + pub(crate) build_information: BinaryBuildInformation, +} + +#[derive(Debug, Clone)] +pub(crate) struct PriceScraperState { + pub(crate) inner: Arc>, +} + +impl PriceScraperState { + pub(crate) fn new() -> Self { + PriceScraperState { + inner: Arc::new(Default::default()), + } + } + + pub(crate) async fn new_failure>(&self, error: S) { + self.inner.write().await.last_failure = Some(PriceScraperLastError { + timestamp: OffsetDateTime::now_utc(), + message: error.into(), + }) + } + pub(crate) async fn new_success(&self, response: CoingeckoPriceResponse) { + self.inner.write().await.last_success = Some(PriceScraperLastSuccess { + timestamp: OffsetDateTime::now_utc(), + response, + }) + } +} + +#[derive(Debug, Default)] +pub(crate) struct PriceScraperStateInner { + pub(crate) last_success: Option, + pub(crate) last_failure: Option, +} + +#[derive(Debug)] +pub(crate) struct PriceScraperLastSuccess { + pub(crate) timestamp: OffsetDateTime, + pub(crate) response: CoingeckoPriceResponse, +} + +#[derive(Debug)] +pub(crate) struct PriceScraperLastError { + pub(crate) timestamp: OffsetDateTime, + pub(crate) message: String, +} + +impl FromRef for StatusState { + fn from_ref(input: &AppState) -> Self { + input.status_state.clone() + } +} + +impl FromRef for PriceScraperState { + fn from_ref(input: &AppState) -> Self { + input.price_scraper_state.clone() + } +} diff --git a/nym-data-observatory/src/logging.rs b/nym-data-observatory/src/logging.rs new file mode 100644 index 00000000000..dfcdefaadfc --- /dev/null +++ b/nym-data-observatory/src/logging.rs @@ -0,0 +1,43 @@ +use tracing::level_filters::LevelFilter; +use tracing_subscriber::{EnvFilter, filter::Directive}; + +pub(crate) fn setup_tracing_logger() { + fn directive_checked(directive: String) -> Directive { + directive.parse().expect("Failed to parse log directive") + } + + let log_builder = tracing_subscriber::fmt() + // Use a more compact, abbreviated log format + .compact() + // Display source code file paths + .with_file(true) + // Display source code line numbers + .with_line_number(true) + // Don't display the event's target (module path) + .with_target(false); + + let mut filter = EnvFilter::builder() + // if RUST_LOG isn't set, set default level + .with_default_directive(LevelFilter::INFO.into()) + .from_env_lossy(); + // these crates are more granularly filtered + let filter_crates = [ + "nym_bin_common", + "nym_explorer_client", + "nym_network_defaults", + "nym_validator_client", + "reqwest", + "rustls", + "hyper", + "sqlx", + "h2", + "tendermint_rpc", + "tower_http", + "axum", + ]; + for crate_name in filter_crates { + filter = filter.add_directive(directive_checked(format!("{crate_name}=warn"))); + } + + log_builder.with_env_filter(filter).init(); +} diff --git a/nym-data-observatory/src/main.rs b/nym-data-observatory/src/main.rs new file mode 100644 index 00000000000..0be492db19d --- /dev/null +++ b/nym-data-observatory/src/main.rs @@ -0,0 +1,35 @@ +use clap::{Parser, crate_name, crate_version}; +use nym_bin_common::bin_info_owned; +use nym_bin_common::logging::maybe_print_banner; +use nym_network_defaults::setup_env; +use tracing::info; + +mod chain_scraper; +mod cli; +mod config; +mod db; +mod env; +mod error; +mod http; +mod logging; +pub mod models; +mod modules; +mod price_scraper; + +#[tokio::main] +async fn main() -> anyhow::Result<()> { + let cli = cli::Cli::parse(); + setup_env(cli.config_env_file.as_ref()); + logging::setup_tracing_logger(); + + if !cli.no_banner { + maybe_print_banner(crate_name!(), crate_version!()); + } + + let bin_info = bin_info_owned!(); + info!("using the following version: {bin_info}"); + + cli.execute().await?; + + Ok(()) +} diff --git a/nym-data-observatory/src/models.rs b/nym-data-observatory/src/models.rs new file mode 100644 index 00000000000..692cfb4802b --- /dev/null +++ b/nym-data-observatory/src/models.rs @@ -0,0 +1,22 @@ +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use utoipa::ToSchema; +use utoipa::r#gen::serde_json; + +#[derive(Serialize, Deserialize, Clone, JsonSchema, ToSchema)] +pub struct WebhookPayload { + pub height: u64, + pub transaction_hash: String, + pub message_index: u64, + pub message: Option, +} + +pub mod openapi_schema { + use super::*; + + #[derive(ToSchema)] + pub struct Coin { + pub denom: String, + pub amount: String, + } +} diff --git a/nym-data-observatory/src/modules/mod.rs b/nym-data-observatory/src/modules/mod.rs new file mode 100644 index 00000000000..30d27c6b5bf --- /dev/null +++ b/nym-data-observatory/src/modules/mod.rs @@ -0,0 +1,4 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: GPL-3.0-only + +pub(crate) mod wasm; diff --git a/nym-data-observatory/src/modules/wasm.rs b/nym-data-observatory/src/modules/wasm.rs new file mode 100644 index 00000000000..98fe1dc8099 --- /dev/null +++ b/nym-data-observatory/src/modules/wasm.rs @@ -0,0 +1,117 @@ +// Copyright 2025 - Nym Technologies SA +// SPDX-License-Identifier: GPL-3.0-only + +use crate::db::DbPool; +use crate::db::queries::wasm::insert_wasm_execute; +use async_trait::async_trait; +use cosmrs::proto::cosmwasm::wasm::v1::MsgExecuteContract; +use cosmrs::proto::prost::Message; +use nym_validator_client::nyxd::{Any, Name}; +use nyxd_scraper_psql::models::DbCoin; +use nyxd_scraper_psql::{ + MsgModule, NyxdScraperTransaction, ParsedTransactionResponse, ScraperError, +}; +use serde_json::Value; +use time::{OffsetDateTime, PrimitiveDateTime}; +use tracing::{error, trace}; +use utoipa::r#gen::serde_json; + +pub struct WasmModule { + connection_pool: DbPool, +} + +impl WasmModule { + pub fn new(connection_pool: DbPool) -> Self { + WasmModule { connection_pool } + } +} + +#[async_trait] +impl MsgModule for WasmModule { + fn type_url(&self) -> String { + MsgExecuteContract::type_url() + } + + async fn handle_msg( + &mut self, + index: usize, + msg: &Any, + tx: &ParsedTransactionResponse, + _storage_tx: &mut dyn NyxdScraperTransaction, + ) -> Result<(), ScraperError> { + let message = serde_json::to_value(tx.parsed_messages.get(&index)).unwrap_or_default(); + let value = serde_json::to_value(message.clone()).unwrap_or_default(); + let wasm_message_type = get_wasm_message_type(&value); + let fee: Vec = tx + .tx + .auth_info + .fee + .amount + .clone() + .into_iter() + .map(|x| DbCoin { + amount: x.amount.to_string(), + denom: x.denom.to_string(), + }) + .collect(); + + let offset_datetime: OffsetDateTime = tx.block.header.time.into(); + let executed_at = PrimitiveDateTime::new(offset_datetime.date(), offset_datetime.time()); + + let height = tx.height.value() as i64; + let hash = tx.hash.to_string(); + let memo = tx.tx.body.memo.clone(); + + match MsgExecuteContract::decode(msg.value.as_ref()) { + Ok(wasm_execute_msg) => { + let funds: Vec = wasm_execute_msg + .funds + .clone() + .into_iter() + .map(|x| x.into()) + .collect(); + let contract = wasm_execute_msg.contract; + let sender = wasm_execute_msg.sender; + + if let Err(err) = insert_wasm_execute( + &self.connection_pool, + sender, + contract, + wasm_message_type, + &message, + Some(funds), + executed_at, + height, + hash, + index as i64, + memo, + &fee, + ) + .await + { + error!("Error persisting wasm contract execution message: {}", err); + } + } + Err(err) => { + error!("Error decoding message: {}", err); + } + } + + Ok(()) + } +} + +fn get_first_field_name(value: Option<&Value>) -> Option { + trace!("value:\n{value:?}"); + match value { + Some(value) => match value.as_object() { + Some(map) => map.keys().next().cloned(), + None => None, + }, + None => None, + } +} + +fn get_wasm_message_type(value: &Value) -> String { + get_first_field_name(value.get("msg")).unwrap_or_default() +} diff --git a/nym-data-observatory/src/price_scraper/mod.rs b/nym-data-observatory/src/price_scraper/mod.rs new file mode 100644 index 00000000000..6442118579d --- /dev/null +++ b/nym-data-observatory/src/price_scraper/mod.rs @@ -0,0 +1,76 @@ +use crate::db::{ + models::{CoingeckoPriceResponse, PriceRecord}, + queries::price::insert_nym_prices, +}; +use core::str; +use tokio::time::Duration; + +use crate::db::DbPool; +use crate::http::state::PriceScraperState; + +const REFRESH_DELAY: Duration = Duration::from_secs(300); +const FAILURE_RETRY_DELAY: Duration = Duration::from_secs(60 * 2); +const COINGECKO_API_URL: &str = + "https://api.coingecko.com/api/v3/simple/price?ids=nym&vs_currencies=chf,usd,eur,gbp,btc"; + +pub(crate) struct PriceScraper { + shared_state: PriceScraperState, + db_pool: DbPool, +} + +impl PriceScraper { + pub(crate) fn new(shared_state: PriceScraperState, db_pool: DbPool) -> Self { + PriceScraper { + shared_state, + db_pool, + } + } + + async fn get_coingecko_prices(&self) -> anyhow::Result { + tracing::info!("💰 Fetching CoinGecko prices from {COINGECKO_API_URL}"); + + let response = reqwest::get(COINGECKO_API_URL) + .await? + .json::() + .await; + + tracing::info!("Got response {:?}", response); + match response { + Ok(resp) => { + let price_record = PriceRecord { + timestamp: time::OffsetDateTime::now_utc().unix_timestamp(), + nym: resp.nym.clone(), + }; + + insert_nym_prices(&self.db_pool, price_record).await?; + Ok(resp) + } + Err(err) => { + //tracing::info!("💰 CoinGecko price response: {:?}", response); + tracing::error!("Error sending request: {err}"); + Err(err.into()) + } + } + } + + pub(crate) async fn run(&self) { + loop { + tracing::info!("Running in a loop 🏃"); + match self.get_coingecko_prices().await { + Ok(coingecko_price_response) => { + self.shared_state + .new_success(coingecko_price_response) + .await; + tracing::info!("✅ Successfully fetched CoinGecko prices"); + tokio::time::sleep(REFRESH_DELAY).await; + } + Err(err) => { + tracing::error!("❌ Failed to get CoinGecko prices: {err}"); + tracing::info!("Retrying in {}s...", FAILURE_RETRY_DELAY.as_secs()); + self.shared_state.new_failure(err.to_string()).await; + tokio::time::sleep(FAILURE_RETRY_DELAY).await; + } + } + } + } +} diff --git a/nym-validator-rewarder/Cargo.toml b/nym-validator-rewarder/Cargo.toml index be1ae056dc2..0655bd21eff 100644 --- a/nym-validator-rewarder/Cargo.toml +++ b/nym-validator-rewarder/Cargo.toml @@ -44,7 +44,7 @@ nym-task = { path = "../common/task" } nym-validator-client = { path = "../common/client-libs/validator-client" } nym-http-api-client = { path = "../common/http-api-client" } nym-coconut-dkg-common = { path = "../common/cosmwasm-smart-contracts/coconut-dkg" } -nyxd-scraper = { path = "../common/nyxd-scraper" } +nyxd-scraper-sqlite = { path = "../common/nyxd-scraper-sqlite" } nym-ticketbooks-merkle = { path = "../common/ticketbooks-merkle" } nym-serde-helpers = { path = "../common/serde-helpers", features = ["base64"] } nym-pemstore = { path = "../common/pemstore" } diff --git a/nym-validator-rewarder/src/cli/process_block.rs b/nym-validator-rewarder/src/cli/process_block.rs index 0174f1165c6..ba802de2750 100644 --- a/nym-validator-rewarder/src/cli/process_block.rs +++ b/nym-validator-rewarder/src/cli/process_block.rs @@ -3,7 +3,7 @@ use crate::cli::{ConfigOverridableArgs, try_load_current_config}; use crate::error::NymRewarderError; -use nyxd_scraper::NyxdScraper; +use nyxd_scraper_sqlite::SqliteNyxdScraper; use std::path::PathBuf; #[derive(Debug, clap::Args)] @@ -24,7 +24,7 @@ pub(crate) async fn execute(args: Args) -> Result<(), NymRewarderError> { let config = try_load_current_config(&args.custom_config_path)?.with_override(args.config_override); - NyxdScraper::new(config.scraper_config()) + SqliteNyxdScraper::new(config.scraper_config()?) .await? .unsafe_process_single_block(args.height) .await?; diff --git a/nym-validator-rewarder/src/cli/process_until.rs b/nym-validator-rewarder/src/cli/process_until.rs index 159f954e574..0a34c0852f2 100644 --- a/nym-validator-rewarder/src/cli/process_until.rs +++ b/nym-validator-rewarder/src/cli/process_until.rs @@ -3,7 +3,7 @@ use crate::cli::{ConfigOverridableArgs, try_load_current_config}; use crate::error::NymRewarderError; -use nyxd_scraper::NyxdScraper; +use nyxd_scraper_sqlite::SqliteNyxdScraper; use std::path::PathBuf; #[derive(Debug, clap::Args)] @@ -37,7 +37,7 @@ pub(crate) async fn execute(args: Args) -> Result<(), NymRewarderError> { let config = try_load_current_config(&args.custom_config_path)?.with_override(args.config_override); - NyxdScraper::new(config.scraper_config()) + SqliteNyxdScraper::new(config.scraper_config()?) .await? .unsafe_process_block_range(args.start_height, args.stop_height) .await?; diff --git a/nym-validator-rewarder/src/config/mod.rs b/nym-validator-rewarder/src/config/mod.rs index 618d4ef27a5..146f971bada 100644 --- a/nym-validator-rewarder/src/config/mod.rs +++ b/nym-validator-rewarder/src/config/mod.rs @@ -12,7 +12,7 @@ use nym_config::{ must_get_home, read_config_from_toml_file, save_formatted_config_to_file, }; use nym_validator_client::nyxd::{AccountId, Coin}; -use nyxd_scraper::{PruningOptions, StartingBlockOpts}; +use nyxd_scraper_sqlite::{PruningOptions, StartingBlockOpts}; use serde::{Deserialize, Serialize}; use serde_with::{DisplayFromStr, serde_as}; use std::io; @@ -119,18 +119,23 @@ impl Config { } } - pub fn scraper_config(&self) -> nyxd_scraper::Config { - nyxd_scraper::Config { + pub fn scraper_config(&self) -> Result { + let database_storage = self.storage_paths.nyxd_scraper.as_path(); + let database_storage = database_storage + .to_str() + .ok_or(NymRewarderError::ConfigError)? + .to_string(); + Ok(nyxd_scraper_sqlite::Config { websocket_url: self.nyxd_scraper.websocket_url.clone(), rpc_url: self.base.upstream_nyxd.clone(), - database_path: self.storage_paths.nyxd_scraper.clone(), + database_storage, pruning_options: self.nyxd_scraper.pruning, store_precommits: self.nyxd_scraper.store_precommits, start_block: StartingBlockOpts { start_block_height: None, use_best_effort_start_height: true, }, - } + }) } pub fn verification_config(&self) -> ticketbook_issuance::VerificationConfig { diff --git a/nym-validator-rewarder/src/config/persistence/paths.rs b/nym-validator-rewarder/src/config/persistence/paths.rs index 6d9baa66f97..c003f900d1b 100644 --- a/nym-validator-rewarder/src/config/persistence/paths.rs +++ b/nym-validator-rewarder/src/config/persistence/paths.rs @@ -46,6 +46,7 @@ impl ValidatorRewarderPaths { impl Default for ValidatorRewarderPaths { fn default() -> Self { ValidatorRewarderPaths { + // validator rewarder uses sqlite nyxd_scraper: default_data_directory().join(DEFAULT_SCRAPER_DB_FILENAME), reward_history: default_data_directory().join(DEFAULT_REWARD_HISTORY_DB_FILENAME), private_ed25519_identity_key_file: default_data_directory() diff --git a/nym-validator-rewarder/src/error.rs b/nym-validator-rewarder/src/error.rs index 8adbe9883ff..a3db95e6f68 100644 --- a/nym-validator-rewarder/src/error.rs +++ b/nym-validator-rewarder/src/error.rs @@ -9,6 +9,7 @@ use nym_validator_client::nym_api::error::NymAPIError; use nym_validator_client::nyxd::error::NyxdError; use nym_validator_client::nyxd::tx::ErrorReport; use nym_validator_client::nyxd::{AccountId, Coin}; +use nyxd_scraper_sqlite::error::SqliteScraperError; use std::io; use std::path::PathBuf; use thiserror::Error; @@ -24,6 +25,9 @@ pub enum NymRewarderError { #[error("failed to perform startup SQL migration: {0}")] StartupMigrationFailure(#[from] sqlx::migrate::MigrateError), + #[error("config error: database storage path invalid")] + ConfigError, + #[error( "failed to load config file using path '{}'. detailed message: {source}", path.display() )] @@ -81,7 +85,13 @@ pub enum NymRewarderError { #[error("chain scraping failure: {source}")] ScraperFailure { #[from] - source: nyxd_scraper::error::ScraperError, + source: nyxd_scraper_sqlite::ScraperError, + }, + + #[error("chain scraper storage failure: {source}")] + ScraperStorageFailure { + #[from] + source: SqliteScraperError, }, // this should never happen but unwrapping everywhere was more cumbersome than just propagating the error diff --git a/nym-validator-rewarder/src/rewarder/block_signing/mod.rs b/nym-validator-rewarder/src/rewarder/block_signing/mod.rs index 57406c34ccb..d86a3365b85 100644 --- a/nym-validator-rewarder/src/rewarder/block_signing/mod.rs +++ b/nym-validator-rewarder/src/rewarder/block_signing/mod.rs @@ -7,7 +7,7 @@ use crate::rewarder::epoch::Epoch; use crate::rewarder::nyxd_client::NyxdClient; use nym_validator_client::nyxd::module_traits::staking; use nym_validator_client::nyxd::{AccountId, PageRequest}; -use nyxd_scraper::NyxdScraper; +use nyxd_scraper_sqlite::SqliteNyxdScraper; use std::cmp::min; use std::collections::HashMap; use std::ops::Range; @@ -17,7 +17,7 @@ pub(crate) mod types; pub struct EpochSigning { pub(crate) nyxd_client: NyxdClient, - pub(crate) nyxd_scraper: NyxdScraper, + pub(crate) nyxd_scraper: SqliteNyxdScraper, pub(crate) whitelist: Vec, } diff --git a/nym-validator-rewarder/src/rewarder/block_signing/types.rs b/nym-validator-rewarder/src/rewarder/block_signing/types.rs index 497d6546da1..559e6991b6e 100644 --- a/nym-validator-rewarder/src/rewarder/block_signing/types.rs +++ b/nym-validator-rewarder/src/rewarder/block_signing/types.rs @@ -7,7 +7,7 @@ use crate::{ }; use cosmwasm_std::{Decimal, Uint128}; use nym_validator_client::nyxd::{AccountId, Coin, module_traits::staking}; -use nyxd_scraper::models; +use nyxd_scraper_sqlite::models; use std::collections::HashMap; use tracing::info; diff --git a/nym-validator-rewarder/src/rewarder/helpers.rs b/nym-validator-rewarder/src/rewarder/helpers.rs index 578d49c498c..572e4ecbc79 100644 --- a/nym-validator-rewarder/src/rewarder/helpers.rs +++ b/nym-validator-rewarder/src/rewarder/helpers.rs @@ -3,7 +3,7 @@ use crate::error::NymRewarderError; use nym_validator_client::nyxd::{AccountId, PublicKey}; -use nyxd_scraper::constants::{BECH32_CONSENSUS_ADDRESS_PREFIX, BECH32_PREFIX}; +use nyxd_scraper_sqlite::constants::{BECH32_CONSENSUS_ADDRESS_PREFIX, BECH32_PREFIX}; use sha2::{Digest, Sha256}; pub(crate) fn consensus_pubkey_to_address( diff --git a/nym-validator-rewarder/src/rewarder/mod.rs b/nym-validator-rewarder/src/rewarder/mod.rs index e46dcc3e791..03ca36dff02 100644 --- a/nym-validator-rewarder/src/rewarder/mod.rs +++ b/nym-validator-rewarder/src/rewarder/mod.rs @@ -16,7 +16,7 @@ use nym_crypto::asymmetric::ed25519; use nym_ecash_time::{EcashTime, ecash_today, ecash_today_date}; use nym_task::ShutdownManager; use nym_validator_client::nyxd::{AccountId, Coin, Hash}; -use nyxd_scraper::NyxdScraper; +use nyxd_scraper_sqlite::SqliteNyxdScraper; use std::sync::Arc; use time::Date; use tracing::{error, info, instrument, warn}; @@ -187,7 +187,7 @@ impl Rewarder { info!("the block signing rewarding is running in monitor only mode"); } - let nyxd_scraper = NyxdScraper::new(config.scraper_config()).await?; + let nyxd_scraper = SqliteNyxdScraper::new(config.scraper_config()?).await?; Some(EpochSigning { nyxd_scraper, diff --git a/nym-wallet/Cargo.lock b/nym-wallet/Cargo.lock index 19db598a322..a7bd151e9de 100644 --- a/nym-wallet/Cargo.lock +++ b/nym-wallet/Cargo.lock @@ -1241,9 +1241,9 @@ dependencies = [ [[package]] name = "cosmos-sdk-proto" -version = "0.26.1" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "462e1f6a8e005acc8835d32d60cbd7973ed65ea2a8d8473830e675f050956427" +checksum = "95ac39be7373404accccaede7cc1ec942ccef14f0ca18d209967a756bf1dbb1f" dependencies = [ "prost", "tendermint-proto", @@ -1251,9 +1251,9 @@ dependencies = [ [[package]] name = "cosmrs" -version = "0.21.1" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1394c263335da09e8ba8c4b2c675d804e3e0deb44cce0866a5f838d3ddd43d02" +checksum = "34e74fa7a22930fe0579bef560f2d64b78415d4c47b9dd976c0635136809471d" dependencies = [ "bip32", "cosmos-sdk-proto", diff --git a/nym-wallet/nym-wallet-types/Cargo.toml b/nym-wallet/nym-wallet-types/Cargo.toml index 9b869ec93f4..1ebcaa9df81 100644 --- a/nym-wallet/nym-wallet-types/Cargo.toml +++ b/nym-wallet/nym-wallet-types/Cargo.toml @@ -14,8 +14,8 @@ strum_macros = "0.27.2" ts-rs = "10.0.0" -cosmwasm-std = "2.2.1" -cosmrs = "=0.21.1" +cosmwasm-std = "=2.2.2" +cosmrs = { version = "0.22.0" } nym-config = { path = "../../common/config" } nym-network-defaults = { path = "../../common/network-defaults" } diff --git a/nym-wallet/src-tauri/Cargo.toml b/nym-wallet/src-tauri/Cargo.toml index 7ea924a9e88..63279911070 100644 --- a/nym-wallet/src-tauri/Cargo.toml +++ b/nym-wallet/src-tauri/Cargo.toml @@ -53,8 +53,8 @@ base64 = "0.13" zeroize = { version = "1.5", features = ["zeroize_derive", "serde"] } plist = "1.6.0" -cosmwasm-std = "2.2.1" -cosmrs = { version = "0.21.0" } +cosmwasm-std = "=2.2.2" +cosmrs = { version = "0.22.0" } nym-node-requests = { path = "../../nym-node/nym-node-requests" } nym-validator-client = { path = "../../common/client-libs/validator-client" } diff --git a/nyx-chain-watcher/Cargo.toml b/nyx-chain-watcher/Cargo.toml index bbc2ed19396..3a2f09149b5 100644 --- a/nyx-chain-watcher/Cargo.toml +++ b/nyx-chain-watcher/Cargo.toml @@ -24,7 +24,7 @@ nym-bin-common = { path = "../common/bin-common", features = ["output_format"] } nym-network-defaults = { path = "../common/network-defaults" } nym-task = { path = "../common/task" } nym-validator-client = { path = "../common/client-libs/validator-client" } -nyxd-scraper = { path = "../common/nyxd-scraper" } +nyxd-scraper-sqlite = { path = "../common/nyxd-scraper-sqlite" } reqwest = { workspace = true, features = ["rustls-tls"] } schemars = { workspace = true } serde = { workspace = true, features = ["derive"] } diff --git a/nyx-chain-watcher/src/chain_scraper/mod.rs b/nyx-chain-watcher/src/chain_scraper/mod.rs index 6f2e493f315..c0db1b8e04d 100644 --- a/nyx-chain-watcher/src/chain_scraper/mod.rs +++ b/nyx-chain-watcher/src/chain_scraper/mod.rs @@ -6,9 +6,9 @@ use crate::env::vars::{ use crate::http::state::BankScraperModuleState; use async_trait::async_trait; use nym_validator_client::nyxd::{Any, Coin, CosmosCoin, Hash, Msg, MsgSend, Name}; -use nyxd_scraper::{ - MsgModule, NyxdScraper, ParsedTransactionResponse, PruningOptions, error::ScraperError, - storage::StorageTransaction, +use nyxd_scraper_sqlite::{ + MsgModule, NyxdScraperTransaction, ParsedTransactionResponse, PruningOptions, ScraperError, + SqliteNyxdScraper, }; use sqlx::SqlitePool; use std::fs; @@ -18,7 +18,7 @@ pub(crate) async fn run_chain_scraper( config: &crate::config::Config, db_pool: SqlitePool, shared_state: BankScraperModuleState, -) -> anyhow::Result { +) -> anyhow::Result { let websocket_url = std::env::var("NYXD_WS").expect("NYXD_WS not defined"); let rpc_url = std::env::var("NYXD").expect("NYXD not defined"); @@ -50,13 +50,13 @@ pub(crate) async fn run_chain_scraper( fs::remove_file(config.chain_scraper_database_path())?; } - let scraper = NyxdScraper::builder(nyxd_scraper::Config { + let scraper = SqliteNyxdScraper::builder(nyxd_scraper_sqlite::Config { websocket_url, rpc_url, - database_path: config.chain_scraper_database_path().into(), + database_storage: config.chain_scraper_database_path(), pruning_options: PruningOptions::nothing(), store_precommits: false, - start_block: nyxd_scraper::StartingBlockOpts { + start_block: nyxd_scraper_sqlite::StartingBlockOpts { start_block_height, use_best_effort_start_height, }, @@ -157,7 +157,7 @@ impl MsgModule for BankScraperModule { index: usize, msg: &Any, tx: &ParsedTransactionResponse, - _storage_tx: &mut StorageTransaction, + _storage_tx: &mut dyn NyxdScraperTransaction, ) -> Result<(), ScraperError> { let memo = tx.tx.body.memo.clone(); diff --git a/nyx-chain-watcher/src/cli/commands/run/mod.rs b/nyx-chain-watcher/src/cli/commands/run/mod.rs index 150738dfb6e..704a292141c 100644 --- a/nyx-chain-watcher/src/cli/commands/run/mod.rs +++ b/nyx-chain-watcher/src/cli/commands/run/mod.rs @@ -15,7 +15,7 @@ mod config; use crate::chain_scraper::run_chain_scraper; use crate::db::DbPool; use crate::http::state::{BankScraperModuleState, PaymentListenerState, PriceScraperState}; -use crate::payment_listener::PaymentListener; +use crate::listener::PaymentListener; use crate::price_scraper::PriceScraper; use crate::{db, http}; pub(crate) use args::Args; diff --git a/nyx-chain-watcher/src/http/state.rs b/nyx-chain-watcher/src/http/state.rs index 8cb9d93e9f5..8ab26dff9b1 100644 --- a/nyx-chain-watcher/src/http/state.rs +++ b/nyx-chain-watcher/src/http/state.rs @@ -7,7 +7,7 @@ use axum::extract::FromRef; use nym_bin_common::bin_info; use nym_bin_common::build_information::BinaryBuildInformation; use nym_validator_client::nyxd::{Coin, MsgSend}; -use nyxd_scraper::ParsedTransactionResponse; +use nyxd_scraper_sqlite::ParsedTransactionResponse; use serde::{Deserialize, Serialize}; use std::collections::HashMap; use std::ops::Deref; diff --git a/nyx-chain-watcher/src/payment_listener/mod.rs b/nyx-chain-watcher/src/listener/mod.rs similarity index 98% rename from nyx-chain-watcher/src/payment_listener/mod.rs rename to nyx-chain-watcher/src/listener/mod.rs index f941e6c0126..f98db43521b 100644 --- a/nyx-chain-watcher/src/payment_listener/mod.rs +++ b/nyx-chain-watcher/src/listener/mod.rs @@ -7,8 +7,8 @@ use crate::db::queries; use crate::http::state::{ PaymentListenerFailureDetails, PaymentListenerState, ProcessedPayment, WatcherFailureDetails, }; +use crate::listener::watcher::PaymentWatcher; use crate::models::WebhookPayload; -use crate::payment_listener::watcher::PaymentWatcher; use anyhow::Context; use sqlx::SqlitePool; use tokio::time::{self, Duration}; diff --git a/nyx-chain-watcher/src/payment_listener/watcher.rs b/nyx-chain-watcher/src/listener/watcher.rs similarity index 100% rename from nyx-chain-watcher/src/payment_listener/watcher.rs rename to nyx-chain-watcher/src/listener/watcher.rs diff --git a/nyx-chain-watcher/src/main.rs b/nyx-chain-watcher/src/main.rs index 725eee7a06d..8281be987c4 100644 --- a/nyx-chain-watcher/src/main.rs +++ b/nyx-chain-watcher/src/main.rs @@ -12,9 +12,9 @@ mod env; mod error; pub(crate) mod helpers; mod http; +mod listener; mod logging; pub mod models; -mod payment_listener; mod price_scraper; #[tokio::main]