From 45c658248894bcef0f12ec20b288be1f289375f8 Mon Sep 17 00:00:00 2001 From: erhant Date: Thu, 13 Jun 2024 19:23:52 +0300 Subject: [PATCH 01/29] add workflows --- Cargo.lock | 1007 +++++++++++++++++++++++++++++++---- Cargo.toml | 2 + compose.yml | 2 +- src/compute/payload.rs | 4 +- src/node.rs | 1 + src/workers/mod.rs | 1 + src/workers/workflow.rs | 86 +++ tests/mock_messages_test.rs | 4 +- 8 files changed, 984 insertions(+), 123 deletions(-) create mode 100644 src/workers/workflow.rs diff --git a/Cargo.lock b/Cargo.lock index c512f4f..1c93d91 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -33,7 +33,7 @@ version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "cipher", "cpufeatures", ] @@ -58,7 +58,7 @@ version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "getrandom 0.2.14", "once_cell", "version_check", @@ -183,7 +183,7 @@ dependencies = [ "async-convert", "backoff", "base64 0.22.0", - "bytes", + "bytes 1.6.0", "derive_builder", "futures", "rand 0.8.5", @@ -193,9 +193,9 @@ dependencies = [ "serde", "serde_json", "thiserror", - "tokio", + "tokio 1.37.0", "tokio-stream", - "tokio-util", + "tokio-util 0.7.10", "tracing", ] @@ -218,7 +218,7 @@ checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51" dependencies = [ "async-stream-impl", "futures-core", - "pin-project-lite", + "pin-project-lite 0.2.14", ] [[package]] @@ -243,6 +243,17 @@ dependencies = [ "syn 2.0.59", ] +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi 0.1.19", + "libc", + "winapi 0.3.9", +] + [[package]] name = "auto_enums" version = "0.8.5" @@ -270,9 +281,9 @@ dependencies = [ "futures-core", "getrandom 0.2.14", "instant", - "pin-project-lite", + "pin-project-lite 0.2.14", "rand 0.8.5", - "tokio", + "tokio 1.37.0", ] [[package]] @@ -283,7 +294,7 @@ checksum = "26b05800d2e817c8b3b4b54abd461726265fa9789ae34330622f2db9ee696f9d" dependencies = [ "addr2line", "cc", - "cfg-if", + "cfg-if 1.0.0", "libc", "miniz_oxide", "object", @@ -376,6 +387,12 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" +[[package]] +name = "bytes" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e4cec68f03f32e44924783795810fa50a7035d8c8ebe78580ad7e6c703fba38" + [[package]] name = "bytes" version = "1.6.0" @@ -388,6 +405,12 @@ version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "17f6e324229dc011159fcc089755d1e2e216a90d43a7dea6853ca740b84f35e7" +[[package]] +name = "cfg-if" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" + [[package]] name = "cfg-if" version = "1.0.0" @@ -432,6 +455,12 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + [[package]] name = "core-foundation" version = "0.9.4" @@ -463,7 +492,7 @@ version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", ] [[package]] @@ -517,6 +546,23 @@ dependencies = [ "subtle", ] +[[package]] +name = "cssparser" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "754b69d351cdc2d8ee09ae203db831e005560fc6030da058f86ad60c92a9cb0a" +dependencies = [ + "cssparser-macros", + "dtoa-short", + "itoa 0.4.8", + "matches", + "phf 0.8.0", + "proc-macro2", + "quote", + "smallvec", + "syn 1.0.109", +] + [[package]] name = "cssparser" version = "0.31.2" @@ -525,7 +571,7 @@ checksum = "5b3df4f93e5fbbe73ec01ec8d3f68bba73107993a5b1e7519273c32db9b0d5be" dependencies = [ "cssparser-macros", "dtoa-short", - "itoa", + "itoa 1.0.11", "phf 0.11.2", "smallvec", ] @@ -547,7 +593,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac574ff4d437a7b5ad237ef331c17ccca63c46479e5b5453eb8e10bb99a759fe" dependencies = [ "csv-core", - "itoa", + "itoa 1.0.11", "ryu", "serde", ] @@ -662,8 +708,10 @@ version = "0.99.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" dependencies = [ + "convert_case", "proc-macro2", "quote", + "rustc_version", "syn 1.0.109", ] @@ -705,7 +753,7 @@ dependencies = [ "base64 0.22.0", "colored", "ecies", - "env_logger", + "env_logger 0.11.3", "fastbloom-rs", "hex", "hex-literal", @@ -714,6 +762,7 @@ dependencies = [ "log", "ollama-rs 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", "ollama-rs 0.1.9 (git+https://github.com/pepperoni21/ollama-rs.git?branch=master)", + "ollama-workflows", "parking_lot", "rand 0.8.5", "reqwest 0.12.4", @@ -721,13 +770,19 @@ dependencies = [ "serde_json", "sha2 0.10.8", "sha3", - "tokio", - "tokio-util", + "tokio 1.37.0", + "tokio-util 0.7.10", "url", "urlencoding", "uuid", ] +[[package]] +name = "dotenv" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77c90badedccf4105eca100756a0b1289e191f6fcbdadd3cee1d2f614f97da8f" + [[package]] name = "dtoa" version = "1.0.9" @@ -779,7 +834,7 @@ version = "0.8.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", ] [[package]] @@ -792,6 +847,19 @@ dependencies = [ "regex", ] +[[package]] +name = "env_logger" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a12e6657c4c97ebab115a42dcee77225f7f482cdd841cf7088c657a42e9e00e7" +dependencies = [ + "atty", + "humantime", + "log", + "regex", + "termcolor", +] + [[package]] name = "env_logger" version = "0.11.3" @@ -829,7 +897,7 @@ checksum = "74fef4569247a5f429d9156b9d0a2599914385dd189c539334c625d8099d90ab" dependencies = [ "futures-core", "nom", - "pin-project-lite", + "pin-project-lite 0.2.14", ] [[package]] @@ -906,6 +974,22 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "fuchsia-zircon" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82" +dependencies = [ + "bitflags 1.3.2", + "fuchsia-zircon-sys", +] + +[[package]] +name = "fuchsia-zircon-sys" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" + [[package]] name = "futf" version = "0.1.5" @@ -1006,7 +1090,7 @@ dependencies = [ "futures-sink", "futures-task", "memchr", - "pin-project-lite", + "pin-project-lite 0.2.14", "pin-utils", "slab", ] @@ -1045,7 +1129,7 @@ version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "libc", "wasi 0.9.0+wasi-snapshot-preview1", ] @@ -1056,7 +1140,7 @@ version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "js-sys", "libc", "wasi 0.11.0+wasi-snapshot-preview1", @@ -1085,22 +1169,42 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" +[[package]] +name = "h2" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e4728fd124914ad25e99e3d15a9361a879f6620f63cb56bbb08f95abb97a535" +dependencies = [ + "bytes 0.5.6", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http 0.2.12", + "indexmap 1.9.3", + "slab", + "tokio 0.2.25", + "tokio-util 0.3.1", + "tracing", + "tracing-futures", +] + [[package]] name = "h2" version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" dependencies = [ - "bytes", + "bytes 1.6.0", "fnv", "futures-core", "futures-sink", "futures-util", "http 0.2.12", - "indexmap", + "indexmap 2.2.6", "slab", - "tokio", - "tokio-util", + "tokio 1.37.0", + "tokio-util 0.7.10", "tracing", ] @@ -1110,19 +1214,25 @@ version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "816ec7294445779408f36fe57bc5b7fc1cf59664059096c65f905c1c61f58069" dependencies = [ - "bytes", + "bytes 1.6.0", "fnv", "futures-core", "futures-sink", "futures-util", "http 1.1.0", - "indexmap", + "indexmap 2.2.6", "slab", - "tokio", - "tokio-util", + "tokio 1.37.0", + "tokio-util 0.7.10", "tracing", ] +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + [[package]] name = "hashbrown" version = "0.14.3" @@ -1139,6 +1249,15 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" +[[package]] +name = "hermit-abi" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] + [[package]] name = "hermit-abi" version = "0.3.9" @@ -1205,6 +1324,33 @@ dependencies = [ "utf8-width", ] +[[package]] +name = "html2text" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c66ee488a63a92237d5b48875b7e05bb293be8fb2894641c8118b60c08ab5ef" +dependencies = [ + "html5ever 0.27.0", + "markup5ever 0.12.1", + "tendril", + "thiserror", + "unicode-width", +] + +[[package]] +name = "html5ever" +version = "0.25.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5c13fb08e5d4dfc151ee5e88bae63f7773d61852f3bdc73c9f4b9e1bde03148" +dependencies = [ + "log", + "mac", + "markup5ever 0.10.1", + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "html5ever" version = "0.26.0" @@ -1213,21 +1359,35 @@ checksum = "bea68cab48b8459f17cf1c944c67ddc572d272d9f2b274140f223ecb1da4a3b7" dependencies = [ "log", "mac", - "markup5ever", + "markup5ever 0.11.0", "proc-macro2", "quote", "syn 1.0.109", ] +[[package]] +name = "html5ever" +version = "0.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c13771afe0e6e846f1e67d038d4cb29998a6779f93c809212e4e9c32efd244d4" +dependencies = [ + "log", + "mac", + "markup5ever 0.12.1", + "proc-macro2", + "quote", + "syn 2.0.59", +] + [[package]] name = "http" version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" dependencies = [ - "bytes", + "bytes 1.6.0", "fnv", - "itoa", + "itoa 1.0.11", ] [[package]] @@ -1236,9 +1396,19 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" dependencies = [ - "bytes", + "bytes 1.6.0", "fnv", - "itoa", + "itoa 1.0.11", +] + +[[package]] +name = "http-body" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13d5ff830006f7646652e057693569bfe0d51760c0085a071769d142a205111b" +dependencies = [ + "bytes 0.5.6", + "http 0.2.12", ] [[package]] @@ -1247,9 +1417,9 @@ version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ - "bytes", + "bytes 1.6.0", "http 0.2.12", - "pin-project-lite", + "pin-project-lite 0.2.14", ] [[package]] @@ -1258,7 +1428,7 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" dependencies = [ - "bytes", + "bytes 1.6.0", "http 1.1.0", ] @@ -1268,11 +1438,11 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0475f8b2ac86659c21b64320d5d653f9efe42acd2a4e560073ec61a155a34f1d" dependencies = [ - "bytes", + "bytes 1.6.0", "futures-core", "http 1.1.0", "http-body 1.0.0", - "pin-project-lite", + "pin-project-lite 0.2.14", ] [[package]] @@ -1281,6 +1451,12 @@ version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" +[[package]] +name = "httpdate" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "494b4d60369511e7dea41cf646832512a94e542f68bb9c49e54518e0f468eb47" + [[package]] name = "httpdate" version = "1.0.3" @@ -1293,13 +1469,37 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" +[[package]] +name = "hyper" +version = "0.13.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a6f157065790a3ed2f88679250419b5cdd96e714a0d65f7797fd337186e96bb" +dependencies = [ + "bytes 0.5.6", + "futures-channel", + "futures-core", + "futures-util", + "h2 0.2.7", + "http 0.2.12", + "http-body 0.3.1", + "httparse", + "httpdate 0.3.2", + "itoa 0.4.8", + "pin-project", + "socket2 0.3.19", + "tokio 0.2.25", + "tower-service", + "tracing", + "want", +] + [[package]] name = "hyper" version = "0.14.29" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f361cde2f109281a220d4307746cdfd5ee3f410da58a70377762396775634b33" dependencies = [ - "bytes", + "bytes 1.6.0", "futures-channel", "futures-core", "futures-util", @@ -1307,11 +1507,11 @@ dependencies = [ "http 0.2.12", "http-body 0.4.6", "httparse", - "httpdate", - "itoa", - "pin-project-lite", - "socket2", - "tokio", + "httpdate 1.0.3", + "itoa 1.0.11", + "pin-project-lite 0.2.14", + "socket2 0.5.6", + "tokio 1.37.0", "tower-service", "tracing", "want", @@ -1323,17 +1523,17 @@ version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fe575dd17d0862a9a33781c8c4696a55c320909004a67a00fb286ba8b1bc496d" dependencies = [ - "bytes", + "bytes 1.6.0", "futures-channel", "futures-util", "h2 0.4.4", "http 1.1.0", "http-body 1.0.0", "httparse", - "itoa", - "pin-project-lite", + "itoa 1.0.11", + "pin-project-lite 0.2.14", "smallvec", - "tokio", + "tokio 1.37.0", "want", ] @@ -1349,21 +1549,34 @@ dependencies = [ "hyper-util", "rustls", "rustls-pki-types", - "tokio", + "tokio 1.37.0", "tokio-rustls", "tower-service", ] +[[package]] +name = "hyper-tls" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d979acc56dcb5b8dddba3917601745e877576475aa046df3226eabdecef78eed" +dependencies = [ + "bytes 0.5.6", + "hyper 0.13.10", + "native-tls", + "tokio 0.2.25", + "tokio-tls", +] + [[package]] name = "hyper-tls" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ - "bytes", + "bytes 1.6.0", "hyper 0.14.29", "native-tls", - "tokio", + "tokio 1.37.0", "tokio-native-tls", ] @@ -1373,12 +1586,12 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" dependencies = [ - "bytes", + "bytes 1.6.0", "http-body-util", "hyper 1.3.1", "hyper-util", "native-tls", - "tokio", + "tokio 1.37.0", "tokio-native-tls", "tower-service", ] @@ -1389,15 +1602,15 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ca38ef113da30126bbff9cd1705f9273e15d45498615d138b0c20279ac7a76aa" dependencies = [ - "bytes", + "bytes 1.6.0", "futures-channel", "futures-util", "http 1.1.0", "http-body 1.0.0", "hyper 1.3.1", - "pin-project-lite", - "socket2", - "tokio", + "pin-project-lite 0.2.14", + "socket2 0.5.6", + "tokio 1.37.0", "tower", "tower-service", "tracing", @@ -1442,6 +1655,16 @@ dependencies = [ "unicode-normalization", ] +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", +] + [[package]] name = "indexmap" version = "2.2.6" @@ -1449,7 +1672,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" dependencies = [ "equivalent", - "hashbrown", + "hashbrown 0.14.3", ] [[package]] @@ -1467,7 +1690,16 @@ version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", +] + +[[package]] +name = "iovec" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2b3ea6ff95e175473f8ffe6a7eb7c00d054240321b84c57051175fe3c1e075e" +dependencies = [ + "libc", ] [[package]] @@ -1485,6 +1717,12 @@ dependencies = [ "either", ] +[[package]] +name = "itoa" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" + [[package]] name = "itoa" version = "1.0.11" @@ -1509,6 +1747,16 @@ dependencies = [ "cpufeatures", ] +[[package]] +name = "kernel32-sys" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" +dependencies = [ + "winapi 0.2.8", + "winapi-build", +] + [[package]] name = "langchain-rust" version = "4.2.0" @@ -1532,7 +1780,7 @@ dependencies = [ "regex", "reqwest 0.12.4", "reqwest-eventsource", - "scraper", + "scraper 0.19.0", "secrecy", "serde", "serde_json", @@ -1540,7 +1788,7 @@ dependencies = [ "text-splitter", "thiserror", "tiktoken-rs", - "tokio", + "tokio 1.37.0", "tokio-stream", "url", "urlencoding", @@ -1643,14 +1891,14 @@ dependencies = [ "chrono", "encoding_rs", "flate2", - "itoa", + "itoa 1.0.11", "linked-hash-map", "log", "md5", "nom", "pom", "rayon", - "time", + "time 0.3.36", "weezl", ] @@ -1660,6 +1908,20 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4" +[[package]] +name = "markup5ever" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a24f40fb03852d1cdd84330cddcaf98e9ec08a7b7768e952fad3b4cf048ec8fd" +dependencies = [ + "log", + "phf 0.8.0", + "phf_codegen 0.8.0", + "string_cache", + "string_cache_codegen", + "tendril", +] + [[package]] name = "markup5ever" version = "0.11.0" @@ -1668,24 +1930,56 @@ checksum = "7a2629bb1404f3d34c2e921f21fd34ba00b206124c81f65c50b43b6aaefeb016" dependencies = [ "log", "phf 0.10.1", - "phf_codegen", + "phf_codegen 0.10.0", + "string_cache", + "string_cache_codegen", + "tendril", +] + +[[package]] +name = "markup5ever" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16ce3abbeba692c8b8441d036ef91aea6df8da2c6b6e21c7e14d3c18e526be45" +dependencies = [ + "log", + "phf 0.11.2", + "phf_codegen 0.11.2", "string_cache", "string_cache_codegen", "tendril", ] +[[package]] +name = "markup5ever_rcdom" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f015da43bcd8d4f144559a3423f4591d69b8ce0652c905374da7205df336ae2b" +dependencies = [ + "html5ever 0.25.2", + "markup5ever 0.10.1", + "tendril", + "xml5ever 0.16.2", +] + [[package]] name = "markup5ever_rcdom" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9521dd6750f8e80ee6c53d65e2e4656d7de37064f3a7a5d2d11d05df93839c2" dependencies = [ - "html5ever", - "markup5ever", + "html5ever 0.26.0", + "markup5ever 0.11.0", "tendril", - "xml5ever", + "xml5ever 0.17.0", ] +[[package]] +name = "matches" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" + [[package]] name = "md5" version = "0.7.0" @@ -1729,6 +2023,25 @@ dependencies = [ "adler", ] +[[package]] +name = "mio" +version = "0.6.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4afd66f5b91bf2a3bc13fad0e21caedac168ca4c707504e75585648ae80e4cc4" +dependencies = [ + "cfg-if 0.1.10", + "fuchsia-zircon", + "fuchsia-zircon-sys", + "iovec", + "kernel32-sys", + "libc", + "log", + "miow", + "net2", + "slab", + "winapi 0.2.8", +] + [[package]] name = "mio" version = "0.8.11" @@ -1740,6 +2053,18 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "miow" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebd808424166322d4a38da87083bfddd3ac4c131334ed55856112eb06d46944d" +dependencies = [ + "kernel32-sys", + "net2", + "winapi 0.2.8", + "ws2_32-sys", +] + [[package]] name = "mockito" version = "1.4.0" @@ -1756,7 +2081,7 @@ dependencies = [ "serde_json", "serde_urlencoded", "similar", - "tokio", + "tokio 1.37.0", ] [[package]] @@ -1777,12 +2102,29 @@ dependencies = [ "tempfile", ] +[[package]] +name = "net2" +version = "0.2.39" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b13b648036a2339d06de780866fbdfda0dde886de7b3af2ddeba8b14f4ee34ac" +dependencies = [ + "cfg-if 0.1.10", + "libc", + "winapi 0.3.9", +] + [[package]] name = "new_debug_unreachable" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" +[[package]] +name = "nodrop" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb" + [[package]] name = "nom" version = "7.1.3" @@ -1814,7 +2156,7 @@ version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ - "hermit-abi", + "hermit-abi 0.3.9", "libc", ] @@ -1836,7 +2178,7 @@ dependencies = [ "reqwest 0.12.4", "serde", "serde_json", - "tokio", + "tokio 1.37.0", "tokio-stream", ] @@ -1851,6 +2193,50 @@ dependencies = [ "url", ] +[[package]] +name = "ollama-rs" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "255252ec57e13d2d6ae074c7b7cd8c004d17dafb1e03f954ba2fd5cc226f8f49" +dependencies = [ + "async-trait", + "log", + "regex", + "reqwest 0.12.4", + "scraper 0.19.0", + "serde", + "serde_json", + "text-splitter", + "url", +] + +[[package]] +name = "ollama-workflows" +version = "0.1.0" +source = "git+https://github.com/andthattoo/ollama-workflows?branch=main#fe24157a0997f6a1966b6da4751ee556cd21c233" +dependencies = [ + "async-trait", + "colored", + "dotenv", + "env_logger 0.9.3", + "html2text", + "langchain-rust", + "log", + "ollama-rs 0.2.0", + "parking_lot", + "rand 0.8.5", + "regex", + "reqwest 0.12.4", + "scraper 0.19.0", + "search_with_google", + "serde", + "serde_json", + "simsimd", + "text-splitter", + "tokio 1.37.0", + "tokio-util 0.7.10", +] + [[package]] name = "once_cell" version = "1.19.0" @@ -1870,7 +2256,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95a0481286a310808298130d22dd1fef0fa571e05a8f44ec801801e84b216b1f" dependencies = [ "bitflags 2.5.0", - "cfg-if", + "cfg-if 1.0.0", "foreign-types", "libc", "once_cell", @@ -1923,7 +2309,7 @@ version = "0.9.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "libc", "redox_syscall", "smallvec", @@ -1938,7 +2324,18 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "phf" -version = "0.10.1" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3dfb61232e34fcb633f43d12c58f83c1df82962dcdfa565a4e866ffc17dafe12" +dependencies = [ + "phf_macros 0.8.0", + "phf_shared 0.8.0", + "proc-macro-hack", +] + +[[package]] +name = "phf" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fabbf1ead8a5bcbc20f5f8b939ee3f5b0f6f281b6ad3468b84656b658b455259" dependencies = [ @@ -1951,10 +2348,20 @@ version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc" dependencies = [ - "phf_macros", + "phf_macros 0.11.2", "phf_shared 0.11.2", ] +[[package]] +name = "phf_codegen" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbffee61585b0411840d3ece935cce9cb6321f01c45477d30066498cd5e1a815" +dependencies = [ + "phf_generator 0.8.0", + "phf_shared 0.8.0", +] + [[package]] name = "phf_codegen" version = "0.10.0" @@ -1965,6 +2372,26 @@ dependencies = [ "phf_shared 0.10.0", ] +[[package]] +name = "phf_codegen" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8d39688d359e6b34654d328e262234662d16cc0f60ec8dcbe5e718709342a5a" +dependencies = [ + "phf_generator 0.11.2", + "phf_shared 0.11.2", +] + +[[package]] +name = "phf_generator" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17367f0cc86f2d25802b2c26ee58a7b23faeccf78a396094c13dced0d0182526" +dependencies = [ + "phf_shared 0.8.0", + "rand 0.7.3", +] + [[package]] name = "phf_generator" version = "0.10.0" @@ -1985,6 +2412,20 @@ dependencies = [ "rand 0.8.5", ] +[[package]] +name = "phf_macros" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f6fde18ff429ffc8fe78e2bf7f8b7a5a5a6e2a8b58bc5a9ac69198bbda9189c" +dependencies = [ + "phf_generator 0.8.0", + "phf_shared 0.8.0", + "proc-macro-hack", + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "phf_macros" version = "0.11.2" @@ -1998,6 +2439,15 @@ dependencies = [ "syn 2.0.59", ] +[[package]] +name = "phf_shared" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c00cf8b9eafe68dde5e9eaa2cef8ee84a9336a47d566ec55ca16589633b65af7" +dependencies = [ + "siphasher", +] + [[package]] name = "phf_shared" version = "0.10.0" @@ -2036,6 +2486,12 @@ dependencies = [ "syn 2.0.59", ] +[[package]] +name = "pin-project-lite" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "257b64915a082f7811703966789728173279bdebb956b143dbcd23f6f970a777" + [[package]] name = "pin-project-lite" version = "0.2.14" @@ -2060,7 +2516,7 @@ version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d1fe60d06143b2430aa532c94cfe9e29783047f06c0d7fd359a9a51b729fa25" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "cpufeatures", "opaque-debug", "universal-hash", @@ -2093,6 +2549,12 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" +[[package]] +name = "proc-macro-hack" +version = "0.5.20+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" + [[package]] name = "proc-macro2" version = "1.0.80" @@ -2133,6 +2595,7 @@ dependencies = [ "rand_chacha 0.2.2", "rand_core 0.5.1", "rand_hc", + "rand_pcg", ] [[package]] @@ -2193,6 +2656,15 @@ dependencies = [ "rand_core 0.5.1", ] +[[package]] +name = "rand_pcg" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16abd0c1b639e9eb4d7c50c0b8100b0d0f849be2349829c740fe8e6eb4816429" +dependencies = [ + "rand_core 0.5.1", +] + [[package]] name = "rayon" version = "1.10.0" @@ -2219,9 +2691,9 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e56596e20a6d3cf715182d9b6829220621e6e985cec04d00410cee29821b4220" dependencies = [ - "html5ever", + "html5ever 0.26.0", "lazy_static", - "markup5ever_rcdom", + "markup5ever_rcdom 0.2.0", "regex", "reqwest 0.11.27", "url", @@ -2265,6 +2737,41 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56" +[[package]] +name = "reqwest" +version = "0.10.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0718f81a8e14c4dbb3b34cf23dc6aaf9ab8a0dfec160c534b3dbca1aaa21f47c" +dependencies = [ + "base64 0.13.1", + "bytes 0.5.6", + "encoding_rs", + "futures-core", + "futures-util", + "http 0.2.12", + "http-body 0.3.1", + "hyper 0.13.10", + "hyper-tls 0.4.3", + "ipnet", + "js-sys", + "lazy_static", + "log", + "mime", + "mime_guess", + "native-tls", + "percent-encoding", + "pin-project-lite 0.2.14", + "serde", + "serde_urlencoded", + "tokio 0.2.25", + "tokio-tls", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "winreg 0.7.0", +] + [[package]] name = "reqwest" version = "0.11.27" @@ -2272,7 +2779,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" dependencies = [ "base64 0.21.7", - "bytes", + "bytes 1.6.0", "encoding_rs", "futures-core", "futures-util", @@ -2288,14 +2795,14 @@ dependencies = [ "native-tls", "once_cell", "percent-encoding", - "pin-project-lite", + "pin-project-lite 0.2.14", "rustls-pemfile 1.0.4", "serde", "serde_json", "serde_urlencoded", "sync_wrapper", "system-configuration", - "tokio", + "tokio 1.37.0", "tokio-native-tls", "tower-service", "url", @@ -2312,7 +2819,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "566cafdd92868e0939d3fb961bd0dc25fcfaaed179291093b3d43e6b3150ea10" dependencies = [ "base64 0.22.0", - "bytes", + "bytes 1.6.0", "encoding_rs", "futures-core", "futures-util", @@ -2332,7 +2839,7 @@ dependencies = [ "native-tls", "once_cell", "percent-encoding", - "pin-project-lite", + "pin-project-lite 0.2.14", "rustls", "rustls-native-certs", "rustls-pemfile 2.1.2", @@ -2342,10 +2849,10 @@ dependencies = [ "serde_urlencoded", "sync_wrapper", "system-configuration", - "tokio", + "tokio 1.37.0", "tokio-native-tls", "tokio-rustls", - "tokio-util", + "tokio-util 0.7.10", "tower-service", "url", "wasm-bindgen", @@ -2366,7 +2873,7 @@ dependencies = [ "futures-timer", "mime", "nom", - "pin-project-lite", + "pin-project-lite 0.2.14", "reqwest 0.12.4", "thiserror", ] @@ -2378,7 +2885,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" dependencies = [ "cc", - "cfg-if", + "cfg-if 1.0.0", "getrandom 0.2.14", "libc", "spin", @@ -2398,6 +2905,15 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" +[[package]] +name = "rustc_version" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver", +] + [[package]] name = "rustix" version = "0.38.32" @@ -2501,6 +3017,22 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" +[[package]] +name = "scraper" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48e02aa790c80c2e494130dec6a522033b6a23603ffc06360e9fe6c611ea2c12" +dependencies = [ + "cssparser 0.27.2", + "ego-tree", + "getopts", + "html5ever 0.25.2", + "matches", + "selectors 0.22.0", + "smallvec", + "tendril", +] + [[package]] name = "scraper" version = "0.19.0" @@ -2508,15 +3040,28 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b80b33679ff7a0ea53d37f3b39de77ea0c75b12c5805ac43ec0c33b3051af1b" dependencies = [ "ahash", - "cssparser", + "cssparser 0.31.2", "ego-tree", "getopts", - "html5ever", + "html5ever 0.26.0", "once_cell", - "selectors", + "selectors 0.25.0", "tendril", ] +[[package]] +name = "search_with_google" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f6a618e5da50d28e74c49b9f26c357ff5a7c7d68f688d4f21db96eb906e4382" +dependencies = [ + "reqwest 0.10.10", + "scraper 0.12.0", + "select", + "thiserror", + "tokio 0.2.25", +] + [[package]] name = "secrecy" version = "0.8.0" @@ -2550,6 +3095,37 @@ dependencies = [ "libc", ] +[[package]] +name = "select" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ee061f90afcc8678bef7a78d0d121683f0ba753f740ff7005f833ec445876b7" +dependencies = [ + "bit-set", + "html5ever 0.25.2", + "markup5ever_rcdom 0.1.0", +] + +[[package]] +name = "selectors" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df320f1889ac4ba6bc0cdc9c9af7af4bd64bb927bccdf32d81140dc1f9be12fe" +dependencies = [ + "bitflags 1.3.2", + "cssparser 0.27.2", + "derive_more", + "fxhash", + "log", + "matches", + "phf 0.8.0", + "phf_codegen 0.8.0", + "precomputed-hash", + "servo_arc 0.1.1", + "smallvec", + "thin-slice", +] + [[package]] name = "selectors" version = "0.25.0" @@ -2557,18 +3133,24 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4eb30575f3638fc8f6815f448d50cb1a2e255b0897985c8c59f4d37b72a07b06" dependencies = [ "bitflags 2.5.0", - "cssparser", + "cssparser 0.31.2", "derive_more", "fxhash", "log", "new_debug_unreachable", "phf 0.10.1", - "phf_codegen", + "phf_codegen 0.10.0", "precomputed-hash", - "servo_arc", + "servo_arc 0.3.0", "smallvec", ] +[[package]] +name = "semver" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" + [[package]] name = "serde" version = "1.0.197" @@ -2595,7 +3177,7 @@ version = "1.0.116" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e17db7126d17feb94eb3fad46bf1a96b034e8aacbc2e775fe81505f8b0b2813" dependencies = [ - "itoa", + "itoa 1.0.11", "ryu", "serde", ] @@ -2607,11 +3189,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" dependencies = [ "form_urlencoded", - "itoa", + "itoa 1.0.11", "ryu", "serde", ] +[[package]] +name = "servo_arc" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d98238b800e0d1576d8b6e3de32827c2d74bee68bb97748dcf5071fb53965432" +dependencies = [ + "nodrop", + "stable_deref_trait", +] + [[package]] name = "servo_arc" version = "0.3.0" @@ -2628,7 +3220,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" dependencies = [ "block-buffer 0.9.0", - "cfg-if", + "cfg-if 1.0.0", "cpufeatures", "digest 0.9.0", "opaque-debug", @@ -2640,7 +3232,7 @@ version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "cpufeatures", "digest 0.10.7", ] @@ -2670,6 +3262,15 @@ version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa42c91313f1d05da9b26f267f931cf178d4aba455b4c4622dd7355eb80c6640" +[[package]] +name = "simsimd" +version = "4.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc9b7427cabeed25b18b43cc7d7ec466d8d1953a13ed56c46dc414c99ca4754e" +dependencies = [ + "cc", +] + [[package]] name = "siphasher" version = "0.3.11" @@ -2691,6 +3292,17 @@ version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" +[[package]] +name = "socket2" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "122e570113d28d773067fab24266b66753f6ea915758651696b6e35e49f88d6e" +dependencies = [ + "cfg-if 1.0.0", + "libc", + "winapi 0.3.9", +] + [[package]] name = "socket2" version = "0.5.6" @@ -2828,7 +3440,7 @@ version = "3.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "fastrand", "rustix", "windows-sys 0.52.0", @@ -2845,6 +3457,15 @@ dependencies = [ "utf-8", ] +[[package]] +name = "termcolor" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" +dependencies = [ + "winapi-util", +] + [[package]] name = "text-splitter" version = "0.13.3" @@ -2864,6 +3485,12 @@ dependencies = [ "unicode-segmentation", ] +[[package]] +name = "thin-slice" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eaa81235c7058867fa8c0e7314f33dcce9c215f535d1913822a2b3f5e289f3c" + [[package]] name = "thiserror" version = "1.0.61" @@ -2899,6 +3526,17 @@ dependencies = [ "rustc-hash", ] +[[package]] +name = "time" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a" +dependencies = [ + "libc", + "wasi 0.10.0+wasi-snapshot-preview1", + "winapi 0.3.9", +] + [[package]] name = "time" version = "0.3.36" @@ -2906,7 +3544,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" dependencies = [ "deranged", - "itoa", + "itoa 1.0.11", "num-conv", "powerfmt", "serde", @@ -2945,6 +3583,23 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" +[[package]] +name = "tokio" +version = "0.2.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6703a273949a90131b290be1fe7b039d0fc884aa1935860dfcbe056f28cd8092" +dependencies = [ + "bytes 0.5.6", + "fnv", + "futures-core", + "iovec", + "lazy_static", + "memchr", + "mio 0.6.23", + "pin-project-lite 0.1.12", + "slab", +] + [[package]] name = "tokio" version = "1.37.0" @@ -2952,14 +3607,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1adbebffeca75fcfd058afa480fb6c0b81e165a0323f9c9d39c9697e37c46787" dependencies = [ "backtrace", - "bytes", + "bytes 1.6.0", "libc", - "mio", + "mio 0.8.11", "num_cpus", "parking_lot", - "pin-project-lite", + "pin-project-lite 0.2.14", "signal-hook-registry", - "socket2", + "socket2 0.5.6", "tokio-macros", "windows-sys 0.48.0", ] @@ -2982,7 +3637,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" dependencies = [ "native-tls", - "tokio", + "tokio 1.37.0", ] [[package]] @@ -2993,7 +3648,7 @@ checksum = "775e0c0f0adb3a2f22a00c4745d728b479985fc15ee7ca6a2608388c5569860f" dependencies = [ "rustls", "rustls-pki-types", - "tokio", + "tokio 1.37.0", ] [[package]] @@ -3003,8 +3658,32 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" dependencies = [ "futures-core", - "pin-project-lite", - "tokio", + "pin-project-lite 0.2.14", + "tokio 1.37.0", +] + +[[package]] +name = "tokio-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a70f4fcd7b3b24fb194f837560168208f669ca8cb70d0c4b862944452396343" +dependencies = [ + "native-tls", + "tokio 0.2.25", +] + +[[package]] +name = "tokio-util" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be8242891f2b6cbef26a2d7e8605133c2c554cd35b3e4948ea892d6d68436499" +dependencies = [ + "bytes 0.5.6", + "futures-core", + "futures-sink", + "log", + "pin-project-lite 0.1.12", + "tokio 0.2.25", ] [[package]] @@ -3013,13 +3692,13 @@ version = "0.7.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" dependencies = [ - "bytes", + "bytes 1.6.0", "futures-core", "futures-sink", "futures-util", - "hashbrown", - "pin-project-lite", - "tokio", + "hashbrown 0.14.3", + "pin-project-lite 0.2.14", + "tokio 1.37.0", "tracing", ] @@ -3032,8 +3711,8 @@ dependencies = [ "futures-core", "futures-util", "pin-project", - "pin-project-lite", - "tokio", + "pin-project-lite 0.2.14", + "tokio 1.37.0", "tower-layer", "tower-service", "tracing", @@ -3058,7 +3737,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" dependencies = [ "log", - "pin-project-lite", + "pin-project-lite 0.2.14", "tracing-attributes", "tracing-core", ] @@ -3083,6 +3762,16 @@ dependencies = [ "once_cell", ] +[[package]] +name = "tracing-futures" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" +dependencies = [ + "pin-project", + "tracing", +] + [[package]] name = "try-lock" version = "0.2.5" @@ -3224,6 +3913,12 @@ version = "0.9.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" +[[package]] +name = "wasi" +version = "0.10.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" + [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" @@ -3236,7 +3931,9 @@ version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", + "serde", + "serde_json", "wasm-bindgen-macro", ] @@ -3261,7 +3958,7 @@ version = "0.4.42" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "js-sys", "wasm-bindgen", "web-sys", @@ -3325,6 +4022,49 @@ version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53a85b86a771b1c87058196170769dd264f66c0782acf1ae6cc51bfd64b39082" +[[package]] +name = "winapi" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-build" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc" + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d4cc384e1e73b93bafa6fb4f1df8c41695c8a91cf9c4c64358067d15a7b6c6b" +dependencies = [ + "windows-sys 0.52.0", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + [[package]] name = "windows-core" version = "0.52.0" @@ -3473,13 +4213,22 @@ version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" +[[package]] +name = "winreg" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0120db82e8a1e0b9fb3345a539c478767c0048d842860994d96113d5b667bd69" +dependencies = [ + "winapi 0.3.9", +] + [[package]] name = "winreg" version = "0.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "windows-sys 0.48.0", ] @@ -3489,10 +4238,32 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a277a57398d4bfa075df44f501a17cfdf8542d224f0d36095a2adc7aee4ef0a5" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "windows-sys 0.48.0", ] +[[package]] +name = "ws2_32-sys" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e" +dependencies = [ + "winapi 0.2.8", + "winapi-build", +] + +[[package]] +name = "xml5ever" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9234163818fd8e2418fcde330655e757900d4236acd8cc70fef345ef91f6d865" +dependencies = [ + "log", + "mac", + "markup5ever 0.10.1", + "time 0.1.45", +] + [[package]] name = "xml5ever" version = "0.17.0" @@ -3501,7 +4272,7 @@ checksum = "4034e1d05af98b51ad7214527730626f019682d797ba38b51689212118d8e650" dependencies = [ "log", "mac", - "markup5ever", + "markup5ever 0.11.0", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index b885b69..2956688 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -46,6 +46,8 @@ sha3 = "0.10.8" # llm stuff langchain-rust = { version = "4.2.0", features = ["ollama"] } ollama-rs = "0.1.9" +ollama-workflows = { git = "https://github.com/andthattoo/ollama-workflows", branch = "main" } + uuid = { version = "1.8.0", features = ["v4"] } [dev-dependencies] diff --git a/compose.yml b/compose.yml index 0c96901..b109b24 100644 --- a/compose.yml +++ b/compose.yml @@ -127,7 +127,7 @@ services: OPENAI_API_KEY: ${OPENAI_API_KEY} SERPER_API_KEY: ${SERPER_API_KEY} BROWSERLESS_TOKEN: ${BROWSERLESS_TOKEN} - AGENT_MAX_ITER: 1 + AGENT_MAX_ITER: 3 OLLAMA_URL: ${OLLAMA_HOST}:${OLLAMA_PORT} QDRANT_URL: http://host.docker.internal:6333 diff --git a/src/compute/payload.rs b/src/compute/payload.rs index e84cb82..fd2b836 100644 --- a/src/compute/payload.rs +++ b/src/compute/payload.rs @@ -45,13 +45,13 @@ pub struct TaskRequestPayload { } impl TaskRequestPayload { - pub fn new(input: T, filter: BloomFilter, time_ns: u128) -> Self { + pub fn new(input: T, filter: BloomFilter, time_ns: u128, public_key: Option) -> Self { Self { task_id: Uuid::new_v4().into(), deadline: get_current_time_nanos() + time_ns, input, filter: filter.into(), - public_key: "32".to_string(), + public_key: public_key.unwrap_or_default(), } } } diff --git a/src/node.rs b/src/node.rs index f2883d8..c9e0be7 100644 --- a/src/node.rs +++ b/src/node.rs @@ -215,6 +215,7 @@ impl DriaComputeNode { /// Given a list of messages, this function: /// /// - parses them into their respective payloads + /// - checks the signatures (if `signed = true`) w.r.t admin public key /// - filters out past-deadline & non-selected (with the Bloom Filter) tasks /// - sorts the tasks by their deadline pub fn parse_messages(&self, messages: Vec, signed: bool) -> Vec> diff --git a/src/workers/mod.rs b/src/workers/mod.rs index e5239ba..05871e0 100644 --- a/src/workers/mod.rs +++ b/src/workers/mod.rs @@ -2,3 +2,4 @@ pub mod diagnostic; pub mod heartbeat; pub mod search_python; pub mod synthesis; +pub mod workflow; diff --git a/src/workers/workflow.rs b/src/workers/workflow.rs new file mode 100644 index 0000000..fe7fc2b --- /dev/null +++ b/src/workers/workflow.rs @@ -0,0 +1,86 @@ +use ollama_workflows::{Executor, Model, ProgramMemory, Workflow}; +use std::sync::Arc; +use std::time::Duration; + +use crate::node::DriaComputeNode; + +pub fn workflow_worker( + node: Arc, + topic: &'static str, + sleep_amount: Duration, + model: Option, +) -> tokio::task::JoinHandle<()> { + let model = if let Some(model) = model { + Model::try_from(model).unwrap_or_else(|model| { + log::error!("Invalid model provided: {}, defaulting.", model); + Model::default() + }) + } else { + Model::default() + }; + + // this ID is given in the workflow itself, but within Dria we always + // use "final_result" for this ID. + let final_result_id = "final_result".to_string(); + + tokio::spawn(async move { + let exe = Executor::new(model); + + node.subscribe_topic(topic).await; + + loop { + tokio::select! { + _ = node.cancellation.cancelled() => { + if let Err(e) = node.unsubscribe_topic(topic).await { + log::error!("Error unsubscribing from {}: {}\nContinuing anyway.", topic, e); + } + break; + } + _ = tokio::time::sleep(sleep_amount) => { + let tasks = match node.process_topic(topic, true).await { + Ok(messages) => { + if messages.is_empty() { + continue; + } + node.parse_messages::(messages, true) + } + Err(e) => { + log::error!("Error processing topic {}: {}", topic, e); + continue; + } + }; + if tasks.is_empty() { + log::info!("No {} tasks.", topic); + } else { + node.set_busy(true); + log::info!("Processing {} {} tasks.", tasks.len(), topic); + for task in &tasks { + log::debug!("Task ID: {}", task.task_id); + } + + for task in tasks { + let mut memory = ProgramMemory::new(); + exe.execute(None, task.input, &mut memory).await; + + let result = match memory.read(&final_result_id) { + Some(entry) => entry.to_string(), + None => { + log::error!("No final result found in memory for task {}", task.task_id); + continue; + }, + + }; + if let Err(e) = node.send_task_result(&task.task_id, &task.public_key, result).await { + log::error!("Error sending task result: {}", e); + }; + } + + node.set_busy(false); + } + + + } + } + } + }) +} diff --git a/tests/mock_messages_test.rs b/tests/mock_messages_test.rs index 647ec9e..8c283df 100644 --- a/tests/mock_messages_test.rs +++ b/tests/mock_messages_test.rs @@ -24,7 +24,7 @@ mod mock_messages_test { let mut filter = FilterBuilder::new(128, 0.01).build_bloom_filter(); filter.add(&node.address()); - let payload_tasked = TaskRequestPayload::new(input.clone(), filter, time); + let payload_tasked = TaskRequestPayload::new(input.clone(), filter, time, None); let payload_str = serde_json::to_string(&payload_tasked).unwrap(); messages.push(WakuMessage::new(payload_str, topic)); @@ -32,7 +32,7 @@ mod mock_messages_test { let mut filter = FilterBuilder::new(128, 0.01).build_bloom_filter(); filter.add(&Uuid::new_v4().to_string().as_bytes()); // something dummy - let payload_not_tasked = TaskRequestPayload::new(input, filter, time); + let payload_not_tasked = TaskRequestPayload::new(input, filter, time, None); let payload_str = serde_json::to_string(&payload_not_tasked).unwrap(); messages.push(WakuMessage::new(payload_str, topic)); From 3bb7a3f43f82c624f5e03325a7fd68543810bac6 Mon Sep 17 00:00:00 2001 From: erhant Date: Fri, 14 Jun 2024 12:43:27 +0300 Subject: [PATCH 02/29] added workflow test --- Cargo.lock | 21 +++++++-------- Cargo.toml | 2 -- Makefile | 4 --- src/workers/workflow.rs | 9 +++---- tests/compute_test.rs | 60 ++++++++++++++++++++++++++++++++++++++--- 5 files changed, 71 insertions(+), 25 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1c93d91..1986e3e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2185,35 +2185,34 @@ dependencies = [ [[package]] name = "ollama-rs" version = "0.1.9" -source = "git+https://github.com/pepperoni21/ollama-rs.git?branch=master#56e8157d98d4185bc171fe9468d3d09bc56e9dd3" +source = "git+https://github.com/andthattoo/ollama-rs?branch=master#426b6d698a58a737f145054cd12555658b5e5e4b" dependencies = [ + "async-trait", + "log", + "regex", "reqwest 0.12.4", + "scraper 0.19.0", "serde", "serde_json", + "text-splitter", "url", ] [[package]] name = "ollama-rs" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "255252ec57e13d2d6ae074c7b7cd8c004d17dafb1e03f954ba2fd5cc226f8f49" +version = "0.1.9" +source = "git+https://github.com/pepperoni21/ollama-rs.git?branch=master#56e8157d98d4185bc171fe9468d3d09bc56e9dd3" dependencies = [ - "async-trait", - "log", - "regex", "reqwest 0.12.4", - "scraper 0.19.0", "serde", "serde_json", - "text-splitter", "url", ] [[package]] name = "ollama-workflows" version = "0.1.0" -source = "git+https://github.com/andthattoo/ollama-workflows?branch=main#fe24157a0997f6a1966b6da4751ee556cd21c233" +source = "git+https://github.com/andthattoo/ollama-workflows?branch=main#eeac2a6c104d9f46794995511c09ce58e204e2b6" dependencies = [ "async-trait", "colored", @@ -2222,7 +2221,7 @@ dependencies = [ "html2text", "langchain-rust", "log", - "ollama-rs 0.2.0", + "ollama-rs 0.1.9 (git+https://github.com/andthattoo/ollama-rs?branch=master)", "parking_lot", "rand 0.8.5", "regex", diff --git a/Cargo.toml b/Cargo.toml index 2956688..dbeaa7e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,8 +10,6 @@ default = [] # test features waku_test = [] -ollama_test = [] -search_python_test = [] [dependencies] tokio-util = { version = "0.7.10", features = ["rt"] } diff --git a/Makefile b/Makefile index 7d09d67..1105604 100644 --- a/Makefile +++ b/Makefile @@ -26,10 +26,6 @@ build-all: test: cargo test -.PHONY: test-ollama # | Run Ollama integration tests only -test-ollama: - cargo test ollama_test --features=ollama_test - .PHONY: test-waku # | Run Waku integration tests only test-waku: cargo test waku_test --features=waku_test diff --git a/src/workers/workflow.rs b/src/workers/workflow.rs index fe7fc2b..accbe79 100644 --- a/src/workers/workflow.rs +++ b/src/workers/workflow.rs @@ -10,6 +10,7 @@ pub fn workflow_worker( sleep_amount: Duration, model: Option, ) -> tokio::task::JoinHandle<()> { + // TODO: decide the model based on workflow let model = if let Some(model) = model { Model::try_from(model).unwrap_or_else(|model| { log::error!("Invalid model provided: {}, defaulting.", model); @@ -18,14 +19,13 @@ pub fn workflow_worker( } else { Model::default() }; + log::info!("Using model: {:?}", model); // this ID is given in the workflow itself, but within Dria we always // use "final_result" for this ID. let final_result_id = "final_result".to_string(); tokio::spawn(async move { - let exe = Executor::new(model); - node.subscribe_topic(topic).await; loop { @@ -59,6 +59,7 @@ pub fn workflow_worker( } for task in tasks { + let exe = Executor::new(model.clone()); // TODO: model shall be workflow specific let mut memory = ProgramMemory::new(); exe.execute(None, task.input, &mut memory).await; @@ -68,8 +69,8 @@ pub fn workflow_worker( log::error!("No final result found in memory for task {}", task.task_id); continue; }, - }; + if let Err(e) = node.send_task_result(&task.task_id, &task.public_key, result).await { log::error!("Error sending task result: {}", e); }; @@ -77,8 +78,6 @@ pub fn workflow_worker( node.set_busy(false); } - - } } } diff --git a/tests/compute_test.rs b/tests/compute_test.rs index cec9e3c..57b9d2c 100644 --- a/tests/compute_test.rs +++ b/tests/compute_test.rs @@ -3,10 +3,10 @@ mod compute_test { use dkn_compute::compute::{llm::ollama::create_ollama, search_python::SearchPythonClient}; use langchain_rust::{language_models::llm::LLM, llm::client::Ollama}; + use ollama_workflows::{Entry, Executor, Model, ProgramMemory, Workflow}; use std::env; use tokio_util::sync::CancellationToken; - #[cfg_attr(test, cfg(feature = "search_python_test"))] #[tokio::test] #[ignore = "run this manually"] async fn test_search_python() { @@ -21,8 +21,8 @@ mod compute_test { println!("Result: {:?}", result); } - #[cfg_attr(test, cfg(feature = "ollama_test"))] #[tokio::test] + #[ignore = "run this manually"] async fn test_ollama_prompt() { let model = "orca-mini".to_string(); let ollama = Ollama::default().with_model(model); @@ -39,8 +39,8 @@ mod compute_test { println!("Prompt: {}\n\nResponse:{}", prompt, response); } - #[cfg_attr(test, cfg(feature = "ollama_test"))] #[tokio::test] + #[ignore = "run this manually"] async fn test_ollama_bad_model() { let model = "thismodeldoesnotexistlol".to_string(); let setup_res = create_ollama(CancellationToken::default(), model).await; @@ -49,4 +49,58 @@ mod compute_test { "Should give error due to non-existing model." ); } + + #[tokio::test] + #[ignore = "run this manually"] + async fn test_workflow() { + let workflow = r#"{ + "name": "Simple", + "description": "This is a simple workflow", + "config": { + "max_steps": 5, + "max_time": 100, + "tools": [] + }, + "tasks":[ + { + "id": "A", + "name": "Random Poem", + "description": "Writes a poem about Kapadokya.", + "prompt": "Please write a poem about Kapadokya.", + "inputs":[], + "operator": "generation", + "outputs": [ + { + "type": "write", + "key": "final_result", + "value": "__result" + } + ] + }, + { + "id": "__end", + "name": "end", + "description": "End of the task", + "prompt": "End of the task", + "inputs": [], + "operator": "end", + "outputs": [] + } + ], + "steps":[ + { + "source":"A", + "target":"end" + } + ] +}"#; + let workflow: Workflow = serde_json::from_str(workflow).unwrap(); + let exe = Executor::new(Model::Phi3Mini); + let mut memory = ProgramMemory::new(); + + exe.execute(None, workflow, &mut memory).await; + + let result = memory.read(&"final_result".to_string()).unwrap(); + println!("Result: {}", result); + } } From db25b55a5cb1a76ca1efe363cb6905ae4321a292 Mon Sep 17 00:00:00 2001 From: erhant Date: Mon, 17 Jun 2024 20:48:47 +0300 Subject: [PATCH 03/29] read models from env, remove other workers --- .env.example | 21 +++---- compose.yml | 38 ------------- src/compute/mod.rs | 1 - src/compute/search_python.rs | 65 ---------------------- src/config/constants.rs | 12 +--- src/config/mod.rs | 49 +++++++++------- src/config/models.rs | 36 ++++++++++++ src/config/tasks.rs | 68 ----------------------- src/main.rs | 33 +++-------- src/node.rs | 20 +++---- src/workers/heartbeat.rs | 17 ++---- src/workers/mod.rs | 2 - src/workers/search_python.rs | 67 ---------------------- src/workers/synthesis.rs | 105 ----------------------------------- src/workers/workflow.rs | 38 +++++++------ tests/compute_test.rs | 18 +----- tests/mock_messages_test.rs | 2 +- 17 files changed, 118 insertions(+), 474 deletions(-) delete mode 100644 src/compute/search_python.rs create mode 100644 src/config/models.rs delete mode 100644 src/config/tasks.rs delete mode 100644 src/workers/search_python.rs delete mode 100644 src/workers/synthesis.rs diff --git a/.env.example b/.env.example index b2d0061..0404c04 100644 --- a/.env.example +++ b/.env.example @@ -10,19 +10,12 @@ WAKU_LOG_LEVEL="DEBUG" # INFO | DEBUG | ERROR DKN_WALLET_SECRET_KEY=${ETH_TESTNET_KEY} # Dria uses the same key as Waku DKN_ADMIN_PUBLIC_KEY= # Public key of Dria (33-byte compressed, hexadecimal). DKN_TASKS=synthesis # task1,task2,task3,... (comma separated, case-insensitive) -DKN_SYNTHESIS_MODEL_PROVIDER=Ollama # Ollama | OpenAI -DKN_SYNTHESIS_MODEL_NAME=phi3 # model name -DKN_LOG_LEVEL=info # maps to RUST_LOG +DKN_MODEL_PROVIDERS=openai,ollama # provider1,provider2,provider3,... (comma separated, case-insensitive) +DKN_LOG_LEVEL=info # info | debug | error ## OLLAMA ## -OLLAMA_HOST="http://127.0.0.1" # default -OLLAMA_PORT="11434" # default -OLLAMA_KEEP_ALIVE="5m" # duration of model's life in memory - -## SEARCH AGENT ## -AGENT_MODEL_PROVIDER="Ollama" # OpenAI | Claude | Ollama -AGENT_MODEL_NAME="phi3" -ANTHROPIC_API_KEY="api-key" -OPENAI_API_KEY="api-key" -SERPER_API_KEY="api-key" -BROWSERLESS_TOKEN="token" +OLLAMA_HOST="http://127.0.0.1" +OLLAMA_PORT="11434" + +## Open AI ## +OPENAI_API_KEY= diff --git a/compose.yml b/compose.yml index b109b24..572fa58 100644 --- a/compose.yml +++ b/compose.yml @@ -96,43 +96,5 @@ services: capabilities: [gpu] profiles: [ollama-cuda] - # Qdrant VectorDB for Search Agent - qdrant: - image: qdrant/qdrant - ports: - - "6333:6333" - - "6334:6334" - volumes: - - ./qdrant_storage:/qdrant/storage:z - profiles: [search-python] - - # Browser automation for Search Agent - browserless: - image: ghcr.io/browserless/chromium - environment: - - TOKEN=${BROWSERLESS_TOKEN} - ports: - - "3030:3000" - profiles: [search-python] - - # Dria Search Agent (Python) - search-agent: - image: firstbatch/dria-searching-agent:latest - ports: - - 5059:5000 - env_file: - - .env.compose - environment: - ANTHROPIC_API_KEY: ${ANTHROPIC_API_KEY} - OPENAI_API_KEY: ${OPENAI_API_KEY} - SERPER_API_KEY: ${SERPER_API_KEY} - BROWSERLESS_TOKEN: ${BROWSERLESS_TOKEN} - AGENT_MAX_ITER: 3 - - OLLAMA_URL: ${OLLAMA_HOST}:${OLLAMA_PORT} - QDRANT_URL: http://host.docker.internal:6333 - BROWSERLESS_URL: http://host.docker.internal:3000 - profiles: [search-python] - volumes: ollama: diff --git a/src/compute/mod.rs b/src/compute/mod.rs index 152e424..928ede8 100644 --- a/src/compute/mod.rs +++ b/src/compute/mod.rs @@ -1,3 +1,2 @@ pub mod llm; pub mod payload; -pub mod search_python; diff --git a/src/compute/search_python.rs b/src/compute/search_python.rs deleted file mode 100644 index 1ea20d8..0000000 --- a/src/compute/search_python.rs +++ /dev/null @@ -1,65 +0,0 @@ -use crate::{config::constants::*, utils::http::BaseClient}; -use serde_json::json; -use std::env; - -const DEFAULT_SEARCH_AGENT_URL: &str = "http://localhost:5059"; - -/// A wrapper for the Dria Search agent in Python: . -pub struct SearchPythonClient { - pub client: BaseClient, - /// URL at which the Python search agent is running. - pub url: String, - /// Enables or disables manager, see more [here](https://docs.crewai.com/how-to/Hierarchical/). - pub with_manager: bool, -} - -impl Default for SearchPythonClient { - fn default() -> Self { - Self::new() - } -} - -impl SearchPythonClient { - pub fn new() -> Self { - let url = env::var(SEARCH_AGENT_URL).unwrap_or(DEFAULT_SEARCH_AGENT_URL.to_string()); - let with_manager = matches!( - env::var(SEARCH_AGENT_MANAGER) - .unwrap_or_default() - .to_lowercase() - .as_str(), - "1" | "true" | "yes" - ); - - let client = BaseClient::new(url.to_string()); - - Self { - client, - url, - with_manager, - } - } - - pub async fn search(&self, query: String) -> Result { - let body = json!({ - "query": query, - "with_manager": self.with_manager, - }); - let r = match self.client.post("search", body).await { - Ok(response) => response, - Err(e) => { - log::error!("Error sending search query to search-agent-python: {}", e); - return Err(e); - } - }; - - let search_result = match r.text().await { - Ok(response) => response, - Err(e) => { - log::error!("Error parsing search-agent-python response: {}", e); - return Err(e); - } - }; - - Ok(search_result) - } -} diff --git a/src/config/constants.rs b/src/config/constants.rs index 40556a8..cec807a 100644 --- a/src/config/constants.rs +++ b/src/config/constants.rs @@ -2,6 +2,7 @@ use hex_literal::hex; //////////////////// DKN Compute Node //////////////////// pub const DKN_TASKS: &str = "DKN_TASKS"; +pub const DKN_MODELS: &str = "DKN_MODELS"; pub const DKN_ADMIN_PUBLIC_KEY: &str = "DKN_ADMIN_PUBLIC_KEY"; pub const DKN_WALLET_SECRET_KEY: &str = "DKN_WALLET_SECRET_KEY"; pub const DKN_WALLET_PUBLIC_KEY: &str = "DKN_WALLET_PUBLIC_KEY"; @@ -14,17 +15,6 @@ pub const DEFAULT_DKN_ADMIN_PUBLIC_KEY: &[u8; 33] = pub const DEFAULT_DKN_WALLET_SECRET_KEY: &[u8; 32] = &hex!("6e6f64656e6f64656e6f64656e6f64656e6f64656e6f64656e6f64656e6f6465"); -//////////////////// Task: Synthesis //////////////////// -pub const DKN_SYNTHESIS_MODEL_PROVIDER: &str = "DKN_SYNTHESIS_MODEL_PROVIDER"; -pub const DKN_SYNTHESIS_MODEL_NAME: &str = "DKN_SYNTHESIS_MODEL_NAME"; -pub const DEFAULT_DKN_SYNTHESIS_MODEL_PROVIDER: &str = "Ollama"; -pub const DEFAULT_DKN_SYNTHESIS_MODEL_NAME_OPENAI: &str = "gtp-3.5-turbo"; -pub const DEFAULT_DKN_SYNTHESIS_MODEL_NAME_OLLAMA: &str = "phi3"; - -///////////////////// Task: Search /////////////////////// -pub const SEARCH_AGENT_URL: &str = "SEARCH_AGENT_URL"; -pub const SEARCH_AGENT_MANAGER: &str = "SEARCH_AGENT_MANAGER"; - //////////////////// Provider: Ollama //////////////////// pub const OLLAMA_HOST: &str = "OLLAMA_HOST"; pub const OLLAMA_PORT: &str = "OLLAMA_PORT"; diff --git a/src/config/mod.rs b/src/config/mod.rs index 37f25e8..eb6342c 100644 --- a/src/config/mod.rs +++ b/src/config/mod.rs @@ -1,23 +1,27 @@ pub mod constants; -pub mod tasks; +pub mod models; use crate::utils::crypto::to_address; use constants::*; use ecies::PublicKey; use libsecp256k1::{PublicKeyFormat, SecretKey}; +use models::parse_dkn_models; +use ollama_workflows::Model; use std::env; #[allow(non_snake_case)] #[derive(Debug, Clone)] pub struct DriaComputeNodeConfig { /// Wallet secret/private key. - pub(crate) DKN_WALLET_SECRET_KEY: SecretKey, + pub(crate) secret_key: SecretKey, /// Wallet public key, derived from the secret key. - pub DKN_WALLET_PUBLIC_KEY: PublicKey, + pub public_key: PublicKey, /// Wallet address, derived from the public key. - pub DKN_WALLET_ADDRESS: [u8; 20], + pub address: [u8; 20], /// Admin public key, used for message authenticity. - pub DKN_ADMIN_PUBLIC_KEY: PublicKey, + pub admin_public_key: PublicKey, + /// Available models for the node. + pub models: Vec, } impl DriaComputeNodeConfig { @@ -31,8 +35,17 @@ impl DriaComputeNodeConfig { Err(_) => SecretKey::parse(DEFAULT_DKN_WALLET_SECRET_KEY) .expect("Should decrypt default secret key."), }; + log::info!( + "Node Secret Key: 0x{}{}", + hex::encode(&secret_key.serialize()[0..1]), + ".".repeat(64) + ); let public_key = PublicKey::from_secret_key(&secret_key); + log::info!( + "Node Public Key: 0x{}", + hex::encode(public_key.serialize_compressed()) + ); let admin_public_key = PublicKey::parse_slice( hex::decode(env::var(DKN_ADMIN_PUBLIC_KEY).unwrap_or_default()) @@ -44,30 +57,26 @@ impl DriaComputeNodeConfig { PublicKey::parse_compressed(DEFAULT_DKN_ADMIN_PUBLIC_KEY) .expect("Should decrypt default Admin public key."), ); - - let address = to_address(&public_key); - log::info!( "Admin Public Key: 0x{}", hex::encode(admin_public_key.serialize_compressed()) ); + let address = to_address(&public_key); log::info!("Node Address: 0x{}", hex::encode(address)); + + let models = parse_dkn_models(env::var(DKN_MODELS).unwrap_or_default()); log::info!( - "Node Public Key: 0x{}", - hex::encode(public_key.serialize_compressed()) - ); - log::info!( - "Node Secret Key: 0x{}{}", - hex::encode(&secret_key.serialize()[0..1]), - ".".repeat(64) + "Models: {:?}", + models.iter().map(|m| m.to_string()).collect::>() ); Self { - DKN_ADMIN_PUBLIC_KEY: admin_public_key, - DKN_WALLET_SECRET_KEY: secret_key, - DKN_WALLET_PUBLIC_KEY: public_key, - DKN_WALLET_ADDRESS: address, + admin_public_key, + secret_key, + public_key, + address, + models, } } } @@ -90,7 +99,7 @@ mod tests { ); let cfg = DriaComputeNodeConfig::new(); assert_eq!( - hex::encode(cfg.DKN_WALLET_ADDRESS), + hex::encode(cfg.address), "1f56f6131705fbf19371122c80d7a2d40fcf9a68" ); } diff --git a/src/config/models.rs b/src/config/models.rs new file mode 100644 index 0000000..d512273 --- /dev/null +++ b/src/config/models.rs @@ -0,0 +1,36 @@ +use ollama_workflows::Model; + +pub fn parse_dkn_models(models_str: String) -> Vec { + let tasks: Vec = models_str + .split(',') + .filter_map(|s| { + let s = s.trim().to_lowercase(); + match Model::try_from(s) { + Ok(model) => Some(model), + Err(e) => { + log::warn!("Invalid model: {}", e); + None + } + } + }) + .collect(); + + tasks +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_parser_1() { + let models = parse_dkn_models("idontexist,i dont either,i332287648762".to_string()); + assert_eq!(models.len(), 0); + } + + #[test] + fn test_parser_2() { + let models = parse_dkn_models("phi3:3.8b,phi3:14b-medium-4k-instruct-q4_1".to_string()); + assert_eq!(models.len(), 2); + } +} diff --git a/src/config/tasks.rs b/src/config/tasks.rs deleted file mode 100644 index 3f96ac2..0000000 --- a/src/config/tasks.rs +++ /dev/null @@ -1,68 +0,0 @@ -use std::env; - -#[derive(Debug, Clone)] -pub struct DriaComputeNodeTasks { - pub synthesis: bool, - pub search: bool, -} - -const TASK_SYNTHESIS: &str = "synthesis"; -const TASK_SEARCH: &str = "search"; - -impl Default for DriaComputeNodeTasks { - fn default() -> Self { - Self { - synthesis: true, - search: true, - } - } -} - -impl DriaComputeNodeTasks { - pub fn new() -> Self { - let tasks_str = env::var("DKN_TASKS").unwrap_or_default(); - Self::parse_str(tasks_str) - } - /// Parses a given string, expecting it to be a comma-separated list of task names, such as - /// `synthesis,search`. - pub fn parse_str(vec: String) -> Self { - let mut synthesis = false; - let mut search = false; - - let tasks: Vec<&str> = vec.split(',').collect(); - for task in tasks { - match task.trim().to_lowercase().as_str() { - TASK_SYNTHESIS => synthesis = true, - TASK_SEARCH => search = true, - _ => { - log::warn!("Unknown task: {}", task); - } - } - } - - Self { synthesis, search } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_parsers() { - env::set_var("DKN_TASKS", "fsfdshk,SynthEsis,fkdshfjsdk"); - let tasks = DriaComputeNodeTasks::new(); - assert!(tasks.synthesis); - assert!(!tasks.search); - - env::set_var("DKN_TASKS", "fsfdshk, fdgsdg, search "); - let tasks = DriaComputeNodeTasks::new(); - assert!(!tasks.synthesis); - assert!(tasks.search); - - env::set_var("DKN_TASKS", ",,,"); - let tasks = DriaComputeNodeTasks::new(); - assert!(!tasks.synthesis); - assert!(!tasks.search); - } -} diff --git a/src/main.rs b/src/main.rs index 81dc1e7..fe2a45a 100644 --- a/src/main.rs +++ b/src/main.rs @@ -3,15 +3,12 @@ use std::sync::Arc; use tokio_util::{sync::CancellationToken, task::TaskTracker}; use dkn_compute::{ - config::{constants::*, tasks::DriaComputeNodeTasks, DriaComputeNodeConfig}, - node::DriaComputeNode, - utils::wait_for_termination, + config::DriaComputeNodeConfig, node::DriaComputeNode, utils::wait_for_termination, }; use dkn_compute::workers::diagnostic::*; use dkn_compute::workers::heartbeat::*; -use dkn_compute::workers::search_python::*; -use dkn_compute::workers::synthesis::*; +use dkn_compute::workers::workflow::*; #[tokio::main] async fn main() -> Result<(), Box> { @@ -22,13 +19,11 @@ async fn main() -> Result<(), Box> { const VERSION: &str = env!("CARGO_PKG_VERSION"); log::info!("Using Dria Compute Node v{}", VERSION); - let tasks = DriaComputeNodeTasks::new(); let config = DriaComputeNodeConfig::new(); let cancellation = CancellationToken::new(); let node = Arc::new(DriaComputeNode::new(config, cancellation.clone())); log::info!("Starting workers..."); - log::info!("{:?}", tasks); let tracker = TaskTracker::new(); tracker.spawn(diagnostic_worker( @@ -42,25 +37,11 @@ async fn main() -> Result<(), Box> { tokio::time::Duration::from_millis(1000), )); - if tasks.synthesis { - tracker.spawn(synthesis_worker( - node.clone(), - "synthesis", - tokio::time::Duration::from_millis(1000), - env::var(DKN_SYNTHESIS_MODEL_PROVIDER).ok(), - env::var(DKN_SYNTHESIS_MODEL_NAME).ok(), - )); - } - - if tasks.search { - // TODO: add a feature / env var to enable/disable search_python - // and use search_rust instead - tracker.spawn(search_worker( - node.clone(), - "search_python", - tokio::time::Duration::from_millis(1000), - )); - } + tracker.spawn(workflow_worker( + node.clone(), + "workflow", + tokio::time::Duration::from_millis(1000), + )); // close tracker after spawning everything tracker.close(); diff --git a/src/node.rs b/src/node.rs index c9e0be7..bffb5b0 100644 --- a/src/node.rs +++ b/src/node.rs @@ -40,16 +40,10 @@ impl DriaComputeNode { } } - /// Returns the wallet address of the node. - #[inline] - pub fn address(&self) -> [u8; 20] { - self.config.DKN_WALLET_ADDRESS - } - /// Shorthand to sign a digest with node's secret key and return signature & recovery id. #[inline] pub fn sign(&self, message: &Message) -> (Signature, RecoveryId) { - sign(message, &self.config.DKN_WALLET_SECRET_KEY) + sign(message, &self.config.secret_key) } /// Returns the state of the node, whether it is busy or not. @@ -69,7 +63,7 @@ impl DriaComputeNode { #[inline] pub fn sign_bytes(&self, message: &[u8; 32]) -> String { let message = Message::parse(message); - let (signature, recid) = sign(&message, &self.config.DKN_WALLET_SECRET_KEY); + let (signature, recid) = sign(&message, &self.config.secret_key); format!( "{}{}", @@ -85,7 +79,7 @@ impl DriaComputeNode { pub fn is_tasked(&self, filter: &FilterPayload) -> NodeResult { let filter = BloomFilter::try_from(filter)?; - Ok(filter.contains(&self.address())) + Ok(filter.contains(&self.config.address)) } /// Creates the payload of a computation result, as per Dria Whitepaper section 5.1 algorithm 2: @@ -101,7 +95,7 @@ impl DriaComputeNode { // sign result let result_digest: [u8; 32] = sha256hash(result.as_ref()); let result_msg = Message::parse(&result_digest); - let (signature, recid) = sign(&result_msg, &self.config.DKN_WALLET_SECRET_KEY); + let (signature, recid) = sign(&result_msg, &self.config.secret_key); let signature: [u8; 64] = signature.serialize(); let recid: [u8; 1] = [recid.serialize()]; @@ -198,7 +192,7 @@ impl DriaComputeNode { if signed { messages.retain(|message| { message - .is_signed(&self.config.DKN_ADMIN_PUBLIC_KEY) + .is_signed(&self.config.admin_public_key) .unwrap_or_else(|e| { log::warn!("Could not verify message signature: {}", e); false @@ -344,7 +338,7 @@ mod tests { let result_digest = sha256hash(result); let message = Message::parse(&result_digest); assert!( - verify(&message, &signature, &node.config.DKN_WALLET_PUBLIC_KEY), + verify(&message, &signature, &node.config.public_key), "Could not verify" ); @@ -352,7 +346,7 @@ mod tests { let recovered_public_key = libsecp256k1::recover(&message, &signature, &recid).expect("Could not recover"); assert_eq!( - node.config.DKN_WALLET_PUBLIC_KEY, recovered_public_key, + node.config.public_key, recovered_public_key, "Public key mismatch" ); diff --git a/src/workers/heartbeat.rs b/src/workers/heartbeat.rs index c1f31b5..68608e6 100644 --- a/src/workers/heartbeat.rs +++ b/src/workers/heartbeat.rs @@ -23,6 +23,9 @@ pub fn heartbeat_worker( tokio::spawn(async move { node.subscribe_topic(topic).await; + // TODO: respond with models_str + // let models_str = serde_json::to_string(&node.config.models).unwrap(); + loop { tokio::select! { _ = node.cancellation.cancelled() => { @@ -33,9 +36,7 @@ pub fn heartbeat_worker( } _ = tokio::time::sleep(sleep_amount) => { let messages = match node.process_topic(topic, true).await { - Ok(messages) => { - messages - }, + Ok(messages) => messages, Err(e) => { log::error!("Error processing topic {}: {}", topic, e); continue; @@ -49,9 +50,7 @@ pub fn heartbeat_worker( continue; } - log::info!("Received heartbeat: {}", message); - let message = match message.parse_payload::(true) { Ok(body) => { let uuid = body.uuid; @@ -68,11 +67,7 @@ pub fn heartbeat_worker( if let Err(e) = node.send_message_once(message).await { log::error!("Error sending message: {}", e); } - } - - - } } } @@ -132,11 +127,11 @@ mod tests { recover(&heartbeat_message, &heartbeat_signature, &heartbeat_recid) .expect("Could not recover"); assert_eq!( - node.config.DKN_WALLET_PUBLIC_KEY, recovered_public_key, + node.config.public_key, recovered_public_key, "Public key mismatch" ); let address = to_address(&recovered_public_key); - assert_eq!(address, node.address(), "Address mismatch"); + assert_eq!(address, node.config.address, "Address mismatch"); // admin node assigns the task to the compute node via Bloom Filter let mut bloom = FilterBuilder::new(100, 0.01).build_bloom_filter(); diff --git a/src/workers/mod.rs b/src/workers/mod.rs index 05871e0..10560b0 100644 --- a/src/workers/mod.rs +++ b/src/workers/mod.rs @@ -1,5 +1,3 @@ pub mod diagnostic; pub mod heartbeat; -pub mod search_python; -pub mod synthesis; pub mod workflow; diff --git a/src/workers/search_python.rs b/src/workers/search_python.rs deleted file mode 100644 index 3c9a61f..0000000 --- a/src/workers/search_python.rs +++ /dev/null @@ -1,67 +0,0 @@ -use std::sync::Arc; -use std::time::Duration; - -use crate::{compute::search_python::SearchPythonClient, node::DriaComputeNode}; - -/// # Search -/// -/// A search task tells the agent to search an information on the Web with a set of tools provided, such -/// as web scrapers and search engine APIs. -pub fn search_worker( - node: Arc, - topic: &'static str, - sleep_amount: Duration, -) -> tokio::task::JoinHandle<()> { - let search_client = SearchPythonClient::new(); - - tokio::spawn(async move { - node.subscribe_topic(topic).await; - - loop { - tokio::select! { - _ = node.cancellation.cancelled() => { - if let Err(e) = node.unsubscribe_topic(topic).await { - log::error!("Error unsubscribing from {}: {}\nContinuing anyway.", topic, e); - } - break; - } - _ = tokio::time::sleep(sleep_amount) => { - let tasks = match node.process_topic(topic, true).await { - Ok(messages) => { - if messages.is_empty() { - continue; - } - node.parse_messages::(messages, true) - } - Err(e) => { - log::error!("Error processing topic {}: {}", topic, e); - continue; - } - }; - - node.set_busy(true); - log::info!("Received {} {} tasks.", tasks.len(), topic); - for task in &tasks { - log::debug!("Task ID: {}", task.task_id); - } - - for task in tasks { - let result = match search_client.search(task.input).await { - Ok(result) => result, - Err(e) => { - log::error!("Error searching: {}", e); - continue; - } - }; - - if let Err(e) = node.send_task_result(&task.task_id, &task.public_key, result).await { - log::error!("Error sending task result: {}", e); - }; - } - - node.set_busy(false); - } - } - } - }) -} diff --git a/src/workers/synthesis.rs b/src/workers/synthesis.rs deleted file mode 100644 index 2a98403..0000000 --- a/src/workers/synthesis.rs +++ /dev/null @@ -1,105 +0,0 @@ -use std::sync::Arc; -use std::time::Duration; - -use crate::{ - compute::llm::common::{create_llm, ModelProvider}, - config::constants::*, - node::DriaComputeNode, -}; - -/// # Synthesis -/// -/// A synthesis task is the task of putting a prompt to an LLM and obtaining many results, essentially growing the number of data points in a dataset, -/// hence creating synthetic data. -pub fn synthesis_worker( - node: Arc, - topic: &'static str, - sleep_amount: Duration, - model_provider: Option, - model_name: Option, -) -> tokio::task::JoinHandle<()> { - tokio::spawn(async move { - let (model_provider, model_name) = parse_model_info(model_provider, model_name); - log::info!("Using {} with {}", model_provider, model_name); - - let llm = match create_llm(model_provider, model_name, node.cancellation.clone()).await { - Ok(llm) => llm, - Err(e) => { - log::error!("Could not create LLM: {}, exiting worker.", e); - return; - } - }; - - node.subscribe_topic(topic).await; - - loop { - tokio::select! { - _ = node.cancellation.cancelled() => { - if let Err(e) = node.unsubscribe_topic(topic).await { - log::error!("Error unsubscribing from {}: {}\nContinuing anyway.", topic, e); - } - break; - } - _ = tokio::time::sleep(sleep_amount) => { - let tasks = match node.process_topic(topic, true).await { - Ok(messages) => { - if messages.is_empty() { - continue; - } - node.parse_messages::(messages, true) - } - Err(e) => { - log::error!("Error processing topic {}: {}", topic, e); - continue; - } - }; - - node.set_busy(true); - log::info!("Processing {} {} tasks.", tasks.len(), topic); - for task in &tasks { - log::debug!("Task ID: {}", task.task_id); - } - - for task in tasks { - let llm_result = match llm.invoke(&task.input).await { - Ok(result) => result, - Err(e) => { - log::error!("Error generating prompt result: {}", e); - continue; - } - }; - - if let Err(e) = node.send_task_result(&task.task_id, &task.public_key, llm_result).await { - log::error!("Error sending task result: {}", e); - }; - } - - node.set_busy(false); - } - } - } - }) -} - -/// Given a model provier option, and a model name option, return the model provider and model name. -/// -/// - If model provider is `None`, it will default. -/// - If model name is `None`, it will default to some model name with respect ot the model provider. -pub fn parse_model_info( - model_provider: Option, - model_name: Option, -) -> (ModelProvider, String) { - let model_provider: ModelProvider = model_provider - .unwrap_or(DEFAULT_DKN_SYNTHESIS_MODEL_PROVIDER.to_string()) - .into(); - - let model_name = model_name.unwrap_or_else(|| { - match &model_provider { - ModelProvider::OpenAI => DEFAULT_DKN_SYNTHESIS_MODEL_NAME_OPENAI.to_string(), - ModelProvider::Ollama => DEFAULT_DKN_SYNTHESIS_MODEL_NAME_OLLAMA.to_string(), - } - .to_string() - }); - - (model_provider, model_name) -} diff --git a/src/workers/workflow.rs b/src/workers/workflow.rs index accbe79..148dab5 100644 --- a/src/workers/workflow.rs +++ b/src/workers/workflow.rs @@ -1,26 +1,22 @@ -use ollama_workflows::{Executor, Model, ProgramMemory, Workflow}; +use ollama_workflows::{Entry, Executor, Model, ProgramMemory, Workflow}; +use serde::Deserialize; use std::sync::Arc; use std::time::Duration; use crate::node::DriaComputeNode; +#[derive(Debug, Deserialize)] +struct WorkflowPayload { + pub(crate) workflow: Workflow, + pub(crate) model: String, + pub(crate) prompt: String, +} + pub fn workflow_worker( node: Arc, topic: &'static str, sleep_amount: Duration, - model: Option, ) -> tokio::task::JoinHandle<()> { - // TODO: decide the model based on workflow - let model = if let Some(model) = model { - Model::try_from(model).unwrap_or_else(|model| { - log::error!("Invalid model provided: {}, defaulting.", model); - Model::default() - }) - } else { - Model::default() - }; - log::info!("Using model: {:?}", model); - // this ID is given in the workflow itself, but within Dria we always // use "final_result" for this ID. let final_result_id = "final_result".to_string(); @@ -42,7 +38,7 @@ pub fn workflow_worker( if messages.is_empty() { continue; } - node.parse_messages::(messages, true) + node.parse_messages::(messages, true) } Err(e) => { log::error!("Error processing topic {}: {}", topic, e); @@ -59,10 +55,20 @@ pub fn workflow_worker( } for task in tasks { - let exe = Executor::new(model.clone()); // TODO: model shall be workflow specific + // read model from the task + let model = Model::try_from(task.input.model.clone()).unwrap_or_else(|model| { + log::error!("Invalid model provided: {}, defaulting.", model); + Model::default() + }); + log::info!("Using model {}", model); + + // execute workflow + let executor = Executor::new(model); let mut memory = ProgramMemory::new(); - exe.execute(None, task.input, &mut memory).await; + let entry = Entry::String(task.input.prompt); + executor.execute(Some(&entry), task.input.workflow, &mut memory).await; + // read final result from memory let result = match memory.read(&final_result_id) { Some(entry) => entry.to_string(), None => { diff --git a/tests/compute_test.rs b/tests/compute_test.rs index 57b9d2c..7a149e1 100644 --- a/tests/compute_test.rs +++ b/tests/compute_test.rs @@ -1,26 +1,12 @@ #![allow(unused_imports)] mod compute_test { - use dkn_compute::compute::{llm::ollama::create_ollama, search_python::SearchPythonClient}; + use dkn_compute::compute::llm::ollama::create_ollama; use langchain_rust::{language_models::llm::LLM, llm::client::Ollama}; use ollama_workflows::{Entry, Executor, Model, ProgramMemory, Workflow}; use std::env; use tokio_util::sync::CancellationToken; - #[tokio::test] - #[ignore = "run this manually"] - async fn test_search_python() { - env::set_var("RUST_LOG", "INFO"); - let _ = env_logger::try_init(); - let search_client = SearchPythonClient::new(); - - let result = search_client - .search("Who is the president of the United States?".to_string()) - .await - .expect("should search"); - println!("Result: {:?}", result); - } - #[tokio::test] #[ignore = "run this manually"] async fn test_ollama_prompt() { @@ -59,7 +45,7 @@ mod compute_test { "config": { "max_steps": 5, "max_time": 100, - "tools": [] + "tools": [], }, "tasks":[ { diff --git a/tests/mock_messages_test.rs b/tests/mock_messages_test.rs index 8c283df..231564f 100644 --- a/tests/mock_messages_test.rs +++ b/tests/mock_messages_test.rs @@ -22,7 +22,7 @@ mod mock_messages_test { // create filter with your own address let mut filter = FilterBuilder::new(128, 0.01).build_bloom_filter(); - filter.add(&node.address()); + filter.add(&node.config.address); let payload_tasked = TaskRequestPayload::new(input.clone(), filter, time, None); let payload_str = serde_json::to_string(&payload_tasked).unwrap(); From fefcefcdfc0219ac7928d3697369a4c023a3fd21 Mon Sep 17 00:00:00 2001 From: erhant Date: Wed, 19 Jun 2024 14:18:45 +0300 Subject: [PATCH 04/29] rm old workers, change topics for task response --- .env.example | 2 +- Cargo.lock | 2 +- src/compute/llm/common.rs | 61 ------------------------------- src/compute/llm/mod.rs | 1 - src/compute/mod.rs | 4 +- src/config/mod.rs | 12 ++++-- src/config/models.rs | 14 +++---- src/lib.rs | 2 +- src/main.rs | 2 - src/node.rs | 19 ++++++++-- src/utils/mod.rs | 1 + src/{compute => utils}/payload.rs | 0 src/workers/diagnostic.rs | 2 + src/workers/heartbeat.rs | 49 +++++++++++++++---------- src/workers/workflow.rs | 29 ++++++++------- tests/compute_test.rs | 8 +--- tests/mock_messages_test.rs | 2 +- tests/mock_sends_test.rs | 2 +- 18 files changed, 86 insertions(+), 126 deletions(-) delete mode 100644 src/compute/llm/common.rs rename src/{compute => utils}/payload.rs (100%) diff --git a/.env.example b/.env.example index 0404c04..a56087f 100644 --- a/.env.example +++ b/.env.example @@ -10,7 +10,7 @@ WAKU_LOG_LEVEL="DEBUG" # INFO | DEBUG | ERROR DKN_WALLET_SECRET_KEY=${ETH_TESTNET_KEY} # Dria uses the same key as Waku DKN_ADMIN_PUBLIC_KEY= # Public key of Dria (33-byte compressed, hexadecimal). DKN_TASKS=synthesis # task1,task2,task3,... (comma separated, case-insensitive) -DKN_MODEL_PROVIDERS=openai,ollama # provider1,provider2,provider3,... (comma separated, case-insensitive) +DKN_MODELS=phi3:3.8b # model1,model2,model3,... (comma separated, case-insensitive) DKN_LOG_LEVEL=info # info | debug | error ## OLLAMA ## diff --git a/Cargo.lock b/Cargo.lock index 1986e3e..c53d0f8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2212,7 +2212,7 @@ dependencies = [ [[package]] name = "ollama-workflows" version = "0.1.0" -source = "git+https://github.com/andthattoo/ollama-workflows?branch=main#eeac2a6c104d9f46794995511c09ce58e204e2b6" +source = "git+https://github.com/andthattoo/ollama-workflows?branch=main#18996ec26adfab02ef1b3397c478e3b068eeeaf1" dependencies = [ "async-trait", "colored", diff --git a/src/compute/llm/common.rs b/src/compute/llm/common.rs deleted file mode 100644 index db9960c..0000000 --- a/src/compute/llm/common.rs +++ /dev/null @@ -1,61 +0,0 @@ -use langchain_rust::language_models::llm::LLM; -use tokio_util::sync::CancellationToken; - -use super::ollama::create_ollama; -use super::openai::create_openai; - -#[derive(Debug, Default)] -pub enum ModelProvider { - #[default] - Ollama, - OpenAI, -} - -impl From for ModelProvider { - fn from(value: String) -> Self { - match value.to_lowercase().as_str().trim() { - "ollama" => Self::Ollama, - "openai" => Self::OpenAI, - _ => { - log::warn!("Unknown LLM type: {}, defaulting.", value); - Self::default() - } - } - } -} - -impl From<&ModelProvider> for String { - fn from(value: &ModelProvider) -> Self { - match value { - ModelProvider::Ollama => "Ollama".to_string(), - ModelProvider::OpenAI => "OpenAI".to_string(), - } - } -} - -impl std::fmt::Display for ModelProvider { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", String::from(self)) - } -} - -/// Creates an LLM of the given type, which is a LangChain object. -/// -/// The respective setups of the LLMs are done within this function, -/// e.g. Ollama will pull the model if it does not exist locally. -pub async fn create_llm( - llm: ModelProvider, - model: String, - cancellation: CancellationToken, -) -> Result, String> { - match llm { - ModelProvider::Ollama => { - let client = create_ollama(cancellation, model).await?; - Ok(Box::new(client)) - } - ModelProvider::OpenAI => { - let client = create_openai(model); - Ok(Box::new(client)) - } - } -} diff --git a/src/compute/llm/mod.rs b/src/compute/llm/mod.rs index 016e4b2..3ef32f6 100644 --- a/src/compute/llm/mod.rs +++ b/src/compute/llm/mod.rs @@ -1,3 +1,2 @@ -pub mod common; pub mod ollama; pub mod openai; diff --git a/src/compute/mod.rs b/src/compute/mod.rs index 928ede8..62af56c 100644 --- a/src/compute/mod.rs +++ b/src/compute/mod.rs @@ -1,2 +1,2 @@ -pub mod llm; -pub mod payload; +// pub mod llm; +// pub mod payload; diff --git a/src/config/mod.rs b/src/config/mod.rs index eb6342c..cab8c15 100644 --- a/src/config/mod.rs +++ b/src/config/mod.rs @@ -6,7 +6,7 @@ use constants::*; use ecies::PublicKey; use libsecp256k1::{PublicKeyFormat, SecretKey}; use models::parse_dkn_models; -use ollama_workflows::Model; +use ollama_workflows::{Model, ModelProvider}; use std::env; #[allow(non_snake_case)] @@ -21,7 +21,7 @@ pub struct DriaComputeNodeConfig { /// Admin public key, used for message authenticity. pub admin_public_key: PublicKey, /// Available models for the node. - pub models: Vec, + pub models: Vec<(ModelProvider, Model)>, } impl DriaComputeNodeConfig { @@ -67,8 +67,12 @@ impl DriaComputeNodeConfig { let models = parse_dkn_models(env::var(DKN_MODELS).unwrap_or_default()); log::info!( - "Models: {:?}", - models.iter().map(|m| m.to_string()).collect::>() + "Models: {}", + serde_json::to_string(&models).unwrap_or_default() + ); + assert!( + !models.is_empty(), + "At least one model should be provided in the configuration." ); Self { diff --git a/src/config/models.rs b/src/config/models.rs index d512273..81069c2 100644 --- a/src/config/models.rs +++ b/src/config/models.rs @@ -1,21 +1,19 @@ -use ollama_workflows::Model; +use ollama_workflows::{Model, ModelProvider}; -pub fn parse_dkn_models(models_str: String) -> Vec { - let tasks: Vec = models_str +pub fn parse_dkn_models(models_str: String) -> Vec<(ModelProvider, Model)> { + models_str .split(',') .filter_map(|s| { let s = s.trim().to_lowercase(); match Model::try_from(s) { - Ok(model) => Some(model), + Ok(model) => Some((model.clone().into(), model)), Err(e) => { - log::warn!("Invalid model: {}", e); + log::warn!("Invalid model: '{}'k", e); None } } }) - .collect(); - - tasks + .collect() } #[cfg(test)] diff --git a/src/lib.rs b/src/lib.rs index 90eb3c6..0bdebbc 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,4 +1,4 @@ -pub mod compute; +// pub mod compute; pub mod config; pub mod errors; pub mod node; diff --git a/src/main.rs b/src/main.rs index fe2a45a..ac97b6f 100644 --- a/src/main.rs +++ b/src/main.rs @@ -33,13 +33,11 @@ async fn main() -> Result<(), Box> { tracker.spawn(heartbeat_worker( node.clone(), - "heartbeat", tokio::time::Duration::from_millis(1000), )); tracker.spawn(workflow_worker( node.clone(), - "workflow", tokio::time::Duration::from_millis(1000), )); diff --git a/src/node.rs b/src/node.rs index bffb5b0..62d5730 100644 --- a/src/node.rs +++ b/src/node.rs @@ -6,9 +6,9 @@ use serde::Deserialize; use tokio_util::sync::CancellationToken; use crate::{ - compute::payload::{TaskRequest, TaskRequestPayload, TaskResponsePayload}, config::DriaComputeNodeConfig, errors::NodeResult, + utils::payload::{TaskRequest, TaskRequestPayload, TaskResponsePayload}, utils::{crypto::sha256hash, filter::FilterPayload, get_current_time_nanos}, waku::{message::WakuMessage, WakuClient}, }; @@ -154,6 +154,17 @@ impl DriaComputeNode { Ok(()) } + /// Unsubscribe from a certain task with its topic, ignoring the error. + pub async fn unsubscribe_topic_ignored(&self, topic: &str) { + if let Err(e) = self.unsubscribe_topic(topic).await { + log::error!( + "Error unsubscribing from {}: {}\nContinuing anyway.", + topic, + e + ); + } + } + /// Send a message via Waku Relay, assuming the content is subscribed to already. pub async fn send_message(&self, message: WakuMessage) -> NodeResult<()> { self.waku.relay.send_message(message).await @@ -276,15 +287,15 @@ impl DriaComputeNode { /// Given a task with `id` and respective `public_key`, encrypts the result and obtains /// the `h || s || e` payload, and sends it to the Waku network. - pub async fn send_task_result>( + pub async fn send_result>( &self, - id: &str, + response_topic: &str, public_key: &[u8], result: R, ) -> NodeResult<()> { let payload = self.create_payload(result.as_ref(), public_key)?; let payload_str = payload.to_string()?; - let message = WakuMessage::new(payload_str, id); + let message = WakuMessage::new(payload_str, response_topic); self.send_message_once(message).await } diff --git a/src/utils/mod.rs b/src/utils/mod.rs index 1357789..ee3f4a0 100644 --- a/src/utils/mod.rs +++ b/src/utils/mod.rs @@ -1,6 +1,7 @@ pub mod crypto; pub mod filter; pub mod http; +pub mod payload; use std::time::{Duration, SystemTime}; use tokio::signal::unix::{signal, SignalKind}; diff --git a/src/compute/payload.rs b/src/utils/payload.rs similarity index 100% rename from src/compute/payload.rs rename to src/utils/payload.rs diff --git a/src/workers/diagnostic.rs b/src/workers/diagnostic.rs index f360c12..c301745 100644 --- a/src/workers/diagnostic.rs +++ b/src/workers/diagnostic.rs @@ -17,6 +17,7 @@ pub fn diagnostic_worker( tokio::spawn(async move { let mut num_peers: usize = 0; let mut num_checks: usize = 0; + loop { tokio::select! { _ = node.cancellation.cancelled() => break, @@ -24,6 +25,7 @@ pub fn diagnostic_worker( match node.waku.peers().await { Ok(peers) => { + // if peer count changes, print it if num_peers != peers.len() { num_peers = peers.len(); log::info!("Active number of peers: {}", num_peers); diff --git a/src/workers/heartbeat.rs b/src/workers/heartbeat.rs index 68608e6..566d126 100644 --- a/src/workers/heartbeat.rs +++ b/src/workers/heartbeat.rs @@ -1,3 +1,4 @@ +use ollama_workflows::{Model, ModelProvider}; use serde::{Deserialize, Serialize}; use std::sync::Arc; use std::time::Duration; @@ -10,6 +11,15 @@ struct HeartbeatPayload { deadline: u128, } +#[derive(Serialize, Deserialize, Debug, Clone)] +struct HeartbeatResponse { + pub(crate) uuid: String, + pub(crate) models: Vec<(ModelProvider, Model)>, +} + +const REQUEST_TOPIC: &str = "heartbeat"; +const RESPONSE_TOPIC: &str = "pong"; + /// # Heartbeat /// /// A heartbeat is a message sent by a node to indicate that it is alive. Dria nodes request @@ -17,28 +27,21 @@ struct HeartbeatPayload { /// identified with the `uuid`. pub fn heartbeat_worker( node: Arc, - topic: &'static str, sleep_amount: Duration, ) -> tokio::task::JoinHandle<()> { tokio::spawn(async move { - node.subscribe_topic(topic).await; - - // TODO: respond with models_str - // let models_str = serde_json::to_string(&node.config.models).unwrap(); + node.subscribe_topic(REQUEST_TOPIC).await; + node.subscribe_topic(RESPONSE_TOPIC).await; loop { tokio::select! { - _ = node.cancellation.cancelled() => { - if let Err(e) = node.unsubscribe_topic(topic).await { - log::error!("Error unsubscribing from {}: {}\nContinuing anyway.", topic, e); - } - break; - } + _ = node.cancellation.cancelled() => break, _ = tokio::time::sleep(sleep_amount) => { - let messages = match node.process_topic(topic, true).await { + + let messages = match node.process_topic(REQUEST_TOPIC, true).await { Ok(messages) => messages, Err(e) => { - log::error!("Error processing topic {}: {}", topic, e); + log::error!("Error processing {}: {}", REQUEST_TOPIC, e); continue; } }; @@ -52,10 +55,13 @@ pub fn heartbeat_worker( log::info!("Received heartbeat: {}", message); let message = match message.parse_payload::(true) { - Ok(body) => { - let uuid = body.uuid; - let signature = node.sign_bytes(&sha256hash(uuid.as_bytes())); - WakuMessage::new(signature, &uuid) + Ok(request_body) => { + let response_body = HeartbeatResponse { + uuid: request_body.uuid.clone(), + models: node.config.models.clone(), + }; + let signature = node.sign_bytes(&sha256hash(serde_json::json!(response_body).to_string())); + WakuMessage::new(signature, RESPONSE_TOPIC) } Err(e) => { log::error!("Error parsing payload: {}", e); @@ -63,14 +69,17 @@ pub fn heartbeat_worker( } }; - // send message - if let Err(e) = node.send_message_once(message).await { - log::error!("Error sending message: {}", e); + if let Err(e) = node.send_message(message).await { + log::error!("Error responding heartbeat: {}", e); + continue; } } } } } + + node.unsubscribe_topic_ignored(REQUEST_TOPIC).await; + node.unsubscribe_topic_ignored(RESPONSE_TOPIC).await; }) } diff --git a/src/workers/workflow.rs b/src/workers/workflow.rs index 148dab5..fded811 100644 --- a/src/workers/workflow.rs +++ b/src/workers/workflow.rs @@ -12,9 +12,11 @@ struct WorkflowPayload { pub(crate) prompt: String, } +const REQUEST_TOPIC: &str = "workflow"; +const RESPONSE_TOPIC: &str = "results"; + pub fn workflow_worker( node: Arc, - topic: &'static str, sleep_amount: Duration, ) -> tokio::task::JoinHandle<()> { // this ID is given in the workflow itself, but within Dria we always @@ -22,18 +24,14 @@ pub fn workflow_worker( let final_result_id = "final_result".to_string(); tokio::spawn(async move { - node.subscribe_topic(topic).await; + node.subscribe_topic(REQUEST_TOPIC).await; + node.subscribe_topic(RESPONSE_TOPIC).await; loop { tokio::select! { - _ = node.cancellation.cancelled() => { - if let Err(e) = node.unsubscribe_topic(topic).await { - log::error!("Error unsubscribing from {}: {}\nContinuing anyway.", topic, e); - } - break; - } + _ = node.cancellation.cancelled() => break, _ = tokio::time::sleep(sleep_amount) => { - let tasks = match node.process_topic(topic, true).await { + let tasks = match node.process_topic(REQUEST_TOPIC, true).await { Ok(messages) => { if messages.is_empty() { continue; @@ -41,15 +39,16 @@ pub fn workflow_worker( node.parse_messages::(messages, true) } Err(e) => { - log::error!("Error processing topic {}: {}", topic, e); + log::error!("Error processing topic {}: {}", REQUEST_TOPIC, e); continue; } }; if tasks.is_empty() { - log::info!("No {} tasks.", topic); + log::info!("No {} tasks.", REQUEST_TOPIC); } else { node.set_busy(true); - log::info!("Processing {} {} tasks.", tasks.len(), topic); + + log::info!("Processing {} {} tasks.", tasks.len(), REQUEST_TOPIC); for task in &tasks { log::debug!("Task ID: {}", task.task_id); } @@ -77,8 +76,9 @@ pub fn workflow_worker( }, }; - if let Err(e) = node.send_task_result(&task.task_id, &task.public_key, result).await { + if let Err(e) = node.send_result(&task.task_id, &task.public_key, result).await { log::error!("Error sending task result: {}", e); + continue; }; } @@ -87,5 +87,8 @@ pub fn workflow_worker( } } } + + node.unsubscribe_topic_ignored(REQUEST_TOPIC).await; + node.unsubscribe_topic_ignored(RESPONSE_TOPIC).await; }) } diff --git a/tests/compute_test.rs b/tests/compute_test.rs index 7a149e1..923a820 100644 --- a/tests/compute_test.rs +++ b/tests/compute_test.rs @@ -1,7 +1,6 @@ #![allow(unused_imports)] mod compute_test { - use dkn_compute::compute::llm::ollama::create_ollama; use langchain_rust::{language_models::llm::LLM, llm::client::Ollama}; use ollama_workflows::{Entry, Executor, Model, ProgramMemory, Workflow}; use std::env; @@ -29,7 +28,8 @@ mod compute_test { #[ignore = "run this manually"] async fn test_ollama_bad_model() { let model = "thismodeldoesnotexistlol".to_string(); - let setup_res = create_ollama(CancellationToken::default(), model).await; + let ollama = Ollama::default().with_model(model); + let setup_res = ollama.invoke("hola").await; assert!( setup_res.is_err(), "Should give error due to non-existing model." @@ -45,7 +45,6 @@ mod compute_test { "config": { "max_steps": 5, "max_time": 100, - "tools": [], }, "tasks":[ { @@ -53,7 +52,6 @@ mod compute_test { "name": "Random Poem", "description": "Writes a poem about Kapadokya.", "prompt": "Please write a poem about Kapadokya.", - "inputs":[], "operator": "generation", "outputs": [ { @@ -68,9 +66,7 @@ mod compute_test { "name": "end", "description": "End of the task", "prompt": "End of the task", - "inputs": [], "operator": "end", - "outputs": [] } ], "steps":[ diff --git a/tests/mock_messages_test.rs b/tests/mock_messages_test.rs index 231564f..b53938d 100644 --- a/tests/mock_messages_test.rs +++ b/tests/mock_messages_test.rs @@ -1,6 +1,6 @@ mod mock_messages_test { use dkn_compute::{ - compute::payload::TaskRequestPayload, node::DriaComputeNode, waku::message::WakuMessage, + node::DriaComputeNode, utils::payload::TaskRequestPayload, waku::message::WakuMessage, }; use fastbloom_rs::{FilterBuilder, Membership}; use serde::{Deserialize, Serialize}; diff --git a/tests/mock_sends_test.rs b/tests/mock_sends_test.rs index 64384b8..a6e9934 100644 --- a/tests/mock_sends_test.rs +++ b/tests/mock_sends_test.rs @@ -1,6 +1,6 @@ mod mock_sends_test { use dkn_compute::{ - compute::payload::TaskResponsePayload, node::DriaComputeNode, utils::crypto::sha256hash, + node::DriaComputeNode, utils::crypto::sha256hash, utils::payload::TaskResponsePayload, waku::message::WakuMessage, }; use std::{env, time::Duration}; From d1570e362a996c6db1f9aa106a4ef080bfb19a37 Mon Sep 17 00:00:00 2001 From: erhant Date: Sun, 23 Jun 2024 00:50:00 +0300 Subject: [PATCH 05/29] use `s || e` instead of `h || s || e` --- src/config/mod.rs | 13 +++++---- src/node.rs | 60 ++++++++++------------------------------ src/utils/crypto.rs | 15 ++++++++++ src/utils/payload.rs | 4 +-- src/waku/message.rs | 31 +++++++++++++++++---- src/workers/heartbeat.rs | 5 ++-- src/workers/workflow.rs | 3 +- tests/mock_sends_test.rs | 41 +++------------------------ 8 files changed, 71 insertions(+), 101 deletions(-) diff --git a/src/config/mod.rs b/src/config/mod.rs index cab8c15..97f184e 100644 --- a/src/config/mod.rs +++ b/src/config/mod.rs @@ -13,7 +13,7 @@ use std::env; #[derive(Debug, Clone)] pub struct DriaComputeNodeConfig { /// Wallet secret/private key. - pub(crate) secret_key: SecretKey, + pub secret_key: SecretKey, /// Wallet public key, derived from the secret key. pub public_key: PublicKey, /// Wallet address, derived from the public key. @@ -70,10 +70,13 @@ impl DriaComputeNodeConfig { "Models: {}", serde_json::to_string(&models).unwrap_or_default() ); - assert!( - !models.is_empty(), - "At least one model should be provided in the configuration." - ); + + if !cfg!(test) { + assert!( + !models.is_empty(), + "At least one model should be provided in the configuration." + ); + } Self { admin_public_key, diff --git a/src/node.rs b/src/node.rs index 62d5730..8db52b1 100644 --- a/src/node.rs +++ b/src/node.rs @@ -58,20 +58,6 @@ impl DriaComputeNode { *self.busy_lock.write() = busy; } - /// Shorthand to sign a digest (bytes) with node's secret key and return signature & recovery id - /// serialized to 65 byte hex-string. - #[inline] - pub fn sign_bytes(&self, message: &[u8; 32]) -> String { - let message = Message::parse(message); - let (signature, recid) = sign(&message, &self.config.secret_key); - - format!( - "{}{}", - hex::encode(signature.serialize()), - hex::encode([recid.serialize()]) - ) - } - /// Given a hex-string serialized Bloom Filter of a task, checks if this node is selected to do the task. /// /// This is done by checking if the address of this node is in the filter. @@ -84,33 +70,27 @@ impl DriaComputeNode { /// Creates the payload of a computation result, as per Dria Whitepaper section 5.1 algorithm 2: /// - /// - Sign result with node `self.secret_key` - /// - Encrypt `(signature || result)` with `task_public_key` - /// - Commit to `(signature || result)` using SHA256. + /// - Sign `task_id || result` with node `self.secret_key` + /// - Encrypt `result` with `task_public_key` pub fn create_payload( &self, result: impl AsRef<[u8]>, + task_id: impl AsRef<[u8]>, task_pubkey: &[u8], ) -> NodeResult { // sign result - let result_digest: [u8; 32] = sha256hash(result.as_ref()); - let result_msg = Message::parse(&result_digest); - let (signature, recid) = sign(&result_msg, &self.config.secret_key); + let mut preimage = Vec::new(); + preimage.extend_from_slice(task_id.as_ref()); + preimage.extend_from_slice(result.as_ref()); + let digest = Message::parse(&sha256hash(preimage)); + let (signature, recid) = sign(&digest, &self.config.secret_key); let signature: [u8; 64] = signature.serialize(); let recid: [u8; 1] = [recid.serialize()]; // encrypt result let ciphertext = encrypt(task_pubkey, result.as_ref())?; - // concatenate `signature_bytes` and `digest_bytes` - let mut preimage = Vec::new(); - preimage.extend_from_slice(&signature); - preimage.extend_from_slice(&recid); - preimage.extend_from_slice(&result_digest); - let commitment: [u8; 32] = sha256hash(preimage); - Ok(TaskResponsePayload { - commitment: hex::encode(commitment), ciphertext: hex::encode(ciphertext), signature: format!("{}{}", hex::encode(signature), hex::encode(recid)), }) @@ -285,19 +265,19 @@ impl DriaComputeNode { .collect() } - /// Given a task with `id` and respective `public_key`, encrypts the result and obtains - /// the `h || s || e` payload, and sends it to the Waku network. + /// Given a task with `id` and respective `public_key`, sign-then-encrypt the result. pub async fn send_result>( &self, response_topic: &str, public_key: &[u8], + task_id: &str, result: R, ) -> NodeResult<()> { - let payload = self.create_payload(result.as_ref(), public_key)?; + let payload = self.create_payload(result.as_ref(), task_id, public_key)?; let payload_str = payload.to_string()?; let message = WakuMessage::new(payload_str, response_topic); - self.send_message_once(message).await + self.send_message(message).await } } @@ -314,6 +294,7 @@ mod tests { #[test] fn test_payload_generation_verification() { const ADMIN_PRIV_KEY: &[u8; 32] = b"aaaabbbbccccddddddddccccbbbbaaaa"; + const TASK_ID: &str = "12345678abcdef"; const RESULT: &[u8; 28] = b"this is some result you know"; let node = DriaComputeNode::default(); @@ -322,7 +303,7 @@ mod tests { // create payload let payload = node - .create_payload(RESULT, &public_key.serialize()) + .create_payload(RESULT, TASK_ID, &public_key.serialize()) .expect("Should create payload"); // (here we assume the payload is sent to Waku network, and picked up again) @@ -360,18 +341,5 @@ mod tests { node.config.public_key, recovered_public_key, "Public key mismatch" ); - - // verify commitments (algorithm 4 in whitepaper) - let mut preimage = Vec::new(); - preimage.extend_from_slice(&signature_bytes); - preimage.extend_from_slice(&recid_bytes); - preimage.extend_from_slice(&result_digest); - assert_eq!( - sha256hash(preimage), - hex::decode(payload.commitment) - .expect("Should decode") - .as_slice(), - "Commitment mismatch" - ); } } diff --git a/src/utils/crypto.rs b/src/utils/crypto.rs index 7ebfcb4..a33a2d2 100644 --- a/src/utils/crypto.rs +++ b/src/utils/crypto.rs @@ -1,4 +1,5 @@ use ecies::PublicKey; +use libsecp256k1::{sign, Message, SecretKey}; use sha2::{Digest, Sha256}; use sha3::Keccak256; @@ -26,6 +27,20 @@ pub fn to_address(public_key: &PublicKey) -> [u8; 20] { addr } +/// Shorthand to sign a digest (bytes) with node's secret key and return signature & recovery id +/// serialized to 65 byte hex-string. +#[inline] +pub fn sign_bytes_recoverable(message: &[u8; 32], secret_key: &SecretKey) -> String { + let message = Message::parse(message); + let (signature, recid) = sign(&message, &secret_key); + + format!( + "{}{}", + hex::encode(signature.serialize()), + hex::encode([recid.serialize()]) + ) +} + #[cfg(test)] mod tests { use super::*; diff --git a/src/utils/payload.rs b/src/utils/payload.rs index fd2b836..26172bf 100644 --- a/src/utils/payload.rs +++ b/src/utils/payload.rs @@ -14,12 +14,10 @@ use crate::{ /// and compute the digest using SHA256. That digest will then be used for the signature check. #[derive(Serialize, Deserialize, Debug, Clone)] pub struct TaskResponsePayload { - /// A signature on the digest of plaintext result. + /// A signature on the digest of plaintext result, prepended with task id. pub signature: String, /// Computation result encrypted with the public key of the task. pub ciphertext: String, - /// A commitment to `signature || result`. - pub commitment: String, } impl TaskResponsePayload { diff --git a/src/waku/message.rs b/src/waku/message.rs index 0bb065a..52c5bf5 100644 --- a/src/waku/message.rs +++ b/src/waku/message.rs @@ -1,11 +1,15 @@ use crate::{ errors::NodeResult, - utils::{crypto::sha256hash, get_current_time_nanos}, + utils::{ + crypto::{sha256hash, sign_bytes_recoverable}, + get_current_time_nanos, + }, }; use base64::{prelude::BASE64_STANDARD, Engine}; use core::fmt; use ecies::PublicKey; +use libsecp256k1::SecretKey; use serde::{Deserialize, Serialize}; /// Within Waku Message and Content Topic we specify version to be 0 since @@ -51,13 +55,11 @@ pub struct WakuMessage { /// /// When recovery is not required and only verification is being done, we omit the recovery id /// and therefore use 128 characters: SIGNATURE_SIZE - 2. -const SIGNATURE_SIZE: usize = 130; +const SIGNATURE_SIZE_HEX: usize = 130; impl WakuMessage { /// Creates a new ephemeral Waku message with current timestamp, version 0. /// - /// ## Parameters - /// /// - `payload` is gives as bytes. It is base64 encoded internally. /// - `topic` is the name of the topic itself within the full content topic. The rest of the content topic /// is filled in automatically, e.g. `/dria/0//proto`. @@ -71,6 +73,20 @@ impl WakuMessage { } } + /// Creates a new Waku Message by signing the SHA256 of the payload, and prepending the signature. + pub fn new_signed( + payload: impl AsRef<[u8]> + Clone, + topic: &str, + signing_key: &SecretKey, + ) -> Self { + let signature_bytes = sign_bytes_recoverable(&sha256hash(payload.clone()), signing_key); + + let mut signed_payload = Vec::new(); + signed_payload.extend_from_slice(hex::decode(signature_bytes).unwrap().as_slice()); + signed_payload.extend_from_slice(payload.as_ref()); + WakuMessage::new(signed_payload, topic) + } + /// Decodes the base64 payload into bytes. pub fn decode_payload(&self) -> Result, base64::DecodeError> { BASE64_STANDARD.decode(&self.payload) @@ -82,7 +98,7 @@ impl WakuMessage { let body = if signed { // skips the 65 byte hex signature - &payload[SIGNATURE_SIZE..] + &payload[SIGNATURE_SIZE_HEX..] } else { &payload[..] }; @@ -96,7 +112,10 @@ impl WakuMessage { let payload = self.decode_payload()?; // parse signature (64 bytes = 128 hex chars, although the full 65-byte RSV signature is given) - let (signature, body) = (&payload[..SIGNATURE_SIZE - 2], &payload[SIGNATURE_SIZE..]); + let (signature, body) = ( + &payload[..SIGNATURE_SIZE_HEX - 2], + &payload[SIGNATURE_SIZE_HEX..], + ); let signature = hex::decode(signature).expect("could not decode"); let signature = libsecp256k1::Signature::parse_standard_slice(&signature).expect("could not parse"); diff --git a/src/workers/heartbeat.rs b/src/workers/heartbeat.rs index 566d126..613ca1e 100644 --- a/src/workers/heartbeat.rs +++ b/src/workers/heartbeat.rs @@ -3,7 +3,7 @@ use serde::{Deserialize, Serialize}; use std::sync::Arc; use std::time::Duration; -use crate::{node::DriaComputeNode, utils::crypto::sha256hash, waku::message::WakuMessage}; +use crate::{node::DriaComputeNode, waku::message::WakuMessage}; #[derive(Serialize, Deserialize, Debug, Clone)] struct HeartbeatPayload { @@ -60,8 +60,7 @@ pub fn heartbeat_worker( uuid: request_body.uuid.clone(), models: node.config.models.clone(), }; - let signature = node.sign_bytes(&sha256hash(serde_json::json!(response_body).to_string())); - WakuMessage::new(signature, RESPONSE_TOPIC) + WakuMessage::new_signed(serde_json::json!(response_body).to_string(), RESPONSE_TOPIC, &node.config.secret_key) } Err(e) => { log::error!("Error parsing payload: {}", e); diff --git a/src/workers/workflow.rs b/src/workers/workflow.rs index fded811..328e5f2 100644 --- a/src/workers/workflow.rs +++ b/src/workers/workflow.rs @@ -76,7 +76,8 @@ pub fn workflow_worker( }, }; - if let Err(e) = node.send_result(&task.task_id, &task.public_key, result).await { + // send result to the response + if let Err(e) = node.send_result(RESPONSE_TOPIC, &task.public_key, &task.task_id, result).await { log::error!("Error sending task result: {}", e); continue; }; diff --git a/tests/mock_sends_test.rs b/tests/mock_sends_test.rs index a6e9934..41fb68a 100644 --- a/tests/mock_sends_test.rs +++ b/tests/mock_sends_test.rs @@ -1,6 +1,7 @@ mod mock_sends_test { use dkn_compute::{ - node::DriaComputeNode, utils::crypto::sha256hash, utils::payload::TaskResponsePayload, + node::DriaComputeNode, + utils::crypto::{sha256hash, sign_bytes_recoverable}, waku::message::WakuMessage, }; use std::{env, time::Duration}; @@ -18,7 +19,8 @@ mod mock_sends_test { let uuid = "59b93cb2-5738-4da4-992d-89a1835738d6"; // some random uuid - let signature = node.sign_bytes(&sha256hash(uuid.as_bytes())); + let signature = + sign_bytes_recoverable(&sha256hash(uuid.as_bytes()), &node.config.secret_key); let message = WakuMessage::new(signature, &uuid); for i in 1..=num_heartbeats { @@ -30,39 +32,4 @@ mod mock_sends_test { tokio::time::sleep(timeout).await; } } - - /// Sends pre-computed signatures on a specific task. This simulates a number of responses to a synthesis task. - #[tokio::test] - #[ignore = "run this manually"] - async fn test_send_hse_responses() { - let _ = env_logger::try_init(); - let node = DriaComputeNode::default(); - - let task_uid = "59b93cb2-5738-4da4-992d-89a1835738d6"; - let payloads = vec![ - TaskResponsePayload { - ciphertext:"04418d872f94717351fa2f8e8b5de18182d05fc81818f6a0f7cb9df811931ede6fedb3180572bf6c3c5485628a23d5753efde59d5cb40df617e4c382aed15fbcc3b12dc07182aae21f7ef2a06f654f912a0c7a156dec148a186032b1b25551046612d1b304fc732192ca5665e016d9731b693e7f56e67d28b5fdb404e276c6bbadd23a3711697162dfa81f452d14c7073f8b7f81b5f208a71ee69ba44ba0023707ccbe5e0bf289bc08885e7d9636cb82e7c5e8b36caff2345379fa8a27595bc542668f54c1863b0d5e343b51b86ffd9985835165f4e78106df2024280cea356ffae661b5005473c3b186f5b1dcf27609ae417545ff04fa559a2d2c695fef9d11f9d7d71a323739553e6231289011b90c48c8cbfb0c89be5042724fa6338f40c534f2ac363fcd5e92735ba1c0a36a32b6febd28fb4455c8907dc76558328179749dcb945ebbcb8862ef04f4f783b3a3b86c12519afa8f042f87233aae421774034c564c5afac10b3082abaf47d5a7bf6207761d7907a0161f25e0fd48443d274eb5820ab2d290530fb5a398372931b2f226380263b7706a9c230716b3b0d9862ece1908d279edfe87796d9117096d9066bc4158165e150d54d558bc70452872436e941c673eb1c100adecfb49b233885e5026bfc688e50fbaf7ec7004cafb1ba40708e5b328c427272291cb47cc538b4e57aad5381cf19ab63af749c0792eb9706f286aae05580c36f0b91b09ba46be8e7f89fa253328785e2c61a660c3d794d0ed9ba77a18ac6a8b2d9be8ef35e0e77f3016ab6e506265b587d2a64edb8c0705ed5203c9685f2dff57efd17cf05193f6e77f6cbda04420fd265a9fce8f75bdac2a2dc6d7371fd26206490befb52c51560af8d0a75e9d7d898b84ebabc2367ba3431b5f74e87bb1593273c6e741a7c59de231d5060c2c7317c5d64740c17f7503f6e254a196f6fe95751f9151d64e5549764d066046b7229c23a311cce9aa387c50bda2f73fc5f82426c1d0f5f313c191107a3798890fd70237d248aef8b1f70827e94ec240229f60bfb7135bff6e5a6e9538bff1abfbc781b106c51b8b2f20df837c4837523fd7fa56a7ee3d9643b9d842ca98faf009c467a0dcb28c86f333e2bde0151e174e401edbefe10d8d5aedc7da32a8a3e7c942fac31a76aa726350a2005804862f624f1c164c866d291c33447595a873c62ff31489fc288b34c799465b1e79d2da1ac3af5dc7abaa3722ccfe9d58d91e46623be83dd9c0a4547dca76da38f289ddc7a3e5ca93182548694959e07afc57c8e989635efbbf6c5947170470cd62dd16e8e037c21c77d77d05d59cd052d72ba8e6dea579f650de9791f7140db98173b1031327d461a43beec51c8e0cfed95a2933ab7a7b8a4d4a6e4317d353704b54a87a1986185e99f268a1e653a549aec1e0fd377ada65b2f49e84090cce2a8d2f8adc9ba0dfda96421ff8e972ab6fe6a3ed79ee6b45eaa2e9267769138d385c8390d6cffd90a4081cf6b617443438f7e1050bff15fc842d8717a669463cc5aa91c72dd319b1893492410a4379256c7322ce99efe8aaec9593f4b8c367a68464d9ba0f5eae906320de632fefadb99bef1833a7b8acfc044fff0e4af9eb6111da02c3e7f1af591eae335b7741f94a5ef0cd6f81265f61952e1b07d13e019edabf864c13426713d6b66edaba6ad980e9f9dfa78b654d9d60ed89bdb5e56cf4d4d626b50d3176792c27c4d478a4245b598d56ee34a1e5d51aaa26af8ad5d4508462d892acb1b14694c98a4650a7c315ac8224b5db0190b8e86f7c9195833a5593f5be6d6205325ce90ceaaa94d692b8d51c0deba7b3e378991f0d0059fc92603e085a9949ba5bc56e4ee24b53be28fc792012813500774bcc00ca406c010981976a67038a20a39080ac36e77a4505fe0fe54a76bb99943a84d58b8b6852724a61138076b2321814c85c75aa5ce8d9b7b0d92eee7e3faa19ce0f96912fdf19c0d382e48da9699a525c0fcc141f5a59fc23b77aca35ffd64d50709caa1c3e9854c2036aa421454e9a1b9bf70d9911920c2a53a404a0d541d4f98cb117605ac9350405fec788e3119732805af13e64b585e49cb710446744d2d4c4901258acebab4dbb5ac3c04334ddc0a5345b0d63e18032f44a35215bb68d4fb49375f0521d4f1b4602eb12a62f80dfac186dacff44ce72844fdfd325a80798308144c427cb70666c12572d3f3e6cd7f40e7544a027893eed0f7ef6fb72a58d91f370a311955512c7d85ed875c66c82fcd77fb960863c58671de393d14f9b88cbe29ac4349f5dca646d79dcadb0885632e567a8ec8793880be18acfd02e0b2a651014fa3e2f273715c100438d75776707bd89c0dc6dd52c90bb95e270d0698d5697a1e16b8a9e9d983a9ecc34318eac5d1bb3be4ee5d078120b97a4923a55a8a4909e1069e8899785805300b9d764f8ffae46ad9ad4f8505f4a04579015b1f3f8a24a828f37bf5f30fa41cb84ebfda4336294a1c40affb627ed9013a3f2861d57f8af6ae8eae809cf7bf42d00f47b09653e30518dadfe62e98bf1039b9d1fba1c7436524370b49668295045d6274fcb313997eec1c93c0975ebe784019d59394b4451a753d23165ab303078a4cf575ce7e5638cea06d06a286d1afb35637814036a6edce3f757e5e23ff333142bc4e9b9475ab70e250c1cafe1c11a1735521ec5acdebb450ae8afc0896d8d0124cd7c48d90168850504916ad6fab5d740".to_string(), - commitment:"0e45a393e8140d062dd0162e074dda83efe268fb2ae34e29fcca46a33a5e2476".to_string(), - signature:"207d2f0be0e103953f99a684fa6f357d7f3f6b5505a09972a9f473a8528b1b8c226a47e76eff66152c0114ef2c56c219d2fc605a123ea58fc11bf2474f78cce900".to_string() - }, - TaskResponsePayload { - ciphertext:"04418d872f94717351fa2f8e8b5de18182d05fc81818f6a0f7cb9df811931ede6fedb3180572bf6c3c5485628a23d5753efde59d5cb40df617e4c382aed15fbcc3b12dc07182aae21f7ef2a06f654f912a0c7a156dec148a186032b1b25551046612d1b304fc732192ca5665e016d9731b693e7f56e67d28b5fdb404e276c6bbadd23a3711697162dfa81f452d14c7073f8b7f81b5f208a71ee69ba44ba0023707ccbe5e0bf289bc08885e7d9636cb82e7c5e8b36caff2345379fa8a27595bc542668f54c1863b0d5e343b51b86ffd9985835165f4e78106df2024280cea356ffae661b5005473c3b186f5b1dcf27609ae417545ff04fa559a2d2c695fef9d11f9d7d71a323739553e6231289011b90c48c8cbfb0c89be5042724fa6338f40c534f2ac363fcd5e92735ba1c0a36a32b6febd28fb4455c8907dc76558328179749dcb945ebbcb8862ef04f4f783b3a3b86c12519afa8f042f87233aae421774034c564c5afac10b3082abaf47d5a7bf6207761d7907a0161f25e0fd48443d274eb5820ab2d290530fb5a398372931b2f226380263b7706a9c230716b3b0d9862ece1908d279edfe87796d9117096d9066bc4158165e150d54d558bc70452872436e941c673eb1c100adecfb49b233885e5026bfc688e50fbaf7ec7004cafb1ba40708e5b328c427272291cb47cc538b4e57aad5381cf19ab63af749c0792eb9706f286aae05580c36f0b91b09ba46be8e7f89fa253328785e2c61a660c3d794d0ed9ba77a18ac6a8b2d9be8ef35e0e77f3016ab6e506265b587d2a64edb8c0705ed5203c9685f2dff57efd17cf05193f6e77f6cbda04420fd265a9fce8f75bdac2a2dc6d7371fd26206490befb52c51560af8d0a75e9d7d898b84ebabc2367ba3431b5f74e87bb1593273c6e741a7c59de231d5060c2c7317c5d64740c17f7503f6e254a196f6fe95751f9151d64e5549764d066046b7229c23a311cce9aa387c50bda2f73fc5f82426c1d0f5f313c191107a3798890fd70237d248aef8b1f70827e94ec240229f60bfb7135bff6e5a6e9538bff1abfbc781b106c51b8b2f20df837c4837523fd7fa56a7ee3d9643b9d842ca98faf009c467a0dcb28c86f333e2bde0151e174e401edbefe10d8d5aedc7da32a8a3e7c942fac31a76aa726350a2005804862f624f1c164c866d291c33447595a873c62ff31489fc288b34c799465b1e79d2da1ac3af5dc7abaa3722ccfe9d58d91e46623be83dd9c0a4547dca76da38f289ddc7a3e5ca93182548694959e07afc57c8e989635efbbf6c5947170470cd62dd16e8e037c21c77d77d05d59cd052d72ba8e6dea579f650de9791f7140db98173b1031327d461a43beec51c8e0cfed95a2933ab7a7b8a4d4a6e4317d353704b54a87a1986185e99f268a1e653a549aec1e0fd377ada65b2f49e84090cce2a8d2f8adc9ba0dfda96421ff8e972ab6fe6a3ed79ee6b45eaa2e9267769138d385c8390d6cffd90a4081cf6b617443438f7e1050bff15fc842d8717a669463cc5aa91c72dd319b1893492410a4379256c7322ce99efe8aaec9593f4b8c367a68464d9ba0f5eae906320de632fefadb99bef1833a7b8acfc044fff0e4af9eb6111da02c3e7f1af591eae335b7741f94a5ef0cd6f81265f61952e1b07d13e019edabf864c13426713d6b66edaba6ad980e9f9dfa78b654d9d60ed89bdb5e56cf4d4d626b50d3176792c27c4d478a4245b598d56ee34a1e5d51aaa26af8ad5d4508462d892acb1b14694c98a4650a7c315ac8224b5db0190b8e86f7c9195833a5593f5be6d6205325ce90ceaaa94d692b8d51c0deba7b3e378991f0d0059fc92603e085a9949ba5bc56e4ee24b53be28fc792012813500774bcc00ca406c010981976a67038a20a39080ac36e77a4505fe0fe54a76bb99943a84d58b8b6852724a61138076b2321814c85c75aa5ce8d9b7b0d92eee7e3faa19ce0f96912fdf19c0d382e48da9699a525c0fcc141f5a59fc23b77aca35ffd64d50709caa1c3e9854c2036aa421454e9a1b9bf70d9911920c2a53a404a0d541d4f98cb117605ac9350405fec788e3119732805af13e64b585e49cb710446744d2d4c4901258acebab4dbb5ac3c04334ddc0a5345b0d63e18032f44a35215bb68d4fb49375f0521d4f1b4602eb12a62f80dfac186dacff44ce72844fdfd325a80798308144c427cb70666c12572d3f3e6cd7f40e7544a027893eed0f7ef6fb72a58d91f370a311955512c7d85ed875c66c82fcd77fb960863c58671de393d14f9b88cbe29ac4349f5dca646d79dcadb0885632e567a8ec8793880be18acfd02e0b2a651014fa3e2f273715c100438d75776707bd89c0dc6dd52c90bb95e270d0698d5697a1e16b8a9e9d983a9ecc34318eac5d1bb3be4ee5d078120b97a4923a55a8a4909e1069e8899785805300b9d764f8ffae46ad9ad4f8505f4a04579015b1f3f8a24a828f37bf5f30fa41cb84ebfda4336294a1c40affb627ed9013a3f2861d57f8af6ae8eae809cf7bf42d00f47b09653e30518dadfe62e98bf1039b9d1fba1c7436524370b49668295045d6274fcb313997eec1c93c0975ebe784019d59394b4451a753d23165ab303078a4cf575ce7e5638cea06d06a286d1afb35637814036a6edce3f757e5e23ff333142bc4e9b9475ab70e250c1cafe1c11a1735521ec5acdebb450ae8afc0896d8d0124cd7c48d90168850504916ad6fab5d740".to_string(), - commitment:"0e45a393e8140d062dd0162e074dda83efe268fb2ae34e29fcca46a33a5e2476".to_string(), - signature:"207d2f0be0e103953f99a684fa6f357d7f3f6b5505a09972a9f473a8528b1b8c226a47e76eff66152c0114ef2c56c219d2fc605a123ea58fc11bf2474f78cce900".to_string() - }, - TaskResponsePayload { - ciphertext:"04418d872f94717351fa2f8e8b5de18182d05fc81818f6a0f7cb9df811931ede6fedb3180572bf6c3c5485628a23d5753efde59d5cb40df617e4c382aed15fbcc3b12dc07182aae21f7ef2a06f654f912a0c7a156dec148a186032b1b25551046612d1b304fc732192ca5665e016d9731b693e7f56e67d28b5fdb404e276c6bbadd23a3711697162dfa81f452d14c7073f8b7f81b5f208a71ee69ba44ba0023707ccbe5e0bf289bc08885e7d9636cb82e7c5e8b36caff2345379fa8a27595bc542668f54c1863b0d5e343b51b86ffd9985835165f4e78106df2024280cea356ffae661b5005473c3b186f5b1dcf27609ae417545ff04fa559a2d2c695fef9d11f9d7d71a323739553e6231289011b90c48c8cbfb0c89be5042724fa6338f40c534f2ac363fcd5e92735ba1c0a36a32b6febd28fb4455c8907dc76558328179749dcb945ebbcb8862ef04f4f783b3a3b86c12519afa8f042f87233aae421774034c564c5afac10b3082abaf47d5a7bf6207761d7907a0161f25e0fd48443d274eb5820ab2d290530fb5a398372931b2f226380263b7706a9c230716b3b0d9862ece1908d279edfe87796d9117096d9066bc4158165e150d54d558bc70452872436e941c673eb1c100adecfb49b233885e5026bfc688e50fbaf7ec7004cafb1ba40708e5b328c427272291cb47cc538b4e57aad5381cf19ab63af749c0792eb9706f286aae05580c36f0b91b09ba46be8e7f89fa253328785e2c61a660c3d794d0ed9ba77a18ac6a8b2d9be8ef35e0e77f3016ab6e506265b587d2a64edb8c0705ed5203c9685f2dff57efd17cf05193f6e77f6cbda04420fd265a9fce8f75bdac2a2dc6d7371fd26206490befb52c51560af8d0a75e9d7d898b84ebabc2367ba3431b5f74e87bb1593273c6e741a7c59de231d5060c2c7317c5d64740c17f7503f6e254a196f6fe95751f9151d64e5549764d066046b7229c23a311cce9aa387c50bda2f73fc5f82426c1d0f5f313c191107a3798890fd70237d248aef8b1f70827e94ec240229f60bfb7135bff6e5a6e9538bff1abfbc781b106c51b8b2f20df837c4837523fd7fa56a7ee3d9643b9d842ca98faf009c467a0dcb28c86f333e2bde0151e174e401edbefe10d8d5aedc7da32a8a3e7c942fac31a76aa726350a2005804862f624f1c164c866d291c33447595a873c62ff31489fc288b34c799465b1e79d2da1ac3af5dc7abaa3722ccfe9d58d91e46623be83dd9c0a4547dca76da38f289ddc7a3e5ca93182548694959e07afc57c8e989635efbbf6c5947170470cd62dd16e8e037c21c77d77d05d59cd052d72ba8e6dea579f650de9791f7140db98173b1031327d461a43beec51c8e0cfed95a2933ab7a7b8a4d4a6e4317d353704b54a87a1986185e99f268a1e653a549aec1e0fd377ada65b2f49e84090cce2a8d2f8adc9ba0dfda96421ff8e972ab6fe6a3ed79ee6b45eaa2e9267769138d385c8390d6cffd90a4081cf6b617443438f7e1050bff15fc842d8717a669463cc5aa91c72dd319b1893492410a4379256c7322ce99efe8aaec9593f4b8c367a68464d9ba0f5eae906320de632fefadb99bef1833a7b8acfc044fff0e4af9eb6111da02c3e7f1af591eae335b7741f94a5ef0cd6f81265f61952e1b07d13e019edabf864c13426713d6b66edaba6ad980e9f9dfa78b654d9d60ed89bdb5e56cf4d4d626b50d3176792c27c4d478a4245b598d56ee34a1e5d51aaa26af8ad5d4508462d892acb1b14694c98a4650a7c315ac8224b5db0190b8e86f7c9195833a5593f5be6d6205325ce90ceaaa94d692b8d51c0deba7b3e378991f0d0059fc92603e085a9949ba5bc56e4ee24b53be28fc792012813500774bcc00ca406c010981976a67038a20a39080ac36e77a4505fe0fe54a76bb99943a84d58b8b6852724a61138076b2321814c85c75aa5ce8d9b7b0d92eee7e3faa19ce0f96912fdf19c0d382e48da9699a525c0fcc141f5a59fc23b77aca35ffd64d50709caa1c3e9854c2036aa421454e9a1b9bf70d9911920c2a53a404a0d541d4f98cb117605ac9350405fec788e3119732805af13e64b585e49cb710446744d2d4c4901258acebab4dbb5ac3c04334ddc0a5345b0d63e18032f44a35215bb68d4fb49375f0521d4f1b4602eb12a62f80dfac186dacff44ce72844fdfd325a80798308144c427cb70666c12572d3f3e6cd7f40e7544a027893eed0f7ef6fb72a58d91f370a311955512c7d85ed875c66c82fcd77fb960863c58671de393d14f9b88cbe29ac4349f5dca646d79dcadb0885632e567a8ec8793880be18acfd02e0b2a651014fa3e2f273715c100438d75776707bd89c0dc6dd52c90bb95e270d0698d5697a1e16b8a9e9d983a9ecc34318eac5d1bb3be4ee5d078120b97a4923a55a8a4909e1069e8899785805300b9d764f8ffae46ad9ad4f8505f4a04579015b1f3f8a24a828f37bf5f30fa41cb84ebfda4336294a1c40affb627ed9013a3f2861d57f8af6ae8eae809cf7bf42d00f47b09653e30518dadfe62e98bf1039b9d1fba1c7436524370b49668295045d6274fcb313997eec1c93c0975ebe784019d59394b4451a753d23165ab303078a4cf575ce7e5638cea06d06a286d1afb35637814036a6edce3f757e5e23ff333142bc4e9b9475ab70e250c1cafe1c11a1735521ec5acdebb450ae8afc0896d8d0124cd7c48d90168850504916ad6fab5d740".to_string(), - commitment:"0e45a393e8140d062dd0162e074dda83efe268fb2ae34e29fcca46a33a5e2476".to_string(), - signature:"207d2f0be0e103953f99a684fa6f357d7f3f6b5505a09972a9f473a8528b1b8c226a47e76eff66152c0114ef2c56c219d2fc605a123ea58fc11bf2474f78cce900".to_string() - } - ]; - for payload in payloads { - let payload_str = payload.to_string().unwrap(); - let message: WakuMessage = WakuMessage::new(payload_str, task_uid); - if let Err(e) = node.send_message_once(message).await { - log::error!("Error sending message: {}", e); - continue; - } - } - } } From a01a5ac1a5432cfbc2aa4809233339bf37a5f3a8 Mon Sep 17 00:00:00 2001 From: selimseker Date: Mon, 24 Jun 2024 10:50:30 +0300 Subject: [PATCH 06/29] update startsh --- start.sh | 120 ++++++++++++------------------------------------------- 1 file changed, 26 insertions(+), 94 deletions(-) diff --git a/start.sh b/start.sh index 7b69445..52d78e0 100755 --- a/start.sh +++ b/start.sh @@ -7,25 +7,17 @@ docs() { Required environment variables in .env file; ETH_CLIENT_ADDRESS, ETH_TESTNET_KEY, RLN_RELAY_CRED_PASSWORD Description of command-line arguments: - --synthesis: Runs the node for the synthesis tasks. Can be set as DKN_TASKS="synthesis" env-var (default: false, required for search tasks) - --search: Runs the node for the search tasks. Can be set as DKN_TASKS="search" env-var (default: false, required for synthesis tasks) - - --synthesis-model-provider=: Indicates the model provider for synthesis tasks, ollama or openai. Can be set as DKN_SYNTHESIS_MODEL_PROVIDER env-var (required on synthesis tasks) - --search-model-provider=: Indicates the model provider for search tasks, ollama or openai. Can be set as AGENT_MODEL_PROVIDER env-var (required on search tasks) - - --synthesis-model: Indicates the model for synthesis tasks, model needs to be compatible with the given provider. Can be set as DKN_SYNTHESIS_MODEL_NAME env-var (required on synthesis tasks) - --search-model: Indicates the model for search tasks, model needs to be compatible with the given provider. Can be set as AGENT_MODEL_NAME env-var (required on search tasks) + -m | --model: Indicates the model to be used within the compute node. Multiple models can be given --local-ollama=: Indicates the local Ollama environment is being used (default: true) + --waku-ext: Will disable the waku execution within the compute node docker-compose. (default: false) --dev: Sets the logging level to debug (default: info) - -b, --background: Enables background mode for running the node (default: FOREGROUND) - -h, --help: Displays this help message - - At least one of --search or --synthesis is required + -b | --background: Enables background mode for running the node (default: FOREGROUND) + -h | --help: Displays this help message Example usage: - ./start.sh --search --synthesis --local-ollama=false --dev + ./start.sh -m nous-hermes2theta-llama3-8b --model phi3:medium --local-ollama=false --dev " exit 0 } @@ -42,8 +34,6 @@ if [ -f "$ENV_FILE" ]; then fi # flag vars -COMPUTE_SEARCH=false -COMPUTE_SYNTHESIS=false START_MODE="FOREGROUND" LOCAL_OLLAMA=true LOGS="info" @@ -51,35 +41,17 @@ EXTERNAL_WAKU=false # script internal COMPOSE_PROFILES=() -TASK_LIST=() +MODELS_LIST=() LOCAL_OLLAMA_PID="" DOCKER_HOST="http://host.docker.internal" # handle command line arguments while [[ "$#" -gt 0 ]]; do - case $1 in - --search) - COMPUTE_SEARCH=true - COMPOSE_PROFILES+=("search-python") - TASK_LIST+=("search") - ;; - --synthesis) - COMPUTE_SYNTHESIS=true - TASK_LIST+=("synthesis") - ;; - - --synthesis-model-provider=*) - DKN_SYNTHESIS_MODEL_PROVIDER="$(echo "${1#*=}" | tr '[:upper:]' '[:lower:]')" - ;; - --search-model-provider=*) - AGENT_MODEL_PROVIDER="$(echo "${1#*=}" | tr '[:upper:]' '[:lower:]')" - ;; - - --synthesis-model=*) - DKN_SYNTHESIS_MODEL_NAME="$(echo "${1#*=}" | tr '[:upper:]' '[:lower:]')" - ;; - --search-model=*) - AGENT_MODEL_NAME="$(echo "${1#*=}" | tr '[:upper:]' '[:lower:]')" + case $1 in + -m|--model) + shift + model="$(echo "${1#*=}" | tr '[:upper:]' '[:lower:]')" + MODELS_LIST+=($model) ;; --local-ollama=*) @@ -155,62 +127,19 @@ handle_compute_env() { compute_env_vars=( "DKN_WALLET_SECRET_KEY" "DKN_ADMIN_PUBLIC_KEY" - "DKN_TASKS" - "DKN_SYNTHESIS_MODEL_PROVIDER" - "DKN_SYNTHESIS_MODEL_NAME" - "AGENT_MODEL_PROVIDER" - "AGENT_MODEL_NAME" "OPENAI_API_KEY" "SERPER_API_KEY" "BROWSERLESS_TOKEN" "ANTHROPIC_API_KEY" "DKN_LOG_LEVEL" + "DKN_MODELS" ) compute_envs=($(as_pairs "${compute_env_vars[@]}")) - # handle DKN_TASKS - if [ ${#TASK_LIST[@]} -ne 0 ]; then - # if any task flag is given, pass it to env var - DKN_TASKS=$(IFS=","; echo "${TASK_LIST[*]}") - else - # if no task type argument has given, check DKN_TASKS env var - if [ -n "$DKN_TASKS" ]; then - # split, iterate and validate given tasks in env var - IFS=',' read -ra tsks <<< "$DKN_TASKS" - for ts in "${tsks[@]}"; do - ts="$(echo "${ts#*=}" | tr '[:upper:]' '[:lower:]')" # make all lowercase - if [ "$ts" = "search" ] || [ "$ts" = "search-python" ]; then - TASK_LIST+=("search") - COMPUTE_SEARCH=true - COMPOSE_PROFILES+=("search-python") - elif [ "$ts" = "synthesis" ]; then - TASK_LIST+=("synthesis") - COMPUTE_SYNTHESIS=true - fi - done - else - echo "ERROR: No task type has given, --synthesis and/or --search flags are required" - exit 1 - fi - fi - - # check model providers, they are required - if [ "$COMPUTE_SEARCH" = true ]; then - if [ -z "$AGENT_MODEL_PROVIDER" ]; then - echo "ERROR: Search model provider is required on search tasks. Example usage; --search-model-provider=ollama" - exit 1 - fi - # then all lowercase - AGENT_MODEL_PROVIDER="$(echo "${AGENT_MODEL_PROVIDER#*=}" | tr '[:upper:]' '[:lower:]')" - - fi - if [ "$COMPUTE_SYNTHESIS" = true ]; then - if [ -z "$DKN_SYNTHESIS_MODEL_PROVIDER" ]; then - echo "ERROR: Synthesis model provider is required on synthesis tasks. Example usage; --synthesis-model-provider=ollama" - exit 1 - fi - # then all lowercase - DKN_SYNTHESIS_MODEL_PROVIDER="$(echo "${DKN_SYNTHESIS_MODEL_PROVIDER#*=}" | tr '[:upper:]' '[:lower:]')" + # handle DKN_MODELS + if [ ${#MODELS_LIST[@]} -ne 0 ]; then + # if model flag is given, pass it to env var + DKN_MODELS=$(IFS=","; echo "${MODELS_LIST[*]}") fi # update envs @@ -283,15 +212,18 @@ handle_ollama_env() { ) ollama_envs=($(as_pairs "${ollama_env_vars[@]}")) - # if there is no task using ollama, do not add any ollama compose profile + # if there is no ollama model given, do not add any ollama compose profile ollama_needed=false - if [ "$COMPUTE_SYNTHESIS" = true ] && [ "$DKN_SYNTHESIS_MODEL_PROVIDER" == "ollama" ]; then - ollama_needed=true - fi - if [ "$COMPUTE_SEARCH" = true ] && [ "$AGENT_MODEL_PROVIDER" == "ollama" ]; then - ollama_needed=true - fi + ollama_models=("nous-hermes2theta-llama3-8b" "phi3:medium" "phi3:medium-128k" "phi3:3.8b") + IFS=',' read -r -a models <<< "$DKN_MODELS" + for m in "${models[@]}"; do + if [[ " ${ollama_models[@]} " =~ " ${m} " ]]; then + ollama_needed=true + break + fi + done if [ "$ollama_needed" = false ]; then + echo "No Ollama model provided. Skipping the Ollama execution" return fi From c66f17784b6e9519fae4cd20bf7a961420120c91 Mon Sep 17 00:00:00 2001 From: selimseker Date: Mon, 24 Jun 2024 11:05:13 +0300 Subject: [PATCH 07/29] add = to -m|--model arg --- start.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/start.sh b/start.sh index 52d78e0..084e2e6 100755 --- a/start.sh +++ b/start.sh @@ -17,7 +17,7 @@ docs() { -h | --help: Displays this help message Example usage: - ./start.sh -m nous-hermes2theta-llama3-8b --model phi3:medium --local-ollama=false --dev + ./start.sh -m=nous-hermes2theta-llama3-8b --model=phi3:medium --local-ollama=false --dev " exit 0 } @@ -48,8 +48,8 @@ DOCKER_HOST="http://host.docker.internal" # handle command line arguments while [[ "$#" -gt 0 ]]; do case $1 in - -m|--model) - shift + -m=*|--model=*) + # shift model="$(echo "${1#*=}" | tr '[:upper:]' '[:lower:]')" MODELS_LIST+=($model) ;; From 412c6b2d595840888ca64d6088701c5d19957f90 Mon Sep 17 00:00:00 2001 From: erhant Date: Mon, 24 Jun 2024 13:03:07 +0300 Subject: [PATCH 08/29] update ollama & langchain, fix payload test --- Cargo.lock | 129 +++++++++++++++++------------------ Cargo.toml | 7 +- examples/common/ollama.rs | 4 +- src/config/mod.rs | 8 +-- src/node.rs | 63 ----------------- src/workers/workflow.rs | 10 ++- tests/compute_test.rs | 76 ++++++++++----------- tests/mock_messages_test.rs | 85 ++++++++++++----------- tests/mock_sends_test.rs | 51 +++++++------- tests/payload_test.rs | 60 +++++++++++++++++ tests/threads_test.rs | 130 ++++++++++++++++++------------------ 11 files changed, 302 insertions(+), 321 deletions(-) create mode 100644 tests/payload_test.rs diff --git a/Cargo.lock b/Cargo.lock index c53d0f8..a650827 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -176,15 +176,16 @@ dependencies = [ [[package]] name = "async-openai" -version = "0.21.0" +version = "0.23.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "007f03f7e27271451af57ced242d6adfa04204d1275a91ec0952bf441fd8d102" +checksum = "47fdad7973458514157072ad405193e7f02fe1492359639e482aeaf8cae1bcd5" dependencies = [ "async-convert", "backoff", - "base64 0.22.0", + "base64 0.22.1", "bytes 1.6.0", "derive_builder", + "eventsource-stream", "futures", "rand 0.8.5", "reqwest 0.12.4", @@ -193,9 +194,9 @@ dependencies = [ "serde", "serde_json", "thiserror", - "tokio 1.37.0", + "tokio 1.38.0", "tokio-stream", - "tokio-util 0.7.10", + "tokio-util 0.7.11", "tracing", ] @@ -283,7 +284,7 @@ dependencies = [ "instant", "pin-project-lite 0.2.14", "rand 0.8.5", - "tokio 1.37.0", + "tokio 1.38.0", ] [[package]] @@ -315,9 +316,9 @@ checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" [[package]] name = "base64" -version = "0.22.0" +version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9475866fec1451be56a3c2400fd081ff546538961565ccb5b7142cbd22bc7a51" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] name = "bit-set" @@ -750,7 +751,7 @@ dependencies = [ name = "dkn-compute" version = "0.1.1" dependencies = [ - "base64 0.22.0", + "base64 0.22.1", "colored", "ecies", "env_logger 0.11.3", @@ -760,8 +761,7 @@ dependencies = [ "langchain-rust", "libsecp256k1", "log", - "ollama-rs 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "ollama-rs 0.1.9 (git+https://github.com/pepperoni21/ollama-rs.git?branch=master)", + "ollama-rs 0.2.0", "ollama-workflows", "parking_lot", "rand 0.8.5", @@ -770,8 +770,8 @@ dependencies = [ "serde_json", "sha2 0.10.8", "sha3", - "tokio 1.37.0", - "tokio-util 0.7.10", + "tokio 1.38.0", + "tokio-util 0.7.11", "url", "urlencoding", "uuid", @@ -1203,8 +1203,8 @@ dependencies = [ "http 0.2.12", "indexmap 2.2.6", "slab", - "tokio 1.37.0", - "tokio-util 0.7.10", + "tokio 1.38.0", + "tokio-util 0.7.11", "tracing", ] @@ -1222,8 +1222,8 @@ dependencies = [ "http 1.1.0", "indexmap 2.2.6", "slab", - "tokio 1.37.0", - "tokio-util 0.7.10", + "tokio 1.38.0", + "tokio-util 0.7.11", "tracing", ] @@ -1511,7 +1511,7 @@ dependencies = [ "itoa 1.0.11", "pin-project-lite 0.2.14", "socket2 0.5.6", - "tokio 1.37.0", + "tokio 1.38.0", "tower-service", "tracing", "want", @@ -1533,7 +1533,7 @@ dependencies = [ "itoa 1.0.11", "pin-project-lite 0.2.14", "smallvec", - "tokio 1.37.0", + "tokio 1.38.0", "want", ] @@ -1549,7 +1549,7 @@ dependencies = [ "hyper-util", "rustls", "rustls-pki-types", - "tokio 1.37.0", + "tokio 1.38.0", "tokio-rustls", "tower-service", ] @@ -1576,7 +1576,7 @@ dependencies = [ "bytes 1.6.0", "hyper 0.14.29", "native-tls", - "tokio 1.37.0", + "tokio 1.38.0", "tokio-native-tls", ] @@ -1591,7 +1591,7 @@ dependencies = [ "hyper 1.3.1", "hyper-util", "native-tls", - "tokio 1.37.0", + "tokio 1.38.0", "tokio-native-tls", "tower-service", ] @@ -1610,7 +1610,7 @@ dependencies = [ "hyper 1.3.1", "pin-project-lite 0.2.14", "socket2 0.5.6", - "tokio 1.37.0", + "tokio 1.38.0", "tower", "tower-service", "tracing", @@ -1759,9 +1759,9 @@ dependencies = [ [[package]] name = "langchain-rust" -version = "4.2.0" +version = "4.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bccb555d11abcf4e18189433821aaee48affe8b56a4528d553d7358ddf419a39" +checksum = "ba76d56973d29cd1f2631fac617401f4f79a2f1e8e6b1f33e1e52104459873b7" dependencies = [ "async-openai", "async-recursion", @@ -1775,7 +1775,7 @@ dependencies = [ "log", "lopdf", "mockito", - "ollama-rs 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "ollama-rs 0.2.0", "readability", "regex", "reqwest 0.12.4", @@ -1788,7 +1788,7 @@ dependencies = [ "text-splitter", "thiserror", "tiktoken-rs", - "tokio 1.37.0", + "tokio 1.38.0", "tokio-stream", "url", "urlencoding", @@ -2081,7 +2081,7 @@ dependencies = [ "serde_json", "serde_urlencoded", "similar", - "tokio 1.37.0", + "tokio 1.38.0", ] [[package]] @@ -2169,19 +2169,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "ollama-rs" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53581ab78176ff3ae830a43236f485fc90d7f472d0081dddc45d8605e1301954" -dependencies = [ - "reqwest 0.12.4", - "serde", - "serde_json", - "tokio 1.37.0", - "tokio-stream", -] - [[package]] name = "ollama-rs" version = "0.1.9" @@ -2200,12 +2187,17 @@ dependencies = [ [[package]] name = "ollama-rs" -version = "0.1.9" -source = "git+https://github.com/pepperoni21/ollama-rs.git?branch=master#56e8157d98d4185bc171fe9468d3d09bc56e9dd3" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "255252ec57e13d2d6ae074c7b7cd8c004d17dafb1e03f954ba2fd5cc226f8f49" dependencies = [ + "async-trait", + "log", "reqwest 0.12.4", "serde", "serde_json", + "tokio 1.38.0", + "tokio-stream", "url", ] @@ -2221,7 +2213,7 @@ dependencies = [ "html2text", "langchain-rust", "log", - "ollama-rs 0.1.9 (git+https://github.com/andthattoo/ollama-rs?branch=master)", + "ollama-rs 0.1.9", "parking_lot", "rand 0.8.5", "regex", @@ -2232,8 +2224,8 @@ dependencies = [ "serde_json", "simsimd", "text-splitter", - "tokio 1.37.0", - "tokio-util 0.7.10", + "tokio 1.38.0", + "tokio-util 0.7.11", ] [[package]] @@ -2801,7 +2793,7 @@ dependencies = [ "serde_urlencoded", "sync_wrapper", "system-configuration", - "tokio 1.37.0", + "tokio 1.38.0", "tokio-native-tls", "tower-service", "url", @@ -2817,7 +2809,7 @@ version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "566cafdd92868e0939d3fb961bd0dc25fcfaaed179291093b3d43e6b3150ea10" dependencies = [ - "base64 0.22.0", + "base64 0.22.1", "bytes 1.6.0", "encoding_rs", "futures-core", @@ -2848,10 +2840,10 @@ dependencies = [ "serde_urlencoded", "sync_wrapper", "system-configuration", - "tokio 1.37.0", + "tokio 1.38.0", "tokio-native-tls", "tokio-rustls", - "tokio-util 0.7.10", + "tokio-util 0.7.11", "tower-service", "url", "wasm-bindgen", @@ -2968,7 +2960,7 @@ version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "29993a25686778eb88d4189742cd713c9bce943bc54251a33509dc63cbacf73d" dependencies = [ - "base64 0.22.0", + "base64 0.22.1", "rustls-pki-types", ] @@ -3152,18 +3144,18 @@ checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" [[package]] name = "serde" -version = "1.0.197" +version = "1.0.203" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" +checksum = "7253ab4de971e72fb7be983802300c30b5a7f0c2e56fab8abfc6a214307c0094" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.197" +version = "1.0.203" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" +checksum = "500cbc0ebeb6f46627f50f3f5811ccf6bf00643be300b4c3eabc0ef55dc5b5ba" dependencies = [ "proc-macro2", "quote", @@ -3172,9 +3164,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.116" +version = "1.0.117" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e17db7126d17feb94eb3fad46bf1a96b034e8aacbc2e775fe81505f8b0b2813" +checksum = "455182ea6142b14f93f4bc5320a2b31c1f266b66a4a5c858b013302a5d8cbfc3" dependencies = [ "itoa 1.0.11", "ryu", @@ -3601,9 +3593,9 @@ dependencies = [ [[package]] name = "tokio" -version = "1.37.0" +version = "1.38.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1adbebffeca75fcfd058afa480fb6c0b81e165a0323f9c9d39c9697e37c46787" +checksum = "ba4f4a02a7a80d6f274636f0aa95c7e383b912d41fe721a31f29e29698585a4a" dependencies = [ "backtrace", "bytes 1.6.0", @@ -3620,9 +3612,9 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "2.2.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" +checksum = "5f5ae998a069d4b5aba8ee9dad856af7d520c3699e6159b185c2acd48155d39a" dependencies = [ "proc-macro2", "quote", @@ -3636,7 +3628,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" dependencies = [ "native-tls", - "tokio 1.37.0", + "tokio 1.38.0", ] [[package]] @@ -3647,7 +3639,7 @@ checksum = "775e0c0f0adb3a2f22a00c4745d728b479985fc15ee7ca6a2608388c5569860f" dependencies = [ "rustls", "rustls-pki-types", - "tokio 1.37.0", + "tokio 1.38.0", ] [[package]] @@ -3658,7 +3650,7 @@ checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" dependencies = [ "futures-core", "pin-project-lite 0.2.14", - "tokio 1.37.0", + "tokio 1.38.0", ] [[package]] @@ -3687,9 +3679,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.10" +version = "0.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" +checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1" dependencies = [ "bytes 1.6.0", "futures-core", @@ -3697,8 +3689,7 @@ dependencies = [ "futures-util", "hashbrown 0.14.3", "pin-project-lite 0.2.14", - "tokio 1.37.0", - "tracing", + "tokio 1.38.0", ] [[package]] @@ -3711,7 +3702,7 @@ dependencies = [ "futures-util", "pin-project", "pin-project-lite 0.2.14", - "tokio 1.37.0", + "tokio 1.38.0", "tower-layer", "tower-service", "tracing", diff --git a/Cargo.toml b/Cargo.toml index dbeaa7e..f1480c4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -42,8 +42,8 @@ sha2 = "0.10.8" sha3 = "0.10.8" # llm stuff -langchain-rust = { version = "4.2.0", features = ["ollama"] } -ollama-rs = "0.1.9" +langchain-rust = { version = "4.3.0", features = ["ollama"] } +ollama-rs = "0.2.0" ollama-workflows = { git = "https://github.com/andthattoo/ollama-workflows", branch = "main" } uuid = { version = "1.8.0", features = ["v4"] } @@ -52,9 +52,6 @@ uuid = { version = "1.8.0", features = ["v4"] } colored = "2.1.0" rand = "0.8.5" -# TODO: fixed version of ollama-rs for benchmarks, remove this when the new version is released -ollama-rs-master = { package = "ollama-rs", git = "https://github.com/pepperoni21/ollama-rs.git", branch = "master" } - [[example]] name = "ollama" path = "./examples/benchmarks/ollama.rs" diff --git a/examples/common/ollama.rs b/examples/common/ollama.rs index 30f3b2e..b85b277 100644 --- a/examples/common/ollama.rs +++ b/examples/common/ollama.rs @@ -2,7 +2,7 @@ use dkn_compute::{ config::constants::{DEFAULT_OLLAMA_HOST, DEFAULT_OLLAMA_PORT}, utils::get_current_time_nanos, }; -use ollama_rs_master::{ +use ollama_rs::{ generation::completion::{request::GenerationRequest, GenerationResponse}, Ollama, }; @@ -12,7 +12,7 @@ pub async fn use_model_with_prompt( model: &str, prompt: &str, ) -> (GenerationResponse, tokio::time::Duration) { - let ollama = Ollama::new(DEFAULT_OLLAMA_HOST, DEFAULT_OLLAMA_PORT); + let ollama = Ollama::new(DEFAULT_OLLAMA_HOST.to_string(), DEFAULT_OLLAMA_PORT); let time = get_current_time_nanos(); let prompt = prompt.to_string(); diff --git a/src/config/mod.rs b/src/config/mod.rs index 97f184e..e4b250e 100644 --- a/src/config/mod.rs +++ b/src/config/mod.rs @@ -71,11 +71,9 @@ impl DriaComputeNodeConfig { serde_json::to_string(&models).unwrap_or_default() ); - if !cfg!(test) { - assert!( - !models.is_empty(), - "At least one model should be provided in the configuration." - ); + if models.is_empty() { + log::error!("No models were provided, you will not be able to get any tasks!"); + log::error!("Please restart with at least one model provided within DKN_MODELS."); } Self { diff --git a/src/node.rs b/src/node.rs index 8db52b1..3526e8d 100644 --- a/src/node.rs +++ b/src/node.rs @@ -280,66 +280,3 @@ impl DriaComputeNode { self.send_message(message).await } } - -#[cfg(test)] -mod tests { - use super::*; - use ecies::decrypt; - use libsecp256k1::{verify, PublicKey, SecretKey}; - - /// This test demonstrates the creation and parsing of a payload. - /// - /// In DKN, the payload is created by Compute Node but parsed by the Admin Node. - /// At the end, there is also the verification step for the commitments. - #[test] - fn test_payload_generation_verification() { - const ADMIN_PRIV_KEY: &[u8; 32] = b"aaaabbbbccccddddddddccccbbbbaaaa"; - const TASK_ID: &str = "12345678abcdef"; - const RESULT: &[u8; 28] = b"this is some result you know"; - - let node = DriaComputeNode::default(); - let secret_key = SecretKey::parse(ADMIN_PRIV_KEY).expect("Should parse secret key"); - let public_key = PublicKey::from_secret_key(&secret_key); - - // create payload - let payload = node - .create_payload(RESULT, TASK_ID, &public_key.serialize()) - .expect("Should create payload"); - - // (here we assume the payload is sent to Waku network, and picked up again) - - // decrypt result - let result = decrypt( - &secret_key.serialize(), - hex::decode(payload.ciphertext) - .expect("Should decode") - .as_slice(), - ) - .expect("Could not decrypt"); - assert_eq!(result, RESULT, "Result mismatch"); - - // verify signature - let rsv = hex::decode(payload.signature).expect("Should decode"); - let mut signature_bytes = [0u8; 64]; - signature_bytes.copy_from_slice(&rsv[0..64]); - let recid_bytes: [u8; 1] = [rsv[64]]; - let signature = - Signature::parse_standard(&signature_bytes).expect("Should parse signature"); - let recid = RecoveryId::parse(recid_bytes[0]).expect("Should parse recovery id"); - - let result_digest = sha256hash(result); - let message = Message::parse(&result_digest); - assert!( - verify(&message, &signature, &node.config.public_key), - "Could not verify" - ); - - // recover verifying key (public key) from signature - let recovered_public_key = - libsecp256k1::recover(&message, &signature, &recid).expect("Could not recover"); - assert_eq!( - node.config.public_key, recovered_public_key, - "Public key mismatch" - ); - } -} diff --git a/src/workers/workflow.rs b/src/workers/workflow.rs index 328e5f2..6e6c072 100644 --- a/src/workers/workflow.rs +++ b/src/workers/workflow.rs @@ -61,11 +61,17 @@ pub fn workflow_worker( }); log::info!("Using model {}", model); - // execute workflow + // execute workflow with cancellation let executor = Executor::new(model); let mut memory = ProgramMemory::new(); let entry = Entry::String(task.input.prompt); - executor.execute(Some(&entry), task.input.workflow, &mut memory).await; + tokio::select! { + _ = node.cancellation.cancelled() => { + log::info!("Received cancellation, quitting all tasks."); + break; + }, + _ = executor.execute(Some(&entry), task.input.workflow, &mut memory) => () + } // read final result from memory let result = match memory.read(&final_result_id) { diff --git a/tests/compute_test.rs b/tests/compute_test.rs index 923a820..116d099 100644 --- a/tests/compute_test.rs +++ b/tests/compute_test.rs @@ -1,45 +1,44 @@ #![allow(unused_imports)] -mod compute_test { - use langchain_rust::{language_models::llm::LLM, llm::client::Ollama}; - use ollama_workflows::{Entry, Executor, Model, ProgramMemory, Workflow}; - use std::env; - use tokio_util::sync::CancellationToken; +use langchain_rust::{language_models::llm::LLM, llm::client::Ollama}; +use ollama_workflows::{Entry, Executor, Model, ProgramMemory, Workflow}; +use std::env; +use tokio_util::sync::CancellationToken; - #[tokio::test] - #[ignore = "run this manually"] - async fn test_ollama_prompt() { - let model = "orca-mini".to_string(); - let ollama = Ollama::default().with_model(model); - let prompt = "The sky appears blue during the day because of a process called scattering. \ +#[tokio::test] +#[ignore = "run this manually"] +async fn test_ollama_prompt() { + let model = "orca-mini".to_string(); + let ollama = Ollama::default().with_model(model); + let prompt = "The sky appears blue during the day because of a process called scattering. \ When sunlight enters the Earth's atmosphere, it collides with air molecules such as oxygen and nitrogen. \ These collisions cause some of the light to be absorbed or reflected, which makes the colors we see appear more vivid and vibrant. \ Blue is one of the brightest colors that is scattered the most by the atmosphere, making it visible to our eyes during the day. \ What may be the question this answer?".to_string(); - let response = ollama - .invoke(&prompt) - .await - .expect("Should generate response"); - println!("Prompt: {}\n\nResponse:{}", prompt, response); - } + let response = ollama + .invoke(&prompt) + .await + .expect("Should generate response"); + println!("Prompt: {}\n\nResponse:{}", prompt, response); +} - #[tokio::test] - #[ignore = "run this manually"] - async fn test_ollama_bad_model() { - let model = "thismodeldoesnotexistlol".to_string(); - let ollama = Ollama::default().with_model(model); - let setup_res = ollama.invoke("hola").await; - assert!( - setup_res.is_err(), - "Should give error due to non-existing model." - ); - } +#[tokio::test] +#[ignore = "run this manually"] +async fn test_ollama_bad_model() { + let model = "thismodeldoesnotexistlol".to_string(); + let ollama = Ollama::default().with_model(model); + let setup_res = ollama.invoke("hola").await; + assert!( + setup_res.is_err(), + "Should give error due to non-existing model." + ); +} - #[tokio::test] - #[ignore = "run this manually"] - async fn test_workflow() { - let workflow = r#"{ +#[tokio::test] +#[ignore = "run this manually"] +async fn test_workflow() { + let workflow = r#"{ "name": "Simple", "description": "This is a simple workflow", "config": { @@ -76,13 +75,12 @@ mod compute_test { } ] }"#; - let workflow: Workflow = serde_json::from_str(workflow).unwrap(); - let exe = Executor::new(Model::Phi3Mini); - let mut memory = ProgramMemory::new(); + let workflow: Workflow = serde_json::from_str(workflow).unwrap(); + let exe = Executor::new(Model::Phi3Mini); + let mut memory = ProgramMemory::new(); - exe.execute(None, workflow, &mut memory).await; + exe.execute(None, workflow, &mut memory).await; - let result = memory.read(&"final_result".to_string()).unwrap(); - println!("Result: {}", result); - } + let result = memory.read(&"final_result".to_string()).unwrap(); + println!("Result: {}", result); } diff --git a/tests/mock_messages_test.rs b/tests/mock_messages_test.rs index b53938d..6fa779b 100644 --- a/tests/mock_messages_test.rs +++ b/tests/mock_messages_test.rs @@ -1,44 +1,43 @@ -mod mock_messages_test { - use dkn_compute::{ - node::DriaComputeNode, utils::payload::TaskRequestPayload, waku::message::WakuMessage, - }; - use fastbloom_rs::{FilterBuilder, Membership}; - use serde::{Deserialize, Serialize}; - use std::time::Duration; - use uuid::Uuid; - - #[derive(Serialize, Deserialize, Clone, Debug)] - struct MockPayload { - number: usize, - } - - #[tokio::test] - async fn test_two_tasks() { - let topic = "testing"; - let time = Duration::from_secs(10).as_nanos(); - let input = MockPayload { number: 42 }; - let node = DriaComputeNode::default(); - let mut messages: Vec = Vec::new(); - - // create filter with your own address - let mut filter = FilterBuilder::new(128, 0.01).build_bloom_filter(); - filter.add(&node.config.address); - - let payload_tasked = TaskRequestPayload::new(input.clone(), filter, time, None); - let payload_str = serde_json::to_string(&payload_tasked).unwrap(); - messages.push(WakuMessage::new(payload_str, topic)); - - // create another filter without your own address - let mut filter = FilterBuilder::new(128, 0.01).build_bloom_filter(); - filter.add(&Uuid::new_v4().to_string().as_bytes()); // something dummy - - let payload_not_tasked = TaskRequestPayload::new(input, filter, time, None); - let payload_str = serde_json::to_string(&payload_not_tasked).unwrap(); - messages.push(WakuMessage::new(payload_str, topic)); - - let tasks = node.parse_messages::(messages.clone(), false); - assert_eq!(tasks.len(), 1); - assert_eq!(tasks[0].task_id, payload_tasked.task_id); - assert_ne!(tasks[0].task_id, payload_not_tasked.task_id); - } + +use dkn_compute::{ + node::DriaComputeNode, utils::payload::TaskRequestPayload, waku::message::WakuMessage, +}; +use fastbloom_rs::{FilterBuilder, Membership}; +use serde::{Deserialize, Serialize}; +use std::time::Duration; +use uuid::Uuid; + +#[derive(Serialize, Deserialize, Clone, Debug)] +struct MockPayload { + number: usize, +} + +#[tokio::test] +async fn test_two_tasks() { + let topic = "testing"; + let time = Duration::from_secs(10).as_nanos(); + let input = MockPayload { number: 42 }; + let node = DriaComputeNode::default(); + let mut messages: Vec = Vec::new(); + + // create filter with your own address + let mut filter = FilterBuilder::new(128, 0.01).build_bloom_filter(); + filter.add(&node.config.address); + + let payload_tasked = TaskRequestPayload::new(input.clone(), filter, time, None); + let payload_str = serde_json::to_string(&payload_tasked).unwrap(); + messages.push(WakuMessage::new(payload_str, topic)); + + // create another filter without your own address + let mut filter = FilterBuilder::new(128, 0.01).build_bloom_filter(); + filter.add(&Uuid::new_v4().to_string().as_bytes()); // something dummy + + let payload_not_tasked = TaskRequestPayload::new(input, filter, time, None); + let payload_str = serde_json::to_string(&payload_not_tasked).unwrap(); + messages.push(WakuMessage::new(payload_str, topic)); + + let tasks = node.parse_messages::(messages.clone(), false); + assert_eq!(tasks.len(), 1); + assert_eq!(tasks[0].task_id, payload_tasked.task_id); + assert_ne!(tasks[0].task_id, payload_not_tasked.task_id); } diff --git a/tests/mock_sends_test.rs b/tests/mock_sends_test.rs index 41fb68a..91ac392 100644 --- a/tests/mock_sends_test.rs +++ b/tests/mock_sends_test.rs @@ -1,35 +1,32 @@ -mod mock_sends_test { - use dkn_compute::{ - node::DriaComputeNode, - utils::crypto::{sha256hash, sign_bytes_recoverable}, - waku::message::WakuMessage, - }; - use std::{env, time::Duration}; +use dkn_compute::{ + node::DriaComputeNode, + utils::crypto::{sha256hash, sign_bytes_recoverable}, + waku::message::WakuMessage, +}; +use std::{env, time::Duration}; - /// Sends pre-computed signatures on a specific task. This simulates a number of responses to a synthesis task. - #[tokio::test] - #[ignore = "run this manually"] - async fn test_send_multiple_heartbeats() { - env::set_var("RUST_LOG", "INFO"); - let _ = env_logger::try_init(); +/// Sends pre-computed signatures on a specific task. This simulates a number of responses to a synthesis task. +#[tokio::test] +#[ignore = "run this manually"] +async fn test_send_multiple_heartbeats() { + env::set_var("RUST_LOG", "INFO"); + let _ = env_logger::try_init(); - let node = DriaComputeNode::default(); - let timeout = Duration::from_millis(1000); - let num_heartbeats = 20; + let node = DriaComputeNode::default(); + let timeout = Duration::from_millis(1000); + let num_heartbeats = 20; - let uuid = "59b93cb2-5738-4da4-992d-89a1835738d6"; // some random uuid + let uuid = "59b93cb2-5738-4da4-992d-89a1835738d6"; // some random uuid - let signature = - sign_bytes_recoverable(&sha256hash(uuid.as_bytes()), &node.config.secret_key); - let message = WakuMessage::new(signature, &uuid); + let signature = sign_bytes_recoverable(&sha256hash(uuid.as_bytes()), &node.config.secret_key); + let message = WakuMessage::new(signature, &uuid); - for i in 1..=num_heartbeats { - println!("Sending heartbeat #{}", i); - if let Err(e) = node.send_message_once(message.clone()).await { - println!("Error sending message: {}", e); - continue; - } - tokio::time::sleep(timeout).await; + for i in 1..=num_heartbeats { + println!("Sending heartbeat #{}", i); + if let Err(e) = node.send_message_once(message.clone()).await { + println!("Error sending message: {}", e); + continue; } + tokio::time::sleep(timeout).await; } } diff --git a/tests/payload_test.rs b/tests/payload_test.rs new file mode 100644 index 0000000..c6813db --- /dev/null +++ b/tests/payload_test.rs @@ -0,0 +1,60 @@ +use dkn_compute::{node::DriaComputeNode, utils::crypto::sha256hash}; +use ecies::decrypt; +use libsecp256k1::{verify, Message, PublicKey, RecoveryId, SecretKey, Signature}; + +/// This test demonstrates the creation and parsing of a payload. +/// +/// In DKN, the payload is created by Compute Node but parsed by the Admin Node. +/// At the end, there is also the verification step for the commitments. +#[test] +fn test_payload_generation_verification() { + const TASK_SECRET_KEY_HEX: &[u8; 32] = b"aaaabbbbccccddddddddccccbbbbaaaa"; + const TASK_ID: &str = "12345678abcdef"; + const RESULT: &[u8; 28] = b"this is some result you know"; + + let node = DriaComputeNode::default(); + let task_secret_key = SecretKey::parse(TASK_SECRET_KEY_HEX).expect("Should parse secret key"); + let task_public_key = PublicKey::from_secret_key(&task_secret_key); + + // create payload + let payload = node + .create_payload(RESULT, TASK_ID, &task_public_key.serialize()) + .expect("Should create payload"); + + // (here we assume the payload is sent to Waku network, and picked up again) + + // decrypt result + let result = decrypt( + &task_secret_key.serialize(), + hex::decode(payload.ciphertext) + .expect("Should decode") + .as_slice(), + ) + .expect("Could not decrypt"); + assert_eq!(result, RESULT, "Result mismatch"); + + // verify signature + let rsv = hex::decode(payload.signature).expect("Should decode"); + let mut signature_bytes = [0u8; 64]; + signature_bytes.copy_from_slice(&rsv[0..64]); + let recid_bytes: [u8; 1] = [rsv[64]]; + let signature = Signature::parse_standard(&signature_bytes).expect("Should parse signature"); + let recid = RecoveryId::parse(recid_bytes[0]).expect("Should parse recovery id"); + + let mut preimage = vec![]; + preimage.extend_from_slice(TASK_ID.as_bytes()); + preimage.extend_from_slice(&result); + let message = Message::parse(&sha256hash(preimage)); + assert!( + verify(&message, &signature, &node.config.public_key), + "Could not verify" + ); + + // recover verifying key (public key) from signature + let recovered_public_key = + libsecp256k1::recover(&message, &signature, &recid).expect("Could not recover"); + assert_eq!( + node.config.public_key, recovered_public_key, + "Public key mismatch" + ); +} diff --git a/tests/threads_test.rs b/tests/threads_test.rs index 30795b5..2c17894 100644 --- a/tests/threads_test.rs +++ b/tests/threads_test.rs @@ -1,80 +1,78 @@ -#[cfg(test)] -mod threads_test { - use parking_lot::RwLock; - use std::sync::Arc; - use tokio_util::task::TaskTracker; - struct BusyStruct { - pub busy_lock: RwLock, - } +use parking_lot::RwLock; +use std::sync::Arc; +use tokio_util::task::TaskTracker; - impl BusyStruct { - pub fn new() -> Self { - Self { - busy_lock: RwLock::new(false), - } - } - /// Returns the state of the node, whether it is busy or not. - #[inline] - pub fn is_busy(&self) -> bool { - *self.busy_lock.read() - } +struct BusyStruct { + pub busy_lock: RwLock, +} - /// Set the state of the node, whether it is busy or not. - #[inline] - pub fn set_busy(&self, busy: bool) { - log::info!("Setting busy to {}", busy); - *self.busy_lock.write() = busy; +impl BusyStruct { + pub fn new() -> Self { + Self { + busy_lock: RwLock::new(false), } } + /// Returns the state of the node, whether it is busy or not. + #[inline] + pub fn is_busy(&self) -> bool { + *self.busy_lock.read() + } - /// This test demonstrates that two threads dont wait for each other. - /// We need a separate busy lock for task types, so that heartbeat messages can be - /// repsonded to with the correct task types. - /// Run with: - /// - /// ```sh - /// cargo test --package dkn-compute --test threads_test --all-features -- threads_test::test_mutex --exact --show-output - /// ``` - #[tokio::test] - #[ignore = "only run this for demonstration"] - async fn test_mutex() { - let _ = env_logger::try_init(); - let tracker = TaskTracker::new(); - let obj = Arc::new(BusyStruct::new()); + /// Set the state of the node, whether it is busy or not. + #[inline] + pub fn set_busy(&self, busy: bool) { + log::info!("Setting busy to {}", busy); + *self.busy_lock.write() = busy; + } +} - println!("Starting test"); +/// This test demonstrates that two threads dont wait for each other. +/// We need a separate busy lock for task types, so that heartbeat messages can be +/// repsonded to with the correct task types. +/// Run with: +/// +/// ```sh +/// cargo test --package dkn-compute --test threads_test --all-features -- threads_test::test_mutex --exact --show-output +/// ``` +#[tokio::test] +#[ignore = "only run this for demonstration"] +async fn test_mutex() { + let _ = env_logger::try_init(); + let tracker = TaskTracker::new(); + let obj = Arc::new(BusyStruct::new()); - // spawn a thread - let obj1 = obj.clone(); - tracker.spawn(tokio::spawn(async move { - println!("Thread 1 | is_busy: {}", obj1.is_busy()); - println!("Thread 1 | Started"); - obj1.set_busy(true); + println!("Starting test"); - tokio::time::sleep(tokio::time::Duration::from_secs(1)).await; - obj1.set_busy(false); - println!("Thread 1 | Finished"); - })); + // spawn a thread + let obj1 = obj.clone(); + tracker.spawn(tokio::spawn(async move { + println!("Thread 1 | is_busy: {}", obj1.is_busy()); + println!("Thread 1 | Started"); + obj1.set_busy(true); - // wait a bit - tokio::time::sleep(tokio::time::Duration::from_millis(250)).await; + tokio::time::sleep(tokio::time::Duration::from_secs(1)).await; + obj1.set_busy(false); + println!("Thread 1 | Finished"); + })); - // spawn a thread - let obj2 = obj.clone(); - tracker.spawn(tokio::spawn(async move { - println!("Thread 2 | is_busy: {}", obj2.is_busy()); - println!("Thread 2 | Started"); - obj2.set_busy(true); - tokio::time::sleep(tokio::time::Duration::from_secs(1)).await; - obj2.set_busy(false); - println!("Thread 2 | Finished"); - })); + // wait a bit + tokio::time::sleep(tokio::time::Duration::from_millis(250)).await; - tracker.close(); - println!("Waiting..."); - tracker.wait().await; + // spawn a thread + let obj2 = obj.clone(); + tracker.spawn(tokio::spawn(async move { + println!("Thread 2 | is_busy: {}", obj2.is_busy()); + println!("Thread 2 | Started"); + obj2.set_busy(true); + tokio::time::sleep(tokio::time::Duration::from_secs(1)).await; + obj2.set_busy(false); + println!("Thread 2 | Finished"); + })); - println!("Done."); - } + tracker.close(); + println!("Waiting..."); + tracker.wait().await; + + println!("Done."); } From c73c0fed74fb2e0ff36e9d8cab617a7eb0578756 Mon Sep 17 00:00:00 2001 From: erhant Date: Mon, 24 Jun 2024 16:03:05 +0300 Subject: [PATCH 09/29] fix docs, fix small typo --- README.md | 91 ++++++++++++++++++++++++++++---------------- src/config/models.rs | 2 +- start.sh | 6 +-- 3 files changed, 62 insertions(+), 37 deletions(-) diff --git a/README.md b/README.md index 646a7e3..218daa8 100644 --- a/README.md +++ b/README.md @@ -27,23 +27,21 @@ A **Dria Compute Node** is a unit of computation within the Dria Knowledge Network. It's purpose is to process tasks given by the **Dria Admin Node**, and receive rewards for providing correct results. These nodes are part of the [Waku](https://waku.org/) network, a privacy-preserving cencorship resistant peer-to-peer network. -### Heartbeat +### Tasks -Dria Admin Node broadcasts heartbeat messages at a set interval, it is a required duty of the compute node to respond to these so that they can be included in the list of available nodes for task assignment. +Compute nodes can technically do any arbitrary task, from computing the square root of a given number to finding LLM outputs from a given prompt, or validating an LLM's output with respect to knowledge available on the web accessed via tools. -### Tasks +#### Heartbeats -Compute nodes can technically do any arbitrary task, from computing the square root of a given number to finding LLM outputs from a given prompt. We currently have the following tasks: +Dria Admin Node broadcasts **heartbeat** messages at a set interval, it is a required duty of the compute node to respond to these so that they can be included in the list of available nodes for task assignment. These tasks will respect the type of model provided, e.g. if a task requires GTP4o and you are running Phi3, you won't be selected for that task. -- **Synthesis**: Generate synthetic data with respect to prompts given by the admin node. -- **Search**: Search the web using reasoning-and-action to answer a given query. -- **Validation**: Validate a given query-response pair. _(WIP)_ +#### Workflows -Tasks are enabled or disabled via the `DKN_TASKS` environment variable. Task names are to be provided in a list of comma-separated strings such as `DKN_TASKS=synthesis,search`. +Each task is given in the form of a workflow, based on [Ollama Workflows](https://github.com/andthattoo/ollama-workflows). See the respective repository for more information. ### Waku -We are using a reduced version of [nwaku-compose](https://github.com/waku-org/nwaku-compose) for the Waku node. It only uses the RELAY protocol, and STORE is disabled. The respective files are under the [waku](./waku/) folder. +We are using a reduced version of [nwaku-compose](https://github.com/waku-org/nwaku-compose) for the Waku node. It only uses the Relay protocol, and Store is disabled. The respective files are under the [waku](./waku/) folder. By default, there are no static peers, but you can specify them using duplicate `--staticnode` arguments within the `WAKU_EXTRA_ARGS` variable which is passed to the Waku node, that is: @@ -65,7 +63,7 @@ git clone https://github.com/firstbatchxyz/dkn-compute-node 2. **Prepare Environment Variables**: Dria Compute Node makes use of several environment variables, some of which used by Waku itself as well. First, prepare you environment variable as given in [.env.example](./.env.example). -3. **Fund an Ethereum Wallet with 0.1 Sepolia ETH**: Waku and Dria makes use of the same Ethereum wallet, and Waku uses RLN Relay protocol for further security within the network. If you have not registered to RLN protocol yet, register by running `./waku/register_rln.sh`. If you have already registered, you will have a `keystore.json` which you can place under `./waku/keystore/keystore.json` in this directory. Your secret key will be provided at `ETH_TESTNET_KEY` variable. You can set an optional password at `RLN_RELAY_CRED_PASSWORD` as well to encrypt the keystore file, or to decrypt it if you already have one. +3. **Fund an Ethereum Wallet with 0.1 Sepolia ETH (+ gas fees)**: Waku and Dria makes use of the same Ethereum wallet, and Waku uses RLN Relay protocol for further security within the network. If you have not registered to RLN protocol yet, register by running `./waku/register_rln.sh`. If you have already registered, you will have a `keystore.json` which you can place under `./waku/keystore/keystore.json` in this directory. Your secret key will be provided at `ETH_TESTNET_KEY` variable. You can set an optional password at `RLN_RELAY_CRED_PASSWORD` as well to encrypt the keystore file, or to decrypt it if you already have one. 4. **Ethereum Client RPC**: To communicate with Sepolia, you need an RPC URL. You can use [Infura](https://app.infura.io/) or [Alchemy](https://www.alchemy.com/). Your URL will be provided at `ETH_CLIENT_ADDRESS` variable. @@ -79,25 +77,40 @@ chmod +x start.sh # Check the available commands ./start.sh --help +``` -# Example command for synthesis tasks -./start.sh --synthesis --synthesis-model-provider=ollama --synthesis-model=phi3 +Based on the resources of your machine, you must decide which models that you will be running locally. For example, you can simple use OpenAI with theirs models, not running anything locally at all; or you can use Ollama with several models loaded to disk, and only one loaded to memory during its respective task. See [here](https://github.com/andthattoo/ollama-workflows/blob/main/src/program/atomics.rs#L269) for the latest list of available models. -# Example command for search tasks -./start.sh --search --search-model-provider=openai --search-model=gpt-4o +Available models are: -# Once you fill the .env file you can skip the given variables -# For instance, assume we have DKN_TASKS=synthesis and DKN_SYNTHESIS_MODEL_PROVIDER=ollama in the .env file -./start.sh --synthesis-model=llama3 # Only model name would be sufficient +- `adrienbrault/nous-hermes2theta-llama3-8b:q8_0` (Ollama) +- `phi3:14b-medium-4k-instruct-q4_1` (Ollama) +- `phi3:14b-medium-128k-instruct-q4_1` (Ollama) +- `phi3:3.8b` (Ollama) +- `gpt-3.5-turbo` (OpenAI) +- `gpt-4-turbo` (OpenAI) +- `gpt-4o` (OpenAI) -# Example command for simultaneous search and synthesis tasks -./start.sh --synthesis --search +```sh +# Run with models +./start.sh -m=llama3 -m=gpt-3.5-turbo ``` -- With the `--local-ollama=true` option (default), the compute node will use the local Ollama server on the host machine. If the server is not running, the start script will initiate it with `ollama serve` and terminate it when stopping the node. - - If `--local-ollama=false` or the local Ollama server is reachable, the compute node will use a Docker Compose service for it. - - There are three Docker Compose Ollama options: `ollama-cpu`, `ollama-cuda`, and `ollama-rocm`. The start script will decide which option to use based on the host machine's GPU specifications. -- Start script will run the containers in the background. You can check their logs either via the terminal or from [Docker Desktop](https://www.docker.com/products/docker-desktop/). +> [!NOTE] +> +> Start script will run the containers in the background. You can check their logs either via the terminal or from [Docker Desktop](https://www.docker.com/products/docker-desktop/). + +#### Using with Local Ollama + +With the `--local-ollama=true` option (default), the compute node will use the local Ollama server on the host machine. If the server is not running, the start script will initiate it with `ollama serve` and terminate it when stopping the node. + +- If `--local-ollama=false` or the local Ollama server is reachable, the compute node will use a Docker Compose service for it. +- There are three Docker Compose Ollama options: `ollama-cpu`, `ollama-cuda`, and `ollama-rocm`. The start script will decide which option to use based on the host machine's GPU specifications. + +```sh +# Run with local ollama +./start.sh -m=phi3 --local-ollama=true +``` ### Run from Source @@ -107,24 +120,26 @@ We are using Make as a wrapper for some scripts. You can see the available comma make help ``` -You will need OpenSSL installed as well, see shorthand commands [here](https://github.com/sfackler/rust-openssl/issues/855#issuecomment-450057552). - -While running Waku and Ollama node elsewhere, you can run the compute node with: +You will need OpenSSL installed as well, see shorthand commands [here](https://github.com/sfackler/rust-openssl/issues/855#issuecomment-450057552). While running Waku and Ollama node elsewhere, you can run the compute node with: ```sh make run # info-level logs make debug # debug-level logs ``` -#### Docs +## Contributing -Open crate docs using: +If you have a feature that you would like to add with respect to its respective issue, or a bug fix, feel free to fork & create a PR! See the sections below for development tips. + +### Development Mode + +It is best if Waku is running externally already (such as nwaku-compose) and you simply run the compute node during development, in debug mode. Our start script provides the means for that: ```sh -make docs +./start.sh -m= --dev --waku-ext ``` -#### Testing +### Testing & Benchmarking Besides the unit tests, there are separate tests for Waku network, and for compute tasks such as Ollama. @@ -134,8 +149,6 @@ make test-waku # Waku tests (requires a running Waku node) make test-ollama # Ollama tests (requires a running Ollama client) ``` -#### Benchmarking - To measure the speed of some Ollama models we have a benchmark that uses some models for a few prompts: ```sh @@ -148,7 +161,15 @@ You can also benchmark these models using a larger task list at a given path, wi JSON_PATH="./path/to/your.json" cargo run --release --example ollama ``` -#### Styling +### Documentation + +Open crate docs using: + +```sh +make docs +``` + +### Styling Lint and format with: @@ -156,3 +177,7 @@ Lint and format with: make lint # clippy make format # rustfmt ``` + +## License + +This project is licensed under the [Apache License 2.0](https://opensource.org/license/Apache-2.0). diff --git a/src/config/models.rs b/src/config/models.rs index 81069c2..92c6e41 100644 --- a/src/config/models.rs +++ b/src/config/models.rs @@ -8,7 +8,7 @@ pub fn parse_dkn_models(models_str: String) -> Vec<(ModelProvider, Model)> { match Model::try_from(s) { Ok(model) => Some((model.clone().into(), model)), Err(e) => { - log::warn!("Invalid model: '{}'k", e); + log::warn!("Error parsing model: {}", e); None } } diff --git a/start.sh b/start.sh index 084e2e6..ad185c3 100755 --- a/start.sh +++ b/start.sh @@ -7,17 +7,17 @@ docs() { Required environment variables in .env file; ETH_CLIENT_ADDRESS, ETH_TESTNET_KEY, RLN_RELAY_CRED_PASSWORD Description of command-line arguments: - -m | --model: Indicates the model to be used within the compute node. Multiple models can be given + -m | --model: Indicates the model to be used within the compute node. Argument can be given multiple times for multiple models. --local-ollama=: Indicates the local Ollama environment is being used (default: true) - --waku-ext: Will disable the waku execution within the compute node docker-compose. (default: false) + --waku-ext: Will assume Waku is running in another container already, and will not launch it. (default: false) --dev: Sets the logging level to debug (default: info) -b | --background: Enables background mode for running the node (default: FOREGROUND) -h | --help: Displays this help message Example usage: - ./start.sh -m=nous-hermes2theta-llama3-8b --model=phi3:medium --local-ollama=false --dev + ./start.sh -m=nous-hermes2theta-llama3-8b --model=phi3:medium --local-ollama=false --dev " exit 0 } From 68b5cacb302ee1b4946e9bf5dc9ed971fe619b32 Mon Sep 17 00:00:00 2001 From: erhant Date: Mon, 24 Jun 2024 16:21:40 +0300 Subject: [PATCH 10/29] fix signature bug --- src/utils/crypto.rs | 2 +- src/waku/message.rs | 18 +++++------------- 2 files changed, 6 insertions(+), 14 deletions(-) diff --git a/src/utils/crypto.rs b/src/utils/crypto.rs index a33a2d2..f2c6937 100644 --- a/src/utils/crypto.rs +++ b/src/utils/crypto.rs @@ -32,7 +32,7 @@ pub fn to_address(public_key: &PublicKey) -> [u8; 20] { #[inline] pub fn sign_bytes_recoverable(message: &[u8; 32], secret_key: &SecretKey) -> String { let message = Message::parse(message); - let (signature, recid) = sign(&message, &secret_key); + let (signature, recid) = sign(&message, secret_key); format!( "{}{}", diff --git a/src/waku/message.rs b/src/waku/message.rs index 52c5bf5..d36cb73 100644 --- a/src/waku/message.rs +++ b/src/waku/message.rs @@ -82,7 +82,7 @@ impl WakuMessage { let signature_bytes = sign_bytes_recoverable(&sha256hash(payload.clone()), signing_key); let mut signed_payload = Vec::new(); - signed_payload.extend_from_slice(hex::decode(signature_bytes).unwrap().as_slice()); + signed_payload.extend_from_slice(signature_bytes.as_ref()); signed_payload.extend_from_slice(payload.as_ref()); WakuMessage::new(signed_payload, topic) } @@ -160,7 +160,7 @@ impl fmt::Display for WakuMessage { #[cfg(test)] mod tests { use super::*; - use libsecp256k1::{Message, SecretKey}; + use libsecp256k1::SecretKey; use rand::thread_rng; use serde_json::json; @@ -218,18 +218,12 @@ mod tests { fn test_signed_message() { let mut rng = thread_rng(); let sk = SecretKey::random(&mut rng); + let pk = PublicKey::from_secret_key(&sk); // create payload & message with signature & body let body = TestStruct::default(); - let body_str = serde_json::to_string(&json!(body)).expect("Should stringify"); - let (signature, recid) = libsecp256k1::sign(&Message::parse(&sha256hash(&body_str)), &sk); - let signature_str = format!( - "{}{}", - hex::encode(signature.serialize()), - hex::encode([recid.serialize()]) - ); - let payload = format!("{}{}", signature_str, body_str); - let message = WakuMessage::new(payload, TOPIC); + let body_str = serde_json::to_string(&body).unwrap(); + let message = WakuMessage::new_signed(body_str, TOPIC, &sk); // decode message let message_body = message.decode_payload().expect("Should decode"); @@ -244,8 +238,6 @@ mod tests { assert_eq!(message.ephemeral, true); assert!(message.timestamp > 0); - // check signature - let pk = PublicKey::from_secret_key(&sk); assert!(message.is_signed(&pk).expect("Should check signature")); let parsed_body = message.parse_payload(true).expect("Should decode"); From 09713189d21b44f615c8abc6f46fa0f49446df6e Mon Sep 17 00:00:00 2001 From: erhant Date: Mon, 24 Jun 2024 16:27:57 +0300 Subject: [PATCH 11/29] bump version --- Cargo.lock | 2 +- Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a650827..fb8102c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -749,7 +749,7 @@ dependencies = [ [[package]] name = "dkn-compute" -version = "0.1.1" +version = "0.2.0" dependencies = [ "base64 0.22.1", "colored", diff --git a/Cargo.toml b/Cargo.toml index f1480c4..85a9e55 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "dkn-compute" -version = "0.1.1" +version = "0.2.0" edition = "2021" license = "Apache-2.0" readme = "README.md" From 0931c67144ff4e521adfdad714822b70a2a7c1f9 Mon Sep 17 00:00:00 2001 From: erhant Date: Mon, 24 Jun 2024 21:32:50 +0300 Subject: [PATCH 12/29] fix model & provider serialization --- Cargo.lock | 2 +- Cargo.toml | 2 +- README.md | 8 ++------ 3 files changed, 4 insertions(+), 8 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fb8102c..f16a7e6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2204,7 +2204,7 @@ dependencies = [ [[package]] name = "ollama-workflows" version = "0.1.0" -source = "git+https://github.com/andthattoo/ollama-workflows?branch=main#18996ec26adfab02ef1b3397c478e3b068eeeaf1" +source = "git+https://github.com/andthattoo/ollama-workflows?rev=394d34cb536a0e2c8f18c0539f51130c63a64fc9#394d34cb536a0e2c8f18c0539f51130c63a64fc9" dependencies = [ "async-trait", "colored", diff --git a/Cargo.toml b/Cargo.toml index 85a9e55..46570b5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -44,7 +44,7 @@ sha3 = "0.10.8" # llm stuff langchain-rust = { version = "4.3.0", features = ["ollama"] } ollama-rs = "0.2.0" -ollama-workflows = { git = "https://github.com/andthattoo/ollama-workflows", branch = "main" } +ollama-workflows = { git = "https://github.com/andthattoo/ollama-workflows", rev = "394d34cb536a0e2c8f18c0539f51130c63a64fc9" } uuid = { version = "1.8.0", features = ["v4"] } diff --git a/README.md b/README.md index 218daa8..12c0ea7 100644 --- a/README.md +++ b/README.md @@ -31,13 +31,9 @@ A **Dria Compute Node** is a unit of computation within the Dria Knowledge Netwo Compute nodes can technically do any arbitrary task, from computing the square root of a given number to finding LLM outputs from a given prompt, or validating an LLM's output with respect to knowledge available on the web accessed via tools. -#### Heartbeats +- **Heartbeats**: Dria Admin Node broadcasts **heartbeat** messages at a set interval, it is a required duty of the compute node to respond to these so that they can be included in the list of available nodes for task assignment. These tasks will respect the type of model provided, e.g. if a task requires `gpt-4o` and you are running `phi3`, you won't be selected for that task. -Dria Admin Node broadcasts **heartbeat** messages at a set interval, it is a required duty of the compute node to respond to these so that they can be included in the list of available nodes for task assignment. These tasks will respect the type of model provided, e.g. if a task requires GTP4o and you are running Phi3, you won't be selected for that task. - -#### Workflows - -Each task is given in the form of a workflow, based on [Ollama Workflows](https://github.com/andthattoo/ollama-workflows). See the respective repository for more information. +- **Workflows**: Each task is given in the form of a workflow, based on [Ollama Workflows](https://github.com/andthattoo/ollama-workflows) (see repository for more information). In simple terms, each workflow defines the agentic behavior of an LLM, all captured in a single JSON file, and can represent things ranging from simple LLM generations to iterative web searching. ### Waku From 066cbda2442d47221645d6ca1d47a4ee75cb25df Mon Sep 17 00:00:00 2001 From: erhant Date: Tue, 25 Jun 2024 18:50:51 +0300 Subject: [PATCH 13/29] added service checks & prompt is now opt, remove redundants --- .env.example | 4 -- Cargo.toml | 11 ++-- README.md | 4 +- compose.yml | 1 + examples/common/ollama.rs | 7 +-- src/compute/llm/mod.rs | 2 - src/compute/llm/ollama.rs | 110 -------------------------------------- src/compute/llm/openai.rs | 61 --------------------- src/compute/mod.rs | 2 - src/config/constants.rs | 12 ----- src/main.rs | 6 +++ src/node.rs | 36 ++++++++++++- src/utils/mod.rs | 1 + src/utils/provider.rs | 31 +++++++++++ src/workers/workflow.rs | 6 +-- 15 files changed, 85 insertions(+), 209 deletions(-) delete mode 100644 src/compute/llm/mod.rs delete mode 100644 src/compute/llm/ollama.rs delete mode 100644 src/compute/llm/openai.rs delete mode 100644 src/compute/mod.rs create mode 100644 src/utils/provider.rs diff --git a/.env.example b/.env.example index a56087f..a4500a7 100644 --- a/.env.example +++ b/.env.example @@ -12,10 +12,6 @@ DKN_ADMIN_PUBLIC_KEY= # Public key of Dria (33-byte compressed, DKN_TASKS=synthesis # task1,task2,task3,... (comma separated, case-insensitive) DKN_MODELS=phi3:3.8b # model1,model2,model3,... (comma separated, case-insensitive) DKN_LOG_LEVEL=info # info | debug | error - -## OLLAMA ## -OLLAMA_HOST="http://127.0.0.1" -OLLAMA_PORT="11434" ## Open AI ## OPENAI_API_KEY= diff --git a/Cargo.toml b/Cargo.toml index 46570b5..c86450b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -25,6 +25,7 @@ hex = "0.4.3" hex-literal = "0.4.1" url = "2.5.0" urlencoding = "2.1.3" +uuid = { version = "1.8.0", features = ["v4"] } # logging env_logger = "0.11.3" @@ -41,17 +42,17 @@ fastbloom-rs = "0.5.9" sha2 = "0.10.8" sha3 = "0.10.8" -# llm stuff -langchain-rust = { version = "4.3.0", features = ["ollama"] } -ollama-rs = "0.2.0" +# llm stuff ollama-workflows = { git = "https://github.com/andthattoo/ollama-workflows", rev = "394d34cb536a0e2c8f18c0539f51130c63a64fc9" } -uuid = { version = "1.8.0", features = ["v4"] } - [dev-dependencies] colored = "2.1.0" rand = "0.8.5" +# we dont use these anymore, Ollama Workflows does it for us +langchain-rust = { version = "4.3.0", features = ["ollama"] } +ollama-rs = "0.2.0" + [[example]] name = "ollama" path = "./examples/benchmarks/ollama.rs" diff --git a/README.md b/README.md index 12c0ea7..2beee1c 100644 --- a/README.md +++ b/README.md @@ -75,9 +75,7 @@ chmod +x start.sh ./start.sh --help ``` -Based on the resources of your machine, you must decide which models that you will be running locally. For example, you can simple use OpenAI with theirs models, not running anything locally at all; or you can use Ollama with several models loaded to disk, and only one loaded to memory during its respective task. See [here](https://github.com/andthattoo/ollama-workflows/blob/main/src/program/atomics.rs#L269) for the latest list of available models. - -Available models are: +Based on the resources of your machine, you must decide which models that you will be running locally. For example, you can simple use OpenAI with theirs models, not running anything locally at all; or you can use Ollama with several models loaded to disk, and only one loaded to memory during its respective task. Available models (see [here](https://github.com/andthattoo/ollama-workflows/blob/main/src/program/atomics.rs#L269) for latest) are: - `adrienbrault/nous-hermes2theta-llama3-8b:q8_0` (Ollama) - `phi3:14b-medium-4k-instruct-q4_1` (Ollama) diff --git a/compose.yml b/compose.yml index 572fa58..9d50dd4 100644 --- a/compose.yml +++ b/compose.yml @@ -27,6 +27,7 @@ services: RUST_LOG: "${DKN_LOG_LEVEL:-info}" SEARCH_AGENT_URL: "http://host.docker.internal:5059" SEARCH_AGENT_MANAGER: true + restart: "on-failure" # Waku Node nwaku: diff --git a/examples/common/ollama.rs b/examples/common/ollama.rs index b85b277..f46fe0d 100644 --- a/examples/common/ollama.rs +++ b/examples/common/ollama.rs @@ -1,7 +1,4 @@ -use dkn_compute::{ - config::constants::{DEFAULT_OLLAMA_HOST, DEFAULT_OLLAMA_PORT}, - utils::get_current_time_nanos, -}; +use dkn_compute::utils::get_current_time_nanos; use ollama_rs::{ generation::completion::{request::GenerationRequest, GenerationResponse}, Ollama, @@ -12,7 +9,7 @@ pub async fn use_model_with_prompt( model: &str, prompt: &str, ) -> (GenerationResponse, tokio::time::Duration) { - let ollama = Ollama::new(DEFAULT_OLLAMA_HOST.to_string(), DEFAULT_OLLAMA_PORT); + let ollama = Ollama::default(); let time = get_current_time_nanos(); let prompt = prompt.to_string(); diff --git a/src/compute/llm/mod.rs b/src/compute/llm/mod.rs deleted file mode 100644 index 3ef32f6..0000000 --- a/src/compute/llm/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -pub mod ollama; -pub mod openai; diff --git a/src/compute/llm/ollama.rs b/src/compute/llm/ollama.rs deleted file mode 100644 index 1623f63..0000000 --- a/src/compute/llm/ollama.rs +++ /dev/null @@ -1,110 +0,0 @@ -use std::env; -use std::sync::Arc; -use tokio_util::sync::CancellationToken; - -use langchain_rust::llm::client::Ollama as OllamaLang; -use ollama_rs::Ollama; - -use crate::config::constants::*; - -/// Creates an Ollama LangChain client, pulls the model if it does not exist locally. -pub async fn create_ollama( - cancellation: CancellationToken, - model: String, -) -> Result { - let client = create_ollama_client(); - log::info!("Ollama URL: {}", client.uri()); - log::info!("Ollama Model: {}", model); - - pull_model(&client, &model, cancellation).await?; - - Ok(OllamaLang::new(Arc::new(client), model, None)) -} - -/// Creates the underlying OllamaRS client. -fn create_ollama_client() -> Ollama { - let host = env::var(OLLAMA_HOST).unwrap_or(DEFAULT_OLLAMA_HOST.to_string()); - - let port = env::var(OLLAMA_PORT) - .and_then(|port_str| { - port_str - .parse::() - .map_err(|_| env::VarError::NotPresent) - }) - .unwrap_or(DEFAULT_OLLAMA_PORT); - - Ollama::new(host, port) -} - -/// Pulls an LLM if it does not exist locally. -/// Also prints the locally installed models. -pub async fn pull_model( - client: &Ollama, - model: &str, - cancellation: CancellationToken, -) -> Result<(), String> { - log::info!("Checking local models"); - let local_models = client - .list_local_models() - .await - .map_err(|e| format!("{:?}", e))?; - - let num_local_modals = local_models.len(); - if num_local_modals == 0 { - log::info!("No local models found."); - } else { - let mut message = format!("{}{}", num_local_modals, " local models found:"); - for model in local_models.iter() { - message.push_str(format!("\n{}", model.name).as_str()) - } - log::info!("{}", message); - } - - log::info!("Pulling model: {}, this may take a while...", model); - const MAX_RETRIES: usize = 3; - let mut retry_count = 0; // retry count for edge case - while let Err(e) = client.pull_model(model.to_string(), false).await { - // edge case: invalid model is given - if e.to_string().contains("file does not exist") { - return Err( - "Invalid Ollama model, please check your environment variables.".to_string(), - ); - } else if retry_count < MAX_RETRIES { - log::error!( - "Error setting up Ollama: {}\nRetrying in 5 seconds ({}/{}).", - e, - retry_count, - MAX_RETRIES - ); - tokio::select! { - _ = cancellation.cancelled() => return Ok(()), - _ = tokio::time::sleep(tokio::time::Duration::from_secs(5)) => { - retry_count += 1; // Increment the retry counter - continue; - } - } - } else { - // Handling the case when maximum retries are exceeded - log::error!("Maximum retry attempts exceeded, stopping retries."); - return Err("Maximum retry attempts exceeded.".to_string()); - } - } - log::info!("Pulled {}", model); - - Ok(()) -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_ollama_config() { - env::set_var(OLLAMA_HOST, "http://im-a-host"); - env::remove_var(OLLAMA_PORT); - - // will use default port, but read host and model from env - let ollama = create_ollama_client(); - assert_eq!(ollama.uri(), "http://im-a-host:11434"); - } -} diff --git a/src/compute/llm/openai.rs b/src/compute/llm/openai.rs deleted file mode 100644 index 5d3b004..0000000 --- a/src/compute/llm/openai.rs +++ /dev/null @@ -1,61 +0,0 @@ -use std::env; - -use langchain_rust::llm::openai::OpenAI; -use langchain_rust::llm::OpenAIConfig; - -use crate::config::constants::*; - -/// Creates an OpenAI langchain client. -/// -/// Will check for the following environment variables: -/// -/// - `OPENAI_API_BASE` -/// - `OPENAI_API_KEY` -/// - `OPENAI_ORG_ID` -/// - `OPENAI_PROJECT_ID` -/// -/// ### Examples -/// -/// ```rs -/// fdsjkjfds -/// ``` -pub fn create_openai(model: String) -> OpenAI { - let mut config = OpenAIConfig::default(); - - if let Ok(api_base) = env::var(OPENAI_API_BASE_URL) { - config = config.with_api_base(api_base); - } - if let Ok(api_key) = env::var(OPENAI_API_KEY) { - config = config.with_api_key(api_key); - } - if let Ok(org_id) = env::var(OPENAI_ORG_ID) { - config = config.with_org_id(org_id); - } - if let Ok(project_id) = env::var(OPENAI_PROJECT_ID) { - config = config.with_project_id(project_id); - } - - OpenAI::new(config).with_model(model) -} - -#[cfg(test)] -mod tests { - use super::*; - use langchain_rust::language_models::llm::LLM; - - #[tokio::test] - #[ignore] // cargo test --package dkn-compute --lib --all-features -- compute::openai::tests::test_openai --exact --show-output --ignored - async fn test_openai() { - let value = "FOOBARFOOBAR"; // use with your own key, with caution - env::set_var(OPENAI_API_KEY, value); - - let openai = create_openai("gpt-3.5-turbo".to_string()); - - let prompt = "Once upon a time, in a land far away, there was a dragon."; - let response = openai - .invoke(prompt) - .await - .expect("Should generate response"); - println!("{}", response); - } -} diff --git a/src/compute/mod.rs b/src/compute/mod.rs deleted file mode 100644 index 62af56c..0000000 --- a/src/compute/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -// pub mod llm; -// pub mod payload; diff --git a/src/config/constants.rs b/src/config/constants.rs index cec807a..85421ba 100644 --- a/src/config/constants.rs +++ b/src/config/constants.rs @@ -14,15 +14,3 @@ pub const DEFAULT_DKN_ADMIN_PUBLIC_KEY: &[u8; 33] = /// 32 byte secret key hex(b"node") * 8, dummy only pub const DEFAULT_DKN_WALLET_SECRET_KEY: &[u8; 32] = &hex!("6e6f64656e6f64656e6f64656e6f64656e6f64656e6f64656e6f64656e6f6465"); - -//////////////////// Provider: Ollama //////////////////// -pub const OLLAMA_HOST: &str = "OLLAMA_HOST"; -pub const OLLAMA_PORT: &str = "OLLAMA_PORT"; -pub const DEFAULT_OLLAMA_HOST: &str = "http://127.0.0.1"; -pub const DEFAULT_OLLAMA_PORT: u16 = 11434; - -//////////////////// Provider: OpenAI //////////////////// -pub const OPENAI_API_BASE_URL: &str = "OPENAI_API_BASE_URL"; -pub const OPENAI_API_KEY: &str = "OPENAI_API_KEY"; -pub const OPENAI_ORG_ID: &str = "OPENAI_ORG_ID"; -pub const OPENAI_PROJECT_ID: &str = "OPENAI_PROJECT_ID"; diff --git a/src/main.rs b/src/main.rs index ac97b6f..269c059 100644 --- a/src/main.rs +++ b/src/main.rs @@ -23,6 +23,12 @@ async fn main() -> Result<(), Box> { let cancellation = CancellationToken::new(); let node = Arc::new(DriaComputeNode::new(config, cancellation.clone())); + log::info!("Checking required services..."); + if let Err(e) = node.check_services().await { + log::error!("{}", e); + return Err(e.into()); + } + log::info!("Starting workers..."); let tracker = TaskTracker::new(); diff --git a/src/node.rs b/src/node.rs index 3526e8d..8ed4743 100644 --- a/src/node.rs +++ b/src/node.rs @@ -1,6 +1,7 @@ use ecies::encrypt; use fastbloom_rs::{BloomFilter, Membership}; use libsecp256k1::{sign, Message, RecoveryId, Signature}; +use ollama_workflows::ModelProvider; use parking_lot::RwLock; use serde::Deserialize; use tokio_util::sync::CancellationToken; @@ -8,8 +9,13 @@ use tokio_util::sync::CancellationToken; use crate::{ config::DriaComputeNodeConfig, errors::NodeResult, - utils::payload::{TaskRequest, TaskRequestPayload, TaskResponsePayload}, - utils::{crypto::sha256hash, filter::FilterPayload, get_current_time_nanos}, + utils::{ + crypto::sha256hash, + filter::FilterPayload, + get_current_time_nanos, + payload::{TaskRequest, TaskRequestPayload, TaskResponsePayload}, + provider::{check_ollama, check_openai}, + }, waku::{message::WakuMessage, WakuClient}, }; @@ -145,6 +151,32 @@ impl DriaComputeNode { } } + /// Check if the required compute services are running, e.g. if Ollama + /// is detected as a provider for the chosen models, it will check that + /// Ollama is running. + pub async fn check_services(&self) -> NodeResult<()> { + let unique_providers: Vec = + self.config + .models + .iter() + .fold(Vec::new(), |mut unique, (provider, _)| { + if !unique.contains(provider) { + unique.push(provider.clone()); + } + unique + }); + + if unique_providers.contains(&ModelProvider::Ollama) { + check_ollama().await?; + } + + if unique_providers.contains(&ModelProvider::OpenAI) { + check_openai()?; + } + + Ok(()) + } + /// Send a message via Waku Relay, assuming the content is subscribed to already. pub async fn send_message(&self, message: WakuMessage) -> NodeResult<()> { self.waku.relay.send_message(message).await diff --git a/src/utils/mod.rs b/src/utils/mod.rs index ee3f4a0..a2779ea 100644 --- a/src/utils/mod.rs +++ b/src/utils/mod.rs @@ -2,6 +2,7 @@ pub mod crypto; pub mod filter; pub mod http; pub mod payload; +pub mod provider; use std::time::{Duration, SystemTime}; use tokio::signal::unix::{signal, SignalKind}; diff --git a/src/utils/provider.rs b/src/utils/provider.rs new file mode 100644 index 0000000..2ee3c5b --- /dev/null +++ b/src/utils/provider.rs @@ -0,0 +1,31 @@ +use reqwest::get; +use std::env; + +/// Checks for OpenAI API key. +pub fn check_openai() -> Result<(), String> { + const OPENAI_API_KEY: &str = "OPENAI_API_KEY"; + + if env::var(OPENAI_API_KEY).is_err() { + return Err("OpenAI API key not found".into()); + } + + Ok(()) +} + +/// Checks for Ollama running at the default port. +pub async fn check_ollama() -> Result<(), String> { + const OLLAMA_URL: &str = "http://127.0.0.1:11434"; + + let response = get(OLLAMA_URL).await.map_err(|e| format!("{}", e))?; + + if let Ok(text) = response.text().await { + // Ollama returns this text specifically + if text == "Ollama is running" { + return Ok(()); + } + } + Err(format!( + "Something is running at {} but its not Ollama?", + OLLAMA_URL + )) +} diff --git a/src/workers/workflow.rs b/src/workers/workflow.rs index 6e6c072..103bf23 100644 --- a/src/workers/workflow.rs +++ b/src/workers/workflow.rs @@ -9,7 +9,7 @@ use crate::node::DriaComputeNode; struct WorkflowPayload { pub(crate) workflow: Workflow, pub(crate) model: String, - pub(crate) prompt: String, + pub(crate) prompt: Option, } const REQUEST_TOPIC: &str = "workflow"; @@ -64,13 +64,13 @@ pub fn workflow_worker( // execute workflow with cancellation let executor = Executor::new(model); let mut memory = ProgramMemory::new(); - let entry = Entry::String(task.input.prompt); + let entry: Option = task.input.prompt.map(|prompt| Entry::String(prompt)); tokio::select! { _ = node.cancellation.cancelled() => { log::info!("Received cancellation, quitting all tasks."); break; }, - _ = executor.execute(Some(&entry), task.input.workflow, &mut memory) => () + _ = executor.execute(entry.as_ref(), task.input.workflow, &mut memory) => () } // read final result from memory From 836897bca6204eb5951f23897e4869f1557e1383 Mon Sep 17 00:00:00 2001 From: erhant Date: Wed, 26 Jun 2024 11:53:49 +0300 Subject: [PATCH 14/29] model fix, tmp workflow 1 parser --- src/workers/diagnostic.rs | 1 - src/workers/workflow.rs | 38 ++++++++++++++++++++++++++------------ 2 files changed, 26 insertions(+), 13 deletions(-) diff --git a/src/workers/diagnostic.rs b/src/workers/diagnostic.rs index c301745..2c27de0 100644 --- a/src/workers/diagnostic.rs +++ b/src/workers/diagnostic.rs @@ -22,7 +22,6 @@ pub fn diagnostic_worker( tokio::select! { _ = node.cancellation.cancelled() => break, _ = tokio::time::sleep(sleep_amount) => { - match node.waku.peers().await { Ok(peers) => { // if peer count changes, print it diff --git a/src/workers/workflow.rs b/src/workers/workflow.rs index 103bf23..836185b 100644 --- a/src/workers/workflow.rs +++ b/src/workers/workflow.rs @@ -55,11 +55,14 @@ pub fn workflow_worker( for task in tasks { // read model from the task - let model = Model::try_from(task.input.model.clone()).unwrap_or_else(|model| { - log::error!("Invalid model provided: {}, defaulting.", model); - Model::default() - }); - log::info!("Using model {}", model); + let model = match Model::try_from(task.input.model) { + Ok(model) => model, + Err(e) => { + log::error!("Could not read model: {}\nSkipping task {}", e, task.task_id); + continue; + } + }; + log::info!("Using model {} for task {}", model, task.task_id); // execute workflow with cancellation let executor = Executor::new(model); @@ -74,13 +77,24 @@ pub fn workflow_worker( } // read final result from memory - let result = match memory.read(&final_result_id) { - Some(entry) => entry.to_string(), - None => { - log::error!("No final result found in memory for task {}", task.task_id); - continue; - }, - }; + // let result = match memory.read(&final_result_id) { + // Some(entry) => entry.to_string(), + // None => { + // log::error!("No final result found in memory for task {}", task.task_id); + // continue; + // }, + // }; + // TODO: temporary for fix, for Workflow 1 (w1) + let res: Option> = memory.get_all(&"history".to_string()); + let mut vars_all: Vec = vec![]; + if let Some(res) = res { + for entry in res { + let sstr = entry.to_string(); + let vars: Vec = sstr.split("\n").map(|s| s.to_string()).collect(); + vars_all.extend(vars); + } + } + let result = serde_json::to_string(&vars_all).unwrap(); // send result to the response if let Err(e) = node.send_result(RESPONSE_TOPIC, &task.public_key, &task.task_id, result).await { From 72cf511b71e5a11ca2671c8b3c953ad40e4b4fe8 Mon Sep 17 00:00:00 2001 From: erhant Date: Wed, 26 Jun 2024 12:44:54 +0300 Subject: [PATCH 15/29] update to fixed workflows --- Cargo.lock | 2 +- Cargo.toml | 2 +- src/utils/filter.rs | 11 +++++++++++ src/workers/workflow.rs | 44 +++++++++++++++-------------------------- 4 files changed, 29 insertions(+), 30 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f16a7e6..decce2e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2204,7 +2204,7 @@ dependencies = [ [[package]] name = "ollama-workflows" version = "0.1.0" -source = "git+https://github.com/andthattoo/ollama-workflows?rev=394d34cb536a0e2c8f18c0539f51130c63a64fc9#394d34cb536a0e2c8f18c0539f51130c63a64fc9" +source = "git+https://github.com/andthattoo/ollama-workflows?rev=049f19bb975eecd457f2889bd8a28b626c33f6da#049f19bb975eecd457f2889bd8a28b626c33f6da" dependencies = [ "async-trait", "colored", diff --git a/Cargo.toml b/Cargo.toml index c86450b..d626c80 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -43,7 +43,7 @@ sha2 = "0.10.8" sha3 = "0.10.8" # llm stuff -ollama-workflows = { git = "https://github.com/andthattoo/ollama-workflows", rev = "394d34cb536a0e2c8f18c0539f51130c63a64fc9" } +ollama-workflows = { git = "https://github.com/andthattoo/ollama-workflows", rev = "049f19bb975eecd457f2889bd8a28b626c33f6da" } [dev-dependencies] colored = "2.1.0" diff --git a/src/utils/filter.rs b/src/utils/filter.rs index ea508b5..9533a6a 100644 --- a/src/utils/filter.rs +++ b/src/utils/filter.rs @@ -88,6 +88,17 @@ mod tests { assert!(!bf.contains(b"im not in this filter")); } + #[test] + #[ignore = "this panics, its a bug within the filter library"] + fn test_filter_empty() { + let filter_payload = FilterPayload { + hex: "".to_string(), + hashes: 0, + }; + + BloomFilter::try_from(&filter_payload).expect("Should parse filter"); + } + #[test] fn test_filter_read_3() { const FILTER_HEX: &str = "e7799ef73dcff3bc"; diff --git a/src/workers/workflow.rs b/src/workers/workflow.rs index 836185b..7cabb45 100644 --- a/src/workers/workflow.rs +++ b/src/workers/workflow.rs @@ -19,10 +19,6 @@ pub fn workflow_worker( node: Arc, sleep_amount: Duration, ) -> tokio::task::JoinHandle<()> { - // this ID is given in the workflow itself, but within Dria we always - // use "final_result" for this ID. - let final_result_id = "final_result".to_string(); - tokio::spawn(async move { node.subscribe_topic(REQUEST_TOPIC).await; node.subscribe_topic(RESPONSE_TOPIC).await; @@ -67,40 +63,32 @@ pub fn workflow_worker( // execute workflow with cancellation let executor = Executor::new(model); let mut memory = ProgramMemory::new(); - let entry: Option = task.input.prompt.map(|prompt| Entry::String(prompt)); + let entry: Option = task.input.prompt.map(Entry::String); + let result: Option; tokio::select! { _ = node.cancellation.cancelled() => { log::info!("Received cancellation, quitting all tasks."); break; }, - _ = executor.execute(entry.as_ref(), task.input.workflow, &mut memory) => () + exec_result = executor.execute(entry.as_ref(), task.input.workflow, &mut memory) => { + result = Some(exec_result); + } } - // read final result from memory - // let result = match memory.read(&final_result_id) { - // Some(entry) => entry.to_string(), - // None => { - // log::error!("No final result found in memory for task {}", task.task_id); - // continue; - // }, - // }; - // TODO: temporary for fix, for Workflow 1 (w1) - let res: Option> = memory.get_all(&"history".to_string()); - let mut vars_all: Vec = vec![]; - if let Some(res) = res { - for entry in res { - let sstr = entry.to_string(); - let vars: Vec = sstr.split("\n").map(|s| s.to_string()).collect(); - vars_all.extend(vars); + match result { + Some(result) => { + // send result to the network + if let Err(e) = node.send_result(RESPONSE_TOPIC, &task.public_key, &task.task_id, result).await { + log::error!("Error sending task result: {}", e); + continue; + }; + } + None => { + log::error!("No result for task {}", task.task_id); + continue; } } - let result = serde_json::to_string(&vars_all).unwrap(); - // send result to the response - if let Err(e) = node.send_result(RESPONSE_TOPIC, &task.public_key, &task.task_id, result).await { - log::error!("Error sending task result: {}", e); - continue; - }; } node.set_busy(false); From d10f15726fa737af14c7b8f8a4881391aef15c6d Mon Sep 17 00:00:00 2001 From: erhant Date: Wed, 26 Jun 2024 13:26:01 +0300 Subject: [PATCH 16/29] send task_id within payload --- src/node.rs | 3 ++- src/utils/payload.rs | 3 +++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/src/node.rs b/src/node.rs index 8ed4743..32ec096 100644 --- a/src/node.rs +++ b/src/node.rs @@ -81,7 +81,7 @@ impl DriaComputeNode { pub fn create_payload( &self, result: impl AsRef<[u8]>, - task_id: impl AsRef<[u8]>, + task_id: &str, task_pubkey: &[u8], ) -> NodeResult { // sign result @@ -99,6 +99,7 @@ impl DriaComputeNode { Ok(TaskResponsePayload { ciphertext: hex::encode(ciphertext), signature: format!("{}{}", hex::encode(signature), hex::encode(recid)), + task_id: task_id.to_string(), }) } diff --git a/src/utils/payload.rs b/src/utils/payload.rs index 26172bf..7ad5e45 100644 --- a/src/utils/payload.rs +++ b/src/utils/payload.rs @@ -13,11 +13,14 @@ use crate::{ /// To check the commitment, one must decrypt the ciphertext and parse plaintext from it, /// and compute the digest using SHA256. That digest will then be used for the signature check. #[derive(Serialize, Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] pub struct TaskResponsePayload { /// A signature on the digest of plaintext result, prepended with task id. pub signature: String, /// Computation result encrypted with the public key of the task. pub ciphertext: String, + /// The unique identifier of the task. + pub task_id: String, } impl TaskResponsePayload { From 94a2f79ba61576545a924920b80bbd6153894806 Mon Sep 17 00:00:00 2001 From: erhant Date: Wed, 26 Jun 2024 14:00:03 +0300 Subject: [PATCH 17/29] update ollama workflows --- Cargo.lock | 2 +- Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index decce2e..da7e13b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2204,7 +2204,7 @@ dependencies = [ [[package]] name = "ollama-workflows" version = "0.1.0" -source = "git+https://github.com/andthattoo/ollama-workflows?rev=049f19bb975eecd457f2889bd8a28b626c33f6da#049f19bb975eecd457f2889bd8a28b626c33f6da" +source = "git+https://github.com/andthattoo/ollama-workflows?rev=9544f27#9544f278bd4a2c8716d22b6496836892b556a57b" dependencies = [ "async-trait", "colored", diff --git a/Cargo.toml b/Cargo.toml index d626c80..a0ad0d9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -43,7 +43,7 @@ sha2 = "0.10.8" sha3 = "0.10.8" # llm stuff -ollama-workflows = { git = "https://github.com/andthattoo/ollama-workflows", rev = "049f19bb975eecd457f2889bd8a28b626c33f6da" } +ollama-workflows = { git = "https://github.com/andthattoo/ollama-workflows", rev = "9544f27" } [dev-dependencies] colored = "2.1.0" From 9b20d06690e3ef3640923e08d0e7f1aab0f8c875 Mon Sep 17 00:00:00 2001 From: erhant Date: Wed, 26 Jun 2024 14:13:05 +0300 Subject: [PATCH 18/29] try value or str for entry --- src/workers/workflow.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/workers/workflow.rs b/src/workers/workflow.rs index 7cabb45..9f9c778 100644 --- a/src/workers/workflow.rs +++ b/src/workers/workflow.rs @@ -63,7 +63,7 @@ pub fn workflow_worker( // execute workflow with cancellation let executor = Executor::new(model); let mut memory = ProgramMemory::new(); - let entry: Option = task.input.prompt.map(Entry::String); + let entry: Option = task.input.prompt.map(|prompt| Entry::try_value_or_str(&prompt)); let result: Option; tokio::select! { _ = node.cancellation.cancelled() => { From 6d7cecb7f3d0a25e048a22d7be46980512d8f12f Mon Sep 17 00:00:00 2001 From: erhant Date: Fri, 28 Jun 2024 21:37:16 +0300 Subject: [PATCH 19/29] bump workflows --- Cargo.lock | 2 +- Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index da7e13b..62a260f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2204,7 +2204,7 @@ dependencies = [ [[package]] name = "ollama-workflows" version = "0.1.0" -source = "git+https://github.com/andthattoo/ollama-workflows?rev=9544f27#9544f278bd4a2c8716d22b6496836892b556a57b" +source = "git+https://github.com/andthattoo/ollama-workflows?rev=58915f3#58915f372d2810588a689f5190e8da15946fd216" dependencies = [ "async-trait", "colored", diff --git a/Cargo.toml b/Cargo.toml index a0ad0d9..c453a72 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -43,7 +43,7 @@ sha2 = "0.10.8" sha3 = "0.10.8" # llm stuff -ollama-workflows = { git = "https://github.com/andthattoo/ollama-workflows", rev = "9544f27" } +ollama-workflows = { git = "https://github.com/andthattoo/ollama-workflows", rev = "58915f3" } [dev-dependencies] colored = "2.1.0" From 2683955407908af51eb8d5f28116bda1261d9ea6 Mon Sep 17 00:00:00 2001 From: erhant Date: Sun, 30 Jun 2024 15:49:42 +0300 Subject: [PATCH 20/29] `cargo update` + fixed ecies --- Cargo.lock | 442 +++++++++++++++++++++++++----------------- src/errors/mod.rs | 4 +- tests/compute_test.rs | 4 +- 3 files changed, 265 insertions(+), 185 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 62a260f..7776380 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,9 +4,9 @@ version = 3 [[package]] name = "addr2line" -version = "0.21.0" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" +checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678" dependencies = [ "gimli", ] @@ -76,9 +76,9 @@ dependencies = [ [[package]] name = "allocator-api2" -version = "0.2.16" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5" +checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" [[package]] name = "android-tzdata" @@ -97,47 +97,48 @@ dependencies = [ [[package]] name = "anstream" -version = "0.6.13" +version = "0.6.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d96bd03f33fe50a863e394ee9718a706f988b9079b20c3784fb726e7678b62fb" +checksum = "418c75fa768af9c03be99d17643f93f79bbba589895012a80e3452a19ddda15b" dependencies = [ "anstyle", "anstyle-parse", "anstyle-query", "anstyle-wincon", "colorchoice", + "is_terminal_polyfill", "utf8parse", ] [[package]] name = "anstyle" -version = "1.0.6" +version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8901269c6307e8d93993578286ac0edf7f195079ffff5ebdeea6a59ffb7e36bc" +checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b" [[package]] name = "anstyle-parse" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c" +checksum = "c03a11a9034d92058ceb6ee011ce58af4a9bf61491aa7e1e59ecd24bd40d22d4" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.0.2" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648" +checksum = "ad186efb764318d35165f1758e7dcef3b10628e26d41a44bc5550652e6804391" dependencies = [ "windows-sys 0.52.0", ] [[package]] name = "anstyle-wincon" -version = "3.0.2" +version = "3.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7" +checksum = "61a38449feb7068f52bb06c12759005cf459ee52bb4adc1d5a7c4322d716fb19" dependencies = [ "anstyle", "windows-sys 0.52.0", @@ -188,7 +189,7 @@ dependencies = [ "eventsource-stream", "futures", "rand 0.8.5", - "reqwest 0.12.4", + "reqwest 0.12.5", "reqwest-eventsource", "secrecy", "serde", @@ -208,7 +209,7 @@ checksum = "3b43422f69d8ff38f95f1b2bb76517c91589a924d1559a0e935d7c8ce0274c11" dependencies = [ "proc-macro2", "quote", - "syn 2.0.59", + "syn 2.0.68", ] [[package]] @@ -230,7 +231,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn 2.0.59", + "syn 2.0.68", ] [[package]] @@ -241,9 +242,15 @@ checksum = "c6fa2087f2753a7da8cc1c0dbfcf89579dd57458e36769de5ac750b4671737ca" dependencies = [ "proc-macro2", "quote", - "syn 2.0.59", + "syn 2.0.68", ] +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + [[package]] name = "atty" version = "0.2.14" @@ -264,14 +271,14 @@ dependencies = [ "derive_utils", "proc-macro2", "quote", - "syn 2.0.59", + "syn 2.0.68", ] [[package]] name = "autocfg" -version = "1.2.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1fdabc7756949593fe60f30ec81974b613357de856987752631dea1e3394c80" +checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" [[package]] name = "backoff" @@ -289,9 +296,9 @@ dependencies = [ [[package]] name = "backtrace" -version = "0.3.71" +version = "0.3.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26b05800d2e817c8b3b4b54abd461726265fa9789ae34330622f2db9ee696f9d" +checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a" dependencies = [ "addr2line", "cc", @@ -343,9 +350,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" +checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" [[package]] name = "block-buffer" @@ -402,9 +409,9 @@ checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9" [[package]] name = "cc" -version = "1.0.94" +version = "1.0.103" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17f6e324229dc011159fcc089755d1e2e216a90d43a7dea6853ca740b84f35e7" +checksum = "2755ff20a1d93490d26ba33a6f092a38a508398a5320df5d4b3014fcccce9410" [[package]] name = "cfg-if" @@ -442,9 +449,9 @@ dependencies = [ [[package]] name = "colorchoice" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" +checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422" [[package]] name = "colored" @@ -496,6 +503,12 @@ dependencies = [ "cfg-if 1.0.0", ] +[[package]] +name = "critical-section" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7059fff8937831a9ae6f0fe4d658ffabf58f2ca96aa9dec1c889f936f705f216" + [[package]] name = "crossbeam-deque" version = "0.8.5" @@ -584,7 +597,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331" dependencies = [ "quote", - "syn 2.0.59", + "syn 2.0.68", ] [[package]] @@ -649,7 +662,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.59", + "syn 2.0.68", ] [[package]] @@ -660,7 +673,7 @@ checksum = "733cabb43482b1a1b53eee8583c2b9e8684d592215ea83efd305dd31bc2f0178" dependencies = [ "darling_core", "quote", - "syn 2.0.59", + "syn 2.0.68", ] [[package]] @@ -690,7 +703,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.59", + "syn 2.0.68", ] [[package]] @@ -700,20 +713,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "206868b8242f27cecce124c19fd88157fbd0dd334df2587f36417bafbc85097b" dependencies = [ "derive_builder_core", - "syn 2.0.59", + "syn 2.0.68", ] [[package]] name = "derive_more" -version = "0.99.17" +version = "0.99.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" dependencies = [ "convert_case", "proc-macro2", "quote", "rustc_version", - "syn 1.0.109", + "syn 2.0.68", ] [[package]] @@ -724,7 +737,7 @@ checksum = "61bb5a1014ce6dfc2a378578509abe775a5aa06bff584a547555d9efdb81b926" dependencies = [ "proc-macro2", "quote", - "syn 2.0.59", + "syn 2.0.68", ] [[package]] @@ -765,7 +778,7 @@ dependencies = [ "ollama-workflows", "parking_lot", "rand 0.8.5", - "reqwest 0.12.4", + "reqwest 0.12.5", "serde", "serde_json", "sha2 0.10.8", @@ -791,18 +804,18 @@ checksum = "dcbb2bf8e87535c23f7a8a321e364ce21462d0ff10cb6407820e8e96dfff6653" [[package]] name = "dtoa-short" -version = "0.3.4" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbaceec3c6e4211c79e7b1800fb9680527106beb2f9c51904a3210c03a448c74" +checksum = "cd1511a7b6a56299bd043a9c167a6d2bfb37bf84a6dfceaba651168adfb43c87" dependencies = [ "dtoa", ] [[package]] name = "ecies" -version = "0.2.6" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53f43496fc04523aa716c5dd76133cb6d7c81eb213375684d06a8b1683f8bc1e" +checksum = "e0206e602d2645ec8b24ed8307fadbc6c3110e2b11ab2f806fc02fee49327079" dependencies = [ "aes-gcm", "getrandom 0.2.14", @@ -824,9 +837,9 @@ checksum = "3a68a4904193147e0a8dec3314640e6db742afd5f6e634f428a6af230d9b3591" [[package]] name = "either" -version = "1.12.0" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dca9240753cf90908d7e4aac30f630662b02aebaa1b58a3cadabdb23385b58b" +checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" [[package]] name = "encoding_rs" @@ -881,9 +894,9 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.3.8" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" +checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" dependencies = [ "libc", "windows-sys 0.52.0", @@ -930,9 +943,9 @@ checksum = "2d7e9bc68be4cdabbb8938140b01a8b5bc1191937f2c7e7ecc2fcebbe2d749df" [[package]] name = "fastrand" -version = "2.0.2" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "658bd65b1cf4c852a3cc96f18a8ce7b5640f6b703f905c7d74532294c2a63984" +checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a" [[package]] name = "flate2" @@ -1056,7 +1069,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.59", + "syn 2.0.68", ] [[package]] @@ -1159,9 +1172,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.28.1" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" +checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" [[package]] name = "glob" @@ -1210,15 +1223,15 @@ dependencies = [ [[package]] name = "h2" -version = "0.4.4" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "816ec7294445779408f36fe57bc5b7fc1cf59664059096c65f905c1c61f58069" +checksum = "fa82e28a107a8cc405f0839610bdc9b15f1e25ec7d696aa5cf173edbcb1486ab" dependencies = [ + "atomic-waker", "bytes 1.6.0", "fnv", "futures-core", "futures-sink", - "futures-util", "http 1.1.0", "indexmap 2.2.6", "slab", @@ -1235,9 +1248,9 @@ checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" [[package]] name = "hashbrown" -version = "0.14.3" +version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" dependencies = [ "ahash", "allocator-api2", @@ -1376,7 +1389,7 @@ dependencies = [ "markup5ever 0.12.1", "proc-macro2", "quote", - "syn 2.0.59", + "syn 2.0.68", ] [[package]] @@ -1434,12 +1447,12 @@ dependencies = [ [[package]] name = "http-body-util" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0475f8b2ac86659c21b64320d5d653f9efe42acd2a4e560073ec61a155a34f1d" +checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" dependencies = [ "bytes 1.6.0", - "futures-core", + "futures-util", "http 1.1.0", "http-body 1.0.0", "pin-project-lite 0.2.14", @@ -1447,9 +1460,9 @@ dependencies = [ [[package]] name = "httparse" -version = "1.8.0" +version = "1.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" +checksum = "0fcc0b4a115bf80b728eb8ea024ad5bd707b615bfed49e0665b6e0f86fd082d9" [[package]] name = "httpdate" @@ -1510,7 +1523,7 @@ dependencies = [ "httpdate 1.0.3", "itoa 1.0.11", "pin-project-lite 0.2.14", - "socket2 0.5.6", + "socket2 0.5.7", "tokio 1.38.0", "tower-service", "tracing", @@ -1526,7 +1539,7 @@ dependencies = [ "bytes 1.6.0", "futures-channel", "futures-util", - "h2 0.4.4", + "h2 0.4.5", "http 1.1.0", "http-body 1.0.0", "httparse", @@ -1539,15 +1552,16 @@ dependencies = [ [[package]] name = "hyper-rustls" -version = "0.26.0" +version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0bea761b46ae2b24eb4aef630d8d1c398157b6fc29e6350ecf090a0b70c952c" +checksum = "5ee4be2c948921a1a5320b629c4193916ed787a7f7f293fd3f7f5a6c9de74155" dependencies = [ "futures-util", "http 1.1.0", "hyper 1.3.1", "hyper-util", "rustls", + "rustls-native-certs", "rustls-pki-types", "tokio 1.38.0", "tokio-rustls", @@ -1598,9 +1612,9 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.3" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca38ef113da30126bbff9cd1705f9273e15d45498615d138b0c20279ac7a76aa" +checksum = "7b875924a60b96e5d7b9ae7b066540b1dd1cbd90d1828f54c92e02a283351c56" dependencies = [ "bytes 1.6.0", "futures-channel", @@ -1609,7 +1623,7 @@ dependencies = [ "http-body 1.0.0", "hyper 1.3.1", "pin-project-lite 0.2.14", - "socket2 0.5.6", + "socket2 0.5.7", "tokio 1.38.0", "tower", "tower-service", @@ -1672,7 +1686,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" dependencies = [ "equivalent", - "hashbrown 0.14.3", + "hashbrown 0.14.5", ] [[package]] @@ -1708,6 +1722,12 @@ version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" +[[package]] +name = "is_terminal_polyfill" +version = "1.70.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8478577c03552c21db0e2724ffb8986a5ce7af88107e6be5d2ee6e158c12800" + [[package]] name = "itertools" version = "0.13.0" @@ -1778,7 +1798,7 @@ dependencies = [ "ollama-rs 0.2.0", "readability", "regex", - "reqwest 0.12.4", + "reqwest 0.12.5", "reqwest-eventsource", "scraper 0.19.0", "secrecy", @@ -1796,15 +1816,15 @@ dependencies = [ [[package]] name = "lazy_static" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" -version = "0.2.153" +version = "0.2.155" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" +checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" [[package]] name = "libsecp256k1" @@ -1862,15 +1882,15 @@ checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" [[package]] name = "linux-raw-sys" -version = "0.4.13" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" +checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" [[package]] name = "lock_api" -version = "0.4.11" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" dependencies = [ "autocfg", "scopeguard", @@ -1878,9 +1898,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.21" +version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" +checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" [[package]] name = "lopdf" @@ -1988,9 +2008,9 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" [[package]] name = "memchr" -version = "2.7.2" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "mime" @@ -2000,9 +2020,9 @@ checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" [[package]] name = "mime_guess" -version = "2.0.4" +version = "2.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4192263c238a5f0d0c6bfd21f336a313a4ce1c450542449ca191bb657b4642ef" +checksum = "f7c44f8e672c00fe5308fa235f821cb4198414e1c77935c1ab6948d3fd78550e" dependencies = [ "mime", "unicase", @@ -2016,9 +2036,9 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.7.2" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d811f3e15f28568be3407c8e7fdb6514c1cda3cb30683f15b6a1a1dc4ea14a7" +checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08" dependencies = [ "adler", ] @@ -2086,11 +2106,10 @@ dependencies = [ [[package]] name = "native-tls" -version = "0.2.11" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" +checksum = "a8614eb2c83d59d1c8cc974dd3f920198647674a0a035e1af1fa58707e317466" dependencies = [ - "lazy_static", "libc", "log", "openssl", @@ -2162,9 +2181,9 @@ dependencies = [ [[package]] name = "object" -version = "0.32.2" +version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" +checksum = "081b846d1d56ddfc18fdf1a922e4f6e07a11768ea1b92dec44e42b72712ccfce" dependencies = [ "memchr", ] @@ -2177,7 +2196,7 @@ dependencies = [ "async-trait", "log", "regex", - "reqwest 0.12.4", + "reqwest 0.12.5", "scraper 0.19.0", "serde", "serde_json", @@ -2193,7 +2212,7 @@ checksum = "255252ec57e13d2d6ae074c7b7cd8c004d17dafb1e03f954ba2fd5cc226f8f49" dependencies = [ "async-trait", "log", - "reqwest 0.12.4", + "reqwest 0.12.5", "serde", "serde_json", "tokio 1.38.0", @@ -2217,7 +2236,7 @@ dependencies = [ "parking_lot", "rand 0.8.5", "regex", - "reqwest 0.12.4", + "reqwest 0.12.5", "scraper 0.19.0", "search_with_google", "serde", @@ -2233,6 +2252,10 @@ name = "once_cell" version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" +dependencies = [ + "critical-section", + "portable-atomic", +] [[package]] name = "opaque-debug" @@ -2246,7 +2269,7 @@ version = "0.10.64" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95a0481286a310808298130d22dd1fef0fa571e05a8f44ec801801e84b216b1f" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "cfg-if 1.0.0", "foreign-types", "libc", @@ -2263,7 +2286,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.59", + "syn 2.0.68", ] [[package]] @@ -2286,9 +2309,9 @@ dependencies = [ [[package]] name = "parking_lot" -version = "0.12.2" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e4af0ca4f6caed20e900d564c242b8e5d4903fdacf31d3daf527b66fe6f42fb" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" dependencies = [ "lock_api", "parking_lot_core", @@ -2296,15 +2319,15 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.9" +version = "0.9.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if 1.0.0", "libc", "redox_syscall", "smallvec", - "windows-targets 0.48.5", + "windows-targets 0.52.5", ] [[package]] @@ -2427,7 +2450,7 @@ dependencies = [ "phf_shared 0.11.2", "proc-macro2", "quote", - "syn 2.0.59", + "syn 2.0.68", ] [[package]] @@ -2474,7 +2497,7 @@ checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ "proc-macro2", "quote", - "syn 2.0.59", + "syn 2.0.68", ] [[package]] @@ -2522,6 +2545,12 @@ dependencies = [ "bstr", ] +[[package]] +name = "portable-atomic" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" + [[package]] name = "powerfmt" version = "0.2.0" @@ -2548,9 +2577,9 @@ checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" [[package]] name = "proc-macro2" -version = "1.0.80" +version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a56dea16b0a29e94408b9aa5e2940a4eedbd128a1ba20e8f7ae60fd3d465af0e" +checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" dependencies = [ "unicode-ident", ] @@ -2561,11 +2590,58 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8746739f11d39ce5ad5c2520a9b75285310dbfe78c541ccf832d38615765aec0" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "memchr", "unicase", ] +[[package]] +name = "quinn" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4ceeeeabace7857413798eb1ffa1e9c905a9946a57d81fb69b4b71c4d8eb3ad" +dependencies = [ + "bytes 1.6.0", + "pin-project-lite 0.2.14", + "quinn-proto", + "quinn-udp", + "rustc-hash", + "rustls", + "thiserror", + "tokio 1.38.0", + "tracing", +] + +[[package]] +name = "quinn-proto" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddf517c03a109db8100448a4be38d498df8a210a99fe0e1b9eaf39e78c640efe" +dependencies = [ + "bytes 1.6.0", + "rand 0.8.5", + "ring", + "rustc-hash", + "rustls", + "slab", + "thiserror", + "tinyvec", + "tracing", +] + +[[package]] +name = "quinn-udp" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9096629c45860fc7fb143e125eb826b5e721e10be3263160c7d60ca832cf8c46" +dependencies = [ + "libc", + "once_cell", + "socket2 0.5.7", + "tracing", + "windows-sys 0.52.0", +] + [[package]] name = "quote" version = "1.0.36" @@ -2692,18 +2768,18 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.4.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" +checksum = "c82cf8cff14456045f55ec4241383baeff27af886adb72ffb2162f99911de0fd" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.6.0", ] [[package]] name = "regex" -version = "1.10.4" +version = "1.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c117dbdfde9c8308975b6a18d71f3f385c89461f7b3fb054288ecf2a2058ba4c" +checksum = "b91213439dad192326a0d7c6ee3955910425f441d7038e0d6933b0aec5c4517f" dependencies = [ "aho-corasick", "memchr", @@ -2713,9 +2789,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.6" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea" +checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" dependencies = [ "aho-corasick", "memchr", @@ -2724,9 +2800,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.8.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56" +checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" [[package]] name = "reqwest" @@ -2791,7 +2867,7 @@ dependencies = [ "serde", "serde_json", "serde_urlencoded", - "sync_wrapper", + "sync_wrapper 0.1.2", "system-configuration", "tokio 1.38.0", "tokio-native-tls", @@ -2805,16 +2881,16 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.12.4" +version = "0.12.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "566cafdd92868e0939d3fb961bd0dc25fcfaaed179291093b3d43e6b3150ea10" +checksum = "c7d6d2a27d57148378eb5e111173f4276ad26340ecc5c49a4a2152167a2d6a37" dependencies = [ "base64 0.22.1", "bytes 1.6.0", "encoding_rs", "futures-core", "futures-util", - "h2 0.4.4", + "h2 0.4.5", "http 1.1.0", "http-body 1.0.0", "http-body-util", @@ -2831,6 +2907,7 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite 0.2.14", + "quinn", "rustls", "rustls-native-certs", "rustls-pemfile 2.1.2", @@ -2838,7 +2915,7 @@ dependencies = [ "serde", "serde_json", "serde_urlencoded", - "sync_wrapper", + "sync_wrapper 1.0.1", "system-configuration", "tokio 1.38.0", "tokio-native-tls", @@ -2865,7 +2942,7 @@ dependencies = [ "mime", "nom", "pin-project-lite 0.2.14", - "reqwest 0.12.4", + "reqwest 0.12.5", "thiserror", ] @@ -2886,9 +2963,9 @@ dependencies = [ [[package]] name = "rustc-demangle" -version = "0.1.23" +version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" +checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" [[package]] name = "rustc-hash" @@ -2907,11 +2984,11 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.32" +version = "0.38.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65e04861e65f21776e67888bfbea442b3642beaa0138fdb1dd7a84a52dffdb89" +checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "errno", "libc", "linux-raw-sys", @@ -2920,11 +2997,11 @@ dependencies = [ [[package]] name = "rustls" -version = "0.22.4" +version = "0.23.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf4ef73721ac7bcd79b2b315da7779d8fc09718c6b3d2d1b2d94850eb8c18432" +checksum = "05cff451f60db80f490f3c182b77c35260baace73209e9cdbbe526bfe3a4d402" dependencies = [ - "log", + "once_cell", "ring", "rustls-pki-types", "rustls-webpki", @@ -2989,9 +3066,9 @@ checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" [[package]] name = "ryu" -version = "1.0.17" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1" +checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" [[package]] name = "schannel" @@ -3065,11 +3142,11 @@ dependencies = [ [[package]] name = "security-framework" -version = "2.10.0" +version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "770452e37cad93e0a50d5abc3990d2bc351c36d0328f86cefec2f2fb206eaef6" +checksum = "c627723fd09706bacdb5cf41499e95098555af3c3c29d014dc3c458ef6be11c0" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.6.0", "core-foundation", "core-foundation-sys", "libc", @@ -3078,9 +3155,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.10.0" +version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41f3cc463c0ef97e11c3461a9d3787412d30e8e7eb907c79180c4a57bf7c04ef" +checksum = "317936bbbd05227752583946b9e66d7ce3b489f84e11a94a510b4437fef407d7" dependencies = [ "core-foundation-sys", "libc", @@ -3123,7 +3200,7 @@ version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4eb30575f3638fc8f6815f448d50cb1a2e255b0897985c8c59f4d37b72a07b06" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "cssparser 0.31.2", "derive_more", "fxhash", @@ -3159,14 +3236,14 @@ checksum = "500cbc0ebeb6f46627f50f3f5811ccf6bf00643be300b4c3eabc0ef55dc5b5ba" dependencies = [ "proc-macro2", "quote", - "syn 2.0.59", + "syn 2.0.68", ] [[package]] name = "serde_json" -version = "1.0.117" +version = "1.0.118" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "455182ea6142b14f93f4bc5320a2b31c1f266b66a4a5c858b013302a5d8cbfc3" +checksum = "d947f6b3163d8857ea16c4fa0dd4840d52f3041039a85decd46867eb1abef2e4" dependencies = [ "itoa 1.0.11", "ryu", @@ -3255,9 +3332,9 @@ checksum = "fa42c91313f1d05da9b26f267f931cf178d4aba455b4c4622dd7355eb80c6640" [[package]] name = "simsimd" -version = "4.3.1" +version = "4.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc9b7427cabeed25b18b43cc7d7ec466d8d1953a13ed56c46dc414c99ca4754e" +checksum = "efc843bc8f12d9c8e6b734a0fe8918fc497b42f6ae0f347dbfdad5b5138ab9b4" dependencies = [ "cc", ] @@ -3296,9 +3373,9 @@ dependencies = [ [[package]] name = "socket2" -version = "0.5.6" +version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05ffd9c0a93b7543e062e759284fcf5f5e3b098501104bfbdde4d404db792871" +checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" dependencies = [ "libc", "windows-sys 0.52.0", @@ -3350,31 +3427,31 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" [[package]] name = "strum" -version = "0.26.2" +version = "0.26.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d8cec3501a5194c432b2b7976db6b7d10ec95c253208b45f83f7136aa985e29" +checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" dependencies = [ "strum_macros", ] [[package]] name = "strum_macros" -version = "0.26.3" +version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7993a8e3a9e88a00351486baae9522c91b123a088f76469e5bd5cc17198ea87" +checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" dependencies = [ "heck", "proc-macro2", "quote", "rustversion", - "syn 2.0.59", + "syn 2.0.68", ] [[package]] name = "subtle" -version = "2.5.0" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" @@ -3389,9 +3466,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.59" +version = "2.0.68" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a6531ffc7b071655e4ce2e04bd464c4830bb585a61cabb96cf808f05172615a" +checksum = "901fa70d88b9d6c98022e23b4136f9f3e54e4662c3bc1bd1d84a42a9a0f0c1e9" dependencies = [ "proc-macro2", "quote", @@ -3404,6 +3481,12 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" +[[package]] +name = "sync_wrapper" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" + [[package]] name = "system-configuration" version = "0.5.1" @@ -3499,7 +3582,7 @@ checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533" dependencies = [ "proc-macro2", "quote", - "syn 2.0.59", + "syn 2.0.68", ] [[package]] @@ -3561,9 +3644,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.6.0" +version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +checksum = "c55115c6fbe2d2bef26eb09ad74bde02d8255476fc0c7b515ef09fbb35742d82" dependencies = [ "tinyvec_macros", ] @@ -3605,7 +3688,7 @@ dependencies = [ "parking_lot", "pin-project-lite 0.2.14", "signal-hook-registry", - "socket2 0.5.6", + "socket2 0.5.7", "tokio-macros", "windows-sys 0.48.0", ] @@ -3618,7 +3701,7 @@ checksum = "5f5ae998a069d4b5aba8ee9dad856af7d520c3699e6159b185c2acd48155d39a" dependencies = [ "proc-macro2", "quote", - "syn 2.0.59", + "syn 2.0.68", ] [[package]] @@ -3633,9 +3716,9 @@ dependencies = [ [[package]] name = "tokio-rustls" -version = "0.25.0" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "775e0c0f0adb3a2f22a00c4745d728b479985fc15ee7ca6a2608388c5569860f" +checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" dependencies = [ "rustls", "rustls-pki-types", @@ -3687,7 +3770,7 @@ dependencies = [ "futures-core", "futures-sink", "futures-util", - "hashbrown 0.14.3", + "hashbrown 0.14.5", "pin-project-lite 0.2.14", "tokio 1.38.0", ] @@ -3705,7 +3788,6 @@ dependencies = [ "tokio 1.38.0", "tower-layer", "tower-service", - "tracing", ] [[package]] @@ -3740,7 +3822,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.59", + "syn 2.0.68", ] [[package]] @@ -3812,9 +3894,9 @@ checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202" [[package]] name = "unicode-width" -version = "0.1.12" +version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68f5e5f3158ecfd4b8ff6fe086db7c8467a2dfdac97fe420f2b7c4aa97af66d6" +checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d" [[package]] name = "universal-hash" @@ -3834,9 +3916,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" -version = "2.5.0" +version = "2.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" +checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" dependencies = [ "form_urlencoded", "idna", @@ -3863,15 +3945,15 @@ checksum = "86bd8d4e895da8537e5315b8254664e6b769c4ff3db18321b297a1e7004392e3" [[package]] name = "utf8parse" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "uuid" -version = "1.8.0" +version = "1.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a183cf7feeba97b4dd1c0d46788634f6221d87fa961b305bed08c851829efcc0" +checksum = "5de17fd2f7da591098415cff336e12965a28061ddace43b59cb3c430179c9439" dependencies = [ "getrandom 0.2.14", ] @@ -3938,7 +4020,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.59", + "syn 2.0.68", "wasm-bindgen-shared", ] @@ -3972,7 +4054,7 @@ checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.59", + "syn 2.0.68", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -4279,22 +4361,22 @@ checksum = "927da81e25be1e1a2901d59b81b37dd2efd1fc9c9345a55007f09bf5a2d3ee03" [[package]] name = "zerocopy" -version = "0.7.32" +version = "0.7.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be" +checksum = "ae87e3fcd617500e5d106f0380cf7b77f3c6092aae37191433159dda23cfb087" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.7.32" +version = "0.7.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" +checksum = "15e934569e47891f7d9411f1a451d947a60e000ab3bd24fbb970f000387d1b3b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.59", + "syn 2.0.68", ] [[package]] diff --git a/src/errors/mod.rs b/src/errors/mod.rs index ac20a70..97fa1d3 100644 --- a/src/errors/mod.rs +++ b/src/errors/mod.rs @@ -81,8 +81,8 @@ impl From for NodeError { } } -impl From for NodeError { - fn from(value: ecies::SecpError) -> Self { +impl From for NodeError { + fn from(value: libsecp256k1::Error) -> Self { Self { message: value.to_string(), source: "secp256k1".to_string(), diff --git a/tests/compute_test.rs b/tests/compute_test.rs index 116d099..21ede5c 100644 --- a/tests/compute_test.rs +++ b/tests/compute_test.rs @@ -79,8 +79,6 @@ async fn test_workflow() { let exe = Executor::new(Model::Phi3Mini); let mut memory = ProgramMemory::new(); - exe.execute(None, workflow, &mut memory).await; - - let result = memory.read(&"final_result".to_string()).unwrap(); + let result = exe.execute(None, workflow, &mut memory).await; println!("Result: {}", result); } From d5d6b216d697aba133e0c7889cecc7a4c909867d Mon Sep 17 00:00:00 2001 From: erhant Date: Thu, 4 Jul 2024 11:34:02 +0300 Subject: [PATCH 21/29] quick fixes --- README.md | 2 +- src/config/mod.rs | 5 ----- src/config/models.rs | 20 +++++++++++++++----- src/node.rs | 1 + src/waku/relay.rs | 15 +++++---------- src/workers/diagnostic.rs | 7 ++++--- 6 files changed, 26 insertions(+), 24 deletions(-) diff --git a/README.md b/README.md index 2beee1c..672496d 100644 --- a/README.md +++ b/README.md @@ -57,7 +57,7 @@ Dria Compute Node is mainly expected to be executed using `./start.sh`. To start git clone https://github.com/firstbatchxyz/dkn-compute-node ``` -2. **Prepare Environment Variables**: Dria Compute Node makes use of several environment variables, some of which used by Waku itself as well. First, prepare you environment variable as given in [.env.example](./.env.example). +2. **Prepare Environment Variables**: Dria Compute Node makes use of several environment variables, some of which used by Waku itself as well. Create a `.env` file, and prepare you environment variables as given in [.env.example](./.env.example). 3. **Fund an Ethereum Wallet with 0.1 Sepolia ETH (+ gas fees)**: Waku and Dria makes use of the same Ethereum wallet, and Waku uses RLN Relay protocol for further security within the network. If you have not registered to RLN protocol yet, register by running `./waku/register_rln.sh`. If you have already registered, you will have a `keystore.json` which you can place under `./waku/keystore/keystore.json` in this directory. Your secret key will be provided at `ETH_TESTNET_KEY` variable. You can set an optional password at `RLN_RELAY_CRED_PASSWORD` as well to encrypt the keystore file, or to decrypt it if you already have one. diff --git a/src/config/mod.rs b/src/config/mod.rs index e4b250e..da8ed69 100644 --- a/src/config/mod.rs +++ b/src/config/mod.rs @@ -71,11 +71,6 @@ impl DriaComputeNodeConfig { serde_json::to_string(&models).unwrap_or_default() ); - if models.is_empty() { - log::error!("No models were provided, you will not be able to get any tasks!"); - log::error!("Please restart with at least one model provided within DKN_MODELS."); - } - Self { admin_public_key, secret_key, diff --git a/src/config/models.rs b/src/config/models.rs index 92c6e41..edd7807 100644 --- a/src/config/models.rs +++ b/src/config/models.rs @@ -1,7 +1,7 @@ use ollama_workflows::{Model, ModelProvider}; pub fn parse_dkn_models(models_str: String) -> Vec<(ModelProvider, Model)> { - models_str + let providers_models = models_str .split(',') .filter_map(|s| { let s = s.trim().to_lowercase(); @@ -13,7 +13,16 @@ pub fn parse_dkn_models(models_str: String) -> Vec<(ModelProvider, Model)> { } } }) - .collect() + .collect::>(); + + if providers_models.is_empty() { + log::error!("No models were provided, defaulting."); + log::error!("Make sure to restart with at least one model provided within DKN_MODELS."); + + vec![(ModelProvider::OpenAI, Model::GPT3_5Turbo)] + } else { + providers_models + } } #[cfg(test)] @@ -21,13 +30,14 @@ mod tests { use super::*; #[test] - fn test_parser_1() { + fn test_parser_default() { let models = parse_dkn_models("idontexist,i dont either,i332287648762".to_string()); - assert_eq!(models.len(), 0); + assert_eq!(models.len(), 1); + assert!(models.contains(&(ModelProvider::OpenAI, Model::GPT3_5Turbo))); } #[test] - fn test_parser_2() { + fn test_parser_2_models() { let models = parse_dkn_models("phi3:3.8b,phi3:14b-medium-4k-instruct-q4_1".to_string()); assert_eq!(models.len(), 2); } diff --git a/src/node.rs b/src/node.rs index 32ec096..a617d69 100644 --- a/src/node.rs +++ b/src/node.rs @@ -127,6 +127,7 @@ impl DriaComputeNode { } else { log::error!("Error subscribing to {}: {}\nAborting.", topic, e); self.cancellation.cancel(); + return; } } diff --git a/src/waku/relay.rs b/src/waku/relay.rs index 3833170..5c82d80 100644 --- a/src/waku/relay.rs +++ b/src/waku/relay.rs @@ -1,4 +1,5 @@ use crate::{errors::NodeResult, utils::http::BaseClient}; +use serde_json::json; use urlencoding; use super::message::WakuMessage; @@ -7,7 +8,7 @@ use super::message::WakuMessage; /// /// The relay client is used to send and receive messages to Waku network. It works as follows: /// -/// 1. A node subscribes to a content topic +/// 1. A node subscribes to a content topic. /// 2. Nodes that are subscribed to the same content topic can send and receive messages via the network. /// 3. On termination, the node unsubscribes from the content topic. #[derive(Debug, Clone)] @@ -23,7 +24,7 @@ impl RelayClient { /// Send a message. pub async fn send_message(&self, message: WakuMessage) -> NodeResult<()> { log::info!("Sending: {}", message); - let message = serde_json::json!(message); + let message = json!(message); self.base.post("relay/v1/auto/messages", message).await?; Ok(()) @@ -52,10 +53,7 @@ impl RelayClient { pub async fn subscribe(&self, content_topic: &str) -> NodeResult<()> { log::debug!("Subscribing to {}", content_topic); self.base - .post( - "relay/v1/auto/subscriptions", - serde_json::json!(vec![content_topic]), - ) + .post("relay/v1/auto/subscriptions", json!(vec![content_topic])) .await?; Ok(()) @@ -65,10 +63,7 @@ impl RelayClient { pub async fn unsubscribe(&self, content_topic: &str) -> NodeResult<()> { log::debug!("Unsubscribing from {}", content_topic); self.base - .delete( - "relay/v1/auto/subscriptions", - serde_json::json!(vec![content_topic]), - ) + .delete("relay/v1/auto/subscriptions", json!(vec![content_topic])) .await?; Ok(()) diff --git a/src/workers/diagnostic.rs b/src/workers/diagnostic.rs index 2c27de0..e2c0bbd 100644 --- a/src/workers/diagnostic.rs +++ b/src/workers/diagnostic.rs @@ -15,8 +15,8 @@ pub fn diagnostic_worker( sleep_amount: Duration, ) -> tokio::task::JoinHandle<()> { tokio::spawn(async move { - let mut num_peers: usize = 0; - let mut num_checks: usize = 0; + let mut num_peers = 0; + let mut num_checks = NUM_CHECKS_INTERVAL; loop { tokio::select! { @@ -34,8 +34,9 @@ pub fn diagnostic_worker( else if num_checks == NUM_CHECKS_INTERVAL { num_checks = 0; log::info!("Active number of peers: {}", num_peers); + } else { + num_checks += 1; } - num_checks += 1; }, Err(e) => { log::error!("Error getting peers: {}", e); From 7f863178fc89f218f7af8f6eed126b2183e314fe Mon Sep 17 00:00:00 2001 From: Erhan Date: Thu, 4 Jul 2024 16:01:55 +0300 Subject: [PATCH 22/29] update to waku v0.30, minor fixes (#61) * update to waku v0.30, minor fixes * test for nonce limit --- .env.example | 5 ++--- Cargo.lock | 2 +- Cargo.toml | 2 +- README.md | 50 ++++++++++++++++++++++++++++---------------- compose.yml | 9 ++++---- src/main.rs | 4 +++- src/utils/mod.rs | 4 ++++ start.sh | 7 +++---- tests/waku_test.rs | 20 +++++++++++------- waku/register_rln.sh | 27 +++++++++++++++++------- waku/run_node.sh | 15 ++++++++----- 11 files changed, 92 insertions(+), 53 deletions(-) diff --git a/.env.example b/.env.example index a4500a7..674f868 100644 --- a/.env.example +++ b/.env.example @@ -1,7 +1,7 @@ ## WAKU ## -ETH_CLIENT_ADDRESS="https://sepolia.infura.io/v3/123aa110320f4aec179150fba1e1b1b1" # RPC URL, this is dummy; use your own! -ETH_TESTNET_KEY= # Secret key of your compute node (32 byte, hexadecimal). +RLN_RELAY_ETH_CLIENT_ADDRESS="https://sepolia.infura.io/v3/123aa110320f4aec179150fba1e1b1b1" # RPC URL, this is dummy; use your own! RLN_RELAY_CRED_PASSWORD="" # Password for the RLN relay credentials. +ETH_TESTNET_KEY= # Secret key of your compute node (32 byte, hexadecimal). WAKU_URL="http://127.0.0.1:8645" # default WAKU_EXTRA_ARGS="" # anything extra for Waku here WAKU_LOG_LEVEL="DEBUG" # INFO | DEBUG | ERROR @@ -9,7 +9,6 @@ WAKU_LOG_LEVEL="DEBUG" # INFO | DEBUG | ERROR ## DRIA ## DKN_WALLET_SECRET_KEY=${ETH_TESTNET_KEY} # Dria uses the same key as Waku DKN_ADMIN_PUBLIC_KEY= # Public key of Dria (33-byte compressed, hexadecimal). -DKN_TASKS=synthesis # task1,task2,task3,... (comma separated, case-insensitive) DKN_MODELS=phi3:3.8b # model1,model2,model3,... (comma separated, case-insensitive) DKN_LOG_LEVEL=info # info | debug | error diff --git a/Cargo.lock b/Cargo.lock index 7776380..345d428 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -762,7 +762,7 @@ dependencies = [ [[package]] name = "dkn-compute" -version = "0.2.0" +version = "0.3.0" dependencies = [ "base64 0.22.1", "colored", diff --git a/Cargo.toml b/Cargo.toml index c453a72..8464b3a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "dkn-compute" -version = "0.2.0" +version = "0.3.0" edition = "2021" license = "Apache-2.0" readme = "README.md" diff --git a/README.md b/README.md index 672496d..40f357f 100644 --- a/README.md +++ b/README.md @@ -45,33 +45,39 @@ By default, there are no static peers, but you can specify them using duplicate WAKU_EXTRA_ARGS="--staticnode=/ip4/foobar/... --staticnode=/ip4/bazboo/..." ``` -## Usage - -Dria Compute Node is mainly expected to be executed using `./start.sh`. To start running a node, you must do the following: - -### Initial Setup +## Setup -1. **Clone the repo** +**Clone the repository**: This repository has the necessary setup to run the node, so start by cloning it using the command below: ```bash git clone https://github.com/firstbatchxyz/dkn-compute-node ``` -2. **Prepare Environment Variables**: Dria Compute Node makes use of several environment variables, some of which used by Waku itself as well. Create a `.env` file, and prepare you environment variables as given in [.env.example](./.env.example). +**Prepare Environment Variables**: Dria Compute Node makes use of several environment variables, some of which used by Waku itself as well. Create a `.env` file, and prepare you environment variables as given in [.env.example](./.env.example). -3. **Fund an Ethereum Wallet with 0.1 Sepolia ETH (+ gas fees)**: Waku and Dria makes use of the same Ethereum wallet, and Waku uses RLN Relay protocol for further security within the network. If you have not registered to RLN protocol yet, register by running `./waku/register_rln.sh`. If you have already registered, you will have a `keystore.json` which you can place under `./waku/keystore/keystore.json` in this directory. Your secret key will be provided at `ETH_TESTNET_KEY` variable. You can set an optional password at `RLN_RELAY_CRED_PASSWORD` as well to encrypt the keystore file, or to decrypt it if you already have one. +**Fund an Ethereum Wallet with 0.1 Sepolia ETH (+ gas fees)**: Waku and Dria makes use of the same Ethereum wallet. We require a bit of ether for the next step, so you should fund your wallet using a faucet such as [Infura](https://www.infura.io/faucet/sepolia) or [Alchemy](https://www.alchemy.com/faucets/ethereum-sepolia). Place your private key at `ETH_TESTNET_KEY` in `.env` without the 0x prefix. -4. **Ethereum Client RPC**: To communicate with Sepolia, you need an RPC URL. You can use [Infura](https://app.infura.io/) or [Alchemy](https://www.alchemy.com/). Your URL will be provided at `ETH_CLIENT_ADDRESS` variable. +**Ethereum Client RPC**: To communicate with Ethereum, you need an RPC URL. You can use [Infura](https://app.infura.io/) or [Alchemy](https://www.alchemy.com/) providers for this. Place your URL at the `RLN_RELAY_ETH_CLIENT_ADDRESS` variable in `.env`. -### Start the node +**Register to RLN Contract**: Waku uses Rate-Limiting Nullifier (RLN) for further security within the network. To register your wallet to RLN, first set a password at `RLN_RELAY_CRED_PASSWORD`. Then, register with the following commands which will create a file at `./waku/keystore/keystore.json`. -With all setup steps completed, you should be able to start a node with `./start.sh` +```sh +cd waku +./register.rln +``` + +> [!TIP] +> +> If you have already registered before, you will have a `keystore.json` which you can place under `./waku/keystore/keystore.json` in this directory. Note that the private key and RLN password must be the same so that this keystore file can be decrypted. + +These setup steps are all to be able to use Waku network. You can find it at [nwaku-compose](https://github.com/waku-org/nwaku-compose/). + +## Usage + +With all setup steps completed, you should be able to start a node with `./start.sh`. See the available commands with: ```sh -# Give exec permissions chmod +x start.sh - -# Check the available commands ./start.sh --help ``` @@ -86,7 +92,6 @@ Based on the resources of your machine, you must decide which models that you wi - `gpt-4o` (OpenAI) ```sh -# Run with models ./start.sh -m=llama3 -m=gpt-3.5-turbo ``` @@ -94,12 +99,21 @@ Based on the resources of your machine, you must decide which models that you wi > > Start script will run the containers in the background. You can check their logs either via the terminal or from [Docker Desktop](https://www.docker.com/products/docker-desktop/). -#### Using with Local Ollama +### Using Ollama + +> If you don't have Ollama installed, you can ignore this section. -With the `--local-ollama=true` option (default), the compute node will use the local Ollama server on the host machine. If the server is not running, the start script will initiate it with `ollama serve` and terminate it when stopping the node. +If you have Ollama installed already (e.g. via `brew install ollama`) then you must indicate that you will be using that Ollama, instead of a Docker container. + +To do this, we set the provide the argument `--local-ollama=true` which is `true` by default. With this, the compute node will use the Ollama server on your machine, instead of a Docker container. + +If the Ollama server is not running, the start script will initiate it with `ollama serve` and terminate it when the node is being stopped. - If `--local-ollama=false` or the local Ollama server is reachable, the compute node will use a Docker Compose service for it. -- There are three Docker Compose Ollama options: `ollama-cpu`, `ollama-cuda`, and `ollama-rocm`. The start script will decide which option to use based on the host machine's GPU specifications. + +> [!TIP] +> +> There are three Docker Compose Ollama options: `ollama-cpu`, `ollama-cuda`, and `ollama-rocm`. The start script will decide which option to use based on the host machine's GPU specifications. ```sh # Run with local ollama diff --git a/compose.yml b/compose.yml index 9d50dd4..20ee3cc 100644 --- a/compose.yml +++ b/compose.yml @@ -6,10 +6,10 @@ x-logging: &logging max-size: 1000m # Environment variable definitions -x-eth-client-address: ð_client_address ${ETH_CLIENT_ADDRESS:-} # Add your ETH_CLIENT_ADDRESS after the "-" +x-rln-relay-eth-client-address: &rln_relay_eth_client_address ${RLN_RELAY_ETH_CLIENT_ADDRESS:-} x-rln-environment: &rln_env - RLN_RELAY_CONTRACT_ADDRESS: ${RLN_RELAY_CONTRACT_ADDRESS:-0xF471d71E9b1455bBF4b85d475afb9BB0954A29c4} + RLN_RELAY_CONTRACT_ADDRESS: ${RLN_RELAY_CONTRACT_ADDRESS:-0xCB33Aa5B38d79E3D9Fa8B10afF38AA201399a7e3} RLN_RELAY_CRED_PATH: ${RLN_RELAY_CRED_PATH:-} # Optional: Add your RLN_RELAY_CRED_PATH after the "-" RLN_RELAY_CRED_PASSWORD: ${RLN_RELAY_CRED_PASSWORD:-} # Optional: Add your RLN_RELAY_CRED_PASSWORD after the "-" @@ -23,7 +23,6 @@ services: WAKU_URL: "http://host.docker.internal:8645" OLLAMA_HOST: "http://host.docker.internal" OLLAMA_PORT: "11434" - OLLAMA_KEEP_ALIVE: "${OLLAMA_KEEP_ALIVE:-5m}" RUST_LOG: "${DKN_LOG_LEVEL:-info}" SEARCH_AGENT_URL: "http://host.docker.internal:5059" SEARCH_AGENT_MANAGER: true @@ -31,7 +30,7 @@ services: # Waku Node nwaku: - image: harbor.status.im/wakuorg/nwaku:v0.28.0 + image: harbor.status.im/wakuorg/nwaku:v0.30.1 restart: on-failure ports: - 30304:30304/tcp @@ -45,7 +44,7 @@ services: - *logging environment: RLN_RELAY_CRED_PASSWORD: "${RLN_RELAY_CRED_PASSWORD}" - ETH_CLIENT_ADDRESS: *eth_client_address + RLN_RELAY_ETH_CLIENT_ADDRESS: *rln_relay_eth_client_address EXTRA_ARGS: "${WAKU_EXTRA_ARGS}" LOG_LEVEL: "${WAKU_LOG_LEVEL:-DEBUG}" <<: diff --git a/src/main.rs b/src/main.rs index 269c059..710855a 100644 --- a/src/main.rs +++ b/src/main.rs @@ -29,7 +29,9 @@ async fn main() -> Result<(), Box> { return Err(e.into()); } - log::info!("Starting workers..."); + // start workers after waiting a bit because Waku takes time to launch + log::info!("Starting workers in 5 seconds..."); + tokio::time::sleep(tokio::time::Duration::from_secs(5)).await; let tracker = TaskTracker::new(); tracker.spawn(diagnostic_worker( diff --git a/src/utils/mod.rs b/src/utils/mod.rs index a2779ea..9717a91 100644 --- a/src/utils/mod.rs +++ b/src/utils/mod.rs @@ -29,6 +29,10 @@ pub async fn wait_for_termination(cancellation: CancellationToken) -> std::io::R tokio::select! { _ = sigterm.recv() => log::warn!("Recieved SIGTERM"), _ = sigint.recv() => log::warn!("Recieved SIGINT"), + _ = cancellation.cancelled() => { + // no need to wait if cancelled anyways + return Ok(()); + } }; cancellation.cancel(); diff --git a/start.sh b/start.sh index ad185c3..4e54baa 100755 --- a/start.sh +++ b/start.sh @@ -4,7 +4,7 @@ docs() { echo " start.sh starts the compute node with given environment and parameters using docker-compose. Loads the .env file as base environment and creates a .env.compose file for final environment to run with docker-compose. - Required environment variables in .env file; ETH_CLIENT_ADDRESS, ETH_TESTNET_KEY, RLN_RELAY_CRED_PASSWORD + Required environment variables in .env file; RLN_RELAY_ETH_CLIENT_ADDRESS, ETH_TESTNET_KEY, RLN_RELAY_CRED_PASSWORD Description of command-line arguments: -m | --model: Indicates the model to be used within the compute node. Argument can be given multiple times for multiple models. @@ -74,7 +74,7 @@ done check_required_env_vars() { local required_vars=( - "ETH_CLIENT_ADDRESS" + "RLN_RELAY_ETH_CLIENT_ADDRESS" "ETH_TESTNET_KEY" "RLN_RELAY_CRED_PASSWORD" "DKN_WALLET_SECRET_KEY" @@ -151,7 +151,7 @@ handle_compute_env waku_envs=() handle_waku_env() { waku_env_vars=( - "ETH_CLIENT_ADDRESS" + "RLN_RELAY_ETH_CLIENT_ADDRESS" "ETH_TESTNET_KEY" "RLN_RELAY_CRED_PASSWORD" "WAKU_URL" @@ -208,7 +208,6 @@ handle_ollama_env() { ollama_env_vars=( "OLLAMA_HOST" "OLLAMA_PORT" - "OLLAMA_KEEP_ALIVE" ) ollama_envs=($(as_pairs "${ollama_env_vars[@]}")) diff --git a/tests/waku_test.rs b/tests/waku_test.rs index 0f3a270..4192952 100644 --- a/tests/waku_test.rs +++ b/tests/waku_test.rs @@ -36,18 +36,22 @@ mod waku_tests { let _ = env_logger::try_init(); let node = DriaComputeNode::default(); - let topic = "test-topic-msr"; + let topic = "test-topic-dria"; node.subscribe_topic(topic).await; - let message = WakuMessage::new("hello world".to_string(), topic); + // this test checks if we get stuck at the nonce limit of RLNv2 + for i in 1..=20 { + println!("Sending message #{}", i); + let message = WakuMessage::new("hello world".to_string(), topic); - node.send_message(message) - .await - .expect("Should send message"); + node.send_message(message) + .await + .expect("Should send message"); - // wait a bit for the message - tokio::time::sleep(tokio::time::Duration::from_secs(1)).await; + // wait a bit for the message + tokio::time::sleep(tokio::time::Duration::from_secs(1)).await; + } let messages = node .process_topic(topic, false) @@ -55,5 +59,7 @@ mod waku_tests { .expect("Should receive"); assert!(messages.len() > 0, "Should have received message"); + + node.unsubscribe_topic_ignored(topic).await; } } diff --git a/waku/register_rln.sh b/waku/register_rln.sh index bd9d594..ab660f1 100755 --- a/waku/register_rln.sh +++ b/waku/register_rln.sh @@ -1,20 +1,31 @@ #!/bin/sh -if test -f $(pwd)/keystore/keystore.json; then - echo "Keystore already exists. Use it instead of creating a new one." + +if test -f ./keystore/keystore.json; then + echo "keystore/keystore.json already exists. Use it instead of creating a new one." echo "Exiting" exit 1 fi -if test -f $(pwd)/../.env; then - echo "Using .env file" - . $(pwd)/../.env + +if test -f .env; then + echo "Using .env file" + . $(pwd)/.env +fi + +# TODO: Set nwaku release when ready instead of quay + +if test -n "${ETH_CLIENT_ADDRESS}"; then + echo "ETH_CLIENT_ADDRESS variable was renamed to RLN_RELAY_ETH_CLIENT_ADDRESS" + echo "Please update your .env file" + exit 1 fi -docker run -v $(pwd)/keystore:/keystore/:Z harbor.status.im/wakuorg/nwaku:v0.25.0 generateRlnKeystore \ ---rln-relay-eth-client-address=${ETH_CLIENT_ADDRESS} \ +docker run -v $(pwd)/keystore:/keystore/:Z harbor.status.im/wakuorg/nwaku:v0.30.1 generateRlnKeystore \ +--rln-relay-eth-client-address=${RLN_RELAY_ETH_CLIENT_ADDRESS} \ --rln-relay-eth-private-key=${ETH_TESTNET_KEY} \ ---rln-relay-eth-contract-address=0xF471d71E9b1455bBF4b85d475afb9BB0954A29c4 \ +--rln-relay-eth-contract-address=0xCB33Aa5B38d79E3D9Fa8B10afF38AA201399a7e3 \ --rln-relay-cred-path=/keystore/keystore.json \ --rln-relay-cred-password="${RLN_RELAY_CRED_PASSWORD}" \ +--rln-relay-user-message-limit=20 \ --execute diff --git a/waku/run_node.sh b/waku/run_node.sh index bb4d4be..8ca07c2 100644 --- a/waku/run_node.sh +++ b/waku/run_node.sh @@ -2,7 +2,13 @@ echo "I am a nwaku node" -if [ -z "${ETH_CLIENT_ADDRESS}" ]; then +if test -n "${ETH_CLIENT_ADDRESS}" -o ; then + echo "ETH_CLIENT_ADDRESS variable was renamed to RLN_RELAY_ETH_CLIENT_ADDRESS" + echo "Please update your .env file" + exit 1 +fi + +if [ -z "${RLN_RELAY_ETH_CLIENT_ADDRESS}" ]; then echo "Missing Eth client address, please refer to README.md for detailed instructions" exit 1 fi @@ -59,8 +65,6 @@ if [ -n "${STORAGE_SIZE}" ]; then STORE_RETENTION_POLICY=--store-message-retention-policy=size:"${STORAGE_SIZE}" fi -# we have disabled store for DKN, using --store=false - exec /usr/bin/wakunode\ --relay=true\ --filter=true\ @@ -71,7 +75,7 @@ exec /usr/bin/wakunode\ --discv5-discovery=true\ --discv5-udp-port=9005\ --discv5-enr-auto-update=True\ - --log-level=${LOG_LEVEL}\ + --log-level=DEBUG\ --tcp-port=30304\ --metrics-server=True\ --metrics-server-port=8003\ @@ -81,10 +85,11 @@ exec /usr/bin/wakunode\ --rest-address=0.0.0.0\ --rest-port=8645\ --rest-allow-origin="waku-org.github.io"\ + --rest-allow-origin="localhost:*"\ --nat=extip:"${MY_EXT_IP}"\ --store=false\ --store-message-db-url="postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/postgres"\ - --rln-relay-eth-client-address="${ETH_CLIENT_ADDRESS}"\ + --rln-relay-eth-client-address="${RLN_RELAY_ETH_CLIENT_ADDRESS}"\ --rln-relay-tree-path="/etc/rln_tree"\ ${RLN_RELAY_CRED_PATH}\ ${RLN_RELAY_CRED_PASSWORD}\ From 256ca67c17c1254379b423ec3d6172addaf17bff Mon Sep 17 00:00:00 2001 From: erhant Date: Fri, 5 Jul 2024 10:58:17 +0300 Subject: [PATCH 23/29] update ollama workflows to accept host & port --- .env.example | 8 ++++++-- Cargo.lock | 2 +- Cargo.toml | 2 +- src/workers/workflow.rs | 27 +++++++++++++++++++++++++-- tests/mock_messages_test.rs | 1 - tests/threads_test.rs | 1 - 6 files changed, 33 insertions(+), 8 deletions(-) diff --git a/.env.example b/.env.example index 674f868..d2bd552 100644 --- a/.env.example +++ b/.env.example @@ -10,7 +10,11 @@ WAKU_LOG_LEVEL="DEBUG" # INFO | DEBUG | ERROR DKN_WALLET_SECRET_KEY=${ETH_TESTNET_KEY} # Dria uses the same key as Waku DKN_ADMIN_PUBLIC_KEY= # Public key of Dria (33-byte compressed, hexadecimal). DKN_MODELS=phi3:3.8b # model1,model2,model3,... (comma separated, case-insensitive) -DKN_LOG_LEVEL=info # info | debug | error +DKN_LOG_LEVEL=info # info | debug | error | none,dkn_compute=debug -## Open AI ## +## Open AI (if used) ## OPENAI_API_KEY= + +## Ollama (if used) ## +OLLAMA_HOST="http://127.0.0.1" +OLLAMA_PORT=11434 diff --git a/Cargo.lock b/Cargo.lock index 345d428..0cd788d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2223,7 +2223,7 @@ dependencies = [ [[package]] name = "ollama-workflows" version = "0.1.0" -source = "git+https://github.com/andthattoo/ollama-workflows?rev=58915f3#58915f372d2810588a689f5190e8da15946fd216" +source = "git+https://github.com/andthattoo/ollama-workflows?rev=4720e16#4720e16f5d48bd7c354af7d0b1aa9e407e375e70" dependencies = [ "async-trait", "colored", diff --git a/Cargo.toml b/Cargo.toml index 8464b3a..b70716d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -43,7 +43,7 @@ sha2 = "0.10.8" sha3 = "0.10.8" # llm stuff -ollama-workflows = { git = "https://github.com/andthattoo/ollama-workflows", rev = "58915f3" } +ollama-workflows = { git = "https://github.com/andthattoo/ollama-workflows", rev = "4720e16" } [dev-dependencies] colored = "2.1.0" diff --git a/src/workers/workflow.rs b/src/workers/workflow.rs index 9f9c778..a44eb75 100644 --- a/src/workers/workflow.rs +++ b/src/workers/workflow.rs @@ -1,4 +1,4 @@ -use ollama_workflows::{Entry, Executor, Model, ProgramMemory, Workflow}; +use ollama_workflows::{Entry, Executor, Model, ModelProvider, ProgramMemory, Workflow}; use serde::Deserialize; use std::sync::Arc; use std::time::Duration; @@ -19,6 +19,8 @@ pub fn workflow_worker( node: Arc, sleep_amount: Duration, ) -> tokio::task::JoinHandle<()> { + let (ollama_host, ollama_port) = get_ollama_config(); + tokio::spawn(async move { node.subscribe_topic(REQUEST_TOPIC).await; node.subscribe_topic(RESPONSE_TOPIC).await; @@ -59,9 +61,14 @@ pub fn workflow_worker( } }; log::info!("Using model {} for task {}", model, task.task_id); + let model_provider = ModelProvider::from(model.clone()); // execute workflow with cancellation - let executor = Executor::new(model); + let executor = if model_provider == ModelProvider::Ollama { + Executor::new_at(model, &ollama_host, ollama_port) + } else { + Executor::new(model) + }; let mut memory = ProgramMemory::new(); let entry: Option = task.input.prompt.map(|prompt| Entry::try_value_or_str(&prompt)); let result: Option; @@ -101,3 +108,19 @@ pub fn workflow_worker( node.unsubscribe_topic_ignored(RESPONSE_TOPIC).await; }) } + +fn get_ollama_config() -> (String, u16) { + const DEFAULT_OLLAMA_HOST: &str = "http://127.0.0.1"; + const DEFAULT_OLLAMA_PORT: u16 = 11434; + + let ollama_host = std::env::var("OLLAMA_HOST").unwrap_or(DEFAULT_OLLAMA_HOST.to_string()); + let ollama_port = std::env::var("OLLAMA_PORT") + .and_then(|port_str| { + port_str + .parse::() + .map_err(|_| std::env::VarError::NotPresent) + }) + .unwrap_or(DEFAULT_OLLAMA_PORT); + + (ollama_host, ollama_port) +} diff --git a/tests/mock_messages_test.rs b/tests/mock_messages_test.rs index 6fa779b..446766f 100644 --- a/tests/mock_messages_test.rs +++ b/tests/mock_messages_test.rs @@ -1,4 +1,3 @@ - use dkn_compute::{ node::DriaComputeNode, utils::payload::TaskRequestPayload, waku::message::WakuMessage, }; diff --git a/tests/threads_test.rs b/tests/threads_test.rs index 2c17894..f681099 100644 --- a/tests/threads_test.rs +++ b/tests/threads_test.rs @@ -1,4 +1,3 @@ - use parking_lot::RwLock; use std::sync::Arc; use tokio_util::task::TaskTracker; From 7c486dfed918f4469e8cf7281419b50d5e7b9d86 Mon Sep 17 00:00:00 2001 From: erhant Date: Fri, 5 Jul 2024 13:09:20 +0300 Subject: [PATCH 24/29] limit 20 to 100 --- waku/register_rln.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/waku/register_rln.sh b/waku/register_rln.sh index ab660f1..5eaf2aa 100755 --- a/waku/register_rln.sh +++ b/waku/register_rln.sh @@ -21,11 +21,12 @@ if test -n "${ETH_CLIENT_ADDRESS}"; then exit 1 fi +# changes here: rln-relay-user-message-limit=100 instead of 20 docker run -v $(pwd)/keystore:/keystore/:Z harbor.status.im/wakuorg/nwaku:v0.30.1 generateRlnKeystore \ --rln-relay-eth-client-address=${RLN_RELAY_ETH_CLIENT_ADDRESS} \ --rln-relay-eth-private-key=${ETH_TESTNET_KEY} \ --rln-relay-eth-contract-address=0xCB33Aa5B38d79E3D9Fa8B10afF38AA201399a7e3 \ --rln-relay-cred-path=/keystore/keystore.json \ --rln-relay-cred-password="${RLN_RELAY_CRED_PASSWORD}" \ ---rln-relay-user-message-limit=20 \ +--rln-relay-user-message-limit=100 \ --execute From 36e622b0436e969b67074f0ebe814b7e5ab4e54c Mon Sep 17 00:00:00 2001 From: erhant Date: Fri, 5 Jul 2024 14:14:03 +0300 Subject: [PATCH 25/29] .env path fix --- waku/register_rln.sh | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/waku/register_rln.sh b/waku/register_rln.sh index 5eaf2aa..cd78963 100755 --- a/waku/register_rln.sh +++ b/waku/register_rln.sh @@ -1,16 +1,14 @@ #!/bin/sh - -if test -f ./keystore/keystore.json; then - echo "keystore/keystore.json already exists. Use it instead of creating a new one." +if test -f $(pwd)/keystore/keystore.json; then + echo "Keystore already exists. Use it instead of creating a new one." echo "Exiting" exit 1 fi - -if test -f .env; then - echo "Using .env file" - . $(pwd)/.env +if test -f ../.env; then + echo "Using .env file from parent directory" + . $(pwd)/../.env fi # TODO: Set nwaku release when ready instead of quay From b1260225d332d9b2d7125900163c257448e211f0 Mon Sep 17 00:00:00 2001 From: erhant Date: Fri, 5 Jul 2024 15:01:22 +0300 Subject: [PATCH 26/29] more docs --- .env.example | 25 +++++++++++----- README.md | 85 +++++++++++++++++++++++++++++++++++++++------------- 2 files changed, 82 insertions(+), 28 deletions(-) diff --git a/.env.example b/.env.example index d2bd552..9af05d7 100644 --- a/.env.example +++ b/.env.example @@ -1,16 +1,25 @@ ## WAKU ## +# RPC URL to connect with Ethereum. RLN_RELAY_ETH_CLIENT_ADDRESS="https://sepolia.infura.io/v3/123aa110320f4aec179150fba1e1b1b1" # RPC URL, this is dummy; use your own! -RLN_RELAY_CRED_PASSWORD="" # Password for the RLN relay credentials. -ETH_TESTNET_KEY= # Secret key of your compute node (32 byte, hexadecimal). -WAKU_URL="http://127.0.0.1:8645" # default -WAKU_EXTRA_ARGS="" # anything extra for Waku here +# Password for the RLN relay credentials, used to encrypt `keystore.json`. +RLN_RELAY_CRED_PASSWORD="your-pwd-here" +# Secret key of your compute node (32 byte, hexadecimal, without 0x prefix). +ETH_TESTNET_KEY= +# Waku's default url. +WAKU_URL="http://127.0.0.1:8645" +# Extra arguments, don't change this. +WAKU_EXTRA_ARGS="--discv5-bootstrap-node=enr:-LW4QMtKZ2-EZ-SDoccv1eMJ7Fe1rXOC7zlDJ5R1cOkwZFzOKf4PomKKn-e51ncNj_SV8YO3maBUW57tATYIRISd-EIBgmlkgnY0gmlwhNRi2gqKbXVsdGlhZGRyc4CCcnOTAAEIAAAAAQACAAMABAAFAAYAB4lzZWNwMjU2azGhAyKWFm1eSXFdUhA_JQ4mCWyzIWDfOmrlRUSQxypW0zhyg3RjcIJ2YIN1ZHCCIy2Fd2FrdTIP --staticnode=/ip4/18.157.160.219/tcp/30304/p2p/16Uiu2HAkzpGa371EyzwfSo62GLLFerkAyLdgFnFh6D2bwmXcnNWR" WAKU_LOG_LEVEL="DEBUG" # INFO | DEBUG | ERROR ## DRIA ## -DKN_WALLET_SECRET_KEY=${ETH_TESTNET_KEY} # Dria uses the same key as Waku -DKN_ADMIN_PUBLIC_KEY= # Public key of Dria (33-byte compressed, hexadecimal). -DKN_MODELS=phi3:3.8b # model1,model2,model3,... (comma separated, case-insensitive) -DKN_LOG_LEVEL=info # info | debug | error | none,dkn_compute=debug +# Dria uses the same wallet as Waku. +DKN_WALLET_SECRET_KEY=${ETH_TESTNET_KEY} +# Public key of Dria Admin node (33-byte compressed, hexadecimal, without 0x prefix). +DKN_ADMIN_PUBLIC_KEY=0208ef5e65a9c656a6f92fb2c770d5d5e2ecffe02a6aade19207f75110be6ae658 +# model1,model2,model3,... (comma separated, case-insensitive) +DKN_MODELS=phi3:3.8b +# info | debug | error | none,dkn_compute=debug +DKN_LOG_LEVEL=info ## Open AI (if used) ## OPENAI_API_KEY= diff --git a/README.md b/README.md index 40f357f..a651e38 100644 --- a/README.md +++ b/README.md @@ -27,6 +27,8 @@ A **Dria Compute Node** is a unit of computation within the Dria Knowledge Network. It's purpose is to process tasks given by the **Dria Admin Node**, and receive rewards for providing correct results. These nodes are part of the [Waku](https://waku.org/) network, a privacy-preserving cencorship resistant peer-to-peer network. +To get started, [setup](#setup) your envrionment and then see [usage](#usage) to run the node. + ### Tasks Compute nodes can technically do any arbitrary task, from computing the square root of a given number to finding LLM outputs from a given prompt, or validating an LLM's output with respect to knowledge available on the web accessed via tools. @@ -47,41 +49,78 @@ WAKU_EXTRA_ARGS="--staticnode=/ip4/foobar/... --staticnode=/ip4/bazboo/..." ## Setup -**Clone the repository**: This repository has the necessary setup to run the node, so start by cloning it using the command below: +To be able to run a node, we need to make a few preparations. Follow the steps below one by one. + +> [!TIP] +> +> These setup steps are all to be able to use Waku network. You can find a similar setup under [nwaku-compose](https://github.com/waku-org/nwaku-compose/) as well. + +### 1. Clone the repository + +This repository has the necessary setup to run the node, so start by cloning it using the command below: ```bash git clone https://github.com/firstbatchxyz/dkn-compute-node ``` -**Prepare Environment Variables**: Dria Compute Node makes use of several environment variables, some of which used by Waku itself as well. Create a `.env` file, and prepare you environment variables as given in [.env.example](./.env.example). +### 2. Prepare Environment Variables + +Dria Compute Node makes use of several environment variables, some of which used by Waku itself as well. Create a `.env` file, and prepare you environment variables as given in [.env.example](./.env.example). + +```sh +cp .env.example .env +``` + +### 3. Prepare Ethereum Wallet + +Waku and Dria makes use of the same Ethereum wallet. In particular, we require a bit of **testnet ether** (0.1 ETH + gas fees) for the next step, so you should fund your wallet using a faucet such as [Infura](https://www.infura.io/faucet/sepolia) or [Alchemy](https://www.alchemy.com/faucets/ethereum-sepolia). + +Place your private key at `ETH_TESTNET_KEY` in `.env` without the 0x prefix. It should look something like: + +```sh +ETH_TESTNET_KEY=ac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80 +``` + +### 4. Prepare Ethereum Client RPC URL + +To communicate with Ethereum, you need an RPC URL. You can use [Infura](https://app.infura.io/) or [Alchemy](https://www.alchemy.com/) providers for this. -**Fund an Ethereum Wallet with 0.1 Sepolia ETH (+ gas fees)**: Waku and Dria makes use of the same Ethereum wallet. We require a bit of ether for the next step, so you should fund your wallet using a faucet such as [Infura](https://www.infura.io/faucet/sepolia) or [Alchemy](https://www.alchemy.com/faucets/ethereum-sepolia). Place your private key at `ETH_TESTNET_KEY` in `.env` without the 0x prefix. +Place your URL at the `RLN_RELAY_ETH_CLIENT_ADDRESS` variable in `.env`. It should look something like one of the below: + +```sh +# infura +RLN_RELAY_ETH_CLIENT_ADDRESS=https://sepolia.infura.io/v3/ +# alchemy +RLN_RELAY_ETH_CLIENT_ADDRESS=https://eth-sepolia.g.alchemy.com/v2/ +``` -**Ethereum Client RPC**: To communicate with Ethereum, you need an RPC URL. You can use [Infura](https://app.infura.io/) or [Alchemy](https://www.alchemy.com/) providers for this. Place your URL at the `RLN_RELAY_ETH_CLIENT_ADDRESS` variable in `.env`. +### 5. Register to RLN Contract -**Register to RLN Contract**: Waku uses Rate-Limiting Nullifier (RLN) for further security within the network. To register your wallet to RLN, first set a password at `RLN_RELAY_CRED_PASSWORD`. Then, register with the following commands which will create a file at `./waku/keystore/keystore.json`. +Waku uses Rate-Limiting Nullifier (RLN) for further security within the network. To register your wallet to RLN, first set a password at `RLN_RELAY_CRED_PASSWORD`. Then, register with the following commands which will create a file at `./waku/keystore/keystore.json`. ```sh cd waku ./register.rln ``` +If all goes well, you should be able to see your transaction at the block explorer under the [RLN contract](https://sepolia.etherscan.io/address/0xCB33Aa5B38d79E3D9Fa8B10afF38AA201399a7e3). + > [!TIP] > > If you have already registered before, you will have a `keystore.json` which you can place under `./waku/keystore/keystore.json` in this directory. Note that the private key and RLN password must be the same so that this keystore file can be decrypted. -These setup steps are all to be able to use Waku network. You can find it at [nwaku-compose](https://github.com/waku-org/nwaku-compose/). - ## Usage -With all setup steps completed, you should be able to start a node with `./start.sh`. See the available commands with: +With all setup steps above completed, we are ready to start a node! See the available commands with: ```sh chmod +x start.sh ./start.sh --help ``` -Based on the resources of your machine, you must decide which models that you will be running locally. For example, you can simple use OpenAI with theirs models, not running anything locally at all; or you can use Ollama with several models loaded to disk, and only one loaded to memory during its respective task. Available models (see [here](https://github.com/andthattoo/ollama-workflows/blob/main/src/program/atomics.rs#L269) for latest) are: +### Choose a Model + +Based on the resources of your machine, you must decide which models that you will be running locally. For example, you can use OpenAI with their models, not running anything locally at all; or you can use Ollama with several models loaded to disk, and only one loaded to memory during its respective task. Available models (see [here](https://github.com/andthattoo/ollama-workflows/blob/main/src/program/atomics.rs#L269) for latest) are: - `adrienbrault/nous-hermes2theta-llama3-8b:q8_0` (Ollama) - `phi3:14b-medium-4k-instruct-q4_1` (Ollama) @@ -91,13 +130,27 @@ Based on the resources of your machine, you must decide which models that you wi - `gpt-4-turbo` (OpenAI) - `gpt-4o` (OpenAI) +### Run Node + +It's time to run our compute node. After deciding the models that you want to run, simply run `./start.sh` with the model names provided, such as: + ```sh ./start.sh -m=llama3 -m=gpt-3.5-turbo ``` -> [!NOTE] -> -> Start script will run the containers in the background. You can check their logs either via the terminal or from [Docker Desktop](https://www.docker.com/products/docker-desktop/). +Start script will run the containers in the background. You can check their logs either via the terminal or from [Docker Desktop](https://www.docker.com/products/docker-desktop/). To print DEBUG-level logs for the compute node, you can add `--dev` argument: + +```sh +./start.sh -m= --dev +``` + +### Persistent Waku + +To persist your Waku session between runs, you can opt to run Waku elsewhere (such as with [nwaku-compose](https://github.com/waku-org/nwaku-compose/)) and then have the compute node connect to the existing Waku node. For such cases, we have `--waku-ext` flag (meaning Waku is externally hosted): + +```sh +./start.sh -m= --waku-ext +``` ### Using Ollama @@ -139,14 +192,6 @@ make debug # debug-level logs If you have a feature that you would like to add with respect to its respective issue, or a bug fix, feel free to fork & create a PR! See the sections below for development tips. -### Development Mode - -It is best if Waku is running externally already (such as nwaku-compose) and you simply run the compute node during development, in debug mode. Our start script provides the means for that: - -```sh -./start.sh -m= --dev --waku-ext -``` - ### Testing & Benchmarking Besides the unit tests, there are separate tests for Waku network, and for compute tasks such as Ollama. From 675c474ac0bb41248a217ce070d6197fe96f7697 Mon Sep 17 00:00:00 2001 From: erhant Date: Fri, 5 Jul 2024 15:54:29 +0300 Subject: [PATCH 27/29] requirements --- README.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/README.md b/README.md index a651e38..034fe1e 100644 --- a/README.md +++ b/README.md @@ -47,6 +47,15 @@ By default, there are no static peers, but you can specify them using duplicate WAKU_EXTRA_ARGS="--staticnode=/ip4/foobar/... --staticnode=/ip4/bazboo/..." ``` +## Requirements + +Your machine should have **at least 2GB** memory, along with a stable internet connection. + +You need the following applications to run compute node: + +- **Git**: We will use `git` to clone the repository from GitHub, and pull latest changes for updates later. +- **Docker**: Our services will make use of Docker so that the node can run on any machine. + ## Setup To be able to run a node, we need to make a few preparations. Follow the steps below one by one. From cfd35df0bfdc3e710a10145d3394a80809eb660a Mon Sep 17 00:00:00 2001 From: erhant Date: Fri, 5 Jul 2024 16:05:46 +0300 Subject: [PATCH 28/29] more docs --- README.md | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/README.md b/README.md index 034fe1e..f0c1e7d 100644 --- a/README.md +++ b/README.md @@ -153,6 +153,34 @@ Start script will run the containers in the background. You can check their logs ./start.sh -m= --dev ``` +### Looking at Logs + +To see your logs, you can go to [Docker Desktop](https://www.docker.com/products/docker-desktop/) and see the running containers and find `dkn-compute-node`. There, open the containers within the compose (click on `>` to the left) and click on any of the container to see its logs. + +Alternatively, you can use `docker compose logs` such as below: + +```sh +# follow Dria logs +docker compose logs -f compute + +# follow Waku logs +docker compose logs -f nwaku +``` + +Instead of following with `-f`, you can check the latest commands like: + +```sh +# logs from last 1 hour +docker compose logs --since=1h compute + +# logs from last 30 minutes +docker compose logs --since=30m compute +``` + +### Stopping the Node + +When you use `./start.sh`, it will wait for you in the same terminal to do CTRL+C before stopping. Once you do that, the containers will be stopped and removed. You can also kill the containers manually, doing CTRL+C afterwards will do nothing in such a case. + ### Persistent Waku To persist your Waku session between runs, you can opt to run Waku elsewhere (such as with [nwaku-compose](https://github.com/waku-org/nwaku-compose/)) and then have the compute node connect to the existing Waku node. For such cases, we have `--waku-ext` flag (meaning Waku is externally hosted): From e745b0420b9bc4e97775a8807fe02add1baff139 Mon Sep 17 00:00:00 2001 From: erhant Date: Fri, 5 Jul 2024 22:27:24 +0300 Subject: [PATCH 29/29] typo fixes --- README.md | 2 +- start.sh | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index f0c1e7d..7c21074 100644 --- a/README.md +++ b/README.md @@ -109,7 +109,7 @@ Waku uses Rate-Limiting Nullifier (RLN) for further security within the network. ```sh cd waku -./register.rln +./register_rln.sh ``` If all goes well, you should be able to see your transaction at the block explorer under the [RLN contract](https://sepolia.etherscan.io/address/0xCB33Aa5B38d79E3D9Fa8B10afF38AA201399a7e3). diff --git a/start.sh b/start.sh index 4e54baa..b58d64b 100755 --- a/start.sh +++ b/start.sh @@ -338,6 +338,8 @@ if [ $compose_exit_code -ne 0 ]; then exit $compose_exit_code fi +echo "All good! Compute node is up and running." + # background/foreground mode if [ "$START_MODE" == "FOREGROUND" ]; then echo "\nUse Control-C to exit"