diff --git a/Cargo.lock b/Cargo.lock index 01acb53..c6909c2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -96,6 +96,28 @@ version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" +[[package]] +name = "async-stream" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51" +dependencies = [ + "async-stream-impl", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.66", +] + [[package]] name = "async-trait" version = "0.1.80" @@ -647,9 +669,7 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "groq-api-rs" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6f4be86a1658f2ed6e885f10afe3ef9268036de117ff3d5a7b06617950bd4e7" +version = "0.2.0" dependencies = [ "anyhow", "chrono", @@ -1140,6 +1160,7 @@ dependencies = [ "reqwest", "reqwest_cookie_store", "serde", + "serde_json", "serde_trim", "serde_yaml", "sha256", @@ -1195,14 +1216,16 @@ dependencies = [ [[package]] name = "ollama-rs" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53581ab78176ff3ae830a43236f485fc90d7f472d0081dddc45d8605e1301954" +version = "0.2.0" dependencies = [ + "async-stream", + "async-trait", + "log", "reqwest", "serde", "serde_json", "tokio", + "url", ] [[package]] @@ -1214,8 +1237,6 @@ checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" [[package]] name = "openai_api_rust" version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea95675c72f6830941af53fe95f5cf6c037ccb6c28ed736b61d2af2c16feac59" dependencies = [ "log", "mime", @@ -1758,9 +1779,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.117" +version = "1.0.120" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "455182ea6142b14f93f4bc5320a2b31c1f266b66a4a5c858b013302a5d8cbfc3" +checksum = "4e0d21c9a8cae1235ad58a00c11cb40d4b1e5c784f1ef2c537876ed6ffd8b7c5" dependencies = [ "itoa", "ryu", diff --git a/Cargo.toml b/Cargo.toml index 42adcce..c2820d6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -15,22 +15,23 @@ name = "nerve" path = "src/main.rs" [dependencies] -ollama-rs = { version = "0.1.9", features = [ +ollama-rs = { path = "./crates/ollama-rs", features = [ "rustls", "tokio", "chat-history", ], default-features = false, optional = true } +openai_api_rust = { path = "./crates/openai_api_rust", optional = true } +groq-api-rs = { path = "./crates/groq-api-rs", optional = true } + anyhow = "1.0.86" async-trait = "0.1.80" chrono = "0.4.38" clap = { version = "4.5.6", features = ["derive"] } colored = "2.1.0" -groq-api-rs = { version = "0.1.0", optional = true } indexmap = "2.2.6" itertools = "0.13.0" lazy_static = "1.4.0" libc = "0.2.155" -openai_api_rust = { version = "0.1.9", optional = true } regex = "1.10.5" serde = { version = "1.0.203", features = ["derive", "serde_derive"] } serde_trim = "1.1.0" @@ -54,6 +55,7 @@ reqwest = { version = "0.12.5", default-features = false, features = [ ] } url = "2.5.2" reqwest_cookie_store = "0.8.0" +serde_json = "1.0.120" [features] default = ["ollama", "groq", "openai", "fireworks"] diff --git a/README.md b/README.md index 45e2549..077e170 100644 --- a/README.md +++ b/README.md @@ -35,7 +35,11 @@ While Nerve was inspired by other projects such as Autogen and Rigging, its main ## LLM Support -Nerve features integrations for any model accessible via the [ollama](https://github.com/ollama/ollama), [groq](https://groq.com), [OpenAI](https://openai.com/index/openai-api/) and [Fireworks](https://fireworks.ai/) APIs. You can specify which provider and which model to use via the `-G` (or `--generator`) argument: +Nerve features integrations for any model accessible via the [ollama](https://github.com/ollama/ollama), [groq](https://groq.com), [OpenAI](https://openai.com/index/openai-api/) and [Fireworks](https://fireworks.ai/) APIs. + +**The tool will automatically detect if the selected model natively supports function calling. If not, it will provide a compatibility layer that empowers older models to perform function calling anyway.** + +You can specify which provider and which model to use via the `-G` (or `--generator`) argument: For **Ollama**: diff --git a/crates/groq-api-rs/.github/workflows/publish.yml b/crates/groq-api-rs/.github/workflows/publish.yml new file mode 100644 index 0000000..86c2da5 --- /dev/null +++ b/crates/groq-api-rs/.github/workflows/publish.yml @@ -0,0 +1,69 @@ +name: Publish + +on: + push: + tags: + - '*' + +env: + CARGO_TERM_COLOR: always + +jobs: + build_and_test_crate: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Build + run: cargo build --verbose + - name: Run tests + run: GROQ_API_KEY=${{ secrets.GROQ_API_TOKEN}} cargo test --verbose + + # reference : https://github.com/azriel91/peace/blob/main/.github/workflows/publish.yml + crates_io_publish: + name: Publish (crates.io) + needs: + - build_and_test_crate + + runs-on: ubuntu-latest + timeout-minutes: 25 + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + + - name: cargo-release Cache + id: cargo_release_cache + uses: actions/cache@v3 + with: + path: ~/.cargo/bin/cargo-release + key: ${{ runner.os }}-cargo-release + + - run: cargo install cargo-release + if: steps.cargo_release_cache.outputs.cache-hit != 'true' + + - name: cargo login + run: cargo login ${{ secrets.CRATES_IO_API_TOKEN }} + + # allow-branch HEAD is because GitHub actions switches + # to the tag while building, which is a detached head + + # Publishing is currently messy, because: + # + # * `peace_rt_model_core` exports `NativeError` or `WebError` depending on the target. + # * `peace_rt_model_web` fails to build when publishing the workspace for a native target. + # * `peace_rt_model_web` still needs its dependencies to be published before it can be + # published. + # * `peace_rt_model_hack` needs `peace_rt_model_web` to be published before it can be + # published. + # + # We *could* pass through `--no-verify` so `cargo` doesn't build the crate before publishing, + # which is reasonable, since this job only runs after the Linux, Windows, and WASM builds + # have passed. + - name: "cargo release publish" + run: |- + cargo release \ + publish \ + --workspace \ + --all-features \ + --allow-branch HEAD \ + --no-confirm \ + --execute diff --git a/crates/groq-api-rs/.gitignore b/crates/groq-api-rs/.gitignore new file mode 100644 index 0000000..3e0c8cf --- /dev/null +++ b/crates/groq-api-rs/.gitignore @@ -0,0 +1,5 @@ +**/target +**/.env +**/.env.** +**/.cargo +**/.DS_Store diff --git a/crates/groq-api-rs/Cargo.lock b/crates/groq-api-rs/Cargo.lock new file mode 100644 index 0000000..c0c8a83 --- /dev/null +++ b/crates/groq-api-rs/Cargo.lock @@ -0,0 +1,1713 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "addr2line" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anyhow" +version = "1.0.86" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "autocfg" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" + +[[package]] +name = "backtrace" +version = "0.3.72" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17c6a35df3749d2e8bb1b7b21a976d82b15548788d2735b9d82f329268f71a11" +dependencies = [ + "addr2line", + "cc", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" + +[[package]] +name = "bumpalo" +version = "3.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" + +[[package]] +name = "bytes" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9" + +[[package]] +name = "cc" +version = "1.0.99" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96c51067fd44124faa7f870b4b1c969379ad32b2ba805aa959430ceaa384f695" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chrono" +version = "0.4.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "js-sys", + "num-traits", + "serde", + "wasm-bindgen", + "windows-targets 0.52.5", +] + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" + +[[package]] +name = "displaydoc" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "encoding_rs" +version = "0.8.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "errno" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "eventsource-stream" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74fef4569247a5f429d9156b9d0a2599914385dd189c539334c625d8099d90ab" +dependencies = [ + "futures-core", + "nom", + "pin-project-lite", +] + +[[package]] +name = "fastrand" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a" + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "futures" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" + +[[package]] +name = "futures-executor" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" + +[[package]] +name = "futures-macro" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "futures-sink" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" + +[[package]] +name = "futures-task" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" + +[[package]] +name = "futures-timer" +version = "3.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" + +[[package]] +name = "futures-util" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "gimli" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" + +[[package]] +name = "groq-api-rs" +version = "0.1.0" +dependencies = [ + "anyhow", + "chrono", + "futures", + "reqwest", + "reqwest-eventsource", + "serde", + "serde_json", + "tokio", +] + +[[package]] +name = "h2" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa82e28a107a8cc405f0839610bdc9b15f1e25ec7d696aa5cf173edbcb1486ab" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" + +[[package]] +name = "hermit-abi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" + +[[package]] +name = "http" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" +dependencies = [ + "bytes", + "http", +] + +[[package]] +name = "http-body-util" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" +dependencies = [ + "bytes", + "futures-util", + "http", + "http-body", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f3935c160d00ac752e09787e6e6bfc26494c2183cc922f1bc678a60d4733bc2" + +[[package]] +name = "hyper" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe575dd17d0862a9a33781c8c4696a55c320909004a67a00fb286ba8b1bc496d" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "itoa", + "pin-project-lite", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-tls" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +dependencies = [ + "bytes", + "http-body-util", + "hyper", + "hyper-util", + "native-tls", + "tokio", + "tokio-native-tls", + "tower-service", +] + +[[package]] +name = "hyper-util" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b875924a60b96e5d7b9ae7b066540b1dd1cbd90d1828f54c92e02a283351c56" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "http", + "http-body", + "hyper", + "pin-project-lite", + "socket2", + "tokio", + "tower", + "tower-service", + "tracing", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.60" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "icu_collections" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locid" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_locid_transform" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_locid_transform_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_locid_transform_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" + +[[package]] +name = "icu_normalizer" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "utf16_iter", + "utf8_iter", + "write16", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" + +[[package]] +name = "icu_properties" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f8ac670d7422d7f76b32e17a5db556510825b29ec9154f235977c9caba61036" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locid_transform", + "icu_properties_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" + +[[package]] +name = "icu_provider" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_provider_macros", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_provider_macros" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "idna" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4716a3a0933a1d01c2f72450e89596eb51dd34ef3c211ccd875acdf1f8fe47ed" +dependencies = [ + "icu_normalizer", + "icu_properties", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "indexmap" +version = "2.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" +dependencies = [ + "equivalent", + "hashbrown", +] + +[[package]] +name = "ipnet" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" + +[[package]] +name = "itoa" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" + +[[package]] +name = "js-sys" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "libc" +version = "0.2.155" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" + +[[package]] +name = "linux-raw-sys" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" + +[[package]] +name = "litemap" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "643cb0b8d4fcc284004d5fd0d67ccf61dfffadb7f75e1e71bc420f4688a3a704" + +[[package]] +name = "lock_api" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" + +[[package]] +name = "memchr" +version = "2.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d" + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + +[[package]] +name = "miniz_oxide" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87dfd01fe195c66b572b37921ad8803d010623c0aca821bea2302239d155cdae" +dependencies = [ + "adler", +] + +[[package]] +name = "mio" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" +dependencies = [ + "libc", + "wasi", + "windows-sys 0.48.0", +] + +[[package]] +name = "native-tls" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8614eb2c83d59d1c8cc974dd3f920198647674a0a035e1af1fa58707e317466" +dependencies = [ + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", +] + +[[package]] +name = "num_cpus" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +dependencies = [ + "hermit-abi", + "libc", +] + +[[package]] +name = "object" +version = "0.35.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8ec7ab813848ba4522158d5517a6093db1ded27575b070f4177b8d12b41db5e" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" + +[[package]] +name = "openssl" +version = "0.10.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95a0481286a310808298130d22dd1fef0fa571e05a8f44ec801801e84b216b1f" +dependencies = [ + "bitflags 2.5.0", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "openssl-sys" +version = "0.9.102" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c597637d56fbc83893a35eb0dd04b2b8e7a50c91e64e9493e398b5df4fb45fa2" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "parking_lot" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-targets 0.52.5", +] + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "pin-project" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkg-config" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" + +[[package]] +name = "proc-macro2" +version = "1.0.85" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22244ce15aa966053a896d1accb3a6e68469b97c7f33f284b99f0d576879fc23" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "redox_syscall" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "469052894dcb553421e483e4209ee581a45100d31b4018de03e5a7ad86374a7e" +dependencies = [ + "bitflags 2.5.0", +] + +[[package]] +name = "reqwest" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "566cafdd92868e0939d3fb961bd0dc25fcfaaed179291093b3d43e6b3150ea10" +dependencies = [ + "base64", + "bytes", + "encoding_rs", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-tls", + "hyper-util", + "ipnet", + "js-sys", + "log", + "mime", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls-pemfile", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "system-configuration", + "tokio", + "tokio-native-tls", + "tokio-util", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", + "winreg", +] + +[[package]] +name = "reqwest-eventsource" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "632c55746dbb44275691640e7b40c907c16a2dc1a5842aa98aaec90da6ec6bde" +dependencies = [ + "eventsource-stream", + "futures-core", + "futures-timer", + "mime", + "nom", + "pin-project-lite", + "reqwest", + "thiserror", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" + +[[package]] +name = "rustix" +version = "0.38.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" +dependencies = [ + "bitflags 2.5.0", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustls-pemfile" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29993a25686778eb88d4189742cd713c9bce943bc54251a33509dc63cbacf73d" +dependencies = [ + "base64", + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "976295e77ce332211c0d24d92c0e83e50f5c5f046d11082cea19f3df13a3562d" + +[[package]] +name = "ryu" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" + +[[package]] +name = "schannel" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534" +dependencies = [ + "windows-sys 0.52.0", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "security-framework" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c627723fd09706bacdb5cf41499e95098555af3c3c29d014dc3c458ef6be11c0" +dependencies = [ + "bitflags 2.5.0", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "317936bbbd05227752583946b9e66d7ce3b489f84e11a94a510b4437fef407d7" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "serde" +version = "1.0.203" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7253ab4de971e72fb7be983802300c30b5a7f0c2e56fab8abfc6a214307c0094" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.203" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "500cbc0ebeb6f46627f50f3f5811ccf6bf00643be300b4c3eabc0ef55dc5b5ba" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.117" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "455182ea6142b14f93f4bc5320a2b31c1f266b66a4a5c858b013302a5d8cbfc3" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "signal-hook-registry" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" +dependencies = [ + "libc", +] + +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] + +[[package]] +name = "smallvec" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" + +[[package]] +name = "socket2" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + +[[package]] +name = "syn" +version = "2.0.66" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c42f3f41a2de00b01c0aaad383c5a45241efc8b2d1eda5661812fda5f3cdcff5" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + +[[package]] +name = "synstructure" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "system-configuration" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "tempfile" +version = "3.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" +dependencies = [ + "cfg-if", + "fastrand", + "rustix", + "windows-sys 0.52.0", +] + +[[package]] +name = "thiserror" +version = "1.0.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tinystr" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] +name = "tokio" +version = "1.38.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba4f4a02a7a80d6f274636f0aa95c7e383b912d41fe721a31f29e29698585a4a" +dependencies = [ + "backtrace", + "bytes", + "libc", + "mio", + "num_cpus", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "windows-sys 0.48.0", +] + +[[package]] +name = "tokio-macros" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f5ae998a069d4b5aba8ee9dad856af7d520c3699e6159b185c2acd48155d39a" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "pin-project", + "pin-project-lite", + "tokio", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-layer" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" + +[[package]] +name = "tower-service" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" + +[[package]] +name = "tracing" +version = "0.1.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +dependencies = [ + "pin-project-lite", + "tracing-core", +] + +[[package]] +name = "tracing-core" +version = "0.1.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +dependencies = [ + "once_cell", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "unicode-ident" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" + +[[package]] +name = "url" +version = "2.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7c25da092f0a868cdf09e8674cd3b7ef3a7d92a24253e663a2fb85e2496de56" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", +] + +[[package]] +name = "utf16_iter" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0" +dependencies = [ + "cfg-if", + "js-sys", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" + +[[package]] +name = "wasm-streams" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b65dc4c90b63b118468cf747d8bf3566c1913ef60be765b5730ead9e0a3ba129" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "web-sys" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "windows-core" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" +dependencies = [ + "windows-targets 0.52.5", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.5", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" +dependencies = [ + "windows_aarch64_gnullvm 0.52.5", + "windows_aarch64_msvc 0.52.5", + "windows_i686_gnu 0.52.5", + "windows_i686_gnullvm", + "windows_i686_msvc 0.52.5", + "windows_x86_64_gnu 0.52.5", + "windows_x86_64_gnullvm 0.52.5", + "windows_x86_64_msvc 0.52.5", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" + +[[package]] +name = "winreg" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a277a57398d4bfa075df44f501a17cfdf8542d224f0d36095a2adc7aee4ef0a5" +dependencies = [ + "cfg-if", + "windows-sys 0.48.0", +] + +[[package]] +name = "write16" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" + +[[package]] +name = "writeable" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" + +[[package]] +name = "yoke" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c5b1314b079b0930c31e3af543d8ee1757b1951ae1e1565ec704403a7240ca5" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28cc31741b18cb6f1d5ff12f5b7523e3d6eb0852bbbad19d73905511d9849b95" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerofrom" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91ec111ce797d0e0784a1116d0ddcdbea84322cd79e5d5ad173daeba4f93ab55" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ea7b4a3637ea8669cedf0f1fd5c286a17f3de97b8dd5a70a6c167a1730e63a5" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerovec" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb2cc8827d6c0994478a15c53f374f46fbd41bea663d809b14744bc42e6b109c" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97cf56601ee5052b4417d90c8755c6683473c926039908196cf35d99f893ebe7" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] diff --git a/crates/groq-api-rs/Cargo.toml b/crates/groq-api-rs/Cargo.toml new file mode 100644 index 0000000..06d1725 --- /dev/null +++ b/crates/groq-api-rs/Cargo.toml @@ -0,0 +1,24 @@ +[package] +name = "groq-api-rs" +version = "0.2.0" +edition = "2021" +authors = ["ededddy "] +license = "MIT OR Apache-2.0" +readme = "README.md" +homepage = "https://github.com/ededddy/groq-api-rs" +documentation = "https://docs.rs/groq-api-rs/" +repository = "https://github.com/ededddy/groq-api-rs" +description = "Client For Groq Cloud LLM APIs" +keywords = ["groq-api", "groq", "llm", "reqwest"] +categories = ["api-bindings", "web-programming", "asynchronous"] + + +[dependencies] +reqwest = { version = "0.12", features = ["json"] } +serde = { version = "1.0.203", features = ["derive"] } +tokio = { version = "1", features = ["full"] } +serde_json = "1.0" +anyhow = "1.0.86" +chrono = { version = "0.4.38", features = ["serde"] } +reqwest-eventsource = "0.6.0" +futures = "0.3.30" diff --git a/crates/groq-api-rs/README.md b/crates/groq-api-rs/README.md new file mode 100644 index 0000000..5ba1a0b --- /dev/null +++ b/crates/groq-api-rs/README.md @@ -0,0 +1,96 @@ +# groq-api-rs + +Provides a simple client implementation for the [groq cloud API](https://console.groq.com/playground). +You can learn more about the API provided [API Documentation](https://console.groq.com/docs/quickstart) +This crate uses [`reqwest`], [`reqwest_eventsource`], [`tokio`], [`serde`], [`serde_json`], [`anyhow`], +[`chrono`],[`futures`] + +# MSRV + +1.78.0 + +# Usage + +```sh +cargo add groq-api-rs +``` + +## Example + +Request a completion object from Groq + +```rust +use groq_api_rs::completion::{client::Groq, message::Message, request::builder}; + +async fn create_completion() -> anyhow::Result<()> { + let messages = vec![Message::UserMessage { + role: Some("user".to_string()), + content: Some("Explain the importance of fast language models".to_string()), + name: None, + tool_call_id: None, + }]; + let request = builder::RequestBuilder::new("mixtral-8x7b-32768".to_string()); + let api_key = env!("GROQ_API_KEY"); + + let mut client = Groq::new(api_key); + client.add_messages(messages); + + let res = client.create(request).await; + assert!(res.is_ok()); + Ok(()) +} +``` + +Request a completion chunk object from Groq using stream option implemented with SSE + +```rust +use groq_api_rs::completion::{client::Groq, message::Message, request::builder}; +async fn create_stream_completion() -> anyhow::Result<()> { + let messages = vec![Message::UserMessage { + role: Some("user".to_string()), + content: Some("Explain the importance of fast language models".to_string()), + name: None, + tool_call_id: None, + }]; + let request = + builder::RequestBuilder::new("mixtral-8x7b-32768".to_string()).with_stream(true); + let api_key = env!("GROQ_API_KEY"); + + let mut client = Groq::new(api_key); + client.add_messages(messages); + + let res = client.create(request).await; + assert!(res.is_ok()); + Ok(()) +} +``` + +Example that the completion can return Error Object and augmented with HTTP status code. + +```rust +use groq_api_rs::completion::{client::Groq, message::Message, request::builder}; +async fn error_does_return() -> anyhow::Result<()> { + let messages = vec![Message::UserMessage { + role: Some("user".to_string()), + content: Some("Explain the importance of fast language models".to_string()), + name: None, + tool_call_id: None, + }]; + let request = + builder::RequestBuilder::new("mixtral-8x7b-32768".to_string()).with_stream(true); + let api_key = ""; + + let mut client = Groq::new(api_key); + client.add_messages(messages); + + let res = client.create(request).await; + assert!(res.is_err()); + eprintln!("{}", res.unwrap_err()); + Ok(()) +} +``` + +# Contribute + +Feel free to open issues and PRs. I am still learning Rust, the design +and coding might not be good. diff --git a/crates/groq-api-rs/src/completion/client.rs b/crates/groq-api-rs/src/completion/client.rs new file mode 100644 index 0000000..b90c000 --- /dev/null +++ b/crates/groq-api-rs/src/completion/client.rs @@ -0,0 +1,333 @@ +use std::hash::{Hash, Hasher}; + +use super::{ + message::Message, + request, + response::{ErrorResponse, Response}, +}; +use crate::completion::response::StreamResponse; +use futures::StreamExt; +use reqwest::header; +use reqwest_eventsource::{Event, EventSource}; + +#[derive(Debug, Clone)] +/// The returned response from groq's completion API could either be a json with full llm response +/// or chunks of response sent via Server Sent Event(SSE) +pub enum CompletionOption { + NonStream(Response), + Stream(Vec), +} + +/// # Private Fields +/// - api_key, the API key used to authenticate with groq, +/// - client, the reqwest::Client with built in connection pool, +/// - tmp_messages, messages that stay there for only a single request. After the request they are cleared. +/// - messages, a Vec for containing messages send to the groq completion endpoint (historic messages will not clear after request) +#[derive(Debug, Clone)] +pub struct Groq { + api_key: String, + messages: Vec, + disposable_msgs: Vec, + client: reqwest::Client, +} + +impl Groq { + pub fn new(api_key: &str) -> Self { + //! Returns an instance of Groq struct. + //! ```ignore no_run + //! Self { + //! api_key: api_key.into(), // the API key used to authenticate with groq + //! client: reqwest::Client::new(), // the reqwest::Client with built in connection pool + //! messages: Vec::new() // a Vec for containing messages send to the groq completion endpoint (historic messages will not clear after request) + //! } + //! ``` + Self { + api_key: api_key.into(), + client: reqwest::Client::new(), + disposable_msgs: Vec::new(), + messages: Vec::new(), + } + } + + pub fn add_message(&mut self, msg: Message) { + //! Non Consuming + //! Adds a message to the internal message vector + self.messages.push(msg); + } + + pub fn add_messages(&mut self, msgs: Vec) { + //! Non Consuming + //! Add messages to the internal message vector + self.messages.extend(msgs); + } + + pub fn clear_messages(&mut self) { + //! Non Consuming + //! Clears the internal message vector. + //! And shrink the capacity to 3. + self.messages.clear(); + self.messages.shrink_to(3); + } + + /// Clears the internal tmp_messages vector. + /// # Note + /// Fucntion is created for internal use and is not recomended for external use. + pub fn clear_disposable_msgs_override(&mut self) { + //! Non Consuming + self.disposable_msgs.clear(); + } + + pub fn add_disposable_msgs(&mut self, msgs: Vec) { + //! Non Consuming + self.disposable_msgs.extend(msgs); + } + + pub fn add_disposable_msg(&mut self, msg: Message) { + //! Non Consuming + self.disposable_msgs.push(msg); + } + + #[allow(dead_code)] + fn get_disposable_msgs(&self) -> Option> { + if self.disposable_msgs.is_empty() { + None + } else { + Some(self.disposable_msgs.clone()) + } + } + + /// Outputs the request messages that should be passed onto the request. + /// Utility function created for easier logic internally. + /// # Returns + /// - Vec in the form of vec!**** + fn get_all_request_messages(&self) -> Vec { + if self.disposable_msgs.is_empty() { + self.messages.clone() + } else { + vec![self.messages.clone(), self.disposable_msgs.clone()].concat() + } + } + + /// Outputs the request messages that should be passed onto the request and clears the tmp messages. + /// Utility function created for easier logic internally. + fn get_request_messages_with_disposable_clear(&mut self) -> Vec { + let all = self.get_all_request_messages(); + self.clear_disposable_msgs_override(); + all + } + + async fn create_stream_completion( + &mut self, + req: request::builder::RequestBuilder, + ) -> anyhow::Result { + /* REMARK: + * https://github.com/jpopesculian/reqwest-eventsource/ + * https://parsec.cloud/en/how-the-reqwest-http-client-streams-responses-in-a-web-context/ + */ + let req = req + .with_messages(self.get_request_messages_with_disposable_clear())? + .build(); + anyhow::ensure!( + req.is_stream(), + "'create_stream_completion' func must have the stream flag turned on in request body" + ); + let mut stream = EventSource::new( + self.client + .post("https://api.groq.com/openai/v1/chat/completions") + .header(header::AUTHORIZATION, format!("Bearer {}", self.api_key)) + .header(header::ACCEPT, "text/event-stream") + .json(&req), + )?; + let mut bufs: Vec = Vec::new(); + while let Some(event) = stream.next().await { + match event { + Ok(Event::Open) => println!("Connection Open!"), + Ok(Event::Message(message)) => { + if message.data == "[DONE]" { + break; + } + bufs.push(serde_json::from_str(&message.data)?); + } + Err(err) => { + stream.close(); + anyhow::bail!("Error: {}", err); + } + } + } + stream.close(); + + Ok(CompletionOption::Stream(bufs)) + } + + async fn create_non_stream_completion( + &mut self, + req: request::builder::RequestBuilder, + ) -> anyhow::Result { + let req = req + .with_messages(self.get_request_messages_with_disposable_clear())? + .build(); + + // println!("req={}", serde_json::to_string_pretty(&req).unwrap()); + + let body = (self.client) + .post("https://api.groq.com/openai/v1/chat/completions") + .header(header::AUTHORIZATION, format!("Bearer {}", self.api_key)) + .json(&req) + .send() + .await?; + if body.status() == reqwest::StatusCode::OK { + let raw: String = body.text().await?; + let resp: Response = serde_json::from_str(&raw)?; + + Ok(CompletionOption::NonStream(resp)) + } else { + let statcode = body.status(); + let raw: String = body.text().await?; + + let mut error: ErrorResponse = serde_json::from_str(&raw)?; + error.code = statcode; + anyhow::bail!(error) + } + } + + pub async fn create( + &mut self, + req: request::builder::RequestBuilder, + ) -> anyhow::Result { + if !req.is_stream() { + self.create_non_stream_completion(req).await + } else { + self.create_stream_completion(req).await + } + } +} + +impl Hash for Groq { + fn hash(&self, state: &mut H) { + self.messages.hash(state); + self.api_key.hash(state); + } +} + +#[cfg(test)] +mod completion_test { + use std::hash::{DefaultHasher, Hash, Hasher}; + + use crate::completion::{client::Groq, message::Message, request::builder}; + + #[test] + fn test_eq_and_hash() { + let g1 = Groq::new("api_key").add_messages(vec![Message::UserMessage { + role: Some("user".to_string()), + content: Some("Explain the importance of fast language models".to_string()), + name: None, + tool_call_id: None, + }]); + + let g2 = Groq::new("api_key").add_messages(vec![Message::UserMessage { + role: Some("user".to_string()), + content: Some("Explain the importance of fast language models".to_string()), + name: None, + tool_call_id: None, + }]); + + let mut hasher = DefaultHasher::new(); + let mut hasher1 = DefaultHasher::new(); + + g1.hash(&mut hasher); + g2.hash(&mut hasher1); + let hash_string = hasher.finish(); + let hash_string1 = hasher1.finish(); + + assert_eq!(hash_string, hash_string1); + } + + #[tokio::test] + async fn create_completion() -> anyhow::Result<()> { + let messages = vec![Message::UserMessage { + role: Some("user".to_string()), + content: Some("Explain the importance of fast language models".to_string()), + name: None, + tool_call_id: None, + }]; + let request = builder::RequestBuilder::new("mixtral-8x7b-32768".to_string()); + let api_key = env!("GROQ_API_KEY"); + + let mut client = Groq::new(api_key); + client.add_messages(messages); + + let res = client.create(request).await; + assert!(res.is_ok()); + Ok(()) + } + + #[tokio::test] + async fn create_stream_completion() -> anyhow::Result<()> { + let messages = vec![Message::UserMessage { + role: Some("user".to_string()), + content: Some("Explain the importance of fast language models".to_string()), + name: None, + tool_call_id: None, + }]; + let request = + builder::RequestBuilder::new("mixtral-8x7b-32768".to_string()).with_stream(true); + let api_key = env!("GROQ_API_KEY"); + + let mut client = Groq::new(api_key); + client.add_messages(messages); + + let res = client.create(request).await; + assert!(res.is_ok()); + println!("{:?}", res.unwrap()); + Ok(()) + } + + #[tokio::test] + async fn error_does_return() -> anyhow::Result<()> { + let messages = vec![Message::UserMessage { + role: Some("user".to_string()), + content: Some("Explain the importance of fast language models".to_string()), + name: None, + tool_call_id: None, + }]; + let request = + builder::RequestBuilder::new("mixtral-8x7b-32768".to_string()).with_stream(true); + let api_key = ""; + + let mut client = Groq::new(api_key); + client.add_messages(messages); + + let res = client.create(request).await; + assert!(res.is_err()); + eprintln!("{}", res.unwrap_err()); + Ok(()) + } + + #[tokio::test] + async fn create_with_add_tmp_message() -> anyhow::Result<()> { + let messages = vec![Message::SystemMessage { + content: Some("I am a system message".to_string()), + name: None, + role: Some("system".to_string()), + tool_call_id: None, + }]; + let request = builder::RequestBuilder::new("mixtral-8x7b-32768".to_string()); + let api_key = env!("GROQ_API_KEY"); + + let client = Groq::new(api_key); + let mut client = client; + client.add_messages(messages); + client.add_disposable_msg(Message::UserMessage { + role: Some("user".to_string()), + content: Some("Explain the importance of fast language models".to_string()), + name: None, + tool_call_id: None, + }); + + assert!(client.get_disposable_msgs().is_some()); + let res = client.create(request).await; + assert!(!res.is_err()); + assert!(client.get_disposable_msgs().is_none()); + Ok(()) + } +} diff --git a/crates/groq-api-rs/src/completion/message.rs b/crates/groq-api-rs/src/completion/message.rs new file mode 100644 index 0000000..75fdc7c --- /dev/null +++ b/crates/groq-api-rs/src/completion/message.rs @@ -0,0 +1,69 @@ +use std::hash::Hash; + +use serde::{Deserialize, Serialize}; + +/// 1:1 Mapping for Message Object used in the `messages` field groq completion API. +/// +/// Refer to [the official documentations](https://console.groq.com/docs/api-reference#chat-create) +/// for more details +/// +#[derive(Debug, Serialize, Clone, Hash)] +#[serde(untagged)] +pub enum Message { + SystemMessage { + #[serde(skip_serializing_if = "Option::is_none")] + content: Option, + #[serde(skip_serializing_if = "Option::is_none")] + name: Option, + #[serde(skip_serializing_if = "Option::is_none")] + role: Option, + #[serde(skip_serializing_if = "Option::is_none")] + tool_call_id: Option, + }, + UserMessage { + #[serde(skip_serializing_if = "Option::is_none")] + content: Option, + #[serde(skip_serializing_if = "Option::is_none")] + name: Option, + #[serde(skip_serializing_if = "Option::is_none")] + role: Option, + #[serde(skip_serializing_if = "Option::is_none")] + tool_call_id: Option, + }, + AssistantMessage { + #[serde(skip_serializing_if = "Option::is_none")] + content: Option, + #[serde(skip_serializing_if = "Option::is_none")] + name: Option, + #[serde(skip_serializing_if = "Option::is_none")] + role: Option, + #[serde(skip_serializing_if = "Option::is_none")] + tool_calls: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + tool_call_id: Option, + }, + ToolMessage { + #[serde(skip_serializing_if = "Option::is_none")] + content: Option, + #[serde(skip_serializing_if = "Option::is_none")] + name: Option, + #[serde(skip_serializing_if = "Option::is_none")] + role: Option, + #[serde(skip_serializing_if = "Option::is_none")] + tool_call_id: Option, + }, +} + +#[derive(Debug, Deserialize, Serialize, Clone, Hash)] +pub struct ToolCall { + pub id: Option, + #[serde(rename(serialize = "type"))] + pub tool_type: Option, + pub function: AssistantFunc, +} + +#[derive(Debug, Deserialize, Serialize, Clone, Hash)] +pub struct AssistantFunc { + pub arguments: Option, + pub name: Option, +} diff --git a/crates/groq-api-rs/src/completion/mod.rs b/crates/groq-api-rs/src/completion/mod.rs new file mode 100644 index 0000000..4060596 --- /dev/null +++ b/crates/groq-api-rs/src/completion/mod.rs @@ -0,0 +1,4 @@ +pub mod client; +pub mod message; +pub mod request; +pub mod response; diff --git a/crates/groq-api-rs/src/completion/request/builder.rs b/crates/groq-api-rs/src/completion/request/builder.rs new file mode 100644 index 0000000..5caf66e --- /dev/null +++ b/crates/groq-api-rs/src/completion/request/builder.rs @@ -0,0 +1,452 @@ +use std::hash::Hash; + +use super::{Message, Request, ResponseFormat, StopEnum, Tool, ToolChoiceEnum}; +use serde_json::Value; + +/// Provides fluent api for building the request object for chat completion +/// +/// The field types, defaults and description could be found from [the official doc](https://console.groq.com/docs/api-reference#chat-create) +/// +/// Here and [Request](../../request/struct.Request.html) just a 1:1 mapping from it +#[derive(Debug)] +pub struct RequestBuilder { + // unused for openai integration only + logit_bias: Option, + // unused for openai integration only + logprobs: bool, // default false + frequency_penalty: f32, // defaults to 0 + max_tokens: Option, + messages: Vec, + model: String, + n: u32, // defaults to 1 + presence_penalty: f32, // defaults to 0 + response_format: ResponseFormat, // defaults to text, + seed: Option, + stop: Option, + stream: bool, // default false + temperature: f32, // defaults to 1 + tool_choice: Option, + tools: Option>, + top_logprobs: Option, + top_p: f32, // defaults to 1 + user: Option, +} + +impl Hash for RequestBuilder { + fn hash(&self, state: &mut H) { + self.logprobs.hash(state); + ((self.frequency_penalty) as i32).hash(state); + self.max_tokens.hash(state); + self.messages.hash(state); + self.model.hash(state); + self.n.hash(state); + ((self.presence_penalty) as i32).hash(state); + self.response_format.hash(state); + self.seed.hash(state); + self.stop.hash(state); + self.stream.hash(state); + ((self.temperature) as i32).hash(state); + self.tool_choice.hash(state); + self.tools.hash(state); + self.top_logprobs.hash(state); + ((self.top_p) as i32).hash(state); + self.user.hash(state); + } +} + +#[derive(Debug, PartialEq)] +pub struct BuilderConfig { + model: String, + logit_bias: Option, + logprobs: Option, + frequency_penalty: Option, + max_tokens: Option, + n: Option, + presence_penalty: Option, + response_format: Option, + seed: Option, + stop: Option, + stream: Option, + temperature: Option, + tool_choice: Option, + tools: Option>, + top_logprobs: Option, + top_p: Option, + user: Option, +} +impl Hash for BuilderConfig { + fn hash(&self, state: &mut H) { + self.model.hash(state); + self.logprobs.unwrap_or(false).hash(state); + (self.frequency_penalty.unwrap_or(0.0) as i32).hash(state); + self.max_tokens.hash(state); + self.n.hash(state); + (self.presence_penalty.unwrap_or(0.0) as i32).hash(state); + self.response_format.hash(state); + self.seed.hash(state); + self.stop.hash(state); + self.stream.hash(state); + (self.temperature.unwrap_or(1.0) as i32).hash(state); + self.tool_choice.hash(state); + self.tools.hash(state); + self.top_logprobs.hash(state); + (self.top_p.unwrap_or(1.0) as i32).hash(state); + self.user.hash(state); + } +} + +impl RequestBuilder { + pub fn with_config(cfg: &BuilderConfig) -> Self { + let mut builder_instance = Self::new(cfg.model.clone()); + + if let Some(lg_bias) = cfg.logit_bias.clone() { + builder_instance = builder_instance.with_logit_bias(lg_bias); + } + if let Some(log_probs) = cfg.logprobs { + builder_instance = builder_instance.with_logprobs(log_probs); + } + if let Some(freq_pen) = cfg.frequency_penalty { + builder_instance = builder_instance.with_frequency_penalty(freq_pen); + } + if let Some(max_tok) = cfg.max_tokens { + builder_instance = builder_instance.with_max_tokens(max_tok); + } + + if let Some(n) = cfg.n { + builder_instance = builder_instance.with_n(n); + } + + if let Some(presence_pen) = cfg.presence_penalty { + builder_instance = builder_instance.with_presence_penalty(presence_pen); + } + if let Some(response_fmt) = cfg.response_format.clone() { + builder_instance = builder_instance.with_response_fmt(response_fmt); + } + if let Some(sed) = cfg.seed { + builder_instance = builder_instance.with_seed(sed); + } + + if let Some(stop) = cfg.stop.clone() { + builder_instance = match stop { + StopEnum::Token(stp) => builder_instance.with_stop(&stp), + StopEnum::Tokens(stps) => builder_instance.with_stops(stps), + } + } + + if let Some(stream) = cfg.stream { + builder_instance = builder_instance.with_stream(stream); + } + if let Some(temp) = cfg.temperature { + builder_instance = builder_instance.with_temperature(temp); + } + + if let Some(tool_choice) = cfg.tool_choice.clone() { + builder_instance = match tool_choice { + ToolChoiceEnum::Str(tool_str) => { + builder_instance.with_tool_choice_string(tool_str).unwrap() + } + ToolChoiceEnum::Tool(tool_inst) => builder_instance.with_tool_choice(tool_inst), + } + } + + if let Some(tools) = cfg.tools.clone() { + builder_instance = builder_instance.with_tools(tools); + } + if let Some(top_logprobs) = cfg.top_logprobs { + builder_instance = builder_instance.with_top_logprobs(top_logprobs); + } + if let Some(top_p) = cfg.top_p { + builder_instance = builder_instance.with_top_p(top_p); + } + if let Some(user) = cfg.user.clone() { + builder_instance = builder_instance.with_user(&user); + } + builder_instance + } + + pub fn get_config(&self) -> BuilderConfig { + BuilderConfig { + model: self.model.clone(), + logit_bias: self.logit_bias.clone(), + logprobs: Some(self.logprobs), + frequency_penalty: Some(self.frequency_penalty), + max_tokens: self.max_tokens, + n: Some(self.n), + presence_penalty: Some(self.presence_penalty), + response_format: Some(self.response_format.clone()), + seed: self.seed, + stop: self.stop.clone(), + stream: Some(self.stream), + temperature: Some(self.temperature), + tool_choice: self.tool_choice.clone(), + tools: self.tools.clone(), + top_logprobs: self.top_logprobs, + top_p: Some(self.top_p), + user: self.user.clone(), + } + } + + pub fn from_builder(source: &RequestBuilder) -> Self { + //! 1 to 1 copy of another RequestBuilder + let mut builder = Self::with_config(&source.get_config()); + builder.messages.extend(source.messages.clone()); + builder + } + + pub fn new(model: String) -> Self { + //! # Important Note + //! The builder method of modifying messages filed is hidden because the reposibility is + //! shifted to the client struct. + //! such that the client struct can maintain the message history and can be reused. + //! + //! # Description + //! Instantiates a RequestBuilder struct with a set of default values for the request object of groq chat completion API. + //! ```ignore no_run + //! Self { + //! logit_bias: None, + //! logprobs: false, + //! frequency_penalty: 0.0, + //! max_tokens: None, + //! messages: Vec::new(), + //! model : "no default model".to_string(), + //! n: 1, + //! presence_penalty: 0.0, + //! response_format: ResponseFormat { + //! response_type: "text".into(), + //! }, + //! seed: None, + //! stop: None, + //! stream: false, + //! temperature: 1.0, + //! tool_choice: None, + //! tools: None, + //! top_logprobs: None, + //! top_p: 1.0, + //! user: None, + //!} + //!``` + Self { + logit_bias: None, + logprobs: false, + frequency_penalty: 0.0, + max_tokens: None, + messages: Vec::new(), + model, + n: 1, + presence_penalty: 0.0, + response_format: ResponseFormat { + response_type: "text".into(), + }, + seed: None, + stop: None, + stream: false, + temperature: 1.0, + tool_choice: None, + tools: None, + top_logprobs: None, + top_p: 1.0, + user: None, + } + } + + pub fn build(self) -> Request { + Request { + logit_bias: self.logit_bias, + logprobs: self.logprobs, + frequency_penalty: self.frequency_penalty, + max_tokens: self.max_tokens, + messages: self.messages, + model: self.model, + n: self.n, + presence_penalty: self.presence_penalty, + response_format: self.response_format, + seed: self.seed, + stop: self.stop, + stream: self.stream, + temperature: self.temperature, + tool_choice: self.tool_choice, + tools: self.tools, + top_logprobs: self.top_logprobs, + top_p: self.top_p, + user: self.user, + } + } + + pub fn with_logit_bias(mut self, logit_bias: Value) -> Self { + self.logit_bias = Some(logit_bias); + self + } + + pub fn with_logprobs(mut self, logprobs: bool) -> Self { + self.logprobs = logprobs; + self + } + + pub fn with_frequency_penalty(mut self, penalty: f32) -> Self { + self.frequency_penalty = penalty; + self + } + + pub fn with_max_tokens(mut self, n: u32) -> Self { + self.max_tokens = Some(n); + self + } + + pub(crate) fn with_messages(mut self, msgs: Vec) -> anyhow::Result { + anyhow::ensure!(msgs.len() > 0, "message cannot be empty"); + self.messages = msgs; + Ok(self) + } + + pub fn with_model(mut self, model: &str) -> Self { + self.model = model.into(); + self + } + + pub fn with_n(mut self, n: u32) -> Self { + self.n = n; + self + } + + pub fn with_presence_penalty(mut self, penalty: f32) -> Self { + self.presence_penalty = penalty; + self + } + + pub fn with_response_fmt(mut self, fmt: ResponseFormat) -> Self { + self.response_format = fmt; + self + } + + pub fn with_seed(mut self, seed: i32) -> Self { + self.seed = Some(seed); + self + } + + pub fn with_stop(mut self, stop: &str) -> Self { + self.stop = Some(StopEnum::Token(stop.into())); + self + } + + pub fn with_stops(mut self, stops: Vec) -> Self { + self.stop = Some(StopEnum::Tokens(stops)); + self + } + + pub fn with_stream(mut self, stream: bool) -> Self { + self.stream = stream; + self + } + + pub fn with_temperature(mut self, temp: f32) -> Self { + self.temperature = temp; + self + } + + pub fn with_tool_choice(mut self, tool: Tool) -> Self { + self.tool_choice = Some(ToolChoiceEnum::Tool(tool)); + self + } + pub fn with_auto_tool_choice(mut self) -> Self { + self.tool_choice = Some(ToolChoiceEnum::Str("auto".into())); + self + } + + pub fn with_tools(mut self, tools: Vec) -> Self { + self.tools = Some(tools); + self + } + + pub fn with_top_logprobs(mut self, prob: u8) -> Self { + self.top_logprobs = Some(prob); + self + } + + pub fn with_top_p(mut self, top_p: f32) -> Self { + self.top_p = top_p; + self + } + + pub fn with_user(mut self, user: &str) -> Self { + self.user = Some(user.into()); + self + } + + pub fn is_stream(&self) -> bool { + //! Check the request object is set to use stream for the completion response or not + //! - true if the stream flag is on + //! - false if the stream flag is off + self.stream + } + + pub fn with_tool_choice_string(mut self, tool: String) -> anyhow::Result { + anyhow::ensure!( + tool == "auto" || tool == "none", + "Tool choice of string only allows 'none' or 'auto'" + ); + + self.tool_choice = Some(ToolChoiceEnum::Str(tool)); + Ok(self) + } +} + +#[cfg(test)] +mod builder_test { + use std::hash::{DefaultHasher, Hash, Hasher}; + + use super::{BuilderConfig, RequestBuilder}; + + #[test] + fn can_return_init_config_and_cfg_hash_should_equal() -> anyhow::Result<()> { + let mut hasher = DefaultHasher::new(); + let mut hasher1 = DefaultHasher::new(); + let cfg = BuilderConfig { + model: "test".to_string(), + logit_bias: None, + logprobs: None, + frequency_penalty: None, + max_tokens: None, + n: None, + presence_penalty: None, + response_format: None, + seed: None, + stop: None, + stream: None, + temperature: None, + tool_choice: None, + tools: None, + top_logprobs: None, + top_p: None, + user: None, + }; + + let builder = RequestBuilder::with_config(&cfg); + let builder1 = RequestBuilder::new("test".into()); + + builder.get_config().hash(&mut hasher); + builder1.get_config().hash(&mut hasher1); + + let builder_hash = hasher.finish(); + let builder1_hash = hasher1.finish(); + assert_eq!(builder_hash, builder1_hash); + Ok(()) + } + + #[test] + fn copied_builder_should_have_eq_hash() -> anyhow::Result<()> { + let mut hasher = DefaultHasher::new(); + let mut hasher1 = DefaultHasher::new(); + + let builder = RequestBuilder::new("test".to_string()); + let builder1 = RequestBuilder::from_builder(&builder); + + builder.hash(&mut hasher); + builder1.hash(&mut hasher1); + + let builder_hash = hasher.finish(); + let builder1_hash = hasher1.finish(); + println!("{}\t{}", builder_hash, builder1_hash); + assert_eq!(hasher.finish(), hasher1.finish()); + Ok(()) + } +} diff --git a/crates/groq-api-rs/src/completion/request/mod.rs b/crates/groq-api-rs/src/completion/request/mod.rs new file mode 100644 index 0000000..e49ae0f --- /dev/null +++ b/crates/groq-api-rs/src/completion/request/mod.rs @@ -0,0 +1,198 @@ +use std::hash::Hash; + +use super::message::Message; +use serde::Serialize; +pub mod builder; + +#[derive(Debug, Serialize)] +pub struct Request { + // unused for openai integration only + #[serde(skip_serializing_if = "Option::is_none")] + logit_bias: Option, + + // unused for openai integration only + logprobs: bool, // default false + frequency_penalty: f32, // defaults to 0 + // + #[serde(skip_serializing_if = "Option::is_none")] + max_tokens: Option, + + messages: Vec, + model: String, + + n: u32, // defaults to 1 + presence_penalty: f32, // defaults to 0 + response_format: ResponseFormat, // defaults to text, + + #[serde(skip_serializing_if = "Option::is_none")] + seed: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + stop: Option, + + stream: bool, // default false + temperature: f32, // defaults to 1 + + #[serde(skip_serializing_if = "Option::is_none")] + tool_choice: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + tools: Option>, + + #[serde(skip_serializing_if = "Option::is_none")] + top_logprobs: Option, + + top_p: f32, // defaults to 1 + + #[serde(skip_serializing_if = "Option::is_none")] + user: Option, +} + +impl Hash for Request { + fn hash(&self, state: &mut H) { + self.logprobs.hash(state); + ((self.frequency_penalty) as i32).hash(state); + self.max_tokens.hash(state); + self.messages.hash(state); + self.model.hash(state); + self.n.hash(state); + ((self.presence_penalty) as i32).hash(state); + self.response_format.hash(state); + self.seed.hash(state); + self.stop.hash(state); + self.stream.hash(state); + ((self.temperature) as i32).hash(state); + self.tool_choice.hash(state); + self.tools.hash(state); + self.top_logprobs.hash(state); + ((self.top_p) as i32).hash(state); + self.user.hash(state); + } +} + +impl Request { + pub fn is_stream(&self) -> bool { + self.stream + } +} + +#[derive(Debug, Serialize, Hash, Clone, PartialEq)] +#[serde(untagged)] +pub enum ToolChoiceEnum { + Str(String), + Tool(Tool), +} + +#[derive(Debug, Serialize, Hash, Clone, PartialEq)] +#[serde(untagged)] +pub enum StopEnum { + Token(String), + Tokens(Vec), +} + +#[derive(Debug, Serialize, Hash, Clone, PartialEq)] +pub struct Tool { + #[serde(rename(serialize = "type"))] + pub tool_type: String, + pub function: Function, +} + +#[derive(Debug, Serialize, Clone, PartialEq)] +pub struct Function { + pub description: Option, + pub name: Option, + pub parameters: Option, +} + +impl Hash for Function { + fn hash(&self, state: &mut H) { + self.description.hash(state); + self.name.hash(state); + } +} + +#[derive(Debug, Serialize, Hash, Clone, PartialEq)] +pub struct ResponseFormat { + #[serde(rename(serialize = "type"))] + pub response_type: String, +} + +#[cfg(test)] +mod request_test { + use crate::completion::request::*; + use anyhow::Context; + + #[test] + fn init_request() -> anyhow::Result<()> { + let target = Request { + logit_bias: None, + logprobs: false, + frequency_penalty: 0.0, + max_tokens: None, + messages: Vec::new(), + model: "".into(), + n: 1, + presence_penalty: 0.0, + response_format: ResponseFormat { + response_type: "text".into(), + }, + seed: None, + stop: None, + stream: false, + temperature: 1.0, + tool_choice: None, + tools: None, + top_logprobs: None, + top_p: 1.0, + user: None, + }; + let req2 = builder::RequestBuilder::new("".into()).build(); + + assert_eq!( + serde_json::to_string(&target).unwrap(), + serde_json::to_string(&req2).unwrap() + ); + Ok(()) + } + + #[test] + fn with_stop_enum() -> anyhow::Result<()> { + let mut target = Request { + logit_bias: None, + logprobs: false, + frequency_penalty: 0.0, + max_tokens: None, + messages: Vec::new(), + model: "".into(), + n: 1, + presence_penalty: 0.0, + response_format: ResponseFormat { + response_type: "text".into(), + }, + seed: None, + stop: Some(StopEnum::Token("endline".into())), + stream: false, + temperature: 1.0, + tool_choice: None, + tools: None, + top_logprobs: None, + top_p: 1.0, + user: None, + }; + let req2 = builder::RequestBuilder::new("".to_string()) + .with_stop("endline") + .build(); + + let out_json = serde_json::to_string(&req2).unwrap(); + assert_eq!(serde_json::to_string(&target).unwrap(), out_json); + + let stops = vec!["endline".to_string()]; + target.stop = Some(StopEnum::Tokens(stops.clone())); + let req2 = builder::RequestBuilder::new("".into()) + .with_stops(stops) + .build(); + let out_json = serde_json::to_string(&req2).unwrap(); + assert_eq!(serde_json::to_string(&target).unwrap(), out_json); + Ok(()) + } +} diff --git a/crates/groq-api-rs/src/completion/response.rs b/crates/groq-api-rs/src/completion/response.rs new file mode 100644 index 0000000..7235dd8 --- /dev/null +++ b/crates/groq-api-rs/src/completion/response.rs @@ -0,0 +1,161 @@ +use chrono::{serde::ts_seconds, Utc}; +use serde::Deserialize; +use std::{fmt::Display, hash::Hash}; + +use super::message::ToolCall; + +/// Response object responsible for representing error object returned +/// # Difference from groq's +/// - Added Status Code field for convenience +#[derive(Debug, Deserialize, Clone, Hash)] +pub struct ErrorResponse { + pub error: ErrorBody, + + #[serde(skip_deserializing)] + pub code: reqwest::StatusCode, +} + +impl Display for ErrorResponse { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + writeln!(f, "status_code : {}, error : {:?}", self.code, self.error) + } +} + +#[derive(Debug, Deserialize, Clone, Hash)] +pub struct ErrorBody { + #[serde(rename(deserialize = "type"))] + pub error_type: String, + pub message: String, +} + +/// Response object responsible for representing completion chunk object returned +/// # Difference from standard completion object +/// - The x_groq struct contains the server stream event ID and usage info at the last message +#[derive(Debug, Deserialize, Clone)] +pub struct StreamResponse { + pub id: String, + pub object: String, + #[serde(with = "ts_seconds")] + pub created: chrono::DateTime, + pub model: String, + + pub system_fingerprint: Option, + pub choices: Vec, + pub x_groq: Option, +} + +impl Hash for StreamResponse { + fn hash(&self, state: &mut H) { + self.id.hash(state); + self.object.hash(state); + self.created.hash(state); + self.model.hash(state); + self.system_fingerprint.hash(state); + self.choices.hash(state); + } +} + +#[derive(Debug, Deserialize, Clone)] +pub struct StreamChoice { + pub index: u32, + pub delta: ChoiceDelta, + pub logprobs: Option, + pub finish_reason: Option, +} + +impl Hash for StreamChoice { + fn hash(&self, state: &mut H) { + self.index.hash(state); + self.delta.hash(state); + self.finish_reason.hash(state); + + if self.logprobs.is_some() { + ((self.logprobs.unwrap()) as i32).hash(state); // I understand that this is a little weird, but the logic is that even if we can't hash a float, we can convert it to int and hash that at least. + } + } +} + +#[derive(Debug, Deserialize, Clone, Hash)] +pub struct ChoiceDelta { + role: Option, + content: Option, +} + +#[derive(Debug, Deserialize, Clone, Hash)] +pub struct XGroq { + pub id: String, + pub usage: Option, +} + +/// Response object responsible for representing completion object returned +#[derive(Debug, Deserialize, Clone)] +pub struct Response { + pub id: String, + pub object: String, + #[serde(with = "ts_seconds")] + pub created: chrono::DateTime, + pub model: String, + + pub system_fingerprint: Option, + pub choices: Vec, + pub usage: UsageInfo, +} + +impl Hash for Response { + fn hash(&self, state: &mut H) { + self.id.hash(state); + self.object.hash(state); + self.created.hash(state); + self.model.hash(state); + self.system_fingerprint.hash(state); + self.choices.hash(state); + self.usage.hash(state); + } +} + +#[derive(Debug, Deserialize, Clone)] +pub struct UsageInfo { + pub prompt_tokens: u32, + pub completion_tokens: u32, + pub total_tokens: u32, + pub prompt_time: f32, + pub completion_time: f32, + pub total_time: f32, +} + +impl Hash for UsageInfo { + fn hash(&self, state: &mut H) { + self.prompt_tokens.hash(state); + self.completion_tokens.hash(state); + self.total_tokens.hash(state); + ((self.prompt_time) as i32).hash(state); + ((self.completion_time) as i32).hash(state); + ((self.total_time) as i32).hash(state); + } +} + +#[derive(Debug, Deserialize, Clone)] +pub struct Choice { + pub index: u32, + pub message: ChoiceMessage, + pub finish_reason: String, + pub logprobs: Option, +} + +impl Hash for Choice { + fn hash(&self, state: &mut H) { + self.index.hash(state); + self.message.hash(state); + self.finish_reason.hash(state); + if self.logprobs.is_some() { + ((self.logprobs.unwrap()) as i32).hash(state); + } + } +} + +#[derive(Debug, Deserialize, Clone, Hash)] +pub struct ChoiceMessage { + pub role: String, + pub content: Option, + pub tool_calls: Option>, +} diff --git a/crates/groq-api-rs/src/lib.rs b/crates/groq-api-rs/src/lib.rs new file mode 100644 index 0000000..cfe0cf9 --- /dev/null +++ b/crates/groq-api-rs/src/lib.rs @@ -0,0 +1,85 @@ +//! Provides a simple client implementation for the [groq cloud API](https://console.groq.com/playground). +//! You can learn more about the API provided [API Documentation](https://console.groq.com/docs/quickstart) +//! This crate uses [`reqwest`], [`reqwest_eventsource`], [`tokio`], [`serde`], [`serde_json`], [`anyhow`], +//! [`chrono`],[`futures`] +//! +//! # MSRV +//! 1.78.0 +//! +//! # Usage +//! ```sh +//! cargo add groq-api-rs +//! ``` +//! +//! # Example +//! Request a completion object from Groq +//! ``` +//! use groq_api_rs::completion::{client::Groq, message::Message, request::builder}; +//! +//! async fn create_completion() -> anyhow::Result<()> { +//! let messages = vec![Message::UserMessage { +//! role: Some("user".to_string()), +//! content: Some("Explain the importance of fast language models".to_string()), +//! name: None, +//! tool_call_id: None, +//! }]; +//! let request = builder::RequestBuilder::new("mixtral-8x7b-32768".to_string()); +//! let api_key = env!("GROQ_API_KEY"); +//! +//! let mut client = Groq::new(api_key); +//! client.add_messages(messages); +//! +//! let res = client.create(request).await; +//! assert!(res.is_ok()); +//! Ok(()) +//! } +//! ``` +//! +//! Request a completion chunk object from Groq using stream option implemented with SSE +//! ``` +//! use groq_api_rs::completion::{client::Groq, message::Message, request::builder}; +//! async fn create_stream_completion() -> anyhow::Result<()> { +//! let messages = vec![Message::UserMessage { +//! role: Some("user".to_string()), +//! content: Some("Explain the importance of fast language models".to_string()), +//! name: None, +//! tool_call_id: None, +//! }]; +//! let request = +//! builder::RequestBuilder::new("mixtral-8x7b-32768".to_string()).with_stream(true); +//! let api_key = env!("GROQ_API_KEY"); +//! +//! let mut client = Groq::new(api_key); +//! client.add_messages(messages); +//! +//! let res = client.create(request).await; +//! assert!(res.is_ok()); +//! Ok(()) +//! } +//! ``` +//! +//! Example that the completion can return Error Object and augmented with HTTP status code. +//! ``` +//! use groq_api_rs::completion::{client::Groq, message::Message, request::builder}; +//! async fn error_does_return() -> anyhow::Result<()> { +//! let messages = vec![Message::UserMessage { +//! role: Some("user".to_string()), +//! content: Some("Explain the importance of fast language models".to_string()), +//! name: None, +//! tool_call_id: None, +//! }]; +//! let request = +//! builder::RequestBuilder::new("mixtral-8x7b-32768".to_string()).with_stream(true); +//! let api_key = ""; +//! +//! let mut client = Groq::new(api_key); +//! client.add_messages(messages); +//! +//! let res = client.create(request).await; +//! assert!(res.is_err()); +//! eprintln!("{}", res.unwrap_err()); +//! Ok(()) +//! } +//! ``` + +pub mod completion; diff --git a/crates/ollama-rs/.github/workflows/rust.yml b/crates/ollama-rs/.github/workflows/rust.yml new file mode 100644 index 0000000..8ad0022 --- /dev/null +++ b/crates/ollama-rs/.github/workflows/rust.yml @@ -0,0 +1,48 @@ +name: CI + +on: + push: + branches: [ "master" ] + pull_request: + branches: [ "master" ] + +env: + CARGO_TERM_COLOR: always + +jobs: + fmt: + name: Formatting + runs-on: ubuntu-latest + steps: + - name: Checkout Actions Repository + uses: actions/checkout@v3 + + - name: Setup Rust + uses: actions-rust-lang/setup-rust-toolchain@v1.5.0 + with: + toolchain: "stable" + components: "rustfmt,clippy" + + - name: cargo fmt + run: cargo fmt --all -- --check + + - name: Install dependencies (Linux) + run: sudo apt-get update && sudo apt-get install -y libxcb-render0-dev libxcb-shape0-dev libxcb-xfixes0-dev libxkbcommon-dev libssl-dev libclang-dev libgtk-3-dev + + - name: Clippy + run: cargo clippy --workspace --no-deps --all-features --all-targets -- -D warnings + + build: + name: Build + runs-on: ubuntu-latest + steps: + - name: Checkout Actions Repository + uses: actions/checkout@v3 + + - name: Setup Rust + uses: actions-rust-lang/setup-rust-toolchain@v1.5.0 + with: + toolchain: "stable" + + - name: Build + run: cargo build --verbose diff --git a/crates/ollama-rs/.gitignore b/crates/ollama-rs/.gitignore new file mode 100644 index 0000000..83cdd23 --- /dev/null +++ b/crates/ollama-rs/.gitignore @@ -0,0 +1,3 @@ +/target +.vscode/settings.json +shell.nix diff --git a/crates/ollama-rs/Cargo.lock b/crates/ollama-rs/Cargo.lock new file mode 100644 index 0000000..15d1071 --- /dev/null +++ b/crates/ollama-rs/Cargo.lock @@ -0,0 +1,1899 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "addr2line" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "ahash" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" +dependencies = [ + "cfg-if", + "getrandom", + "once_cell", + "version_check", + "zerocopy", +] + +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "async-stream" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51" +dependencies = [ + "async-stream-impl", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "async-trait" +version = "0.1.80" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6fa2087f2753a7da8cc1c0dbfcf89579dd57458e36769de5ac750b4671737ca" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "auto_enums" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1899bfcfd9340ceea3533ea157360ba8fa864354eccbceab58e1006ecab35393" +dependencies = [ + "derive_utils", + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "autocfg" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" + +[[package]] +name = "backtrace" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" +dependencies = [ + "addr2line", + "cc", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + +[[package]] +name = "base64" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9475866fec1451be56a3c2400fd081ff546538961565ccb5b7142cbd22bc7a51" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" + +[[package]] +name = "bumpalo" +version = "3.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "bytes" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" + +[[package]] +name = "cc" +version = "1.0.83" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0" +dependencies = [ + "libc", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "core-foundation" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" + +[[package]] +name = "cssparser" +version = "0.31.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b3df4f93e5fbbe73ec01ec8d3f68bba73107993a5b1e7519273c32db9b0d5be" +dependencies = [ + "cssparser-macros", + "dtoa-short", + "itoa", + "phf 0.11.2", + "smallvec", +] + +[[package]] +name = "cssparser-macros" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331" +dependencies = [ + "quote", + "syn 2.0.59", +] + +[[package]] +name = "derive_more" +version = "0.99.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "derive_utils" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61bb5a1014ce6dfc2a378578509abe775a5aa06bff584a547555d9efdb81b926" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "dtoa" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcbb2bf8e87535c23f7a8a321e364ce21462d0ff10cb6407820e8e96dfff6653" + +[[package]] +name = "dtoa-short" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbaceec3c6e4211c79e7b1800fb9680527106beb2f9c51904a3210c03a448c74" +dependencies = [ + "dtoa", +] + +[[package]] +name = "ego-tree" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a68a4904193147e0a8dec3314640e6db742afd5f6e634f428a6af230d9b3591" + +[[package]] +name = "either" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a47c1c47d2f5964e29c61246e81db715514cd532db6b5116a25ea3c03d6780a2" + +[[package]] +name = "errno" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3e13f66a2f95e32a39eaa81f6b95d42878ca0e1db0c7543723dfe12557e860" +dependencies = [ + "libc", + "windows-sys", +] + +[[package]] +name = "fastrand" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "futf" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df420e2e84819663797d1ec6544b13c5be84629e7bb00dc960d6917db2987843" +dependencies = [ + "mac", + "new_debug_unreachable", +] + +[[package]] +name = "futures-channel" +version = "0.3.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff4dd66668b557604244583e3e1e1eada8c5c2e96a6d0d6653ede395b78bbacb" +dependencies = [ + "futures-core", +] + +[[package]] +name = "futures-core" +version = "0.3.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb1d22c66e66d9d72e1758f0bd7d4fd0bee04cad842ee34587d68c07e45d088c" + +[[package]] +name = "futures-io" +version = "0.3.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8bf34a163b5c4c52d0478a4d757da8fb65cabef42ba90515efee0f6f9fa45aaa" + +[[package]] +name = "futures-macro" +version = "0.3.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53b153fd91e4b0147f4aced87be237c98248656bb01050b96bf3ee89220a8ddb" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "futures-sink" +version = "0.3.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e36d3378ee38c2a36ad710c5d30c2911d752cb941c00c72dbabfb786a7970817" + +[[package]] +name = "futures-task" +version = "0.3.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "efd193069b0ddadc69c46389b740bbccdd97203899b48d09c5f7969591d6bae2" + +[[package]] +name = "futures-util" +version = "0.3.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a19526d624e703a3179b3d322efec918b6246ea0fa51d41124525f00f1cc8104" +dependencies = [ + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "fxhash" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" +dependencies = [ + "byteorder", +] + +[[package]] +name = "getopts" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14dbbfd5c71d70241ecf9e6f13737f7b5ce823821063188d7e46c41d371eebd5" +dependencies = [ + "unicode-width", +] + +[[package]] +name = "getrandom" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "gimli" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0" + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + +[[package]] +name = "hermit-abi" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7" + +[[package]] +name = "html5ever" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bea68cab48b8459f17cf1c944c67ddc572d272d9f2b274140f223ecb1da4a3b7" +dependencies = [ + "log", + "mac", + "markup5ever", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "http" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" +dependencies = [ + "bytes", + "http", +] + +[[package]] +name = "http-body-util" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0475f8b2ac86659c21b64320d5d653f9efe42acd2a4e560073ec61a155a34f1d" +dependencies = [ + "bytes", + "futures-core", + "http", + "http-body", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" + +[[package]] +name = "hyper" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe575dd17d0862a9a33781c8c4696a55c320909004a67a00fb286ba8b1bc496d" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "http", + "http-body", + "httparse", + "itoa", + "pin-project-lite", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0bea761b46ae2b24eb4aef630d8d1c398157b6fc29e6350ecf090a0b70c952c" +dependencies = [ + "futures-util", + "http", + "hyper", + "hyper-util", + "rustls", + "rustls-pki-types", + "tokio", + "tokio-rustls", + "tower-service", +] + +[[package]] +name = "hyper-tls" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +dependencies = [ + "bytes", + "http-body-util", + "hyper", + "hyper-util", + "native-tls", + "tokio", + "tokio-native-tls", + "tower-service", +] + +[[package]] +name = "hyper-util" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca38ef113da30126bbff9cd1705f9273e15d45498615d138b0c20279ac7a76aa" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "http", + "http-body", + "hyper", + "pin-project-lite", + "socket2", + "tokio", + "tower", + "tower-service", + "tracing", +] + +[[package]] +name = "idna" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "ipnet" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" + +[[package]] +name = "itertools" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" + +[[package]] +name = "js-sys" +version = "0.3.65" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54c0c35952f67de54bb584e9fd912b3023117cbafc0a77d8f3dee1fb5f572fe8" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "libc" +version = "0.2.149" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b" + +[[package]] +name = "linux-raw-sys" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da2479e8c062e40bf0066ffa0bc823de0a9368974af99c9f6df941d2c231e03f" + +[[package]] +name = "lock_api" +version = "0.4.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" + +[[package]] +name = "mac" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4" + +[[package]] +name = "markup5ever" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a2629bb1404f3d34c2e921f21fd34ba00b206124c81f65c50b43b6aaefeb016" +dependencies = [ + "log", + "phf 0.10.1", + "phf_codegen", + "string_cache", + "string_cache_codegen", + "tendril", +] + +[[package]] +name = "memchr" +version = "2.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167" + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "miniz_oxide" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" +dependencies = [ + "adler", +] + +[[package]] +name = "mio" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3dce281c5e46beae905d4de1870d8b1509a9142b62eedf18b443b011ca8343d0" +dependencies = [ + "libc", + "wasi", + "windows-sys", +] + +[[package]] +name = "native-tls" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" +dependencies = [ + "lazy_static", + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "new_debug_unreachable" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" + +[[package]] +name = "num_cpus" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +dependencies = [ + "hermit-abi", + "libc", +] + +[[package]] +name = "object" +version = "0.32.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9cf5f9dd3933bd50a9e1f149ec995f39ae2c496d31fd772c1fd45ebc27e902b0" +dependencies = [ + "memchr", +] + +[[package]] +name = "ollama-rs" +version = "0.2.0" +dependencies = [ + "async-stream", + "async-trait", + "base64", + "log", + "ollama-rs", + "regex", + "reqwest", + "scraper", + "serde", + "serde_json", + "text-splitter", + "tokio", + "tokio-stream", + "url", +] + +[[package]] +name = "once_cell" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" + +[[package]] +name = "openssl" +version = "0.10.58" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9dfc0783362704e97ef3bd24261995a699468440099ef95d869b4d9732f829a" +dependencies = [ + "bitflags 2.4.1", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "openssl-sys" +version = "0.9.94" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f55da20b29f956fb01f0add8683eb26ee13ebe3ebd935e49898717c6b4b2830" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "parking_lot" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-targets", +] + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "phf" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fabbf1ead8a5bcbc20f5f8b939ee3f5b0f6f281b6ad3468b84656b658b455259" +dependencies = [ + "phf_shared 0.10.0", +] + +[[package]] +name = "phf" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc" +dependencies = [ + "phf_macros", + "phf_shared 0.11.2", +] + +[[package]] +name = "phf_codegen" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb1c3a8bc4dd4e5cfce29b44ffc14bedd2ee294559a294e2a4d4c9e9a6a13cd" +dependencies = [ + "phf_generator 0.10.0", + "phf_shared 0.10.0", +] + +[[package]] +name = "phf_generator" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d5285893bb5eb82e6aaf5d59ee909a06a16737a8970984dd7746ba9283498d6" +dependencies = [ + "phf_shared 0.10.0", + "rand", +] + +[[package]] +name = "phf_generator" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48e4cc64c2ad9ebe670cb8fd69dd50ae301650392e81c05f9bfcb2d5bdbc24b0" +dependencies = [ + "phf_shared 0.11.2", + "rand", +] + +[[package]] +name = "phf_macros" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3444646e286606587e49f3bcf1679b8cef1dc2c5ecc29ddacaffc305180d464b" +dependencies = [ + "phf_generator 0.11.2", + "phf_shared 0.11.2", + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "phf_shared" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" +dependencies = [ + "siphasher", +] + +[[package]] +name = "phf_shared" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" +dependencies = [ + "siphasher", +] + +[[package]] +name = "pin-project" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkg-config" +version = "0.3.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + +[[package]] +name = "precomputed-hash" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" + +[[package]] +name = "proc-macro2" +version = "1.0.81" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d1597b0c024618f09a9c3b8655b7e430397a36d23fdafec26d6965e9eec3eba" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "redox_syscall" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "regex" +version = "1.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c117dbdfde9c8308975b6a18d71f3f385c89461f7b3fb054288ecf2a2058ba4c" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56" + +[[package]] +name = "reqwest" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "566cafdd92868e0939d3fb961bd0dc25fcfaaed179291093b3d43e6b3150ea10" +dependencies = [ + "base64", + "bytes", + "futures-core", + "futures-util", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-rustls", + "hyper-tls", + "hyper-util", + "ipnet", + "js-sys", + "log", + "mime", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls", + "rustls-pemfile", + "rustls-pki-types", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tokio-native-tls", + "tokio-rustls", + "tokio-util", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", + "webpki-roots", + "winreg", +] + +[[package]] +name = "ring" +version = "0.17.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "688c63d65483050968b2a8937f7995f443e27041a0f7700aa59b0822aedebb74" +dependencies = [ + "cc", + "getrandom", + "libc", + "spin", + "untrusted", + "windows-sys", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" + +[[package]] +name = "rustix" +version = "0.38.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b426b0506e5d50a7d8dafcf2e81471400deb602392c7dd110815afb4eaf02a3" +dependencies = [ + "bitflags 2.4.1", + "errno", + "libc", + "linux-raw-sys", + "windows-sys", +] + +[[package]] +name = "rustls" +version = "0.22.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99008d7ad0bbbea527ec27bddbc0e432c5b87d8175178cee68d2eec9c4a1813c" +dependencies = [ + "log", + "ring", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-pemfile" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29993a25686778eb88d4189742cd713c9bce943bc54251a33509dc63cbacf73d" +dependencies = [ + "base64", + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecd36cc4259e3e4514335c4a138c6b43171a8d61d8f5c9348f9fc7529416f247" + +[[package]] +name = "rustls-webpki" +version = "0.102.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "faaa0a62740bedb9b2ef5afa303da42764c012f743917351dc9a237ea1663610" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + +[[package]] +name = "rustversion" +version = "1.0.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "092474d1a01ea8278f69e6a358998405fae5b8b963ddaeb2b0b04a128bf1dfb0" + +[[package]] +name = "ryu" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" + +[[package]] +name = "schannel" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c3733bf4cf7ea0880754e19cb5a462007c4a8c1914bff372ccc95b464f1df88" +dependencies = [ + "windows-sys", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "scraper" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b80b33679ff7a0ea53d37f3b39de77ea0c75b12c5805ac43ec0c33b3051af1b" +dependencies = [ + "ahash", + "cssparser", + "ego-tree", + "getopts", + "html5ever", + "once_cell", + "selectors", + "tendril", +] + +[[package]] +name = "security-framework" +version = "2.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "selectors" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4eb30575f3638fc8f6815f448d50cb1a2e255b0897985c8c59f4d37b72a07b06" +dependencies = [ + "bitflags 2.4.1", + "cssparser", + "derive_more", + "fxhash", + "log", + "new_debug_unreachable", + "phf 0.10.1", + "phf_codegen", + "precomputed-hash", + "servo_arc", + "smallvec", +] + +[[package]] +name = "serde" +version = "1.0.199" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c9f6e76df036c77cd94996771fb40db98187f096dd0b9af39c6c6e452ba966a" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.199" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11bd257a6541e141e42ca6d24ae26f7714887b47e89aa739099104c7e4d3b7fc" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "serde_json" +version = "1.0.116" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e17db7126d17feb94eb3fad46bf1a96b034e8aacbc2e775fe81505f8b0b2813" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "servo_arc" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d036d71a959e00c77a63538b90a6c2390969f9772b096ea837205c6bd0491a44" +dependencies = [ + "stable_deref_trait", +] + +[[package]] +name = "signal-hook-registry" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1" +dependencies = [ + "libc", +] + +[[package]] +name = "siphasher" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" + +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] + +[[package]] +name = "smallvec" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" + +[[package]] +name = "socket2" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9" +dependencies = [ + "libc", + "windows-sys", +] + +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" + +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + +[[package]] +name = "string_cache" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b" +dependencies = [ + "new_debug_unreachable", + "once_cell", + "parking_lot", + "phf_shared 0.10.0", + "precomputed-hash", + "serde", +] + +[[package]] +name = "string_cache_codegen" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bb30289b722be4ff74a408c3cc27edeaad656e06cb1fe8fa9231fa59c728988" +dependencies = [ + "phf_generator 0.10.0", + "phf_shared 0.10.0", + "proc-macro2", + "quote", +] + +[[package]] +name = "strum" +version = "0.26.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d8cec3501a5194c432b2b7976db6b7d10ec95c253208b45f83f7136aa985e29" +dependencies = [ + "strum_macros", +] + +[[package]] +name = "strum_macros" +version = "0.26.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6cf59daf282c0a494ba14fd21610a0325f9f90ec9d1231dea26bcb1d696c946" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "rustversion", + "syn 2.0.59", +] + +[[package]] +name = "subtle" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.59" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a6531ffc7b071655e4ce2e04bd464c4830bb585a61cabb96cf808f05172615a" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + +[[package]] +name = "tempfile" +version = "3.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ef1adac450ad7f4b3c28589471ade84f25f731a7a0fe30d71dfa9f60fd808e5" +dependencies = [ + "cfg-if", + "fastrand", + "redox_syscall", + "rustix", + "windows-sys", +] + +[[package]] +name = "tendril" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d24a120c5fc464a3458240ee02c299ebcb9d67b5249c8848b09d639dca8d7bb0" +dependencies = [ + "futf", + "mac", + "utf-8", +] + +[[package]] +name = "text-splitter" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3634ff66852bfbf7e8e987735bac08168daa5d42dc39a0df7d05fc83eaa3fe4" +dependencies = [ + "ahash", + "auto_enums", + "either", + "itertools", + "once_cell", + "regex", + "strum", + "thiserror", + "unicode-segmentation", +] + +[[package]] +name = "thiserror" +version = "1.0.60" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "579e9083ca58dd9dcf91a9923bb9054071b9ebbd800b342194c9feb0ee89fc18" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.60" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2470041c06ec3ac1ab38d0356a6119054dedaea53e12fbefc0de730a1c08524" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "tinyvec" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f38200e3ef7995e5ef13baec2f432a6da0aa9ac495b2c0e8f3b7eec2c92d653" +dependencies = [ + "backtrace", + "bytes", + "libc", + "mio", + "num_cpus", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "windows-sys", +] + +[[package]] +name = "tokio-macros" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "775e0c0f0adb3a2f22a00c4745d728b479985fc15ee7ca6a2608388c5569860f" +dependencies = [ + "rustls", + "rustls-pki-types", + "tokio", +] + +[[package]] +name = "tokio-stream" +version = "0.1.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", + "tracing", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "pin-project", + "pin-project-lite", + "tokio", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" + +[[package]] +name = "tower-service" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" + +[[package]] +name = "tracing" +version = "0.1.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +dependencies = [ + "log", + "pin-project-lite", + "tracing-core", +] + +[[package]] +name = "tracing-core" +version = "0.1.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +dependencies = [ + "once_cell", +] + +[[package]] +name = "try-lock" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" + +[[package]] +name = "unicode-bidi" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" + +[[package]] +name = "unicode-ident" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" + +[[package]] +name = "unicode-normalization" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-segmentation" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202" + +[[package]] +name = "unicode-width" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68f5e5f3158ecfd4b8ff6fe086db7c8467a2dfdac97fe420f2b7c4aa97af66d6" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", +] + +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.88" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7daec296f25a1bae309c0cd5c29c4b260e510e6d813c286b19eaadf409d40fce" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.88" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e397f4664c0e4e428e8313a469aaa58310d302159845980fd23b0f22a847f217" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn 2.0.59", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9afec9963e3d0994cac82455b2b3502b81a7f40f9a0d32181f7528d9f4b43e02" +dependencies = [ + "cfg-if", + "js-sys", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.88" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5961017b3b08ad5f3fe39f1e79877f8ee7c23c5e5fd5eb80de95abc41f1f16b2" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.88" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5353b8dab669f5e10f5bd76df26a9360c748f054f862ff5f3f8aae0c7fb3907" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.88" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d046c5d029ba91a1ed14da14dca44b68bf2f124cfbaf741c54151fdb3e0750b" + +[[package]] +name = "wasm-streams" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b65dc4c90b63b118468cf747d8bf3566c1913ef60be765b5730ead9e0a3ba129" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "web-sys" +version = "0.3.65" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5db499c5f66323272151db0e666cd34f78617522fb0c1604d31a27c50c206a85" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki-roots" +version = "0.26.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3de34ae270483955a94f4b21bdaaeb83d508bb84a01435f393818edb0012009" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "winreg" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a277a57398d4bfa075df44f501a17cfdf8542d224f0d36095a2adc7aee4ef0a5" +dependencies = [ + "cfg-if", + "windows-sys", +] + +[[package]] +name = "zerocopy" +version = "0.7.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae87e3fcd617500e5d106f0380cf7b77f3c6092aae37191433159dda23cfb087" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15e934569e47891f7d9411f1a451d947a60e000ab3bd24fbb970f000387d1b3b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.59", +] + +[[package]] +name = "zeroize" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" diff --git a/crates/ollama-rs/Cargo.toml b/crates/ollama-rs/Cargo.toml new file mode 100644 index 0000000..7fa08cc --- /dev/null +++ b/crates/ollama-rs/Cargo.toml @@ -0,0 +1,37 @@ +[package] +name = "ollama-rs" +version = "0.2.0" +edition = "2021" +license-file = "LICENSE.md" +description = "A Rust library for interacting with the Ollama API" +repository = "https://github.com/pepperoni21/ollama-rs" +readme = "README.md" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +reqwest = { version = "0.12.4", default-features = false, features = ["json"] } +serde = { version = "1", features = ["derive"] } +serde_json = "1" +tokio = { version = "1", features = ["full"], optional = true } +tokio-stream = { version = "0.1.15", optional = true } +async-trait = { version = "0.1.73" } +url = "2" +log = "0.4" +scraper = { version = "0.19.0", optional = true } +text-splitter = { version = "0.13.1", optional = true } +regex = { version = "1.9.3", optional = true } +async-stream = "0.3.5" + +[features] +default = ["reqwest/default-tls"] +stream = ["tokio-stream", "reqwest/stream", "tokio"] +rustls = ["reqwest/rustls-tls"] +chat-history = [] +function-calling = ["scraper", "text-splitter", "regex", "chat-history"] + +[dev-dependencies] +tokio = { version = "1", features = ["full"] } +ollama-rs = { path = ".", features = ["stream", "chat-history", "function-calling"] } +base64 = "0.22.0" + diff --git a/crates/ollama-rs/LICENSE.md b/crates/ollama-rs/LICENSE.md new file mode 100644 index 0000000..9ea8a4f --- /dev/null +++ b/crates/ollama-rs/LICENSE.md @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2023 pepperoni21 + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/crates/ollama-rs/Makefile b/crates/ollama-rs/Makefile new file mode 100644 index 0000000..7fe3f41 --- /dev/null +++ b/crates/ollama-rs/Makefile @@ -0,0 +1,3 @@ +fmt: + cargo fmt + cargo clippy --fix --allow-dirty \ No newline at end of file diff --git a/crates/ollama-rs/README.md b/crates/ollama-rs/README.md new file mode 100644 index 0000000..d7272f8 --- /dev/null +++ b/crates/ollama-rs/README.md @@ -0,0 +1,210 @@ +# Ollama-rs + +### A simple and easy to use library for interacting with the Ollama API. + +It was made following the [Ollama API](https://github.com/jmorganca/ollama/blob/main/docs/api.md) documentation. + +## Installation + +### Add ollama-rs to your Cargo.toml + +```toml +[dependencies] +ollama-rs = "0.2.0" +``` + +### Initialize Ollama + +```rust +// By default it will connect to localhost:11434 +let ollama = Ollama::default(); + +// For custom values: +let ollama = Ollama::new("http://localhost".to_string(), 11434); +``` + +## Usage + +Feel free to check the [Chatbot example](https://github.com/pepperoni21/ollama-rs/blob/0.2.0/examples/basic_chatbot.rs) that shows how to use the library to create a simple chatbot in less than 50 lines of code. +You can also check some [other examples](https://github.com/pepperoni21/ollama-rs/tree/0.2.0/examples). + +_These examples use poor error handling for simplicity, but you should handle errors properly in your code._ + +### Completion generation + +```rust +let model = "llama2:latest".to_string(); +let prompt = "Why is the sky blue?".to_string(); + +let res = ollama.generate(GenerationRequest::new(model, prompt)).await; + +if let Ok(res) = res { + println!("{}", res.response); +} +``` + +**OUTPUTS:** _The sky appears blue because of a phenomenon called Rayleigh scattering..._ + +### Completion generation (streaming) + +_Requires the `stream` feature._ + +```rust +let model = "llama2:latest".to_string(); +let prompt = "Why is the sky blue?".to_string(); + +let mut stream = ollama.generate_stream(GenerationRequest::new(model, prompt)).await.unwrap(); + +let mut stdout = tokio::io::stdout(); +while let Some(res) = stream.next().await { + let responses = res.unwrap(); + for resp in responses { + stdout.write(resp.response.as_bytes()).await.unwrap(); + stdout.flush().await.unwrap(); + } +} +``` + +Same output as above but streamed. + +### Completion generation (passing options to the model) + +```rust +let model = "llama2:latest".to_string(); +let prompt = "Why is the sky blue?".to_string(); + +let options = GenerationOptions::default() + .temperature(0.2) + .repeat_penalty(1.5) + .top_k(25) + .top_p(0.25); + +let res = ollama.generate(GenerationRequest::new(model, prompt).options(options)).await; + +if let Ok(res) = res { + println!("{}", res.response); +} +``` + +**OUTPUTS:** _1. Sun emits white sunlight: The sun consists primarily ..._ + +### Chat mode +Description: _Every message sent and received will be stored in library's history._ +_Each time you want to store history, you have to provide an ID for a chat._ +_It can be uniq for each user or the same every time, depending on your need_ + +Example with history: +```rust +let model = "llama2:latest".to_string(); +let prompt = "Why is the sky blue?".to_string(); +let history_id = "USER_ID_OR_WHATEVER"; + +let res = ollama + .send_chat_messages_with_history( + ChatMessageRequest::new( + model, + vec![ChatMessage::user(prompt)], // <- You should provide only one message + ), + history_id // <- This entry save for us all the history + ).await; + +if let Ok(res) = res { +println!("{}", res.response); +} +``` + +Getting history for some ID: +```rust +let history_id = "USER_ID_OR_WHATEVER"; +let history = ollama.get_message_history(history_id); // <- Option> +// Act +``` + +Clear history if we no more need it: +```rust +// Clear history for an ID +let history_id = "USER_ID_OR_WHATEVER"; +ollama.clear_messages_for_id(history_id); +// Clear history for all chats +ollama.clear_all_messages(); +``` + +_Check chat with history examples for [default](https://github.com/pepperoni21/ollama-rs/blob/master/examples/chat_with_history.rs) and [stream](https://github.com/pepperoni21/ollama-rs/blob/master/examples/chat_with_history_stream.rs)_ + +### List local models + +```rust +let res = ollama.list_local_models().await.unwrap(); +``` + +_Returns a vector of `Model` structs._ + +### Show model information + +```rust +let res = ollama.show_model_info("llama2:latest".to_string()).await.unwrap(); +``` + +_Returns a `ModelInfo` struct._ + +### Create a model + +```rust +let res = ollama.create_model(CreateModelRequest::path("model".into(), "/tmp/Modelfile.example".into())).await.unwrap(); +``` + +_Returns a `CreateModelStatus` struct representing the final status of the model creation._ + +### Create a model (streaming) + +_Requires the `stream` feature._ + +```rust +let mut res = ollama.create_model_stream(CreateModelRequest::path("model".into(), "/tmp/Modelfile.example".into())).await.unwrap(); + +while let Some(res) = res.next().await { + let res = res.unwrap(); + // Handle the status +} +``` + +_Returns a `CreateModelStatusStream` that will stream every status update of the model creation._ + +### Copy a model + +```rust +let _ = ollama.copy_model("mario".into(), "mario_copy".into()).await.unwrap(); +``` + +### Delete a model + +```rust +let _ = ollama.delete_model("mario_copy".into()).await.unwrap(); +``` + +### Generate embeddings + +```rust +let prompt = "Why is the sky blue?".to_string(); +let res = ollama.generate_embeddings("llama2:latest".to_string(), prompt, None).await.unwrap(); +``` + +_Returns a `GenerateEmbeddingsResponse` struct containing the embeddings (a vector of floats)._ + +### Make a function call + +```rust +let tools = vec![Arc::new(Scraper::new())]; +let parser = Arc::new(NousFunctionCall::new()); +let message = ChatMessage::user("What is the current oil price?".to_string()); +let res = ollama.send_function_call( + FunctionCallRequest::new( + "adrienbrault/nous-hermes2pro:Q8_0".to_string(), + tools, + vec![message], + ), + parser, + ).await.unwrap(); +``` + +_Uses the given tools (such as searching the web) to find an answer, returns a `ChatMessageResponse` with the answer to the question._ diff --git a/crates/ollama-rs/examples/basic_chatbot.rs b/crates/ollama-rs/examples/basic_chatbot.rs new file mode 100644 index 0000000..2749e43 --- /dev/null +++ b/crates/ollama-rs/examples/basic_chatbot.rs @@ -0,0 +1,49 @@ +use ollama_rs::{ + generation::completion::{ + request::GenerationRequest, GenerationContext, GenerationResponseStream, + }, + Ollama, +}; +use tokio::io::{stdout, AsyncWriteExt}; +use tokio_stream::StreamExt; + +#[tokio::main] +async fn main() -> Result<(), Box> { + let ollama = Ollama::default(); + + let mut stdout = stdout(); + + let mut context: Option = None; + + loop { + stdout.write_all(b"\n> ").await?; + stdout.flush().await?; + + let mut input = String::new(); + std::io::stdin().read_line(&mut input)?; + + let input = input.trim_end(); + if input.eq_ignore_ascii_case("exit") { + break; + } + + let mut request = GenerationRequest::new("llama2:latest".into(), input.to_string()); + if let Some(context) = context.clone() { + request = request.context(context); + } + let mut stream: GenerationResponseStream = ollama.generate_stream(request).await?; + + while let Some(Ok(res)) = stream.next().await { + for ele in res { + stdout.write_all(ele.response.as_bytes()).await?; + stdout.flush().await?; + + if ele.context.is_some() { + context = ele.context; + } + } + } + } + + Ok(()) +} diff --git a/crates/ollama-rs/examples/chat_api_chatbot.rs b/crates/ollama-rs/examples/chat_api_chatbot.rs new file mode 100644 index 0000000..27700d5 --- /dev/null +++ b/crates/ollama-rs/examples/chat_api_chatbot.rs @@ -0,0 +1,52 @@ +use ollama_rs::{ + generation::chat::{request::ChatMessageRequest, ChatMessage, ChatMessageResponseStream}, + Ollama, +}; +use tokio::io::{stdout, AsyncWriteExt}; +use tokio_stream::StreamExt; + +#[tokio::main] +async fn main() -> Result<(), Box> { + let ollama = Ollama::default(); + + let mut stdout = stdout(); + + let mut messages: Vec = vec![]; + + loop { + stdout.write_all(b"\n> ").await?; + stdout.flush().await?; + + let mut input = String::new(); + std::io::stdin().read_line(&mut input)?; + + let input = input.trim_end(); + if input.eq_ignore_ascii_case("exit") { + break; + } + + let user_message = ChatMessage::user(input.to_string()); + messages.push(user_message); + + let mut stream: ChatMessageResponseStream = ollama + .send_chat_messages_stream(ChatMessageRequest::new( + "llama2:latest".to_string(), + messages.clone(), + )) + .await?; + + let mut response = String::new(); + while let Some(Ok(res)) = stream.next().await { + if let Some(assistant_message) = res.message { + stdout + .write_all(assistant_message.content.as_bytes()) + .await?; + stdout.flush().await?; + response += assistant_message.content.as_str(); + } + } + messages.push(ChatMessage::assistant(response)); + } + + Ok(()) +} diff --git a/crates/ollama-rs/examples/chat_with_history.rs b/crates/ollama-rs/examples/chat_with_history.rs new file mode 100644 index 0000000..69420e9 --- /dev/null +++ b/crates/ollama-rs/examples/chat_with_history.rs @@ -0,0 +1,43 @@ +use ollama_rs::{ + generation::chat::{request::ChatMessageRequest, ChatMessage}, + Ollama, +}; +use tokio::io::{stdout, AsyncWriteExt}; + +#[tokio::main] +async fn main() -> Result<(), Box> { + let mut ollama = Ollama::new_default_with_history(30); + + let mut stdout = stdout(); + + loop { + stdout.write_all(b"\n> ").await?; + stdout.flush().await?; + + let mut input = String::new(); + std::io::stdin().read_line(&mut input)?; + + let input = input.trim_end(); + if input.eq_ignore_ascii_case("exit") { + break; + } + + let user_message = ChatMessage::user(input.to_string()); + + let result = ollama + .send_chat_messages_with_history( + ChatMessageRequest::new("llama2:latest".to_string(), vec![user_message]), + "default", + ) + .await?; + + let assistant_message = result.message.unwrap().content; + stdout.write_all(assistant_message.as_bytes()).await?; + stdout.flush().await?; + } + + // Display whole history of messages + dbg!(&ollama.get_messages_history("default")); + + Ok(()) +} diff --git a/crates/ollama-rs/examples/chat_with_history_stream.rs b/crates/ollama-rs/examples/chat_with_history_stream.rs new file mode 100644 index 0000000..2d54a42 --- /dev/null +++ b/crates/ollama-rs/examples/chat_with_history_stream.rs @@ -0,0 +1,50 @@ +use ollama_rs::{ + generation::chat::{request::ChatMessageRequest, ChatMessage, ChatMessageResponseStream}, + Ollama, +}; +use tokio::io::{stdout, AsyncWriteExt}; +use tokio_stream::StreamExt; + +#[tokio::main] +async fn main() -> Result<(), Box> { + let mut ollama = Ollama::new_default_with_history(30); + + let mut stdout = stdout(); + + loop { + stdout.write_all(b"\n> ").await?; + stdout.flush().await?; + + let mut input = String::new(); + std::io::stdin().read_line(&mut input)?; + + let input = input.trim_end(); + if input.eq_ignore_ascii_case("exit") { + break; + } + + let mut stream: ChatMessageResponseStream = ollama + .send_chat_messages_with_history_stream( + ChatMessageRequest::new( + "llama2:latest".to_string(), + vec![ChatMessage::user(input.to_string())], + ), + "user".to_string(), + ) + .await?; + + let mut response = String::new(); + while let Some(Ok(res)) = stream.next().await { + if let Some(assistant_message) = res.message { + stdout + .write_all(assistant_message.content.as_bytes()) + .await?; + stdout.flush().await?; + response += assistant_message.content.as_str(); + } + } + dbg!(&ollama.get_messages_history("user")); + } + + Ok(()) +} diff --git a/crates/ollama-rs/examples/completion_with_options.rs b/crates/ollama-rs/examples/completion_with_options.rs new file mode 100644 index 0000000..a67e7a1 --- /dev/null +++ b/crates/ollama-rs/examples/completion_with_options.rs @@ -0,0 +1,26 @@ +use ollama_rs::{ + generation::{completion::request::GenerationRequest, options::GenerationOptions}, + Ollama, +}; + +#[tokio::main] +async fn main() -> Result<(), Box> { + let ollama = Ollama::default(); + let model = "llama2:latest".to_string(); + let prompt = "Why is the sky blue?".to_string(); + + let options = GenerationOptions::default() + .temperature(0.2) + .repeat_penalty(1.5) + .top_k(25) + .top_p(0.25); + + let res = ollama + .generate(GenerationRequest::new(model, prompt).options(options)) + .await; + + if let Ok(res) = res { + println!("{}", res.response); + } + Ok(()) +} diff --git a/crates/ollama-rs/examples/options_from_json.rs b/crates/ollama-rs/examples/options_from_json.rs new file mode 100644 index 0000000..58b3dc1 --- /dev/null +++ b/crates/ollama-rs/examples/options_from_json.rs @@ -0,0 +1,30 @@ +use ollama_rs::{ + generation::{completion::request::GenerationRequest, options::GenerationOptions}, + Ollama, +}; + +#[tokio::main] +async fn main() -> Result<(), Box> { + let ollama = Ollama::default(); + let model = "llama2:latest".to_string(); + let prompt = "Why is the sky blue?".to_string(); + + // Fetch the configuration from a file or from user request + // let options_str = fs::read_to_string("options.json").expect("The option file should be available") ; + let options_str = r#"{ + "temperature": 0.2, + "repeat_penalty": 1.5, + "top_k": 25, + "top_p": 0.25 + }"#; + let options: GenerationOptions = + serde_json::from_str(options_str).expect("JSON was not well-formatted"); + let res = ollama + .generate(GenerationRequest::new(model, prompt).options(options)) + .await; + + if let Ok(res) = res { + println!("{}", res.response); + } + Ok(()) +} diff --git a/crates/ollama-rs/src/error.rs b/crates/ollama-rs/src/error.rs new file mode 100644 index 0000000..436bc9d --- /dev/null +++ b/crates/ollama-rs/src/error.rs @@ -0,0 +1,52 @@ +use std::{ + error::Error, + fmt::{Debug, Display}, +}; + +use serde::Deserialize; + +/// A result type for ollama-rs. +pub type Result = std::result::Result; + +/// An error type for ollama-rs. +#[derive(Deserialize)] +pub struct OllamaError { + #[serde(rename = "error")] + pub(crate) message: String, +} + +impl Display for OllamaError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "An error occurred with ollama-rs: {}", self.message) + } +} + +impl Debug for OllamaError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "Ollama error: {}", self.message) + } +} + +impl Error for OllamaError {} + +impl From for OllamaError { + fn from(message: String) -> Self { + Self { message } + } +} + +impl From> for OllamaError { + fn from(error: Box) -> Self { + Self { + message: error.to_string(), + } + } +} + +impl From for OllamaError { + fn from(error: serde_json::Error) -> Self { + Self { + message: error.to_string(), + } + } +} diff --git a/crates/ollama-rs/src/generation.rs b/crates/ollama-rs/src/generation.rs new file mode 100644 index 0000000..a29c4ae --- /dev/null +++ b/crates/ollama-rs/src/generation.rs @@ -0,0 +1,8 @@ +pub mod chat; +pub mod completion; +pub mod embeddings; +#[cfg(feature = "function-calling")] +pub mod functions; +pub mod images; +pub mod options; +pub mod parameters; diff --git a/crates/ollama-rs/src/generation/chat/mod.rs b/crates/ollama-rs/src/generation/chat/mod.rs new file mode 100644 index 0000000..0076d5c --- /dev/null +++ b/crates/ollama-rs/src/generation/chat/mod.rs @@ -0,0 +1,315 @@ +use std::collections::HashMap; + +#[cfg(all(feature = "chat-history", feature = "stream"))] +use async_stream::stream; +use serde::{Deserialize, Serialize}; + +use crate::Ollama; +pub mod request; +use super::images::Image; +use request::ChatMessageRequest; + +#[cfg(feature = "chat-history")] +use crate::history::MessagesHistory; + +#[cfg(feature = "stream")] +/// A stream of `ChatMessageResponse` objects +pub type ChatMessageResponseStream = + std::pin::Pin> + Send>>; + +impl Ollama { + #[cfg(feature = "stream")] + /// Chat message generation with streaming. + /// Returns a stream of `ChatMessageResponse` objects + pub async fn send_chat_messages_stream( + &self, + request: ChatMessageRequest, + ) -> crate::error::Result { + use tokio_stream::StreamExt; + + let mut request = request; + request.stream = true; + + let url = format!("{}api/chat", self.url_str()); + let serialized = serde_json::to_string(&request) + .map_err(|e| e.to_string()) + .unwrap(); + let res = self + .reqwest_client + .post(url) + .body(serialized) + .send() + .await + .map_err(|e| e.to_string())?; + + if !res.status().is_success() { + return Err(res.text().await.unwrap_or_else(|e| e.to_string()).into()); + } + + let stream = Box::new(res.bytes_stream().map(|res| match res { + Ok(bytes) => { + let res = serde_json::from_slice::(&bytes); + match res { + Ok(res) => Ok(res), + Err(e) => { + eprintln!("Failed to deserialize response: {}", e); + Err(()) + } + } + } + Err(e) => { + eprintln!("Failed to read response: {}", e); + Err(()) + } + })); + + Ok(std::pin::Pin::from(stream)) + } + + /// Chat message generation. + /// Returns a `ChatMessageResponse` object + pub async fn send_chat_messages( + &self, + request: ChatMessageRequest, + ) -> crate::error::Result { + let mut request = request; + request.stream = false; + + let url = format!("{}api/chat", self.url_str()); + let serialized = serde_json::to_string(&request).map_err(|e| e.to_string())?; + + let res = self + .reqwest_client + .post(url) + .body(serialized) + .send() + .await + .map_err(|e| e.to_string())?; + + if !res.status().is_success() { + return Err(res.text().await.unwrap_or_else(|e| e.to_string()).into()); + } + + let bytes = res.bytes().await.map_err(|e| e.to_string())?; + let res = + serde_json::from_slice::(&bytes).map_err(|e| e.to_string())?; + + Ok(res) + } +} + +#[cfg(feature = "chat-history")] +impl Ollama { + #[cfg(feature = "stream")] + pub async fn send_chat_messages_with_history_stream( + &mut self, + mut request: ChatMessageRequest, + history_id: impl ToString, + ) -> crate::error::Result { + use async_stream::stream; + use tokio_stream::StreamExt; + let id_copy = history_id.to_string().clone(); + + let mut current_chat_messages = self.get_chat_messages_by_id(id_copy.clone()); + + if let Some(message) = request.messages.first() { + current_chat_messages.push(message.clone()); + } + + // The request is modified to include the current chat messages + request.messages.clone_from(¤t_chat_messages); + request.stream = true; + + let mut resp_stream: ChatMessageResponseStream = + self.send_chat_messages_stream(request.clone()).await?; + + let messages_history = self.messages_history.clone(); + + let s = stream! { + let mut result = String::new(); + + while let Some(item) = resp_stream.try_next().await.unwrap() { + let msg_part = item.clone().message.unwrap().content; + + if item.done { + if let Some(history) = messages_history.clone() { + let mut inner = history.write().unwrap(); + // Message we sent to AI + if let Some(message) = request.messages.last() { + inner.add_message(id_copy.clone(), message.clone()); + } + + // AI's response + inner.add_message(id_copy.clone(), ChatMessage::assistant(result.clone())); + } + } else { + result.push_str(&msg_part); + } + + yield Ok(item); + } + }; + + Ok(Box::pin(s)) + } + + /// Chat message generation + /// Returns a `ChatMessageResponse` object + /// Manages the history of messages for the given `id` + pub async fn send_chat_messages_with_history( + &mut self, + mut request: ChatMessageRequest, + history_id: impl ToString, + ) -> crate::error::Result { + // The request is modified to include the current chat messages + let id_copy = history_id.to_string().clone(); + let mut current_chat_messages = self.get_chat_messages_by_id(id_copy.clone()); + + if let Some(message) = request.messages.first() { + current_chat_messages.push(message.clone()); + } + + // The request is modified to include the current chat messages + request.messages.clone_from(¤t_chat_messages); + + let result = self.send_chat_messages(request.clone()).await; + + if let Ok(result) = result { + // Message we sent to AI + if let Some(message) = request.messages.last() { + self.store_chat_message_by_id(id_copy.clone(), message.clone()); + } + // Store AI's response in the history + self.store_chat_message_by_id(id_copy, result.message.clone().unwrap()); + + return Ok(result); + } + + result + } + + /// Helper function to store chat messages by id + fn store_chat_message_by_id(&mut self, id: impl ToString, message: ChatMessage) { + if let Some(messages_history) = self.messages_history.as_mut() { + messages_history.write().unwrap().add_message(id, message); + } + } + + /// Let get existing history with a new message in it + /// Without impact for existing history + /// Used to prepare history for request + fn get_chat_messages_by_id(&mut self, history_id: impl ToString) -> Vec { + let chat_history = match self.messages_history.as_mut() { + Some(history) => history, + None => &mut { + let new_history = + std::sync::Arc::new(std::sync::RwLock::new(MessagesHistory::default())); + self.messages_history = Some(new_history); + self.messages_history.clone().unwrap() + }, + }; + // Clone the current chat messages to avoid borrowing issues + // And not to add message to the history if the request fails + let mut history_instance = chat_history.write().unwrap(); + let chat_history = history_instance + .messages_by_id + .entry(history_id.to_string()) + .or_default(); + + chat_history.clone() + } +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct ChatMessageResponse { + /// The name of the model used for the completion. + pub model: String, + /// The creation time of the completion, in such format: `2023-08-04T08:52:19.385406455-07:00`. + pub created_at: String, + /// The generated chat message. + pub message: Option, + pub done: bool, + #[serde(flatten)] + /// The final data of the completion. This is only present if the completion is done. + pub final_data: Option, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct ChatMessageFinalResponseData { + /// Time spent generating the response + pub total_duration: u64, + /// Number of tokens in the prompt + pub prompt_eval_count: u16, + /// Time spent in nanoseconds evaluating the prompt + pub prompt_eval_duration: u64, + /// Number of tokens the response + pub eval_count: u16, + /// Time in nanoseconds spent generating the response + pub eval_duration: u64, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ToolCallFunction { + pub name: String, + pub arguments: Option>, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ToolCall { + pub function: ToolCallFunction, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ChatMessage { + pub role: MessageRole, + pub content: String, + pub images: Option>, + pub tool_calls: Option>, +} + +impl ChatMessage { + pub fn new(role: MessageRole, content: String) -> Self { + Self { + role, + content, + images: None, + tool_calls: None, + } + } + + pub fn user(content: String) -> Self { + Self::new(MessageRole::User, content) + } + + pub fn assistant(content: String) -> Self { + Self::new(MessageRole::Assistant, content) + } + + pub fn system(content: String) -> Self { + Self::new(MessageRole::System, content) + } + + pub fn with_images(mut self, images: Vec) -> Self { + self.images = Some(images); + self + } + + pub fn add_image(mut self, image: Image) -> Self { + if let Some(images) = self.images.as_mut() { + images.push(image); + } else { + self.images = Some(vec![image]); + } + self + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +pub enum MessageRole { + #[serde(rename = "user")] + User, + #[serde(rename = "assistant")] + Assistant, + #[serde(rename = "system")] + System, +} diff --git a/crates/ollama-rs/src/generation/chat/request.rs b/crates/ollama-rs/src/generation/chat/request.rs new file mode 100644 index 0000000..370765b --- /dev/null +++ b/crates/ollama-rs/src/generation/chat/request.rs @@ -0,0 +1,90 @@ +use std::collections::HashMap; + +use serde::{Deserialize, Serialize}; + +use crate::generation::{options::GenerationOptions, parameters::FormatType}; + +use super::ChatMessage; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ToolFunctionParameterProperty { + #[serde(rename(serialize = "type", deserialize = "type"))] + pub the_type: String, + pub description: String, + // `enum` is optional and can be a list of strings + #[serde(rename(serialize = "enum", deserialize = "enum"))] + pub an_enum: Option>, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ToolFunctionParameters { + #[serde(rename(serialize = "type", deserialize = "type"))] + pub the_type: String, + pub required: Vec, + pub properties: HashMap, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ToolFunction { + pub name: String, + pub description: String, + pub parameters: ToolFunctionParameters, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Tool { + #[serde(rename(serialize = "type", deserialize = "type"))] + pub the_type: String, + pub function: ToolFunction, +} + +/// A chat message request to Ollama. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ChatMessageRequest { + #[serde(rename = "model")] + pub model_name: String, + pub messages: Vec, + pub options: Option, + pub template: Option, + pub format: Option, + pub tools: Vec, + pub(crate) stream: bool, +} + +impl ChatMessageRequest { + pub fn new(model_name: String, messages: Vec) -> Self { + Self { + model_name, + messages, + options: None, + template: None, + format: None, + tools: vec![], + // Stream value will be overwritten by Ollama::send_chat_messages_stream() and Ollama::send_chat_messages() methods + stream: false, + } + } + + /// Additional model parameters listed in the documentation for the Modelfile + pub fn options(mut self, options: GenerationOptions) -> Self { + self.options = Some(options); + self + } + + /// The full prompt or prompt template (overrides what is defined in the Modelfile) + pub fn template(mut self, template: String) -> Self { + self.template = Some(template); + self + } + + pub fn tools(mut self, tools: Vec) -> Self { + self.tools = tools; + self + } + + // The format to return a response in. Currently the only accepted value is `json` + pub fn format(mut self, format: FormatType) -> Self { + self.format = Some(format); + self + } +} diff --git a/crates/ollama-rs/src/generation/completion/mod.rs b/crates/ollama-rs/src/generation/completion/mod.rs new file mode 100644 index 0000000..a7ba8cd --- /dev/null +++ b/crates/ollama-rs/src/generation/completion/mod.rs @@ -0,0 +1,118 @@ +use serde::{Deserialize, Serialize}; + +use crate::Ollama; + +use request::GenerationRequest; + +pub mod request; + +#[cfg(feature = "stream")] +/// A stream of `GenerationResponse` objects +pub type GenerationResponseStream = std::pin::Pin< + Box< + dyn tokio_stream::Stream> + Send, + >, +>; +pub type GenerationResponseStreamChunk = Vec; + +impl Ollama { + #[cfg(feature = "stream")] + /// Completion generation with streaming. + /// Returns a stream of `GenerationResponse` objects + pub async fn generate_stream( + &self, + request: GenerationRequest, + ) -> crate::error::Result { + use tokio_stream::StreamExt; + + use crate::error::OllamaError; + + let mut request = request; + request.stream = true; + + let url = format!("{}api/generate", self.url_str()); + let serialized = serde_json::to_string(&request).map_err(|e| e.to_string())?; + let res = self + .reqwest_client + .post(url) + .body(serialized) + .send() + .await + .map_err(|e| e.to_string())?; + + if !res.status().is_success() { + return Err(res.text().await.unwrap_or_else(|e| e.to_string()).into()); + } + + let stream = Box::new(res.bytes_stream().map(|res| match res { + Ok(bytes) => { + let res = serde_json::Deserializer::from_slice(&bytes).into_iter(); + let res = res + .map(|res| res.map_err(|e| OllamaError::from(e.to_string()))) + .filter_map(Result::ok) // Filter out the errors + .collect::>(); + Ok(res) + } + Err(e) => Err(OllamaError::from(format!("Failed to read response: {}", e))), + })); + + Ok(std::pin::Pin::from(stream)) + } + + /// Completion generation with a single response. + /// Returns a single `GenerationResponse` object + pub async fn generate( + &self, + request: GenerationRequest, + ) -> crate::error::Result { + let mut request = request; + request.stream = false; + + let url = format!("{}api/generate", self.url_str()); + let serialized = serde_json::to_string(&request).map_err(|e| e.to_string())?; + let res = self + .reqwest_client + .post(url) + .body(serialized) + .send() + .await + .map_err(|e| e.to_string())?; + + if !res.status().is_success() { + return Err(res.text().await.unwrap_or_else(|e| e.to_string()).into()); + } + + let res = res.bytes().await.map_err(|e| e.to_string())?; + let res = serde_json::from_slice::(&res).map_err(|e| e.to_string())?; + + Ok(res) + } +} + +/// An encoding of a conversation returned by Ollama after a completion request, this can be sent in a new request to keep a conversational memory. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct GenerationContext(pub Vec); + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct GenerationResponse { + /// The name of the model used for the completion. + pub model: String, + /// The creation time of the completion, in such format: `2023-08-04T08:52:19.385406455-07:00`. + pub created_at: String, + /// The response of the completion. This can be the entire completion or only a token if the completion is streaming. + pub response: String, + /// Whether the completion is done. If the completion is streaming, this will be false until the last response. + pub done: bool, + /// An encoding of the conversation used in this response, this can be sent in the next request to keep a conversational memory + pub context: Option, + /// Time spent generating the response + pub total_duration: Option, + /// Number of tokens in the prompt + pub prompt_eval_count: Option, + /// Time spent in nanoseconds evaluating the prompt + pub prompt_eval_duration: Option, + /// Number of tokens in the response + pub eval_count: Option, + /// Time spent in nanoseconds generating the response + pub eval_duration: Option, +} diff --git a/crates/ollama-rs/src/generation/completion/request.rs b/crates/ollama-rs/src/generation/completion/request.rs new file mode 100644 index 0000000..d306d94 --- /dev/null +++ b/crates/ollama-rs/src/generation/completion/request.rs @@ -0,0 +1,91 @@ +use serde::Serialize; + +use crate::generation::{ + images::Image, + options::GenerationOptions, + parameters::{FormatType, KeepAlive}, +}; + +use super::GenerationContext; + +/// A generation request to Ollama. +#[derive(Debug, Clone, Serialize)] +pub struct GenerationRequest { + #[serde(rename = "model")] + pub model_name: String, + pub prompt: String, + pub images: Vec, + pub options: Option, + pub system: Option, + pub template: Option, + pub context: Option, + pub format: Option, + pub keep_alive: Option, + pub(crate) stream: bool, +} + +impl GenerationRequest { + pub fn new(model_name: String, prompt: String) -> Self { + Self { + model_name, + prompt, + images: Vec::new(), + options: None, + system: None, + template: None, + context: None, + format: None, + keep_alive: None, + // Stream value will be overwritten by Ollama::generate_stream() and Ollama::generate() methods + stream: false, + } + } + + /// A list of images to be used with the prompt + pub fn images(mut self, images: Vec) -> Self { + self.images = images; + self + } + + /// Add an image to be used with the prompt + pub fn add_image(mut self, image: Image) -> Self { + self.images.push(image); + self + } + + /// Additional model parameters listed in the documentation for the Modelfile + pub fn options(mut self, options: GenerationOptions) -> Self { + self.options = Some(options); + self + } + + /// System prompt to (overrides what is defined in the Modelfile) + pub fn system(mut self, system: String) -> Self { + self.system = Some(system); + self + } + + /// The full prompt or prompt template (overrides what is defined in the Modelfile) + pub fn template(mut self, template: String) -> Self { + self.template = Some(template); + self + } + + /// The context parameter returned from a previous request to /generate, this can be used to keep a short conversational memory + pub fn context(mut self, context: GenerationContext) -> Self { + self.context = Some(context); + self + } + + // The format to return a response in. Currently the only accepted value is `json` + pub fn format(mut self, format: FormatType) -> Self { + self.format = Some(format); + self + } + + /// Used to control how long a model stays loaded in memory, by default models are unloaded after 5 minutes of inactivity + pub fn keep_alive(mut self, keep_alive: KeepAlive) -> Self { + self.keep_alive = Some(keep_alive); + self + } +} diff --git a/crates/ollama-rs/src/generation/embeddings.rs b/crates/ollama-rs/src/generation/embeddings.rs new file mode 100644 index 0000000..f06eeb9 --- /dev/null +++ b/crates/ollama-rs/src/generation/embeddings.rs @@ -0,0 +1,60 @@ +use serde::{Deserialize, Serialize}; + +use crate::Ollama; + +use super::options::GenerationOptions; + +impl Ollama { + /// Generate embeddings from a model + /// * `model_name` - Name of model to generate embeddings from + /// * `prompt` - Prompt to generate embeddings for + pub async fn generate_embeddings( + &self, + model_name: String, + prompt: String, + options: Option, + ) -> crate::error::Result { + let request = GenerateEmbeddingsRequest { + model_name, + prompt, + options, + }; + + let url = format!("{}api/embeddings", self.url_str()); + let serialized = serde_json::to_string(&request).map_err(|e| e.to_string())?; + let res = self + .reqwest_client + .post(url) + .body(serialized) + .send() + .await + .map_err(|e| e.to_string())?; + + if !res.status().is_success() { + return Err(res.text().await.unwrap_or_else(|e| e.to_string()).into()); + } + + let res = res.bytes().await.map_err(|e| e.to_string())?; + let res = serde_json::from_slice::(&res) + .map_err(|e| e.to_string())?; + + Ok(res) + } +} + +/// An embeddings generation request to Ollama. +#[derive(Debug, Serialize)] +struct GenerateEmbeddingsRequest { + #[serde(rename = "model")] + model_name: String, + prompt: String, + options: Option, +} + +/// An embeddings generation response from Ollama. +#[derive(Debug, Deserialize, Clone)] +pub struct GenerateEmbeddingsResponse { + #[serde(rename = "embedding")] + #[allow(dead_code)] + pub embeddings: Vec, +} diff --git a/crates/ollama-rs/src/generation/functions/mod.rs b/crates/ollama-rs/src/generation/functions/mod.rs new file mode 100644 index 0000000..8fd78c5 --- /dev/null +++ b/crates/ollama-rs/src/generation/functions/mod.rs @@ -0,0 +1,106 @@ +pub mod pipelines; +pub mod request; +pub mod tools; + +pub use crate::generation::functions::pipelines::nous_hermes::request::NousFunctionCall; +pub use crate::generation::functions::pipelines::openai::request::OpenAIFunctionCall; +pub use crate::generation::functions::request::FunctionCallRequest; +pub use tools::DDGSearcher; +pub use tools::Scraper; +pub use tools::StockScraper; + +use crate::error::OllamaError; +use crate::generation::chat::request::ChatMessageRequest; +use crate::generation::chat::{ChatMessage, ChatMessageResponse}; +use crate::generation::functions::pipelines::RequestParserBase; +use crate::generation::functions::tools::Tool; +use std::sync::Arc; + +#[cfg(feature = "function-calling")] +impl crate::Ollama { + fn has_system_prompt(&self, messages: &[ChatMessage], system_prompt: &str) -> bool { + let system_message = messages.first().unwrap().clone(); + system_message.content == system_prompt + } + + fn has_system_prompt_history(&mut self) -> bool { + self.get_messages_history("default").is_some() + } + + #[cfg(feature = "chat-history")] + pub async fn send_function_call_with_history( + &mut self, + request: FunctionCallRequest, + parser: Arc, + id: String, + ) -> Result { + let mut request = request; + + if !self.has_system_prompt_history() { + let system_prompt = parser.get_system_message(&request.tools).await; + self.set_system_response(id.clone(), system_prompt.content); + + //format input + let formatted_query = ChatMessage::user( + parser.format_query(&request.chat.messages.first().unwrap().content), + ); + //replace with formatted_query with previous chat_message + request.chat.messages.remove(0); + request.chat.messages.insert(0, formatted_query); + } + + let tool_call_result = self + .send_chat_messages_with_history( + ChatMessageRequest::new(request.chat.model_name.clone(), request.chat.messages), + id.clone(), + ) + .await?; + + let tool_call_content: String = tool_call_result.message.clone().unwrap().content; + let result = parser + .parse( + &tool_call_content, + request.chat.model_name.clone(), + request.tools, + ) + .await; + + match result { + Ok(r) => { + self.add_assistant_response(id.clone(), r.message.clone().unwrap().content); + Ok(r) + } + Err(e) => { + self.add_assistant_response(id.clone(), e.message.clone().unwrap().content); + Err(OllamaError::from(e.message.unwrap().content)) + } + } + } + + pub async fn send_function_call( + &self, + request: FunctionCallRequest, + parser: Arc, + ) -> Result { + let mut request = request; + + request.chat.stream = false; + let system_prompt = parser.get_system_message(&request.tools).await; + let model_name = request.chat.model_name.clone(); + + //Make sure the first message in chat is the system prompt + if !self.has_system_prompt(&request.chat.messages, &system_prompt.content) { + request.chat.messages.insert(0, system_prompt); + } + let result = self.send_chat_messages(request.chat).await?; + let response_content: String = result.message.clone().unwrap().content; + + let result = parser + .parse(&response_content, model_name, request.tools) + .await; + match result { + Ok(r) => Ok(r), + Err(e) => Err(OllamaError::from(e.message.unwrap().content)), + } + } +} diff --git a/crates/ollama-rs/src/generation/functions/pipelines/mod.rs b/crates/ollama-rs/src/generation/functions/pipelines/mod.rs new file mode 100644 index 0000000..057e347 --- /dev/null +++ b/crates/ollama-rs/src/generation/functions/pipelines/mod.rs @@ -0,0 +1,26 @@ +use crate::error::OllamaError; +use crate::generation::chat::{ChatMessage, ChatMessageResponse}; +use crate::generation::functions::tools::Tool; +use async_trait::async_trait; +use std::sync::Arc; + +pub mod nous_hermes; +pub mod openai; + +#[async_trait] +pub trait RequestParserBase { + async fn parse( + &self, + input: &str, + model_name: String, + tools: Vec>, + ) -> Result; + fn format_query(&self, input: &str) -> String { + input.to_string() + } + fn format_response(&self, response: &str) -> String { + response.to_string() + } + async fn get_system_message(&self, tools: &[Arc]) -> ChatMessage; + fn error_handler(&self, error: OllamaError) -> ChatMessageResponse; +} diff --git a/crates/ollama-rs/src/generation/functions/pipelines/nous_hermes/mod.rs b/crates/ollama-rs/src/generation/functions/pipelines/nous_hermes/mod.rs new file mode 100644 index 0000000..23c05af --- /dev/null +++ b/crates/ollama-rs/src/generation/functions/pipelines/nous_hermes/mod.rs @@ -0,0 +1,4 @@ +pub mod prompts; +pub mod request; + +pub use prompts::DEFAULT_SYSTEM_TEMPLATE; diff --git a/crates/ollama-rs/src/generation/functions/pipelines/nous_hermes/prompts.rs b/crates/ollama-rs/src/generation/functions/pipelines/nous_hermes/prompts.rs new file mode 100644 index 0000000..fc0080b --- /dev/null +++ b/crates/ollama-rs/src/generation/functions/pipelines/nous_hermes/prompts.rs @@ -0,0 +1,67 @@ +pub const DEFAULT_SYSTEM_TEMPLATE: &str = r#" +Role: | + You are a function calling AI agent with self-recursion. + You can call only one function at a time and analyse data you get from function response. + You are provided with function signatures within XML tags. + The current date is: {date}. +Objective: | + You may use agentic frameworks for reasoning and planning to help with user query. + Please call a function and wait for function results to be provided to you in the next iteration. + Don't make assumptions about what values to plug into function arguments. + Once you have called a function, results will be fed back to you within XML tags. + Don't make assumptions about tool results if XML tags are not present since function hasn't been executed yet. + Analyze the data once you get the results and call another function. + At each iteration please continue adding the your analysis to previous summary. + Your final response should directly answer the user query with an anlysis or summary of the results of function calls. +Tools: | + Here are the available tools: + {tools} + If the provided function signatures doesn't have the function you must call, you may write executable rust code in markdown syntax and call code_interpreter() function as follows: + + {"arguments": {"code_markdown": , "name": "code_interpreter"}} + + Make sure that the json object above with code markdown block is parseable with json.loads() and the XML block with XML ElementTree. +Examples: | + Here are some example usage of functions: + [ + { + "example": "```\nSYSTEM: You are a helpful assistant who has access to functions. Use them if required\n[\n {\n \"name\": \"calculate_distance\",\n \"description\": \"Calculate the distance between two locations\",\n \"parameters\": {\n \"type\": \"object\",\n \"properties\": {\n \"origin\": {\n \"type\": \"string\",\n \"description\": \"The starting location\"\n },\n \"destination\": {\n \"type\": \"string\",\n \"description\": \"The destination location\"\n },\n \"mode\": {\n \"type\": \"string\",\n \"description\": \"The mode of transportation\"\n }\n },\n \"required\": [\n \"origin\",\n \"destination\",\n \"mode\"\n ]\n }\n },\n {\n \"name\": \"generate_password\",\n \"description\": \"Generate a random password\",\n \"parameters\": {\n \"type\": \"object\",\n \"properties\": {\n \"length\": {\n \"type\": \"integer\",\n \"description\": \"The length of the password\"\n }\n },\n \"required\": [\n \"length\"\n ]\n }\n }\n]\n\n\nUSER: Hi, I need to know the distance from New York to Los Angeles by car.\nASSISTANT:\n\n{\"arguments\": {\"origin\": \"New York\",\n \"destination\": \"Los Angeles\", \"mode\": \"car\"}, \"name\": \"calculate_distance\"}\n\n```\n" + }, + { + "example": "```\nSYSTEM: You are a helpful assistant with access to functions. Use them if required\n[\n {\n \"name\": \"calculate_distance\",\n \"description\": \"Calculate the distance between two locations\",\n \"parameters\": {\n \"type\": \"object\",\n \"properties\": {\n \"origin\": {\n \"type\": \"string\",\n \"description\": \"The starting location\"\n },\n \"destination\": {\n \"type\": \"string\",\n \"description\": \"The destination location\"\n },\n \"mode\": {\n \"type\": \"string\",\n \"description\": \"The mode of transportation\"\n }\n },\n \"required\": [\n \"origin\",\n \"destination\",\n \"mode\"\n ]\n }\n },\n {\n \"name\": \"generate_password\",\n \"description\": \"Generate a random password\",\n \"parameters\": {\n \"type\": \"object\",\n \"properties\": {\n \"length\": {\n \"type\": \"integer\",\n \"description\": \"The length of the password\"\n }\n },\n \"required\": [\n \"length\"\n ]\n }\n }\n]\n\n\nUSER: Can you help me generate a random password with a length of 8 characters?\nASSISTANT:\n\n{\"arguments\": {\"length\": 8}, \"name\": \"generate_password\"}\n\n```" + } +] +Schema: | + Use the following pydantic model json schema for each tool call you will make: + { + "name": "tool name", + "description": "tool description", + "parameters": { + "type": "object", + "properties": { + "parameter1": { + "type": "string", + "description": "parameter description" + }, + "parameter2": { + "type": "string", + "description": "parameter description" + } + }, + "required": [ + "parameter1", + "parameter2" + ] + } + } +Instructions: | + At the very first turn you don't have so you shouldn't not make up the results. + Please keep a running summary with analysis of previous function results and summaries from previous iterations. + Do not stop calling functions until the task has been accomplished or you've reached max iteration of 10. + Calling multiple functions at once can overload the system and increase cost so call one function at a time please. + If you plan to continue with analysis, always call another function. + For each function call return a valid json object (using doulbe quotes) with function name and arguments within XML tags as follows: + + {"arguments": , "name": } + +"#; diff --git a/crates/ollama-rs/src/generation/functions/pipelines/nous_hermes/request.rs b/crates/ollama-rs/src/generation/functions/pipelines/nous_hermes/request.rs new file mode 100644 index 0000000..f1c30a8 --- /dev/null +++ b/crates/ollama-rs/src/generation/functions/pipelines/nous_hermes/request.rs @@ -0,0 +1,161 @@ +use crate::error::OllamaError; +use crate::generation::chat::{ChatMessage, ChatMessageResponse}; +use crate::generation::functions::pipelines::nous_hermes::DEFAULT_SYSTEM_TEMPLATE; +use crate::generation::functions::pipelines::RequestParserBase; +use crate::generation::functions::tools::Tool; +use async_trait::async_trait; +use regex::Regex; +use serde::{Deserialize, Serialize}; +use serde_json::{json, Map, Value}; +use std::collections::HashMap; +use std::sync::Arc; + +pub fn convert_to_openai_tool(tool: &Arc) -> Value { + let mut function = HashMap::new(); + function.insert("name".to_string(), Value::String(tool.name())); + function.insert("description".to_string(), Value::String(tool.description())); + function.insert("parameters".to_string(), tool.parameters()); + + let mut result = HashMap::new(); + result.insert("type".to_string(), Value::String("function".to_string())); + + let mapp: Map = function.into_iter().collect(); + result.insert("function".to_string(), Value::Object(mapp)); + + json!(result) +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct NousFunctionCallSignature { + pub name: String, + pub arguments: Value, +} + +pub struct NousFunctionCall {} + +impl Default for NousFunctionCall { + fn default() -> Self { + Self::new() + } +} + +impl NousFunctionCall { + pub fn new() -> Self { + Self {} + } + + pub async fn function_call_with_history( + &self, + model_name: String, + tool_params: Value, + tool: Arc, + ) -> Result { + let result = tool.run(tool_params).await; + match result { + Ok(result) => Ok(ChatMessageResponse { + model: model_name.clone(), + created_at: "".to_string(), + message: Some(ChatMessage::assistant(self.format_tool_response(&result))), + done: true, + final_data: None, + }), + Err(e) => Err(self.error_handler(OllamaError::from(e))), + } + } + + pub fn format_tool_response(&self, function_response: &str) -> String { + format!("\n{}\n\n", function_response) + } + + pub fn extract_tool_call(&self, content: &str) -> Option { + let re = Regex::new(r"(?s)(.*?)").unwrap(); + if let Some(captures) = re.captures(content) { + if let Some(matched) = captures.get(1) { + let result = matched + .as_str() + .replace('\n', "") + .replace("{{", "{") + .replace("}}", "}"); + return Some(result); + } + } + None + } +} + +#[async_trait] +impl RequestParserBase for NousFunctionCall { + async fn parse( + &self, + input: &str, + model_name: String, + tools: Vec>, + ) -> Result { + //Extract between and + let tool_response = self.extract_tool_call(input); + match tool_response { + Some(tool_response_str) => { + let response_value: Result = + serde_json::from_str(&tool_response_str); + match response_value { + Ok(response) => { + if let Some(tool) = tools.iter().find(|t| t.name() == response.name) { + let tool_params = response.arguments; + let result = self + .function_call_with_history( + model_name.clone(), + tool_params.clone(), + tool.clone(), + ) + .await?; //Error is also returned as String for LLM feedback + return Ok(result); + } else { + return Err(self.error_handler(OllamaError::from( + "Tool name not found".to_string(), + ))); + } + } + Err(e) => return Err(self.error_handler(OllamaError::from(e))), + } + } + None => { + return Err(self.error_handler(OllamaError::from( + "Error while extracting tags.".to_string(), + ))) + } + } + } + + fn format_query(&self, input: &str) -> String { + format!( + "{}\nThis is the first turn and you don't have to analyze yet", + input + ) + } + + fn format_response(&self, response: &str) -> String { + format!("Agent iteration to assist with user query: {}", response) + } + + async fn get_system_message(&self, tools: &[Arc]) -> ChatMessage { + let tools_info: Vec = tools.iter().map(convert_to_openai_tool).collect(); + let tools_json = serde_json::to_string(&tools_info).unwrap(); + let system_message_content = DEFAULT_SYSTEM_TEMPLATE.replace("{tools}", &tools_json); + ChatMessage::system(system_message_content) + } + + fn error_handler(&self, error: OllamaError) -> ChatMessageResponse { + let error_message = format!( + "\nThere was an error parsing function calls\n Here's the error stack trace: {}\nPlease call the function again with correct syntax", + error + ); + + ChatMessageResponse { + model: "".to_string(), + created_at: "".to_string(), + message: Some(ChatMessage::assistant(error_message)), + done: true, + final_data: None, + } + } +} diff --git a/crates/ollama-rs/src/generation/functions/pipelines/openai/mod.rs b/crates/ollama-rs/src/generation/functions/pipelines/openai/mod.rs new file mode 100644 index 0000000..1c37516 --- /dev/null +++ b/crates/ollama-rs/src/generation/functions/pipelines/openai/mod.rs @@ -0,0 +1,4 @@ +pub mod prompts; +pub mod request; + +pub use prompts::{DEFAULT_RESPONSE_FUNCTION, DEFAULT_SYSTEM_TEMPLATE}; diff --git a/crates/ollama-rs/src/generation/functions/pipelines/openai/prompts.rs b/crates/ollama-rs/src/generation/functions/pipelines/openai/prompts.rs new file mode 100644 index 0000000..d497aa0 --- /dev/null +++ b/crates/ollama-rs/src/generation/functions/pipelines/openai/prompts.rs @@ -0,0 +1,32 @@ +pub const DEFAULT_SYSTEM_TEMPLATE: &str = r#" +You are a function calling AI agent with self-recursion. +You can call only one function at a time and analyse data you get from function response. +You have access to the following tools: + +{tools} + +Don't make assumptions about what values to plug into function arguments. +You must always select one of the above tools and respond with only a JSON object matching the following schema: + +{ + "name": , + "arguments": +} +"#; + +pub const DEFAULT_RESPONSE_FUNCTION: &str = r#" +{ + "name": "__conversational_response", + "description": "Respond conversationally if no other tools should be called for a given query.", + "parameters": { + "type": "object", + "properties": { + "response": { + "type": "string", + "description": "Conversational response to the user." + } + }, + "required": ["response"] + } +} +"#; diff --git a/crates/ollama-rs/src/generation/functions/pipelines/openai/request.rs b/crates/ollama-rs/src/generation/functions/pipelines/openai/request.rs new file mode 100644 index 0000000..4a42cd3 --- /dev/null +++ b/crates/ollama-rs/src/generation/functions/pipelines/openai/request.rs @@ -0,0 +1,113 @@ +use crate::error::OllamaError; +use crate::generation::chat::{ChatMessage, ChatMessageResponse}; +use crate::generation::functions::pipelines::openai::DEFAULT_SYSTEM_TEMPLATE; +use crate::generation::functions::pipelines::RequestParserBase; +use crate::generation::functions::tools::Tool; +use async_trait::async_trait; +use serde::{Deserialize, Serialize}; +use serde_json::{json, Map, Value}; +use std::collections::HashMap; +use std::sync::Arc; + +pub fn convert_to_openai_tool(tool: &Arc) -> Value { + let mut function = HashMap::new(); + function.insert("name".to_string(), Value::String(tool.name())); + function.insert("description".to_string(), Value::String(tool.description())); + function.insert("parameters".to_string(), tool.parameters()); + + let mut result = HashMap::new(); + result.insert("type".to_string(), Value::String("function".to_string())); + + let mapp: Map = function.into_iter().collect(); + result.insert("function".to_string(), Value::Object(mapp)); + + json!(result) +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct OpenAIFunctionCallSignature { + pub name: String, //name of the tool + pub arguments: Value, +} + +pub struct OpenAIFunctionCall {} + +impl OpenAIFunctionCall { + pub async fn function_call_with_history( + &self, + model_name: String, + tool_params: Value, + tool: Arc, + ) -> Result { + let result = tool.run(tool_params).await; + match result { + Ok(result) => Ok(ChatMessageResponse { + model: model_name.clone(), + created_at: "".to_string(), + message: Some(ChatMessage::assistant(result.to_string())), + done: true, + final_data: None, + }), + Err(e) => Err(self.error_handler(OllamaError::from(e))), + } + } + + fn clean_tool_call(&self, json_str: &str) -> String { + json_str + .trim() + .trim_start_matches("```json") + .trim_end_matches("```") + .trim() + .to_string() + } +} + +#[async_trait] +impl RequestParserBase for OpenAIFunctionCall { + async fn parse( + &self, + input: &str, + model_name: String, + tools: Vec>, + ) -> Result { + let response_value: Result = + serde_json::from_str(&self.clean_tool_call(input)); + match response_value { + Ok(response) => { + if let Some(tool) = tools.iter().find(|t| t.name() == response.name) { + let tool_params = response.arguments; + let result = self + .function_call_with_history( + model_name.clone(), + tool_params.clone(), + tool.clone(), + ) + .await?; + return Ok(result); + } else { + return Err(self.error_handler(OllamaError::from("Tool not found".to_string()))); + } + } + Err(e) => { + return Err(self.error_handler(OllamaError::from(e))); + } + } + } + + async fn get_system_message(&self, tools: &[Arc]) -> ChatMessage { + let tools_info: Vec = tools.iter().map(convert_to_openai_tool).collect(); + let tools_json = serde_json::to_string(&tools_info).unwrap(); + let system_message_content = DEFAULT_SYSTEM_TEMPLATE.replace("{tools}", &tools_json); + ChatMessage::system(system_message_content) + } + + fn error_handler(&self, error: OllamaError) -> ChatMessageResponse { + ChatMessageResponse { + model: "".to_string(), + created_at: "".to_string(), + message: Some(ChatMessage::assistant(error.to_string())), + done: true, + final_data: None, + } + } +} diff --git a/crates/ollama-rs/src/generation/functions/request.rs b/crates/ollama-rs/src/generation/functions/request.rs new file mode 100644 index 0000000..44da42e --- /dev/null +++ b/crates/ollama-rs/src/generation/functions/request.rs @@ -0,0 +1,36 @@ +use crate::generation::chat::request::ChatMessageRequest; +use crate::generation::chat::ChatMessage; +use crate::generation::functions::Tool; +use crate::generation::{options::GenerationOptions, parameters::FormatType}; +use std::sync::Arc; + +#[derive(Clone)] +pub struct FunctionCallRequest { + pub chat: ChatMessageRequest, + pub tools: Vec>, +} + +impl FunctionCallRequest { + pub fn new(model_name: String, tools: Vec>, messages: Vec) -> Self { + let chat = ChatMessageRequest::new(model_name, messages); + Self { chat, tools } + } + + /// Additional model parameters listed in the documentation for the Modelfile + pub fn options(mut self, options: GenerationOptions) -> Self { + self.chat.options = Some(options); + self + } + + /// The full prompt or prompt template (overrides what is defined in the Modelfile) + pub fn template(mut self, template: String) -> Self { + self.chat.template = Some(template); + self + } + + // The format to return a response in. Currently the only accepted value is `json` + pub fn format(mut self, format: FormatType) -> Self { + self.chat.format = Some(format); + self + } +} diff --git a/crates/ollama-rs/src/generation/functions/tools/finance.rs b/crates/ollama-rs/src/generation/functions/tools/finance.rs new file mode 100644 index 0000000..9161af4 --- /dev/null +++ b/crates/ollama-rs/src/generation/functions/tools/finance.rs @@ -0,0 +1,98 @@ +use crate::generation::functions::tools::Tool; +use async_trait::async_trait; +use reqwest::Client; +use scraper::{Html, Selector}; +use serde_json::{json, Value}; +use std::collections::HashMap; +use std::error::Error; + +pub struct StockScraper { + base_url: String, + language: String, +} + +impl Default for StockScraper { + fn default() -> Self { + Self::new() + } +} + +impl StockScraper { + pub fn new() -> Self { + StockScraper { + base_url: "https://www.google.com/finance".to_string(), + language: "en".to_string(), + } + } + + // Changed to an async function + pub async fn scrape( + &self, + exchange: &str, + ticker: &str, + ) -> Result, Box> { + let target_url = format!( + "{}/quote/{}:{}?hl={}", + self.base_url, ticker, exchange, self.language + ); + let client = Client::new(); + let response = client.get(&target_url).send().await?; // Make the request asynchronously + let content = response.text().await?; // Asynchronously get the text of the response + let document = Html::parse_document(&content); + + let items_selector = Selector::parse("div.gyFHrc").unwrap(); + let desc_selector = Selector::parse("div.mfs7Fc").unwrap(); + let value_selector = Selector::parse("div.P6K39c").unwrap(); + + let mut stock_description = HashMap::new(); + + for item in document.select(&items_selector) { + if let Some(item_description) = item.select(&desc_selector).next() { + if let Some(item_value) = item.select(&value_selector).next() { + stock_description.insert( + item_description.text().collect::>().join(""), + item_value.text().collect::>().join(""), + ); + } + } + } + + Ok(stock_description) + } +} + +#[async_trait] +impl Tool for StockScraper { + fn name(&self) -> String { + "Stock Scraper".to_string() + } + + fn description(&self) -> String { + "Scrapes stock information from Google Finance.".to_string() + } + + fn parameters(&self) -> Value { + json!({ + "type": "object", + "properties": { + "exchange": { + "type": "string", + "description": "The stock exchange market identifier code (MIC)" + }, + "ticker": { + "type": "string", + "description": "The ticker symbol of the stock" + } + }, + "required": ["exchange", "ticker"] + }) + } + + async fn run(&self, input: Value) -> Result> { + let exchange = input["exchange"].as_str().ok_or("Exchange is required")?; + let ticker = input["ticker"].as_str().ok_or("Ticker is required")?; + + let result = self.scrape(exchange, ticker).await?; + Ok(serde_json::to_string(&result)?) + } +} diff --git a/crates/ollama-rs/src/generation/functions/tools/mod.rs b/crates/ollama-rs/src/generation/functions/tools/mod.rs new file mode 100644 index 0000000..b5f4223 --- /dev/null +++ b/crates/ollama-rs/src/generation/functions/tools/mod.rs @@ -0,0 +1,59 @@ +pub mod finance; +pub mod scraper; +pub mod search_ddg; + +pub use self::finance::StockScraper; +pub use self::scraper::Scraper; +pub use self::search_ddg::DDGSearcher; + +use async_trait::async_trait; +use serde_json::{json, Value}; +use std::error::Error; +use std::string::String; + +#[async_trait] +pub trait Tool: Send + Sync { + /// Returns the name of the tool. + fn name(&self) -> String; + + /// Provides a description of what the tool does and when to use it. + fn description(&self) -> String; + + /// Returns the parameters for OpenAI-like function call. + fn parameters(&self) -> Value { + json!({ + "type": "object", + "properties": { + "input": { + "type": "string", + "description": self.description() + } + }, + "required": ["input"] + }) + } + + /// Processes an input string and executes the tool's functionality, returning a `Result`. + async fn call(&self, input: &str) -> Result> { + let input = self.parse_input(input).await; + self.run(input).await + } + + /// Executes the core functionality of the tool. + async fn run(&self, input: Value) -> Result>; + + /// Parses the input string. + async fn parse_input(&self, input: &str) -> Value { + log::info!("Using default implementation: {}", input); + match serde_json::from_str::(input) { + Ok(input) => { + if input["input"].is_string() { + Value::String(input["input"].as_str().unwrap().to_string()) + } else { + Value::String(input.to_string()) + } + } + Err(_) => Value::String(input.to_string()), + } + } +} diff --git a/crates/ollama-rs/src/generation/functions/tools/scraper.rs b/crates/ollama-rs/src/generation/functions/tools/scraper.rs new file mode 100644 index 0000000..2398e72 --- /dev/null +++ b/crates/ollama-rs/src/generation/functions/tools/scraper.rs @@ -0,0 +1,63 @@ +use crate::generation::functions::tools::Tool; +use async_trait::async_trait; +use reqwest::Client; +use scraper::{Html, Selector}; +use serde_json::{json, Value}; +use std::error::Error; + +pub struct Scraper {} + +impl Default for Scraper { + fn default() -> Self { + Self::new() + } +} + +impl Scraper { + pub fn new() -> Self { + Self {} + } +} + +#[async_trait] +impl Tool for Scraper { + fn name(&self) -> String { + "Website Scraper".to_string() + } + + fn description(&self) -> String { + "Scrapes text content from websites and splits it into manageable chunks.".to_string() + } + + fn parameters(&self) -> Value { + json!({ + "type": "object", + "properties": { + "website": { + "type": "string", + "description": "The URL of the website to scrape" + } + }, + "required": ["website"] + }) + } + + async fn run(&self, input: Value) -> Result> { + let website = input["website"].as_str().ok_or("Website URL is required")?; + let client = Client::new(); + let response = client.get(website).send().await?.text().await?; + + let document = Html::parse_document(&response); + let selector = Selector::parse("p, h1, h2, h3, h4, h5, h6").unwrap(); + let elements: Vec = document + .select(&selector) + .map(|el| el.text().collect::>().join(" ")) + .collect(); + let body = elements.join(" "); + + let sentences: Vec = body.split(". ").map(|s| s.to_string()).collect(); + let formatted_content = sentences.join("\n\n"); + + Ok(formatted_content) + } +} diff --git a/crates/ollama-rs/src/generation/functions/tools/search_ddg.rs b/crates/ollama-rs/src/generation/functions/tools/search_ddg.rs new file mode 100644 index 0000000..6793bb2 --- /dev/null +++ b/crates/ollama-rs/src/generation/functions/tools/search_ddg.rs @@ -0,0 +1,127 @@ +use reqwest; + +use scraper::{Html, Selector}; +use std::error::Error; + +use crate::generation::functions::tools::Tool; +use async_trait::async_trait; +use serde::{Deserialize, Serialize}; +use serde_json::{json, Value}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct SearchResult { + title: String, + link: String, + snippet: String, +} + +pub struct DDGSearcher { + pub client: reqwest::Client, + pub base_url: String, +} + +impl Default for DDGSearcher { + fn default() -> Self { + Self::new() + } +} + +impl DDGSearcher { + pub fn new() -> Self { + DDGSearcher { + client: reqwest::Client::new(), + base_url: "https://duckduckgo.com".to_string(), + } + } + + pub async fn search(&self, query: &str) -> Result, Box> { + let url = format!("{}/html/?q={}", self.base_url, query); + let resp = self.client.get(&url).send().await?; + let body = resp.text().await?; + let document = Html::parse_document(&body); + + let result_selector = Selector::parse(".web-result").unwrap(); + let result_title_selector = Selector::parse(".result__a").unwrap(); + let result_url_selector = Selector::parse(".result__url").unwrap(); + let result_snippet_selector = Selector::parse(".result__snippet").unwrap(); + + let results = document + .select(&result_selector) + .map(|result| { + let title = result + .select(&result_title_selector) + .next() + .unwrap() + .text() + .collect::>() + .join(""); + let link = result + .select(&result_url_selector) + .next() + .unwrap() + .text() + .collect::>() + .join("") + .trim() + .to_string(); + let snippet = result + .select(&result_snippet_selector) + .next() + .unwrap() + .text() + .collect::>() + .join(""); + + SearchResult { + title, + link, + //url: String::from(url.value().attr("href").unwrap()), + snippet, + } + }) + .collect::>(); + + Ok(results) + } +} + +#[async_trait] +impl Tool for DDGSearcher { + fn name(&self) -> String { + "DDG Searcher".to_string() + } + + fn description(&self) -> String { + "Searches the web using DuckDuckGo's HTML interface.".to_string() + } + + fn parameters(&self) -> Value { + json!({ + "description": "This tool lets you search the web using DuckDuckGo. The input should be a search query.", + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "The search query to send to DuckDuckGo" + } + }, + "required": ["query"] + }) + } + + async fn call(&self, input: &str) -> Result> { + let input_value = self.parse_input(input).await; + self.run(input_value).await + } + + async fn run(&self, input: Value) -> Result> { + let query = input["query"].as_str().unwrap(); + let results = self.search(query).await?; + let results_json = serde_json::to_string(&results)?; + Ok(results_json) + } + + async fn parse_input(&self, input: &str) -> Value { + Tool::parse_input(self, input).await + } +} diff --git a/crates/ollama-rs/src/generation/images.rs b/crates/ollama-rs/src/generation/images.rs new file mode 100644 index 0000000..7a670f8 --- /dev/null +++ b/crates/ollama-rs/src/generation/images.rs @@ -0,0 +1,10 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Image(String); + +impl Image { + pub fn from_base64(base64: &str) -> Self { + Self(base64.to_string()) + } +} diff --git a/crates/ollama-rs/src/generation/options.rs b/crates/ollama-rs/src/generation/options.rs new file mode 100644 index 0000000..7054a2a --- /dev/null +++ b/crates/ollama-rs/src/generation/options.rs @@ -0,0 +1,120 @@ +use serde::{Deserialize, Serialize}; + +/// Options for generation requests to Ollama. +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct GenerationOptions { + pub(super) mirostat: Option, + pub(super) mirostat_eta: Option, + pub(super) mirostat_tau: Option, + pub(super) num_ctx: Option, + pub(super) num_gqa: Option, + pub(super) num_gpu: Option, + pub(super) num_thread: Option, + pub(super) repeat_last_n: Option, + pub(super) repeat_penalty: Option, + pub(super) temperature: Option, + pub(super) seed: Option, + pub(super) stop: Option>, + pub(super) tfs_z: Option, + pub(super) num_predict: Option, + pub(super) top_k: Option, + pub(super) top_p: Option, +} + +impl GenerationOptions { + /// Enable Mirostat sampling for controlling perplexity. (default: 0, 0 = disabled, 1 = Mirostat, 2 = Mirostat 2.0) + pub fn mirostat(mut self, mirostat: u8) -> Self { + self.mirostat = Some(mirostat); + self + } + + /// Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive. (Default: 0.1) + pub fn mirostat_eta(mut self, mirostat_eta: f32) -> Self { + self.mirostat_eta = Some(mirostat_eta); + self + } + + /// Controls the balance between coherence and diversity of the output. A lower value will result in more focused and coherent text. (Default: 5.0) + pub fn mirostat_tau(mut self, mirostat_tau: f32) -> Self { + self.mirostat_tau = Some(mirostat_tau); + self + } + + /// Sets the size of the context window used to generate the next token. (Default: 2048) + pub fn num_ctx(mut self, num_ctx: u32) -> Self { + self.num_ctx = Some(num_ctx); + self + } + + /// The number of GQA groups in the transformer layer. Required for some models, for example it is 8 for llama2:70b + pub fn num_gqa(mut self, num_gqa: u32) -> Self { + self.num_gqa = Some(num_gqa); + self + } + + /// The number of layers to send to the GPU(s). On macOS it defaults to 1 to enable metal support, 0 to disable. + pub fn num_gpu(mut self, num_gpu: u32) -> Self { + self.num_gpu = Some(num_gpu); + self + } + + /// Sets the number of threads to use during computation. By default, Ollama will detect this for optimal performance. It is recommended to set this value to the number of physical CPU cores your system has (as opposed to the logical number of cores). + pub fn num_thread(mut self, num_thread: u32) -> Self { + self.num_thread = Some(num_thread); + self + } + + /// Sets how far back for the model to look back to prevent repetition. (Default: 64, 0 = disabled, -1 = num_ctx) + pub fn repeat_last_n(mut self, repeat_last_n: i32) -> Self { + self.repeat_last_n = Some(repeat_last_n); + self + } + + /// Sets how strongly to penalize repetitions. A higher value (e.g., 1.5) will penalize repetitions more strongly, while a lower value (e.g., 0.9) will be more lenient. (Default: 1.1) + pub fn repeat_penalty(mut self, repeat_penalty: f32) -> Self { + self.repeat_penalty = Some(repeat_penalty); + self + } + + /// The temperature of the model. Increasing the temperature will make the model answer more creatively. (Default: 0.8) + pub fn temperature(mut self, temperature: f32) -> Self { + self.temperature = Some(temperature); + self + } + + /// Sets the random number seed to use for generation. Setting this to a specific number will make the model generate the same text for the same prompt. (Default: 0) + pub fn seed(mut self, seed: i32) -> Self { + self.seed = Some(seed); + self + } + + /// Sets the stop sequences to use. When this pattern is encountered the LLM will stop generating text and return. Multiple stop patterns may be set by specifying multiple separate `stop` parameters in a modelfile. + pub fn stop(mut self, stop: Vec) -> Self { + self.stop = Some(stop); + self + } + + /// Tail free sampling is used to reduce the impact of less probable tokens from the output. A higher value (e.g., 2.0) will reduce the impact more, while a value of 1.0 disables this setting. (default: 1) + pub fn tfs_z(mut self, tfs_z: f32) -> Self { + self.tfs_z = Some(tfs_z); + self + } + + /// Maximum number of tokens to predict when generating text. (Default: 128, -1 = infinite generation, -2 = fill context) + pub fn num_predict(mut self, num_predict: i32) -> Self { + self.num_predict = Some(num_predict); + self + } + + /// Reduces the probability of generating nonsense. A higher value (e.g. 100) will give more diverse answers, while a lower value (e.g. 10) will be more conservative. (Default: 40) + pub fn top_k(mut self, top_k: u32) -> Self { + self.top_k = Some(top_k); + self + } + + /// Works together with top-k. A higher value (e.g., 0.95) will lead to more diverse text, while a lower value (e.g., 0.5) will generate more focused and conservative text. (Default: 0.9) + pub fn top_p(mut self, top_p: f32) -> Self { + self.top_p = Some(top_p); + self + } +} diff --git a/crates/ollama-rs/src/generation/parameters.rs b/crates/ollama-rs/src/generation/parameters.rs new file mode 100644 index 0000000..53f53cf --- /dev/null +++ b/crates/ollama-rs/src/generation/parameters.rs @@ -0,0 +1,51 @@ +use serde::{Deserialize, Serialize}; + +/// The format to return a response in. Currently the only accepted value is `json` +#[derive(Debug, Serialize, Deserialize, Clone)] +#[serde(rename_all = "lowercase")] +pub enum FormatType { + Json, +} + +/// Used to control how long a model stays loaded in memory, by default models are unloaded after 5 minutes of inactivity +#[derive(Debug, Clone)] +pub enum KeepAlive { + Indefinitely, + UnloadOnCompletion, + Until { time: u64, unit: TimeUnit }, +} + +impl Serialize for KeepAlive { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + match self { + KeepAlive::Indefinitely => serializer.serialize_i8(-1), + KeepAlive::UnloadOnCompletion => serializer.serialize_i8(0), + KeepAlive::Until { time, unit } => { + let mut s = String::new(); + s.push_str(&time.to_string()); + s.push_str(unit.to_symbol()); + serializer.serialize_str(&s) + } + } + } +} + +#[derive(Debug, Clone)] +pub enum TimeUnit { + Seconds, + Minutes, + Hours, +} + +impl TimeUnit { + pub fn to_symbol(&self) -> &'static str { + match self { + TimeUnit::Seconds => "s", + TimeUnit::Minutes => "m", + TimeUnit::Hours => "hr", + } + } +} diff --git a/crates/ollama-rs/src/history.rs b/crates/ollama-rs/src/history.rs new file mode 100644 index 0000000..2fe3eea --- /dev/null +++ b/crates/ollama-rs/src/history.rs @@ -0,0 +1,162 @@ +use std::collections::HashMap; + +use crate::{ + generation::chat::{ChatMessage, MessageRole}, + Ollama, +}; + +#[derive(Debug, Clone, Default)] +pub struct MessagesHistory { + pub(crate) messages_by_id: HashMap>, + pub(crate) messages_number_limit: u16, +} + +pub type WrappedMessageHistory = std::sync::Arc>; + +/// Store for messages history +impl MessagesHistory { + /// Generate a MessagesHistory + pub fn new(messages_number_limit: u16) -> WrappedMessageHistory { + std::sync::Arc::new(std::sync::RwLock::new(Self { + messages_by_id: HashMap::new(), + messages_number_limit: messages_number_limit.max(2), + })) + } + + /// Add message for entry even no history exists for an entry + pub fn add_message(&mut self, entry_id: impl ToString, message: ChatMessage) { + if message.content.is_empty() && message.images.is_none() { + return; + } + + let messages = self.messages_by_id.entry(entry_id.to_string()).or_default(); + + // Replacing the oldest message if the limit is reached + // The oldest message is the first one, unless it's a system message + if messages.len() >= self.messages_number_limit as usize { + let index_to_remove = messages + .first() + .map(|m| if m.role == MessageRole::System { 1 } else { 0 }) + .unwrap_or(0); + + messages.remove(index_to_remove); + } + + if message.role == MessageRole::System { + messages.insert(0, message); + } else { + messages.push(message); + } + } + + /// Get Option with list of ChatMessage + pub fn get_messages(&self, entry_id: impl ToString) -> Option<&Vec> { + self.messages_by_id.get(&entry_id.to_string()) + } + + /// Clear history for an entry + pub fn clear_messages_for_id(&mut self, entry_id: impl ToString) { + self.messages_by_id.remove(&entry_id.to_string()); + } + + /// Remove a whole history + pub fn clear_all_messages(&mut self) { + self.messages_by_id = HashMap::new(); + } +} + +impl Ollama { + /// Create default instance with chat history + pub fn new_default_with_history(messages_number_limit: u16) -> Self { + Self { + messages_history: Some(MessagesHistory::new(messages_number_limit)), + ..Default::default() + } + } + + /// Create new instance with chat history + /// + /// # Panics + /// + /// Panics if the host is not a valid URL or if the URL cannot have a port. + pub fn new_with_history( + host: impl crate::IntoUrl, + port: u16, + messages_number_limit: u16, + ) -> Self { + let mut url = host.into_url().unwrap(); + url.set_port(Some(port)).unwrap(); + Self::new_with_history_from_url(url, messages_number_limit) + } + + /// Create new instance with chat history from a [`url::Url`]. + #[inline] + pub fn new_with_history_from_url(url: url::Url, messages_number_limit: u16) -> Self { + Self { + url, + ..Ollama::new_default_with_history(messages_number_limit) + } + } + + #[inline] + pub fn try_new_with_history( + url: impl crate::IntoUrl, + messages_number_limit: u16, + ) -> Result { + Ok(Self { + url: url.into_url()?, + ..Ollama::new_default_with_history(messages_number_limit) + }) + } + + /// Add AI's message to a history + pub fn add_assistant_response(&mut self, entry_id: impl ToString, message: impl ToString) { + self.add_history_message(entry_id, ChatMessage::assistant(message.to_string())); + } + + /// Add user's message to a history + pub fn add_user_response(&mut self, entry_id: impl ToString, message: impl ToString) { + self.add_history_message(entry_id, ChatMessage::user(message.to_string())); + } + + /// Set system prompt for chat history + pub fn set_system_response(&mut self, entry_id: impl ToString, message: impl ToString) { + self.add_history_message(entry_id, ChatMessage::system(message.to_string())); + } + + /// Helper for message add to history + fn add_history_message(&mut self, entry_id: impl ToString, message: ChatMessage) { + if let Some(messages_history) = self.messages_history.as_mut() { + messages_history + .write() + .unwrap() + .add_message(entry_id, message); + } + } + + /// For tests purpose + /// Getting list of messages in a history + pub fn get_messages_history(&mut self, entry_id: impl ToString) -> Option> { + self.messages_history.clone().map(|message_history| { + message_history + .write() + .unwrap() + .get_messages(entry_id) + .cloned() + })? + } + + /// Clear history for an entry + pub fn clear_messages_for_id(&mut self, entry_id: impl ToString) { + if let Some(history) = self.messages_history.clone() { + history.write().unwrap().clear_messages_for_id(entry_id) + } + } + + /// Remove a whole history + pub fn clear_all_messages(&mut self) { + if let Some(history) = self.messages_history.clone() { + history.write().unwrap().clear_all_messages() + } + } +} diff --git a/crates/ollama-rs/src/lib.rs b/crates/ollama-rs/src/lib.rs new file mode 100644 index 0000000..6e3369c --- /dev/null +++ b/crates/ollama-rs/src/lib.rs @@ -0,0 +1,152 @@ +#[cfg(feature = "chat-history")] +use crate::history::WrappedMessageHistory; +use url::Url; + +pub mod error; +pub mod generation; +#[cfg(feature = "chat-history")] +pub mod history; +pub mod models; + +/// A trait to try to convert some type into a [`Url`]. +/// +/// This trait is "sealed", such that only types within ollama-rs can +/// implement it. +pub trait IntoUrl: IntoUrlSealed {} + +impl IntoUrl for Url {} +impl IntoUrl for String {} +impl<'a> IntoUrl for &'a str {} +impl<'a> IntoUrl for &'a String {} + +pub trait IntoUrlSealed { + fn into_url(self) -> Result; + + fn as_str(&self) -> &str; +} + +impl IntoUrlSealed for Url { + fn into_url(self) -> Result { + Ok(self) + } + + fn as_str(&self) -> &str { + self.as_str() + } +} + +impl<'a> IntoUrlSealed for &'a str { + fn into_url(self) -> Result { + Url::parse(self)?.into_url() + } + + fn as_str(&self) -> &str { + self + } +} + +impl<'a> IntoUrlSealed for &'a String { + fn into_url(self) -> Result { + (&**self).into_url() + } + + fn as_str(&self) -> &str { + self.as_ref() + } +} + +impl IntoUrlSealed for String { + fn into_url(self) -> Result { + (&*self).into_url() + } + + fn as_str(&self) -> &str { + self.as_ref() + } +} + +#[derive(Debug, Clone)] +pub struct Ollama { + pub(crate) url: Url, + pub(crate) reqwest_client: reqwest::Client, + #[cfg(feature = "chat-history")] + pub(crate) messages_history: Option, +} + +impl Ollama { + /// # Panics + /// + /// Panics if the host is not a valid URL or if the URL cannot have a port. + pub fn new(host: impl IntoUrl, port: u16) -> Self { + let mut url: Url = host.into_url().unwrap(); + url.set_port(Some(port)).unwrap(); + + Self::from_url(url) + } + + /// Tries to create new instance by converting `url` into [`Url`]. + #[inline] + pub fn try_new(url: impl IntoUrl) -> Result { + Ok(Self::from_url(url.into_url()?)) + } + + /// Create new instance from a [`Url`]. + #[inline] + pub fn from_url(url: Url) -> Self { + Self { + url, + ..Default::default() + } + } + + /// Returns the http URI of the Ollama instance + /// + /// # Panics + /// + /// Panics if the URL does not have a host. + #[inline] + pub fn uri(&self) -> String { + self.url.host().unwrap().to_string() + } + + /// Returns the URL of the Ollama instance as a [`Url`]. + pub fn url(&self) -> &Url { + &self.url + } + + /// Returns the URL of the Ollama instance as a [str]. + /// + /// Syntax in pseudo-BNF: + /// + /// ```bnf + /// url = scheme ":" [ hierarchical | non-hierarchical ] [ "?" query ]? [ "#" fragment ]? + /// non-hierarchical = non-hierarchical-path + /// non-hierarchical-path = /* Does not start with "/" */ + /// hierarchical = authority? hierarchical-path + /// authority = "//" userinfo? host [ ":" port ]? + /// userinfo = username [ ":" password ]? "@" + /// hierarchical-path = [ "/" path-segment ]+ + /// ``` + #[inline] + pub fn url_str(&self) -> &str { + self.url.as_str() + } +} + +impl From for Ollama { + fn from(url: Url) -> Self { + Self::from_url(url) + } +} + +impl Default for Ollama { + /// Returns a default Ollama instance with the host set to `http://127.0.0.1:11434`. + fn default() -> Self { + Self { + url: Url::parse("http://127.0.0.1:11434").unwrap(), + reqwest_client: reqwest::Client::new(), + #[cfg(feature = "chat-history")] + messages_history: None, + } + } +} diff --git a/crates/ollama-rs/src/models.rs b/crates/ollama-rs/src/models.rs new file mode 100644 index 0000000..b7cf06e --- /dev/null +++ b/crates/ollama-rs/src/models.rs @@ -0,0 +1,30 @@ +pub mod copy; +pub mod create; +pub mod delete; +pub mod list_local; +pub mod pull; +pub mod push; +pub mod show_info; + +use serde::{Deserialize, Serialize}; + +/// A local model pulled from Ollama. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct LocalModel { + pub name: String, + pub modified_at: String, + pub size: u64, +} + +/// A model's info. Some fields may be empty if the model does not have them. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ModelInfo { + #[serde(default = "String::new")] + pub license: String, + #[serde(default = "String::new")] + pub modelfile: String, + #[serde(default = "String::new")] + pub parameters: String, + #[serde(default = "String::new")] + pub template: String, +} diff --git a/crates/ollama-rs/src/models/copy.rs b/crates/ollama-rs/src/models/copy.rs new file mode 100644 index 0000000..65b7c16 --- /dev/null +++ b/crates/ollama-rs/src/models/copy.rs @@ -0,0 +1,40 @@ +use serde::Serialize; + +use crate::Ollama; + +impl Ollama { + /// Copy a model. Creates a model with another name from an existing model. + pub async fn copy_model( + &self, + source: String, + destination: String, + ) -> crate::error::Result<()> { + let request = CopyModelRequest { + source, + destination, + }; + + let url = format!("{}api/copy", self.url_str()); + let serialized = serde_json::to_string(&request).map_err(|e| e.to_string())?; + let res = self + .reqwest_client + .post(url) + .body(serialized) + .send() + .await + .map_err(|e| e.to_string())?; + + if res.status().is_success() { + Ok(()) + } else { + Err(res.text().await.unwrap_or_else(|e| e.to_string()).into()) + } + } +} + +/// A copy model request to Ollama. +#[derive(Serialize)] +struct CopyModelRequest { + source: String, + destination: String, +} diff --git a/crates/ollama-rs/src/models/create.rs b/crates/ollama-rs/src/models/create.rs new file mode 100644 index 0000000..eb580a7 --- /dev/null +++ b/crates/ollama-rs/src/models/create.rs @@ -0,0 +1,124 @@ +use serde::{Deserialize, Serialize}; + +use crate::Ollama; + +/// A stream of `CreateModelStatus` objects +#[cfg(feature = "stream")] +pub type CreateModelStatusStream = std::pin::Pin< + Box> + Send>, +>; + +impl Ollama { + #[cfg(feature = "stream")] + /// Create a model with streaming, meaning that each new status will be streamed. + pub async fn create_model_stream( + &self, + mut request: CreateModelRequest, + ) -> crate::error::Result { + use tokio_stream::StreamExt; + + use crate::error::OllamaError; + + request.stream = true; + + let url = format!("{}api/create", self.url_str()); + let serialized = serde_json::to_string(&request).map_err(|e| e.to_string())?; + let res = self + .reqwest_client + .post(url) + .body(serialized) + .send() + .await + .map_err(|e| e.to_string())?; + + if !res.status().is_success() { + return Err(res.text().await.unwrap_or_else(|e| e.to_string()).into()); + } + + let stream = Box::new(res.bytes_stream().map(|res| match res { + Ok(bytes) => { + let res = serde_json::from_slice::(&bytes); + match res { + Ok(res) => Ok(res), + Err(e) => { + let err = serde_json::from_slice::(&bytes); + match err { + Ok(err) => Err(err), + Err(_) => Err(OllamaError::from(format!( + "Failed to deserialize response: {}", + e + ))), + } + } + } + } + Err(e) => Err(OllamaError::from(format!("Failed to read response: {}", e))), + })); + + Ok(std::pin::Pin::from(stream)) + } + + /// Create a model with a single response, only the final status will be returned. + pub async fn create_model( + &self, + request: CreateModelRequest, + ) -> crate::error::Result { + let url = format!("{}api/create", self.url_str()); + let serialized = serde_json::to_string(&request).map_err(|e| e.to_string())?; + let res = self + .reqwest_client + .post(url) + .body(serialized) + .send() + .await + .map_err(|e| e.to_string())?; + + if !res.status().is_success() { + return Err(res.text().await.unwrap_or_else(|e| e.to_string()).into()); + } + + let res = res.bytes().await.map_err(|e| e.to_string())?; + let res = serde_json::from_slice::(&res).map_err(|e| e.to_string())?; + + Ok(res) + } +} + +/// A create model request to Ollama. +#[derive(Serialize)] +pub struct CreateModelRequest { + #[serde(rename = "name")] + model_name: String, + path: Option, + modelfile: Option, + stream: bool, +} + +impl CreateModelRequest { + /// Create a model described in the Modelfile at `path`. + pub fn path(model_name: String, path: String) -> Self { + Self { + model_name, + path: Some(path), + modelfile: None, + stream: false, + } + } + + /// Create a model described by the Modelfile contents passed to `modelfile`. + pub fn modelfile(model_name: String, modelfile: String) -> Self { + Self { + model_name, + path: None, + modelfile: Some(modelfile), + stream: false, + } + } +} + +/// A create model status response from Ollama. +#[derive(Deserialize, Debug)] +pub struct CreateModelStatus { + #[serde(rename = "status")] + pub message: String, +} diff --git a/crates/ollama-rs/src/models/delete.rs b/crates/ollama-rs/src/models/delete.rs new file mode 100644 index 0000000..ad25d76 --- /dev/null +++ b/crates/ollama-rs/src/models/delete.rs @@ -0,0 +1,33 @@ +use serde::Serialize; + +use crate::Ollama; + +impl Ollama { + /// Delete a model and its data. + pub async fn delete_model(&self, model_name: String) -> crate::error::Result<()> { + let request = DeleteModelRequest { model_name }; + + let url = format!("{}api/delete", self.url_str()); + let serialized = serde_json::to_string(&request).map_err(|e| e.to_string())?; + let res = self + .reqwest_client + .delete(url) + .body(serialized) + .send() + .await + .map_err(|e| e.to_string())?; + + if res.status().is_success() { + Ok(()) + } else { + Err(res.text().await.unwrap_or_else(|e| e.to_string()).into()) + } + } +} + +/// A delete model request to Ollama. +#[derive(Serialize)] +struct DeleteModelRequest { + #[serde(rename = "name")] + model_name: String, +} diff --git a/crates/ollama-rs/src/models/list_local.rs b/crates/ollama-rs/src/models/list_local.rs new file mode 100644 index 0000000..f0c86db --- /dev/null +++ b/crates/ollama-rs/src/models/list_local.rs @@ -0,0 +1,33 @@ +use serde::Deserialize; + +use crate::Ollama; + +use super::LocalModel; + +impl Ollama { + pub async fn list_local_models(&self) -> crate::error::Result> { + let url = format!("{}api/tags", self.url_str()); + let res = self + .reqwest_client + .get(url) + .send() + .await + .map_err(|e| e.to_string())?; + + if !res.status().is_success() { + return Err(res.text().await.unwrap_or_else(|e| e.to_string()).into()); + } + + let res = res.bytes().await.map_err(|e| e.to_string())?; + let res = + serde_json::from_slice::(&res).map_err(|e| e.to_string())?; + + Ok(res.models) + } +} + +/// A response from Ollama containing a list of local models. +#[derive(Debug, Clone, Deserialize)] +struct ListLocalModelsResponse { + models: Vec, +} diff --git a/crates/ollama-rs/src/models/pull.rs b/crates/ollama-rs/src/models/pull.rs new file mode 100644 index 0000000..b764a19 --- /dev/null +++ b/crates/ollama-rs/src/models/pull.rs @@ -0,0 +1,121 @@ +use serde::{Deserialize, Serialize}; + +use crate::Ollama; + +/// A stream of `PullModelStatus` objects. +#[cfg(feature = "stream")] +pub type PullModelStatusStream = std::pin::Pin< + Box> + Send>, +>; + +impl Ollama { + #[cfg(feature = "stream")] + /// Pull a model with streaming, meaning that each new status will be streamed. + /// - `model_name` - The name of the model to pull. + /// - `allow_insecure` - Allow insecure connections to the library. Only use this if you are pulling from your own library during development. + pub async fn pull_model_stream( + &self, + model_name: String, + allow_insecure: bool, + ) -> crate::error::Result { + use tokio_stream::StreamExt; + + use crate::error::OllamaError; + + let request = PullModelRequest { + model_name, + allow_insecure, + stream: true, + }; + + let url = format!("{}api/pull", self.url_str()); + let serialized = serde_json::to_string(&request).map_err(|e| e.to_string())?; + let res = self + .reqwest_client + .post(url) + .body(serialized) + .send() + .await + .map_err(|e| e.to_string())?; + + if !res.status().is_success() { + return Err(res.text().await.unwrap_or_else(|e| e.to_string()).into()); + } + + let stream = Box::new(res.bytes_stream().map(|res| match res { + Ok(bytes) => { + let res = serde_json::from_slice::(&bytes); + match res { + Ok(res) => Ok(res), + Err(e) => { + let err = serde_json::from_slice::(&bytes); + match err { + Ok(err) => Err(err), + Err(_) => Err(OllamaError::from(format!( + "Failed to deserialize response: {}", + e + ))), + } + } + } + } + Err(e) => Err(OllamaError::from(format!("Failed to read response: {}", e))), + })); + + Ok(std::pin::Pin::from(stream)) + } + + /// Pull a model with a single response, only the final status will be returned. + /// - `model_name` - The name of the model to pull. + /// - `allow_insecure` - Allow insecure connections to the library. Only use this if you are pulling from your own library during development. + pub async fn pull_model( + &self, + model_name: String, + allow_insecure: bool, + ) -> crate::error::Result { + let request = PullModelRequest { + model_name, + allow_insecure, + stream: false, + }; + + let url = format!("{}api/pull", self.url_str()); + let serialized = serde_json::to_string(&request).map_err(|e| e.to_string())?; + let res = self + .reqwest_client + .post(url) + .body(serialized) + .send() + .await + .map_err(|e| e.to_string())?; + + if !res.status().is_success() { + return Err(res.text().await.unwrap_or_else(|e| e.to_string()).into()); + } + + let res = res.bytes().await.map_err(|e| e.to_string())?; + let res = serde_json::from_slice::(&res).map_err(|e| e.to_string())?; + + Ok(res) + } +} + +/// A pull model request to Ollama. +#[derive(Debug, Clone, Serialize)] +struct PullModelRequest { + #[serde(rename = "name")] + model_name: String, + #[serde(rename = "insecure")] + allow_insecure: bool, + stream: bool, +} + +/// A pull model status response from Ollama. +#[derive(Debug, Clone, Deserialize)] +pub struct PullModelStatus { + #[serde(rename = "status")] + pub message: String, + pub digest: Option, + pub total: Option, + pub completed: Option, +} diff --git a/crates/ollama-rs/src/models/push.rs b/crates/ollama-rs/src/models/push.rs new file mode 100644 index 0000000..9ec592e --- /dev/null +++ b/crates/ollama-rs/src/models/push.rs @@ -0,0 +1,121 @@ +use serde::{Deserialize, Serialize}; + +use crate::Ollama; + +/// A stream of `PushModelStatus` objects. +#[cfg(feature = "stream")] +pub type PushModelStatusStream = std::pin::Pin< + Box> + Send>, +>; + +impl Ollama { + #[cfg(feature = "stream")] + /// Upload a model to a model library. Requires registering for ollama.ai and adding a public key first. + /// Push a model with streaming, meaning that each new status will be streamed. + /// - `model_name` - The name of the model to push in the form of `/:`. + /// - `allow_insecure` - Allow insecure connections to the library. Only use this if you are pushing to your library during development. + pub async fn push_model_stream( + &self, + model_name: String, + allow_insecure: bool, + ) -> crate::error::Result { + use crate::error::OllamaError; + use tokio_stream::StreamExt; + + let request = PushModelRequest { + model_name, + allow_insecure, + stream: true, + }; + + let url = format!("{}api/push", self.url_str()); + let serialized = serde_json::to_string(&request).map_err(|e| e.to_string())?; + let res = self + .reqwest_client + .post(url) + .body(serialized) + .send() + .await + .map_err(|e| e.to_string())?; + + if !res.status().is_success() { + return Err(res.text().await.unwrap_or_else(|e| e.to_string()).into()); + } + + let stream = Box::new(res.bytes_stream().map(|res| match res { + Ok(bytes) => { + let res = serde_json::from_slice::(&bytes); + match res { + Ok(res) => Ok(res), + Err(e) => { + let err = serde_json::from_slice::(&bytes); + match err { + Ok(err) => Err(err), + Err(_) => Err(OllamaError::from(format!( + "Failed to deserialize response: {}", + e + ))), + } + } + } + } + Err(e) => Err(OllamaError::from(format!("Failed to read response: {}", e))), + })); + + Ok(std::pin::Pin::from(stream)) + } + + /// Upload a model to a model library. Requires registering for ollama.ai and adding a public key first. + /// Push a model with a single response, only the final status will be returned. + /// - `model_name` - The name of the model to push in the form of `/:`. + /// - `allow_insecure` - Allow insecure connections to the library. Only use this if you are pushing to your library during development. + pub async fn push_model( + &self, + model_name: String, + allow_insecure: bool, + ) -> crate::error::Result { + let request = PushModelRequest { + model_name, + allow_insecure, + stream: false, + }; + + let url = format!("{}api/push", self.url_str()); + let serialized = serde_json::to_string(&request).map_err(|e| e.to_string())?; + let res = self + .reqwest_client + .post(url) + .body(serialized) + .send() + .await + .map_err(|e| e.to_string())?; + + if !res.status().is_success() { + return Err(res.text().await.unwrap_or_else(|e| e.to_string()).into()); + } + + let res = res.bytes().await.map_err(|e| e.to_string())?; + let res = serde_json::from_slice::(&res).map_err(|e| e.to_string())?; + + Ok(res) + } +} + +/// A push model request to Ollama. +#[derive(Debug, Clone, Serialize)] +struct PushModelRequest { + #[serde(rename = "name")] + model_name: String, + #[serde(rename = "insecure")] + allow_insecure: bool, + stream: bool, +} + +/// A push model status response from Ollama. +#[derive(Debug, Clone, Deserialize)] +pub struct PushModelStatus { + #[serde(rename = "status")] + pub message: String, + pub digest: Option, + pub total: Option, +} diff --git a/crates/ollama-rs/src/models/show_info.rs b/crates/ollama-rs/src/models/show_info.rs new file mode 100644 index 0000000..e32083f --- /dev/null +++ b/crates/ollama-rs/src/models/show_info.rs @@ -0,0 +1,37 @@ +use serde::Serialize; + +use crate::Ollama; + +use super::ModelInfo; + +impl Ollama { + /// Show details about a model including modelfile, template, parameters, license, and system prompt. + pub async fn show_model_info(&self, model_name: String) -> crate::error::Result { + let url = format!("{}api/show", self.url_str()); + let serialized = + serde_json::to_string(&ModelInfoRequest { model_name }).map_err(|e| e.to_string())?; + let res = self + .reqwest_client + .post(url) + .body(serialized) + .send() + .await + .map_err(|e| e.to_string())?; + + if !res.status().is_success() { + return Err(res.text().await.unwrap_or_else(|e| e.to_string()).into()); + } + + let res = res.bytes().await.map_err(|e| e.to_string())?; + let res = serde_json::from_slice::(&res).map_err(|e| e.to_string())?; + + Ok(res) + } +} + +/// A show model info request to Ollama. +#[derive(Serialize)] +struct ModelInfoRequest { + #[serde(rename = "name")] + model_name: String, +} diff --git a/crates/ollama-rs/tests/chat_history_management.rs b/crates/ollama-rs/tests/chat_history_management.rs new file mode 100644 index 0000000..cd98d03 --- /dev/null +++ b/crates/ollama-rs/tests/chat_history_management.rs @@ -0,0 +1,90 @@ +use ollama_rs::Ollama; + +#[test] +fn test_chat_history_saved_as_should() { + let mut ollama = Ollama::new_default_with_history(30); + let chat_id = "default"; + + ollama.add_user_response(chat_id, "Hello"); + ollama.add_assistant_response(chat_id, "Hi"); + + ollama.add_user_response(chat_id, "Tell me 'hi' again"); + ollama.add_assistant_response(chat_id, "Hi again"); + + let history = ollama.get_messages_history(chat_id).unwrap(); + + assert_eq!(history.len(), 4); + + let last = history.last(); + assert!(last.is_some()); + assert_eq!(last.unwrap().content, "Hi again".to_string()); +} + +#[test] +fn chat_history_not_stored_if_no_content() { + let mut ollama = Ollama::new_default_with_history(30); + let chat_id = "default"; + + ollama.add_user_response(chat_id, "Hello"); + ollama.add_assistant_response(chat_id, ""); + + ollama.add_user_response(chat_id, ""); + ollama.add_assistant_response(chat_id, "Hi again"); + + let history = ollama.get_messages_history(chat_id).unwrap(); + + assert_eq!(history.len(), 2); + + let last = history.last(); + assert!(last.is_some()); + assert_eq!(last.unwrap().content, "Hi again".to_string()); +} + +#[test] +fn clear_chat_history_for_one_id_only() { + let mut ollama = Ollama::new_default_with_history(30); + let first_chat_id = "default"; + + ollama.add_user_response(first_chat_id, "Hello"); + + let another_chat_id = "not_default"; + + ollama.add_user_response(another_chat_id, "Hello"); + + assert_eq!(ollama.get_messages_history(first_chat_id).unwrap().len(), 1); + assert_eq!( + ollama.get_messages_history(another_chat_id).unwrap().len(), + 1 + ); + + ollama.clear_messages_for_id(first_chat_id); + + assert!(ollama.get_messages_history(first_chat_id).is_none()); + assert_eq!( + ollama.get_messages_history(another_chat_id).unwrap().len(), + 1 + ); +} + +#[test] +fn clear_chat_history_for_all() { + let mut ollama = Ollama::new_default_with_history(30); + let first_chat_id = "default"; + + ollama.add_user_response(first_chat_id, "Hello"); + + let another_chat_id = "not_default"; + + ollama.add_user_response(another_chat_id, "Hello"); + + assert_eq!(ollama.get_messages_history(first_chat_id).unwrap().len(), 1); + assert_eq!( + ollama.get_messages_history(another_chat_id).unwrap().len(), + 1 + ); + + ollama.clear_all_messages(); + + assert!(ollama.get_messages_history(first_chat_id).is_none()); + assert!(ollama.get_messages_history(another_chat_id).is_none()); +} diff --git a/crates/ollama-rs/tests/copy_model.rs b/crates/ollama-rs/tests/copy_model.rs new file mode 100644 index 0000000..c512013 --- /dev/null +++ b/crates/ollama-rs/tests/copy_model.rs @@ -0,0 +1,10 @@ +#[tokio::test] +/// This test needs a model named "mario" to work +async fn test_copy_model() { + let ollama = ollama_rs::Ollama::default(); + + ollama + .copy_model("mario".into(), "mario_copy".into()) + .await + .unwrap(); +} diff --git a/crates/ollama-rs/tests/create_model.rs b/crates/ollama-rs/tests/create_model.rs new file mode 100644 index 0000000..a278a73 --- /dev/null +++ b/crates/ollama-rs/tests/create_model.rs @@ -0,0 +1,47 @@ +use ollama_rs::{models::create::CreateModelRequest, Ollama}; +use tokio_stream::StreamExt; + +#[tokio::test] +/// This test needs a Modelfile at /tmp to work +async fn test_create_model_stream() { + let ollama = Ollama::default(); + + let mut res = ollama + .create_model_stream(CreateModelRequest::path( + "model".into(), + "/tmp/Modelfile.example".into(), + )) + .await + .unwrap(); + + let mut done = false; + while let Some(res) = res.next().await { + match res { + Ok(res) => { + dbg!(&res.message); + if res.message.eq("success") { + done = true; + } + } + Err(e) => panic!("{:?}", e), + } + } + + assert!(done); +} + +#[tokio::test] +/// This test needs a Modelfile at /tmp to work +async fn test_create_model() { + let ollama = Ollama::default(); + + let res = ollama + .create_model(CreateModelRequest::path( + "model".into(), + "/tmp/Modelfile.example".into(), + )) + .await + .unwrap(); + + assert!(res.message.eq("success")); +} diff --git a/crates/ollama-rs/tests/delete_model.rs b/crates/ollama-rs/tests/delete_model.rs new file mode 100644 index 0000000..cf5cbc3 --- /dev/null +++ b/crates/ollama-rs/tests/delete_model.rs @@ -0,0 +1,9 @@ +use ollama_rs::Ollama; + +#[tokio::test] +/// This test needs a model named "mario_copy" to work +async fn test_delete_model() { + let ollama = Ollama::default(); + + ollama.delete_model("mario_copy".into()).await.unwrap(); +} diff --git a/crates/ollama-rs/tests/embeddings_generation.rs b/crates/ollama-rs/tests/embeddings_generation.rs new file mode 100644 index 0000000..d71546a --- /dev/null +++ b/crates/ollama-rs/tests/embeddings_generation.rs @@ -0,0 +1,15 @@ +use ollama_rs::Ollama; + +#[tokio::test] +async fn test_embeddings_generation() { + let ollama = Ollama::default(); + + let prompt = "Why is the sky blue?".to_string(); + + let res = ollama + .generate_embeddings("llama2:latest".to_string(), prompt, None) + .await + .unwrap(); + + dbg!(res); +} diff --git a/crates/ollama-rs/tests/function_call.rs b/crates/ollama-rs/tests/function_call.rs new file mode 100644 index 0000000..034139c --- /dev/null +++ b/crates/ollama-rs/tests/function_call.rs @@ -0,0 +1,95 @@ +#![cfg(feature = "function-calling")] + +use ollama_rs::{ + generation::chat::ChatMessage, + generation::functions::tools::{DDGSearcher, Scraper, StockScraper}, + generation::functions::{FunctionCallRequest, NousFunctionCall}, + Ollama, +}; +use std::sync::Arc; + +#[tokio::test] +async fn test_send_function_call() { + /// Model to be used, make sure it is tailored towards "function calling", such as: + /// - OpenAIFunctionCall: not model specific, degraded performance + /// - NousFunctionCall: adrienbrault/nous-hermes2pro:Q8_0 + const MODEL: &str = "adrienbrault/nous-hermes2pro:Q8_0"; + + const PROMPT: &str = "Aside from the Apple Remote, what other device can control the program Apple Remote was originally designed to interact with?"; + let user_message = ChatMessage::user(PROMPT.to_string()); + + let scraper_tool = Arc::new(Scraper::new()); + let ddg_search_tool = Arc::new(DDGSearcher::new()); + let parser = Arc::new(NousFunctionCall::new()); + + let ollama = Ollama::default(); + let result = ollama + .send_function_call( + FunctionCallRequest::new( + MODEL.to_string(), + vec![scraper_tool, ddg_search_tool], + vec![user_message], + ), + parser, + ) + .await + .unwrap(); + + assert!(result.done); +} + +#[tokio::test] +async fn test_send_function_call_with_history() { + /// Model to be used, make sure it is tailored towards "function calling", such as: + /// - OpenAIFunctionCall: not model specific, degraded performance + /// - NousFunctionCall: adrienbrault/nous-hermes2pro:Q8_0 + const MODEL: &str = "adrienbrault/nous-hermes2pro:Q8_0"; + + const PROMPT: &str = "Aside from the Apple Remote, what other device can control the program Apple Remote was originally designed to interact with?"; + let user_message = ChatMessage::user(PROMPT.to_string()); + + let scraper_tool = Arc::new(Scraper::new()); + let ddg_search_tool = Arc::new(DDGSearcher::new()); + let parser = Arc::new(NousFunctionCall::new()); + + let mut ollama = Ollama::new_default_with_history(30); + let result = ollama + .send_function_call_with_history( + FunctionCallRequest::new( + MODEL.to_string(), + vec![scraper_tool, ddg_search_tool], + vec![user_message], + ), + parser, + "default".to_string(), + ) + .await + .unwrap(); + + assert!(result.done); +} + +#[tokio::test] +async fn test_send_function_call_finance() { + /// Model to be used, make sure it is tailored towards "function calling", such as: + /// - OpenAIFunctionCall: not model specific, degraded performance + /// - NousFunctionCall: adrienbrault/nous-hermes2pro:Q8_0 + const MODEL: &str = "adrienbrault/nous-hermes2pro:Q8_0"; + + const PROMPT: &str = "What are the current risk factors to $APPL?"; + let user_message = ChatMessage::user(PROMPT.to_string()); + + let stock_scraper = Arc::new(StockScraper::new()); + let parser = Arc::new(NousFunctionCall::new()); + + let ollama = Ollama::default(); + let result = ollama + .send_function_call( + FunctionCallRequest::new(MODEL.to_string(), vec![stock_scraper], vec![user_message]), + parser, + ) + .await + .unwrap(); + + assert!(result.done); +} diff --git a/crates/ollama-rs/tests/generation.rs b/crates/ollama-rs/tests/generation.rs new file mode 100644 index 0000000..d35e94d --- /dev/null +++ b/crates/ollama-rs/tests/generation.rs @@ -0,0 +1,83 @@ +#![allow(unused_imports)] +use base64::Engine; +use ollama_rs::{ + generation::{ + completion::{request::GenerationRequest, GenerationResponseStream}, + images::Image, + }, + Ollama, +}; +use tokio::io::AsyncWriteExt; +use tokio_stream::StreamExt; + +#[allow(dead_code)] +const PROMPT: &str = "Why is the sky blue?"; + +#[tokio::test] +async fn test_generation_stream() { + let ollama = Ollama::default(); + + let mut res: GenerationResponseStream = ollama + .generate_stream(GenerationRequest::new( + "llama2:latest".to_string(), + PROMPT.into(), + )) + .await + .unwrap(); + + let mut done = false; + while let Some(res) = res.next().await { + let res = res.unwrap(); + for ele in res { + dbg!(&ele); + if ele.done { + done = true; + break; + } + } + } + + assert!(done); +} + +#[tokio::test] +async fn test_generation() { + let ollama = Ollama::default(); + + let res = ollama + .generate(GenerationRequest::new( + "llama2:latest".to_string(), + PROMPT.into(), + )) + .await + .unwrap(); + dbg!(res); +} + +const IMAGE_URL: &str = "https://images.pexels.com/photos/1054655/pexels-photo-1054655.jpeg"; + +#[tokio::test] +async fn test_generation_with_images() { + let ollama = Ollama::default(); + + let bytes = reqwest::get(IMAGE_URL) + .await + .unwrap() + .bytes() + .await + .unwrap(); + let base64 = base64::engine::general_purpose::STANDARD.encode(&bytes); + let image = Image::from_base64(&base64); + + let res = ollama + .generate( + GenerationRequest::new( + "llava:latest".to_string(), + "What can we see in this image?".to_string(), + ) + .add_image(image), + ) + .await + .unwrap(); + dbg!(res); +} diff --git a/crates/ollama-rs/tests/list_local_models.rs b/crates/ollama-rs/tests/list_local_models.rs new file mode 100644 index 0000000..6bd2e44 --- /dev/null +++ b/crates/ollama-rs/tests/list_local_models.rs @@ -0,0 +1,8 @@ +#[tokio::test] +async fn test_list_local_models() { + let ollama = ollama_rs::Ollama::default(); + + let models = ollama.list_local_models().await.unwrap(); + + dbg!(models); +} diff --git a/crates/ollama-rs/tests/pull_model.rs b/crates/ollama-rs/tests/pull_model.rs new file mode 100644 index 0000000..4983206 --- /dev/null +++ b/crates/ollama-rs/tests/pull_model.rs @@ -0,0 +1,19 @@ +use ollama_rs::Ollama; +use tokio_stream::StreamExt; + +#[tokio::test] +async fn test_pull_model() { + let ollama = Ollama::default(); + + let mut res = ollama + .pull_model_stream("llama2:latest".into(), false) + .await + .unwrap(); + + while let Some(res) = res.next().await { + match res { + Ok(res) => println!("{:?}", res), + Err(e) => panic!("{:?}", e), + } + } +} diff --git a/crates/ollama-rs/tests/push_model.rs b/crates/ollama-rs/tests/push_model.rs new file mode 100644 index 0000000..0609393 --- /dev/null +++ b/crates/ollama-rs/tests/push_model.rs @@ -0,0 +1,20 @@ +use ollama_rs::Ollama; +use tokio_stream::StreamExt; + +#[tokio::test] +/// This test needs a local model named `test_model:latest` to work, and requires registering for ollama.ai and adding a public key first. +async fn test_push_model() { + let ollama = Ollama::default(); + + let mut res = ollama + .push_model_stream("test_model:latest".into(), false) + .await + .unwrap(); + + while let Some(res) = res.next().await { + match res { + Ok(res) => println!("{:?}", res), + Err(e) => panic!("{:?}", e), + } + } +} diff --git a/crates/ollama-rs/tests/send_chat_messages.rs b/crates/ollama-rs/tests/send_chat_messages.rs new file mode 100644 index 0000000..ccd72c3 --- /dev/null +++ b/crates/ollama-rs/tests/send_chat_messages.rs @@ -0,0 +1,218 @@ +use base64::Engine; +use tokio_stream::StreamExt; + +use ollama_rs::{ + generation::{ + chat::{request::ChatMessageRequest, ChatMessage}, + images::Image, + }, + Ollama, +}; + +#[allow(dead_code)] +const PROMPT: &str = "Why is the sky blue?"; + +#[tokio::test] +async fn test_send_chat_messages_stream() { + let ollama = Ollama::default(); + + let messages = vec![ChatMessage::user(PROMPT.to_string())]; + let mut res = ollama + .send_chat_messages_stream(ChatMessageRequest::new( + "llama2:latest".to_string(), + messages, + )) + .await + .unwrap(); + + let mut done = false; + while let Some(res) = res.next().await { + let res = res.unwrap(); + dbg!(&res); + if res.done { + done = true; + break; + } + } + + assert!(done); +} + +#[tokio::test] +async fn test_send_chat_messages() { + let ollama = Ollama::default(); + + let messages = vec![ChatMessage::user(PROMPT.to_string())]; + let res = ollama + .send_chat_messages(ChatMessageRequest::new( + "llama2:latest".to_string(), + messages, + )) + .await + .unwrap(); + dbg!(&res); + + assert!(res.done); +} + +#[tokio::test] +async fn test_send_chat_messages_with_history_stream() { + let mut ollama = Ollama::new_default_with_history(30); + let id = "default"; + + let messages = vec![ChatMessage::user(PROMPT.to_string())]; + + let mut done = false; + + let mut res = ollama + .send_chat_messages_with_history_stream( + ChatMessageRequest::new("llama2:latest".to_string(), messages), + id, + ) + .await + .unwrap(); + + while let Some(res) = res.next().await { + let res = res.unwrap(); + + if res.done { + done = true; + break; + } + } + + assert!(done); + // Should have user's message as well as AI's response + dbg!(&ollama.get_messages_history(id).unwrap()); + assert_eq!(ollama.get_messages_history(id).unwrap().len(), 2); +} + +#[tokio::test] +async fn test_send_chat_messages_with_history() { + let mut ollama = Ollama::new_default_with_history(30); + let id = "default".to_string(); + let second_message = vec![ChatMessage::user("Second message".to_string())]; + + let messages = vec![ChatMessage::user(PROMPT.to_string())]; + let res = ollama + .send_chat_messages_with_history( + ChatMessageRequest::new("llama2:latest".to_string(), messages.clone()), + &id, + ) + .await + .unwrap(); + + dbg!(&res); + assert!(res.done); + // Should have user's message as well as AI's response + assert_eq!(ollama.get_messages_history(&id).unwrap().len(), 2); + + let res = ollama + .send_chat_messages_with_history( + ChatMessageRequest::new("llama2:latest".to_string(), second_message.clone()), + &id, + ) + .await + .unwrap(); + + dbg!(&res); + assert!(res.done); + + let history = ollama.get_messages_history(&id).unwrap(); + // Should now have 2 user messages as well as AI's responses + assert_eq!(history.len(), 4); + + let second_user_message_in_history = history.get(2); + + assert!(second_user_message_in_history.is_some()); + assert_eq!( + second_user_message_in_history.unwrap().content, + "Second message".to_string() + ); +} + +#[tokio::test] +async fn test_send_chat_messages_remove_old_history_with_limit_less_than_min() { + // Setting history length to 1 but the minimum is 2 + let mut ollama = Ollama::new_default_with_history(1); + let id = "default".to_string(); + + let messages = vec![ChatMessage::user(PROMPT.to_string())]; + let res = ollama + .send_chat_messages_with_history( + ChatMessageRequest::new("llama2:latest".to_string(), messages.clone()), + &id, + ) + .await + .unwrap(); + + dbg!(&res); + assert!(res.done); + // Minimal history length is 2 + assert_eq!(ollama.get_messages_history(&id).unwrap().len(), 2); +} + +#[tokio::test] +async fn test_send_chat_messages_remove_old_history() { + let mut ollama = Ollama::new_default_with_history(3); + let id = "default".to_string(); + + let messages = vec![ChatMessage::user(PROMPT.to_string())]; + let res = ollama + .send_chat_messages_with_history( + ChatMessageRequest::new("llama2:latest".to_string(), messages.clone()), + &id, + ) + .await + .unwrap(); + + dbg!(&res); + + assert!(res.done); + + assert_eq!(ollama.get_messages_history(&id).unwrap().len(), 2); + + // Duplicate to check that we have 3 messages stored + let res = ollama + .send_chat_messages_with_history( + ChatMessageRequest::new("llama2:latest".to_string(), messages), + &id, + ) + .await + .unwrap(); + + dbg!(&res); + + assert!(res.done); + + assert_eq!(ollama.get_messages_history(&id).unwrap().len(), 3); +} + +const IMAGE_URL: &str = "https://images.pexels.com/photos/1054655/pexels-photo-1054655.jpeg"; + +#[tokio::test] +async fn test_send_chat_messages_with_images() { + let ollama = Ollama::default(); + + let bytes = reqwest::get(IMAGE_URL) + .await + .unwrap() + .bytes() + .await + .unwrap(); + let base64 = base64::engine::general_purpose::STANDARD.encode(&bytes); + let image = Image::from_base64(&base64); + + let messages = + vec![ChatMessage::user("What can we see in this image?".to_string()).add_image(image)]; + let res = ollama + .send_chat_messages(ChatMessageRequest::new( + "llava:latest".to_string(), + messages, + )) + .await + .unwrap(); + dbg!(&res); + + assert!(res.done); +} diff --git a/crates/ollama-rs/tests/show_model_info.rs b/crates/ollama-rs/tests/show_model_info.rs new file mode 100644 index 0000000..a58d8b2 --- /dev/null +++ b/crates/ollama-rs/tests/show_model_info.rs @@ -0,0 +1,11 @@ +#[tokio::test] +async fn test_show_model_info() { + let ollama = ollama_rs::Ollama::default(); + + let model_info = ollama + .show_model_info("llama2:latest".to_string()) + .await + .unwrap(); + + dbg!(model_info); +} diff --git a/crates/openai_api_rust/.github/workflows/rust.yml b/crates/openai_api_rust/.github/workflows/rust.yml new file mode 100644 index 0000000..af0cdb7 --- /dev/null +++ b/crates/openai_api_rust/.github/workflows/rust.yml @@ -0,0 +1,20 @@ +name: Rust + +on: + push: + branches: [ "main" ] + pull_request: + branches: [ "main" ] + +env: + CARGO_TERM_COLOR: always + +jobs: + build: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + - name: Build + run: cargo build --verbose diff --git a/crates/openai_api_rust/.gitignore b/crates/openai_api_rust/.gitignore new file mode 100644 index 0000000..ea8c4bf --- /dev/null +++ b/crates/openai_api_rust/.gitignore @@ -0,0 +1 @@ +/target diff --git a/crates/openai_api_rust/.rustfmt.toml b/crates/openai_api_rust/.rustfmt.toml new file mode 100644 index 0000000..66ef1db --- /dev/null +++ b/crates/openai_api_rust/.rustfmt.toml @@ -0,0 +1,10 @@ +edition = "2021" +hard_tabs = true +max_width = 100 +tab_spaces = 4 +reorder_imports = true +match_block_trailing_comma = true +newline_style = "Unix" +reorder_modules = true +use_field_init_shorthand = true +use_small_heuristics = "Max" diff --git a/crates/openai_api_rust/Cargo.lock b/crates/openai_api_rust/Cargo.lock new file mode 100644 index 0000000..65d4ee9 --- /dev/null +++ b/crates/openai_api_rust/Cargo.lock @@ -0,0 +1,489 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + +[[package]] +name = "bumpalo" +version = "3.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535" + +[[package]] +name = "cc" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "crc32fast" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "flate2" +version = "1.0.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8a2db397cb1c8772f31494cb8917e48cd1e64f0fa7efac59fbd741a0a8ce841" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "form_urlencoded" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "getrandom" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "idna" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "itoa" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6" + +[[package]] +name = "js-sys" +version = "0.3.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "445dde2150c55e483f3d8416706b97ec8e8237c307e5b7b4b8dd15e6af2a0730" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "libc" +version = "0.2.140" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99227334921fae1a979cf0bfdfcc6b3e5ce376ef57e16fb6fb3ea2ed6095f80c" + +[[package]] +name = "log" +version = "0.4.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "miniz_oxide" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa" +dependencies = [ + "adler", +] + +[[package]] +name = "once_cell" +version = "1.17.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3" + +[[package]] +name = "openai_api_rust" +version = "0.1.9" +dependencies = [ + "log", + "mime", + "rand", + "serde", + "serde_json", + "ureq", +] + +[[package]] +name = "percent-encoding" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e" + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + +[[package]] +name = "proc-macro2" +version = "1.0.53" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba466839c78239c09faf015484e5cc04860f88242cff4d03eb038f04b4699b73" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin", + "untrusted", + "web-sys", + "winapi", +] + +[[package]] +name = "rustls" +version = "0.20.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fff78fc74d175294f4e83b28343315ffcfb114b156f0185e9741cb5570f50e2f" +dependencies = [ + "log", + "ring", + "sct", + "webpki", +] + +[[package]] +name = "ryu" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041" + +[[package]] +name = "sct" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "serde" +version = "1.0.158" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "771d4d9c4163ee138805e12c710dd365e4f44be8be0503cb1bb9eb989425d9c9" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.158" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e801c1712f48475582b7696ac71e0ca34ebb30e09338425384269d9717c62cad" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.5", +] + +[[package]] +name = "serde_json" +version = "1.0.94" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c533a59c9d8a93a09c6ab31f0fd5e5f4dd1b8fc9434804029839884765d04ea" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89c2d1c76a26822187a1fbb5964e3fff108bc208f02e820ab9dac1234f6b388a" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "tinyvec" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "unicode-bidi" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" + +[[package]] +name = "unicode-ident" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5464a87b239f13a63a501f2701565754bae92d243d4bb7eb12f6d57d2269bf4" + +[[package]] +name = "unicode-normalization" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + +[[package]] +name = "ureq" +version = "2.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "338b31dd1314f68f3aabf3ed57ab922df95ffcd902476ca7ba3c4ce7b908c46d" +dependencies = [ + "base64", + "flate2", + "log", + "once_cell", + "rustls", + "serde", + "serde_json", + "url", + "webpki", + "webpki-roots", +] + +[[package]] +name = "url" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d68c799ae75762b8c3fe375feb6600ef5602c883c5d21eb51c09f22b83c4643" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", +] + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31f8dcbc21f30d9b8f2ea926ecb58f6b91192c17e9d33594b3df58b2007ca53b" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95ce90fd5bcc06af55a641a86428ee4229e44e07033963a2290a8e241607ccb9" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn 1.0.109", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c21f77c0bedc37fd5dc21f897894a5ca01e7bb159884559461862ae90c0b4c5" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d" + +[[package]] +name = "web-sys" +version = "0.3.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e33b99f4b23ba3eec1a53ac264e35a755f00e966e0065077d6027c0f575b0b97" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f095d78192e208183081cc07bc5515ef55216397af48b873e5edcd72637fa1bd" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "webpki-roots" +version = "0.22.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87" +dependencies = [ + "webpki", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" diff --git a/crates/openai_api_rust/Cargo.toml b/crates/openai_api_rust/Cargo.toml new file mode 100644 index 0000000..fd0ad8b --- /dev/null +++ b/crates/openai_api_rust/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "openai_api_rust" +version = "0.1.9" +authors = ["i@gt.email","craig@mayhew.io"] +edition = "2021" +license = "MIT" +repository = "https://github.com/openai-rs/openai-api" +description = "A very simple Rust library for OpenAI API, free from complex async operations and redundant dependencies." +keywords = ["openai_api", "openai", "openai-api"] +categories = ["api-bindings"] + +[dependencies] +ureq = { version = "^2.6", features = ["json"] } +serde = { version = "^1.0", features = ["derive"] } +serde_json = "^1.0" +log = "^0.4" +mime = "^0.3.16" +rand = "0.8.5" diff --git a/crates/openai_api_rust/LICENSE b/crates/openai_api_rust/LICENSE new file mode 100644 index 0000000..0b37cae --- /dev/null +++ b/crates/openai_api_rust/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) [2023] [i@guantong.dev] + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/crates/openai_api_rust/README.md b/crates/openai_api_rust/README.md new file mode 100644 index 0000000..c1979bf --- /dev/null +++ b/crates/openai_api_rust/README.md @@ -0,0 +1,93 @@ +# OpenAI API for Rust + +[![GitHub Workflow Status](https://img.shields.io/github/actions/workflow/status/openai-rs/openai-api/rust.yml?style=flat-square)](https://github.com/openai-rs/openai-api/actions) +[![Crates.io](https://img.shields.io/crates/v/openai_api_rust?style=flat-square)](https://crates.io/crates/openai_api_rust/versions) +[![Crates.io](https://img.shields.io/crates/d/openai_api_rust?style=flat-square)](https://crates.io/crates/openai_api_rust) +[![GitHub](https://img.shields.io/github/license/openai-rs/openai-api?style=flat-square)](https://github.com/openai-rs/openai-api/blob/main/LICENSE) + +A community-maintained library provides a simple and convenient way to interact with the OpenAI API. +No complex async and redundant dependencies. + +## API + +check [official API reference](https://platform.openai.com/docs/api-reference) +|API|Support| +|---|---| +|Models|✔️| +|Completions|✔️| +|Chat|✔️| +|Images|✔️| +|Embeddings|✔️| +|Audio|✔️| +|Files|❌| +|Fine-tunes|❌| +|Moderations|❌| +|Engines|❌| +___ + +## Usage + +Add the following to your Cargo.toml file: + +```toml +openai_api_rust = "0.1.9" +``` + +Export your API key into the environment variables + +```bash +export OPENAI_API_KEY= +``` + +Then use the crate in your Rust code: + +```rust +use openai_api_rust::*; +use openai_api_rust::chat::*; +use openai_api_rust::completions::*; + +fn main() { + // Load API key from environment OPENAI_API_KEY. + // You can also hadcode through `Auth::new()`, but it is not recommended. + let auth = Auth::from_env().unwrap(); + let openai = OpenAI::new(auth, "https://api.openai.com/v1/"); + let body = ChatBody { + model: "gpt-3.5-turbo".to_string(), + max_tokens: Some(7), + temperature: Some(0_f32), + top_p: Some(0_f32), + n: Some(2), + stream: Some(false), + stop: None, + presence_penalty: None, + frequency_penalty: None, + logit_bias: None, + user: None, + messages: vec![Message { role: Role::User, content: "Hello!".to_string() }], + }; + let rs = openai.chat_completion_create(&body); + let choice = rs.unwrap().choices; + let message = &choice[0].message.as_ref().unwrap(); + assert!(message.content.contains("Hello")); +} +``` + +### Use proxy + +Load proxy from env + +```rust +let openai = OpenAI::new(auth, "https://api.openai.com/v1/") + .use_env_proxy(); +``` + +Set the proxy manually + +```rust +let openai = OpenAI::new(auth, "https://api.openai.com/v1/") + .set_proxy("http://127.0.0.1:1080"); +``` + +## License + +This library is distributed under the terms of the MIT license. See [LICENSE](LICENSE) for details. diff --git a/crates/openai_api_rust/src/apis/audio.rs b/crates/openai_api_rust/src/apis/audio.rs new file mode 100644 index 0000000..302124f --- /dev/null +++ b/crates/openai_api_rust/src/apis/audio.rs @@ -0,0 +1,143 @@ +// Learn how to turn audio into text. + +//! Audio API + +use std::fs::File; + +use crate::mpart::Mpart as Multipart; +use serde::{Deserialize, Serialize}; + +use crate::requests::Requests; +use crate::*; + +use super::{AUDIO_TRANSCRIPTION_CREATE, AUDIO_TRANSLATIONS_CREATE}; + +#[derive(Debug)] +pub struct AudioBody { + /// The audio file to transcribe, + /// in one of these formats: mp3, mp4, mpeg, mpga, m4a, wav, or webm. + pub file: File, + /// ID of the model to use. Only whisper-1 is currently available. + pub model: String, + /// An optional text to guide the model's style or continue a previous audio segment. + /// The prompt should match the audio language. + pub prompt: Option, + /// The format of the transcript output, in one of these options: json, text, srt, verbose_json, or vtt. + pub response_format: Option, + /// The sampling temperature, between 0 and 1. + /// Higher values like 0.8 will make the output more random, + /// while lower values like 0.2 will make it more focused and deterministic. If set to 0, + /// the model will use log probability to automatically increase the temperature until certain thresholds are hit. + pub temperature: Option, + /// The language of the input audio. Supplying the input language in ISO-639-1 format will improve accuracy and latency. + /// ISO-639-1: https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes + pub language: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct Audio { + pub text: Option, +} + +pub trait AudioApi { + /// Transcribes audio into the input language. + fn audio_transcription_create(&self, audio_body: AudioBody) -> ApiResult