From 3a4ad66cb7329ea3429bea0274b0106f022e99d2 Mon Sep 17 00:00:00 2001 From: Christos KK Loverdos Date: Mon, 30 Sep 2024 15:43:28 +0300 Subject: [PATCH] Initial commit --- .editorconfig | 17 + .github/actions/fetch-sui-cli/action.yml | 21 + .github/workflows/python.yml | 13 + .github/workflows/talus-agentic-framework.yml | 160 + .gitignore | 164 + LICENSE | 38 + README.md | 131 + TROUBLESHOOTING.md | 25 + e2e_tests/.gitignore | 3 + e2e_tests/Cargo.lock | 7184 +++++++++++++++++ e2e_tests/Cargo.toml | 34 + e2e_tests/README.md | 63 + e2e_tests/oneclick-test.sh | 162 + e2e_tests/src/completion.rs | 207 + e2e_tests/src/main.rs | 280 + e2e_tests/src/ollama_mock.rs | 38 + e2e_tests/src/prelude.rs | 12 + e2e_tests/src/prompt.rs | 125 + e2e_tests/src/setup.rs | 508 ++ examples/LICENSE | 176 + examples/README.md | 121 + examples/cli_cluster.py | 226 + examples/example.just | 26 + examples/ig_post_planner.py | 206 + examples/main.py | 269 + examples/requirements.txt | 7 + examples/trip_planner.py | 222 + justfile | 148 + nexus_sdk/LICENSE | 176 + nexus_sdk/README.md | 4 + nexus_sdk/requirements.txt | 2 + nexus_sdk/setup.py | 17 + nexus_sdk/src/nexus_sdk/__init__.py | 21 + nexus_sdk/src/nexus_sdk/cluster.py | 213 + nexus_sdk/src/nexus_sdk/model.py | 63 + nexus_sdk/src/nexus_sdk/node.py | 21 + nexus_sdk/src/nexus_sdk/utils.py | 17 + offchain/LICENSE | 38 + offchain/README.md | 7 + offchain/events/.gitignore | 150 + offchain/events/LICENSE | 38 + offchain/events/README.md | 32 + offchain/events/pyproject.toml | 26 + offchain/events/src/nexus_events/__init__.py | 0 offchain/events/src/nexus_events/offchain.py | 52 + offchain/events/src/nexus_events/sui_event.py | 266 + offchain/tools/.flake8 | 3 + offchain/tools/.gitignore | 148 + offchain/tools/LICENSE | 38 + offchain/tools/Modelfile | 13 + offchain/tools/README.md | 63 + offchain/tools/pyproject.toml | 67 + offchain/tools/src/nexus_tools/__init__.py | 0 .../tools/src/nexus_tools/server/__init__.py | 0 .../server/controllers/__init__.py | 0 .../server/controllers/inference.py | 20 + .../server/crew/talus_chat_ollama.py | 71 + .../nexus_tools/server/crew/talus_ollama.py | 169 + offchain/tools/src/nexus_tools/server/main.py | 258 + .../src/nexus_tools/server/models/__init__.py | 0 .../src/nexus_tools/server/models/agents.py | 30 + .../nexus_tools/server/models/completion.py | 30 + .../src/nexus_tools/server/models/error.py | 23 + .../nexus_tools/server/models/extra_models.py | 7 + .../src/nexus_tools/server/models/model.py | 23 + .../src/nexus_tools/server/models/prompt.py | 42 + .../src/nexus_tools/server/security_api.py | 19 + .../src/nexus_tools/server/tools/__init__.py | 0 .../src/nexus_tools/server/tools/tools.py | 323 + offchain/tools/tests/__init__.py | 0 offchain/tools/tests/conftest.py | 17 + offchain/tools/tests/test_agent.py | 102 + offchain/tools/tests/test_default_api.py | 32 + offchain/tools/tests/test_ollama.py | 58 + offchain/tools/tests/test_tool.py | 187 + onchain/LICENSE | 38 + onchain/Move.toml | 39 + onchain/README.md | 118 + onchain/Suibase.toml | 5 + onchain/sources/agent.move | 305 + onchain/sources/cluster.move | 678 ++ onchain/sources/consts.move | 14 + onchain/sources/model.move | 296 + onchain/sources/node.move | 62 + onchain/sources/prompt.move | 72 + onchain/sources/task.move | 169 + onchain/sources/tests/cluster_tests.move | 195 + onchain/sources/tests/node_tests.move | 36 + onchain/sources/tests/prompt_tests.move | 97 + onchain/sources/tool.move | 31 + rustfmt.toml | 18 + 91 files changed, 15645 insertions(+) create mode 100644 .editorconfig create mode 100644 .github/actions/fetch-sui-cli/action.yml create mode 100644 .github/workflows/python.yml create mode 100644 .github/workflows/talus-agentic-framework.yml create mode 100644 .gitignore create mode 100644 LICENSE create mode 100644 README.md create mode 100644 TROUBLESHOOTING.md create mode 100644 e2e_tests/.gitignore create mode 100644 e2e_tests/Cargo.lock create mode 100644 e2e_tests/Cargo.toml create mode 100644 e2e_tests/README.md create mode 100755 e2e_tests/oneclick-test.sh create mode 100644 e2e_tests/src/completion.rs create mode 100644 e2e_tests/src/main.rs create mode 100644 e2e_tests/src/ollama_mock.rs create mode 100644 e2e_tests/src/prelude.rs create mode 100644 e2e_tests/src/prompt.rs create mode 100644 e2e_tests/src/setup.rs create mode 100644 examples/LICENSE create mode 100644 examples/README.md create mode 100644 examples/cli_cluster.py create mode 100644 examples/example.just create mode 100644 examples/ig_post_planner.py create mode 100644 examples/main.py create mode 100644 examples/requirements.txt create mode 100644 examples/trip_planner.py create mode 100644 justfile create mode 100644 nexus_sdk/LICENSE create mode 100644 nexus_sdk/README.md create mode 100644 nexus_sdk/requirements.txt create mode 100644 nexus_sdk/setup.py create mode 100644 nexus_sdk/src/nexus_sdk/__init__.py create mode 100644 nexus_sdk/src/nexus_sdk/cluster.py create mode 100644 nexus_sdk/src/nexus_sdk/model.py create mode 100644 nexus_sdk/src/nexus_sdk/node.py create mode 100644 nexus_sdk/src/nexus_sdk/utils.py create mode 100644 offchain/LICENSE create mode 100644 offchain/README.md create mode 100644 offchain/events/.gitignore create mode 100644 offchain/events/LICENSE create mode 100755 offchain/events/README.md create mode 100644 offchain/events/pyproject.toml create mode 100644 offchain/events/src/nexus_events/__init__.py create mode 100644 offchain/events/src/nexus_events/offchain.py create mode 100755 offchain/events/src/nexus_events/sui_event.py create mode 100644 offchain/tools/.flake8 create mode 100644 offchain/tools/.gitignore create mode 100644 offchain/tools/LICENSE create mode 100644 offchain/tools/Modelfile create mode 100644 offchain/tools/README.md create mode 100644 offchain/tools/pyproject.toml create mode 100755 offchain/tools/src/nexus_tools/__init__.py create mode 100755 offchain/tools/src/nexus_tools/server/__init__.py create mode 100755 offchain/tools/src/nexus_tools/server/controllers/__init__.py create mode 100644 offchain/tools/src/nexus_tools/server/controllers/inference.py create mode 100644 offchain/tools/src/nexus_tools/server/crew/talus_chat_ollama.py create mode 100644 offchain/tools/src/nexus_tools/server/crew/talus_ollama.py create mode 100755 offchain/tools/src/nexus_tools/server/main.py create mode 100755 offchain/tools/src/nexus_tools/server/models/__init__.py create mode 100644 offchain/tools/src/nexus_tools/server/models/agents.py create mode 100644 offchain/tools/src/nexus_tools/server/models/completion.py create mode 100644 offchain/tools/src/nexus_tools/server/models/error.py create mode 100644 offchain/tools/src/nexus_tools/server/models/extra_models.py create mode 100644 offchain/tools/src/nexus_tools/server/models/model.py create mode 100644 offchain/tools/src/nexus_tools/server/models/prompt.py create mode 100644 offchain/tools/src/nexus_tools/server/security_api.py create mode 100644 offchain/tools/src/nexus_tools/server/tools/__init__.py create mode 100644 offchain/tools/src/nexus_tools/server/tools/tools.py create mode 100755 offchain/tools/tests/__init__.py create mode 100644 offchain/tools/tests/conftest.py create mode 100644 offchain/tools/tests/test_agent.py create mode 100644 offchain/tools/tests/test_default_api.py create mode 100644 offchain/tools/tests/test_ollama.py create mode 100755 offchain/tools/tests/test_tool.py create mode 100644 onchain/LICENSE create mode 100755 onchain/Move.toml create mode 100644 onchain/README.md create mode 100644 onchain/Suibase.toml create mode 100644 onchain/sources/agent.move create mode 100644 onchain/sources/cluster.move create mode 100644 onchain/sources/consts.move create mode 100644 onchain/sources/model.move create mode 100644 onchain/sources/node.move create mode 100644 onchain/sources/prompt.move create mode 100644 onchain/sources/task.move create mode 100644 onchain/sources/tests/cluster_tests.move create mode 100644 onchain/sources/tests/node_tests.move create mode 100644 onchain/sources/tests/prompt_tests.move create mode 100755 onchain/sources/tool.move create mode 100644 rustfmt.toml diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..5f00523 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,17 @@ +root = true + +[*] +charset = utf-8 +end_of_line = lf +indent_style = space +insert_final_newline = true +trim_trailing_whitespace = true + +[*.move] +indent_size = 4 + +[*.yml] +indent_size = 2 + +[*.rs] +indent_size = 4 diff --git a/.github/actions/fetch-sui-cli/action.yml b/.github/actions/fetch-sui-cli/action.yml new file mode 100644 index 0000000..36c6ff8 --- /dev/null +++ b/.github/actions/fetch-sui-cli/action.yml @@ -0,0 +1,21 @@ +name: "Setup Sui CLI" +description: "Downloads and sets up the Sui CLI" +inputs: + sui_ref: + description: "Sui version to download from the Sui's Github release page" + required: true +runs: + using: "composite" + steps: + - run: wget "https://github.com/MystenLabs/sui/releases/download/${{ inputs.sui_ref }}/sui-${{ inputs.sui_ref }}-ubuntu-x86_64.tgz" + shell: bash + - run: tar -xvf "sui-${{ inputs.sui_ref }}-ubuntu-x86_64.tgz" + shell: bash + - run: mkdir -p /home/runner/.local/bin + shell: bash + - run: mv sui /home/runner/.local/bin/sui + shell: bash + - run: sudo chmod +x /home/runner/.local/bin/sui + shell: bash + - run: sui --version + shell: bash diff --git a/.github/workflows/python.yml b/.github/workflows/python.yml new file mode 100644 index 0000000..b9b98f1 --- /dev/null +++ b/.github/workflows/python.yml @@ -0,0 +1,13 @@ +# Github workflow to check python code + +name: Python +on: [push] + +jobs: + # https://black.readthedocs.io/en/stable/integrations/github_actions.html + formatting-check: + name: Formatting Check + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: psf/black@stable diff --git a/.github/workflows/talus-agentic-framework.yml b/.github/workflows/talus-agentic-framework.yml new file mode 100644 index 0000000..965c1f1 --- /dev/null +++ b/.github/workflows/talus-agentic-framework.yml @@ -0,0 +1,160 @@ +# Github workflow to build and test the Talus Agentic Framework project + +name: Talus Agentic Framework +on: [push] + +env: + # defines what Sui version to install from the Sui's Github release page + # https://github.com/MystenLabs/sui/releases + SUI_REF: testnet-v1.26.1 + +jobs: + # 1. Get Sui CLI + # 2. Builds and tests talus framework package + build-agentic-framework: + name: (Move) Build Agentic Framework + runs-on: ubuntu-latest + steps: + - name: Check out repository code + uses: actions/checkout@v4 + + # 1. + - name: Fetch Sui CLI + uses: ./.github/actions/fetch-sui-cli + with: + sui_ref: ${{ env.SUI_REF }} + + # 2. + - run: sui move build -p onchain + - run: sui move test -p onchain + + # We use nightly for formatting only because lots of nice format rules are + # not available in stable Rust yet. + # + # 1. Get nightly Rust toolchain + # 2. Check Rust formatting + check-e2e-tests-fmt: + name: (Rust) Check Formatting + runs-on: ubuntu-latest + steps: + - name: Check out repository code + uses: actions/checkout@v4 + + # 1. + - name: Set up Rust + uses: actions-rs/toolchain@v1 + with: + toolchain: nightly + profile: minimal + override: true + components: rustfmt + + # 2. + - run: cd e2e_tests && cargo fmt -- --check + + # 1. Get stable Rust toolchain + # 2. Set up caching + # 3. Build and check Rust binary + # 4. Upload Rust binary as artifact + build-e2e-tests: + name: (Rust) Build E2E Tests + runs-on: ubuntu-latest + steps: + - name: Check out repository code + uses: actions/checkout@v4 + + # 1. + - name: Set up Rust + uses: actions-rs/toolchain@v1 + with: + toolchain: stable + profile: minimal + override: true + components: clippy + + # 2. + - name: Cache Rust dependencies + uses: actions/cache@v3 + with: + path: | + ~/.cargo/bin/ + ~/.cargo/registry/index/ + ~/.cargo/registry/cache/ + ~/.cargo/git/db/ + e2e_tests/target/ + key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} + restore-keys: | + ${{ runner.os }}-cargo- + + # 3. + - run: cd e2e_tests && cargo build + - run: cd e2e_tests && cargo clippy -- -D warnings + + # 4. + - name: Upload Rust binary + uses: actions/upload-artifact@v3 + with: + name: e2e-tests-binary # ARTIFACT NAME + path: e2e_tests/target/debug/e2e_tests_bin # FROM THIS PATH + retention-days: 1 # we only need this for the next job + + # 1. Get necessary files: code, Sui CLI, Rust binary. + # The Ollama APIs are mocked in the Rust e2e binary + # 2. Start Sui Localnet as a bg process with a fresh genesis and localnet wallet + # 3. Deploy Talus Pkg and export FW_PKG_ID env variable + # 4. Run E2E Tests binary with appropriate env variables + # 5. Shutdown the localnet to clean up + run-e2e-tests: + name: Run E2E Tests + runs-on: ubuntu-latest + needs: [build-agentic-framework, build-e2e-tests] + steps: + # 1. + - name: Check out repository code + uses: actions/checkout@v4 + - name: Fetch Sui CLI + uses: ./.github/actions/fetch-sui-cli + with: + sui_ref: ${{ env.SUI_REF }} + - name: Download Rust binary + uses: actions/download-artifact@v3 + with: + name: e2e-tests-binary + + # 2. + - name: Start Sui Localnet + run: | + sui genesis -f + nohup sui start & + echo $! > sui-localnet.pid & + sleep 5 + shell: bash + + # 3. + - name: Deploy Talus Pkg and export FW_PKG_ID + run: | + cd onchain + json=$(sui client publish --skip-dependency-verification --json) + + fw_pkg_id=$(echo $json | jq -cr '.objectChanges[] | select(.packageId) | .packageId') + if [ -z "$fw_pkg_id" ]; then + echo "Cannot get pkg ID from JSON \n\n${json}" + else + echo "Talus framework package ID: $fw_pkg_id" + fi + + echo "FW_PKG_ID=$(echo $fw_pkg_id)" >> $GITHUB_ENV + + # 4. + - name: Run E2E Tests binary + run: | + export SUI_WALLET_PATH=~/.sui/sui_config/client.yaml + export RUST_LOG=info,e2e_tests=debug + chmod +x e2e_tests_bin + ./e2e_tests_bin + + # 5. + - name: Shutdown Sui Localnet + run: | + kill $(cat sui-localnet.pid) + shell: bash diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..4935560 --- /dev/null +++ b/.gitignore @@ -0,0 +1,164 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# Mac stuff +.DS_Store + +# python things +__pycache__ +__pypackages__ + +# Generator obsolete +.openapi-generator + +node_modules + +tmp.* +.wrk.local + +Move.lock +model_addresses.json + +# Bash tools +nohup.out + +# IDEs +.idea +*.iml +.vscode diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..e6ab15f --- /dev/null +++ b/LICENSE @@ -0,0 +1,38 @@ +Business Source License 1.1 +License text copyright (c) 2020 MariaDB Corporation Ab, All Rights Reserved. +“Business Source License” is a trademark of MariaDB Corporation Ab. +____________________________________________________________________________ + +Business Source License 1.1 +Parameters +Licensor: Talus Labs, Inc. +Licensed Work: Talus AI Agent Framework © 2024 Talus Labs, Inc. +Change Date: December 31, 2027 +Change License: Apache License, Version 2.0 +____________________________________________________________________________ + +Terms +The Licensor hereby grants you the right to copy, modify, create derivative works, redistribute, and make non-production use of the Licensed Work. The Licensor may make an Additional Use Grant, above, permitting limited production use. + +Effective on the Change Date, or the fourth anniversary of the first publicly available distribution of a specific version of the Licensed Work under this License, whichever comes first, the Licensor hereby grants you rights under the terms of the Change License, and the rights granted in the paragraph above terminate. + +If your use of the Licensed Work does not comply with the requirements currently in effect as described in this License, you must purchase a commercial license from the Licensor, its affiliated entities, or authorized resellers, or you must refrain from using the Licensed Work. + +All copies of the original and modified Licensed Work, and derivative works of the Licensed Work, are subject to this License. This License applies separately for each version of the Licensed Work and the Change Date may vary for each version of the Licensed Work released by Licensor. + +You must conspicuously display this License on each original or modified copy of the Licensed Work. If you receive the Licensed Work in original or modified form from a third party, the terms and conditions set forth in this License apply to your use of that work. +Any use of the Licensed Work in violation of this License will automatically terminate your rights under this License for the current and all other versions of the Licensed Work. + +This License does not grant you any right in any trademark or logo of Licensor or its affiliates (provided that you may use a trademark or logo of Licensor as expressly required by this License). + +TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE LICENSED WORK IS PROVIDED ON AN “AS IS” BASIS. LICENSOR HEREBY DISCLAIMS ALL WARRANTIES AND CONDITIONS, EXPRESS OR IMPLIED, INCLUDING (WITHOUT LIMITATION) WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT, AND TITLE. +_____________________________________________________________________________ +Covenants of Licensor +In consideration of the right to use this License’s text and the “Business Source License” name and trademark, Licensor covenants to MariaDB, and to all other recipients of the licensed work to be provided by Licensor: + +To specify as the Change License the GPL Version 2.0 or any later version, or a license that is compatible with GPL Version 2.0 or a later version, where “compatible” means that software provided under the Change License can be included in a program with software provided under GPL Version 2.0 or a later version. Licensor may specify additional Change Licenses without limitation. + +To either: (a) specify an additional grant of rights to use that does not impose any additional restriction on the right granted in this License, as the Additional Use Grant; or (b) insert the text “None” to specify a Change Date. Not to modify this License in any other way. +____________________________________________________________________________ +Notice +The Business Source License (this document, or the “License”) is not an Open Source license. However, the Licensed Work will eventually be made available under an Open Source License, as stated in this License. diff --git a/README.md b/README.md new file mode 100644 index 0000000..142ba25 --- /dev/null +++ b/README.md @@ -0,0 +1,131 @@ +# Nexus + +Nexus is the onchain Agentic Framework created by [Talus][talus]. +The current version, provided here, is a sneak preview of our first iteration. There are +[many ideas](#what-remains-to-be-done) to develop it further but in it's current state it still +showcases what an onchain agentic framework is and it works end-to-end. For more details about +Talus, please visit our [blog][blog] and read our [litepaper][litepaper]. + +- [Nexus](#nexus) + - [Architecture](#architecture) + - [What is provided](#what-is-provided) + - [Looking forward](#looking-forward) + - [Contributing](#contributing) + - [References](#references) + - [Acknowledgments](#acknowledgments) + - [License](#license) + +## Architecture + +Nexus is built on [Sui Move][sui_move], and consists of several key components: + +1. **Onchain logic**: The onchain core logic of Nexus is implemented in Sui Move smart + contracts in folder [`onchain`][onchain]. +2. **SDK**: A Python SDK, in folder [`nexus_sdk`][nexus_sdk], which provides easy-to-use functions for setting up agents, interacting + with the smart contracts, and running agentic workflows. +3. **Offchain components**: Services that handle LLM inference and anything that runs offchain, + including supported tools. For more details, please see [`offchain`][offchain], where we + define two Python packages named `nexus_events` and `nexus_tools`. + +The high-level architecture is shown in the following diagram: + +```mermaid +graph TD + subgraph offchain["Offchain components"] + subgraph nexus_tools["nexus_tools"] + LLM_Inference["LLM Inference"] + Tools + end + nexus_events + end + + style offchain stroke-dasharray: 5 5, stroke-width:2px; + + subgraph onchain[""] + nexus_contracts["Nexus Contracts
(folder onchain)"] + blockchain["Blockchain"] + end + + style onchain stroke-dasharray: 5 5, stroke-width:2px; + + subgraph agent["Example Agent"] + agent_instructions["Instructions"] + agent_ui["UI"] + end + + style agent stroke-dasharray: 5 5, stroke-width:2px; + + nexus_sdk["nexus_sdk"] + + nexus_events --> nexus_tools + nexus_events --> blockchain + nexus_contracts --> blockchain + nexus_sdk --> onchain + + agent --> nexus_sdk +``` + +## What is provided + +- Nexus, an onchain agentic framework, made of the components described above. +- [Examples][examples] of agents implemented with Nexus. +- Complete instructions on how to setup a full environment, including the blockchain, smart + contracts, and the offchain components. + +## Looking forward + +Our first iteration had focused on feasibility, and as an aid in exploring the design space. You +can build agents that work end-to-end. Here are some thoughts, which also give you an idea of +some of the things we are actively working on: + +- Develop and experiment with novel **pricing/payment mechanisms**. +- Implement a **slashing mechanism** to penalize misbehaving nodes and maintain network integrity. +- Expand support to include **multiple modalities** beyond LLMs. +- Enable **customization of tool parameters** by both users and agents for greater flexibility. +- Introduce better **error handling** for agent responses to improve reliability. +- Implement **parallel execution** capabilities to enhance task processing efficiency. +- Develop support for **advanced task flow features**, such as loops and backtracking, to handle more complex workflows. +- Provide **offchain storage** options to reduce on-chain data storage needs. +- Introduce **privacy features** to allow for confidential or private data handling. + +Stay tuned ! + +## Contributing + +If you find and issue setting up and running Nexus, which is not covered by our documentation, +please open a [ticket][bugs] _and_ add the [`external`][label_external] label. + +## References + +- Talus [site][talus]. +- Talus [blog][blog]. +- Talus [litepaper][litepaper]. + +## Acknowledgments + +In designing this version of Nexus, we have taken inspiration from [crewAI][crewAI]. Concepts +like 'Tool', 'Task' etc come from there. We also use crewAI tools in the implementation. + +## License + +- The Nexus agentic framework (in [`onchain`][onchain] and [`offchain`][offchain]), is + licensed under [BSL 1.1][Nexus_License]. +- The [Nexus SDK][nexus_sdk], is licensed under [Apache 2.0][SDK_License]. +- [Examples][examples] are licensed under [Apache 2.0][Examples_License]. + + + +[talus]: https://talus.network/ +[blog]: https://blog.talus.network/ +[litepaper]: https://talus.network/litepaper.pdf +[crewAI]: https://github.com/crewAIInc/crewAI +[sui_move]: https://docs.sui.io/concepts/sui-move-concepts +[onchain]: ./onchain/ +[offchain]: ./offchain/ +[nexus_sdk]: ./nexus_sdk/ +[examples]: ./examples/ +[bugs]: https://github.com/Talus-Network/nexus/issues +[label_external]: https://github.com/Talus-Network/nexus/labels/external +[Nexus_License]: ./LICENSE +[SDK_License]: ./nexus_sdk/LICENSE +[Examples_License]: ./examples/LICENSE diff --git a/TROUBLESHOOTING.md b/TROUBLESHOOTING.md new file mode 100644 index 0000000..a3aa030 --- /dev/null +++ b/TROUBLESHOOTING.md @@ -0,0 +1,25 @@ +# "Object is not available for consumption" + +You may encounter this error when working with Sui localnet. +In a nutshell, this means that some previous operation using this object, usually a `Coin`, has not finished correctly. +Typically this happens when you turn off the localnet and start it again. + +To fix it, you can regenerate the localnet: + +```bash +localnet regen +``` + +# "This query type is not supported by the full node" + +Most likely you are using very different version of localnet with Suibase. +You need to pin the version in your Suibase.yml file. +See the command `just suibase-setup`. + +# "Server returned an error status code: 500" + +Try regenerating the localnet: + +```bash +localnet regen +``` diff --git a/e2e_tests/.gitignore b/e2e_tests/.gitignore new file mode 100644 index 0000000..f456bd6 --- /dev/null +++ b/e2e_tests/.gitignore @@ -0,0 +1,3 @@ +# ! Don't ignore Cargo.lock so that we can cache the dependencies on CI + +target diff --git a/e2e_tests/Cargo.lock b/e2e_tests/Cargo.lock new file mode 100644 index 0000000..e12d908 --- /dev/null +++ b/e2e_tests/Cargo.lock @@ -0,0 +1,7184 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "addchain" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b2e69442aa5628ea6951fa33e24efe8313f4321a91bd729fc2f75bdfc858570" +dependencies = [ + "num-bigint 0.3.3", + "num-integer", + "num-traits", +] + +[[package]] +name = "addr2line" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5fb1d8e4442bd405fdfd1dacb42792696b0cf9cb15882e5d097b742a676d375" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler2" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" + +[[package]] +name = "aead" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d122413f284cf2d62fb1b7db97e02edb8cda96d769b16e443a4f6195e35662b0" +dependencies = [ + "crypto-common", + "generic-array", +] + +[[package]] +name = "aes" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" +dependencies = [ + "cfg-if", + "cipher", + "cpufeatures", +] + +[[package]] +name = "aes-gcm" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "831010a0f742e1209b3bcea8fab6a8e149051ba6099432c8cb2cc117dec3ead1" +dependencies = [ + "aead", + "aes", + "cipher", + "ctr", + "ghash", + "subtle", +] + +[[package]] +name = "ahash" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9" +dependencies = [ + "getrandom", + "once_cell", + "version_check", +] + +[[package]] +name = "ahash" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" +dependencies = [ + "cfg-if", + "once_cell", + "version_check", + "zerocopy", +] + +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "aliasable" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "250f629c0161ad8107cf89319e990051fae62832fd343083bea452d93e2205fd" + +[[package]] +name = "alloc-no-stdlib" +version = "2.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3" + +[[package]] +name = "alloc-stdlib" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece" +dependencies = [ + "alloc-no-stdlib", +] + +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anemo" +version = "0.0.0" +source = "git+https://github.com/mystenlabs/anemo.git?rev=26d415eb9aa6a2417be3c03c57d6e93c30bd1ad7#26d415eb9aa6a2417be3c03c57d6e93c30bd1ad7" +dependencies = [ + "anyhow", + "async-trait", + "bincode", + "bytes", + "ed25519", + "futures", + "hex", + "http 0.2.12", + "matchit 0.5.0", + "pin-project-lite", + "pkcs8 0.9.0", + "quinn", + "quinn-proto", + "rand", + "rcgen", + "ring 0.16.20", + "rustls 0.21.12", + "rustls-webpki 0.101.7", + "serde", + "serde_json", + "socket2", + "tap", + "thiserror", + "tokio", + "tokio-util", + "tower 0.4.13", + "tracing", + "x509-parser", +] + +[[package]] +name = "anstream" +version = "0.6.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64e15c1ab1f89faffbf04a634d5e1962e9074f2741eef6d97f3c4e322426d526" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1" + +[[package]] +name = "anstyle-parse" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb47de1e80c2b463c735db5b217a0ddc39d612e7ac9e2e96a5aed1f57616c1cb" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d36fc52c7f6c869915e99412912f22093507da8d9e942ceaf66fe4b7c14422a" +dependencies = [ + "windows-sys 0.52.0", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5bf74e1b6e971609db8ca7a9ce79fd5768ab6ae46441c572e46cf596f59e57f8" +dependencies = [ + "anstyle", + "windows-sys 0.52.0", +] + +[[package]] +name = "anyhow" +version = "1.0.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6" +dependencies = [ + "backtrace", +] + +[[package]] +name = "ark-bls12-381" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c775f0d12169cba7aae4caeb547bb6a50781c7449a8aa53793827c9ec4abf488" +dependencies = [ + "ark-ec", + "ark-ff", + "ark-serialize", + "ark-std", +] + +[[package]] +name = "ark-bn254" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a22f4561524cd949590d78d7d4c5df8f592430d221f7f3c9497bbafd8972120f" +dependencies = [ + "ark-ec", + "ark-ff", + "ark-std", +] + +[[package]] +name = "ark-crypto-primitives" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3a13b34da09176a8baba701233fdffbaa7c1b1192ce031a3da4e55ce1f1a56" +dependencies = [ + "ark-ec", + "ark-ff", + "ark-relations", + "ark-serialize", + "ark-snark", + "ark-std", + "blake2", + "derivative", + "digest 0.10.7", + "sha2 0.10.8", +] + +[[package]] +name = "ark-ec" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "defd9a439d56ac24968cca0571f598a61bc8c55f71d50a89cda591cb750670ba" +dependencies = [ + "ark-ff", + "ark-poly", + "ark-serialize", + "ark-std", + "derivative", + "hashbrown 0.13.2", + "itertools 0.10.5", + "num-traits", + "zeroize", +] + +[[package]] +name = "ark-ff" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec847af850f44ad29048935519032c33da8aa03340876d351dfab5660d2966ba" +dependencies = [ + "ark-ff-asm", + "ark-ff-macros", + "ark-serialize", + "ark-std", + "derivative", + "digest 0.10.7", + "itertools 0.10.5", + "num-bigint 0.4.6", + "num-traits", + "paste", + "rustc_version", + "zeroize", +] + +[[package]] +name = "ark-ff-asm" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ed4aa4fe255d0bc6d79373f7e31d2ea147bcf486cba1be5ba7ea85abdb92348" +dependencies = [ + "quote 1.0.37", + "syn 1.0.109", +] + +[[package]] +name = "ark-ff-macros" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7abe79b0e4288889c4574159ab790824d0033b9fdcb2a112a3182fac2e514565" +dependencies = [ + "num-bigint 0.4.6", + "num-traits", + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 1.0.109", +] + +[[package]] +name = "ark-groth16" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "20ceafa83848c3e390f1cbf124bc3193b3e639b3f02009e0e290809a501b95fc" +dependencies = [ + "ark-crypto-primitives", + "ark-ec", + "ark-ff", + "ark-poly", + "ark-relations", + "ark-serialize", + "ark-std", +] + +[[package]] +name = "ark-poly" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d320bfc44ee185d899ccbadfa8bc31aab923ce1558716e1997a1e74057fe86bf" +dependencies = [ + "ark-ff", + "ark-serialize", + "ark-std", + "derivative", + "hashbrown 0.13.2", +] + +[[package]] +name = "ark-relations" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00796b6efc05a3f48225e59cb6a2cda78881e7c390872d5786aaf112f31fb4f0" +dependencies = [ + "ark-ff", + "ark-std", + "tracing", +] + +[[package]] +name = "ark-secp256r1" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3975a01b0a6e3eae0f72ec7ca8598a6620fc72fa5981f6f5cca33b7cd788f633" +dependencies = [ + "ark-ec", + "ark-ff", + "ark-std", +] + +[[package]] +name = "ark-serialize" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adb7b85a02b83d2f22f89bd5cac66c9c89474240cb6207cb1efc16d098e822a5" +dependencies = [ + "ark-serialize-derive", + "ark-std", + "digest 0.10.7", + "num-bigint 0.4.6", +] + +[[package]] +name = "ark-serialize-derive" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae3281bc6d0fd7e549af32b52511e1302185bd688fd3359fa36423346ff682ea" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 1.0.109", +] + +[[package]] +name = "ark-snark" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84d3cc6833a335bb8a600241889ead68ee89a3cf8448081fb7694c0fe503da63" +dependencies = [ + "ark-ff", + "ark-relations", + "ark-serialize", + "ark-std", +] + +[[package]] +name = "ark-std" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" +dependencies = [ + "num-traits", + "rand", +] + +[[package]] +name = "arrayref" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb" + +[[package]] +name = "arrayvec" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" + +[[package]] +name = "asn1-rs" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f6fd5ddaf0351dff5b8da21b2fb4ff8e08ddd02857f0bf69c47639106c0fff0" +dependencies = [ + "asn1-rs-derive", + "asn1-rs-impl", + "displaydoc", + "nom", + "num-traits", + "rusticata-macros", + "thiserror", + "time", +] + +[[package]] +name = "asn1-rs-derive" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "726535892e8eae7e70657b4c8ea93d26b8553afb1ce617caee529ef96d7dee6c" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 1.0.109", + "synstructure", +] + +[[package]] +name = "asn1-rs-impl" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2777730b2039ac0f95f093556e61b6d26cebed5393ca6f152717777cec3a42ed" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 1.0.109", +] + +[[package]] +name = "async-compression" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "942c7cd7ae39e91bde4820d74132e9862e62c2f386c3aa90ccf55949f5bad63a" +dependencies = [ + "brotli", + "flate2", + "futures-core", + "memchr", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "async-lock" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b" +dependencies = [ + "event-listener", +] + +[[package]] +name = "async-stream" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51" +dependencies = [ + "async-stream-impl", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 2.0.77", +] + +[[package]] +name = "async-trait" +version = "0.1.82" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a27b8a3a6e1a44fa4c8baf1f653e4172e81486d4941f2237e20dc2d0cf4ddff1" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 2.0.77", +] + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "auto_ops" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7460f7dd8e100147b82a63afca1a20eb6c231ee36b90ba7272e14951cb58af59" + +[[package]] +name = "autocfg" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" + +[[package]] +name = "axum" +version = "0.6.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf" +dependencies = [ + "async-trait", + "axum-core 0.3.4", + "base64 0.21.7", + "bitflags 1.3.2", + "bytes", + "futures-util", + "headers", + "http 0.2.12", + "http-body 0.4.6", + "hyper 0.14.30", + "itoa", + "matchit 0.7.3", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "sha1", + "sync_wrapper 0.1.2", + "tokio", + "tokio-tungstenite", + "tower 0.4.13", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f43644eed690f5374f1af436ecd6aea01cd201f6fbdf0178adaf6907afb2cec" +dependencies = [ + "async-trait", + "axum-core 0.4.4", + "bytes", + "futures-util", + "http 1.1.0", + "http-body 1.0.1", + "http-body-util", + "hyper 1.4.1", + "hyper-util", + "itoa", + "matchit 0.7.3", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "sync_wrapper 1.0.1", + "tokio", + "tower 0.5.1", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-core" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" +dependencies = [ + "async-trait", + "bytes", + "futures-util", + "http 0.2.12", + "http-body 0.4.6", + "mime", + "rustversion", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-core" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e6b8ba012a258d63c9adfa28b9ddcf66149da6f986c5b5452e629d5ee64bf00" +dependencies = [ + "async-trait", + "bytes", + "futures-util", + "http 1.1.0", + "http-body 1.0.1", + "http-body-util", + "mime", + "pin-project-lite", + "rustversion", + "sync_wrapper 1.0.1", + "tower-layer", + "tower-service", +] + +[[package]] +name = "backtrace" +version = "0.3.74" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" +dependencies = [ + "addr2line", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", + "windows-targets 0.52.6", +] + +[[package]] +name = "base-x" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cbbc9d0964165b47557570cce6c952866c2678457aca742aafc9fb771d30270" + +[[package]] +name = "base16ct" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce" + +[[package]] +name = "base16ct" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" + +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "base64ct" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" + +[[package]] +name = "bcs" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85b6598a2f5d564fb7855dc6b06fd1c38cff5a72bd8b863a4d021938497b440a" +dependencies = [ + "serde", + "thiserror", +] + +[[package]] +name = "bech32" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d86b93f97252c47b41663388e6d155714a9d0c398b99f1005cbc5f978b29f445" + +[[package]] +name = "beef" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a8241f3ebb85c056b509d4327ad0358fbbba6ffb340bf388f26350aeda225b1" +dependencies = [ + "serde", +] + +[[package]] +name = "bellpepper" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ae286c2cb403324ab644c7cc68dceb25fe52ca9429908a726d7ed272c1edf7b" +dependencies = [ + "bellpepper-core", + "byteorder", + "ff 0.13.0", +] + +[[package]] +name = "bellpepper-core" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d8abb418570756396d722841b19edfec21d4e89e1cf8990610663040ecb1aea" +dependencies = [ + "blake2s_simd", + "byteorder", + "ff 0.13.0", + "serde", + "thiserror", +] + +[[package]] +name = "better_any" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b359aebd937c17c725e19efcb661200883f04c49c53e7132224dac26da39d4a0" +dependencies = [ + "better_typeid_derive", +] + +[[package]] +name = "better_typeid_derive" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3deeecb812ca5300b7d3f66f730cc2ebd3511c3d36c691dd79c165d5b19a26e3" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 1.0.109", +] + +[[package]] +name = "bincode" +version = "1.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" +dependencies = [ + "serde", +] + +[[package]] +name = "bip32" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b30ed1d6f8437a487a266c8293aeb95b61a23261273e3e02912cdb8b68bf798b" +dependencies = [ + "bs58", + "hmac", + "k256", + "once_cell", + "pbkdf2", + "rand_core", + "ripemd", + "sha2 0.10.8", + "subtle", + "zeroize", +] + +[[package]] +name = "bit-set" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" +dependencies = [ + "bit-vec", +] + +[[package]] +name = "bit-vec" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" + +[[package]] +name = "bitcoin-private" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73290177011694f38ec25e165d0387ab7ea749a4b81cd4c80dae5988229f7a57" + +[[package]] +name = "bitcoin_hashes" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d7066118b13d4b20b23645932dfb3a81ce7e29f95726c2036fa33cd7b092501" +dependencies = [ + "bitcoin-private", +] + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" + +[[package]] +name = "bitmaps" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2" +dependencies = [ + "typenum", +] + +[[package]] +name = "bitvec" +version = "0.20.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7774144344a4faa177370406a7ff5f1da24303817368584c6206c8303eb07848" +dependencies = [ + "funty 1.1.0", + "radium 0.6.2", + "tap", + "wyz 0.2.0", +] + +[[package]] +name = "bitvec" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" +dependencies = [ + "funty 2.0.0", + "radium 0.7.0", + "tap", + "wyz 0.5.1", +] + +[[package]] +name = "blake2" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" +dependencies = [ + "digest 0.10.7", +] + +[[package]] +name = "blake2b_simd" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23285ad32269793932e830392f2fe2f83e26488fd3ec778883a93c8323735780" +dependencies = [ + "arrayref", + "arrayvec", + "constant_time_eq", +] + +[[package]] +name = "blake2s_simd" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94230421e395b9920d23df13ea5d77a20e1725331f90fbbf6df6040b33f756ae" +dependencies = [ + "arrayref", + "arrayvec", + "constant_time_eq", +] + +[[package]] +name = "block-buffer" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" +dependencies = [ + "generic-array", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "block-padding" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8894febbff9f758034a5b8e12d87918f56dfc64a8e1fe757d65e29041538d93" +dependencies = [ + "generic-array", +] + +[[package]] +name = "blst" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4378725facc195f1a538864863f6de233b500a8862747e7f165078a419d5e874" +dependencies = [ + "cc", + "glob", + "threadpool", + "zeroize", +] + +[[package]] +name = "blstrs" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a8a8ed6fefbeef4a8c7b460e4110e12c5e22a5b7cf32621aae6ad650c4dcf29" +dependencies = [ + "blst", + "byte-slice-cast", + "ff 0.13.0", + "group 0.13.0", + "pairing", + "rand_core", + "serde", + "subtle", +] + +[[package]] +name = "brotli" +version = "3.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d640d25bc63c50fb1f0b545ffd80207d2e10a4c965530809b40ba3386825c391" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", + "brotli-decompressor", +] + +[[package]] +name = "brotli-decompressor" +version = "2.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e2e4afe60d7dd600fdd3de8d0f08c2b7ec039712e3b6137ff98b7004e82de4f" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", +] + +[[package]] +name = "bs58" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "771fe0050b883fcc3ea2359b1a96bcfbc090b7116eae7c3c512c7a083fdf23d3" +dependencies = [ + "sha2 0.9.9", +] + +[[package]] +name = "bstr" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40723b8fb387abc38f4f4a37c09073622e41dd12327033091ef8950659e6dc0c" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "bumpalo" +version = "3.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" + +[[package]] +name = "byte-slice-cast" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3ac9f8b63eca6fd385229b3675f6cc0dc5c8a5c8a54a59d4f52ffd670d87b0c" + +[[package]] +name = "bytecount" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ce89b21cab1437276d2650d57e971f9d548a2d9037cc231abdc0562b97498ce" + +[[package]] +name = "bytemuck" +version = "1.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94bbb0ad554ad961ddc5da507a12a29b14e4ae5bda06b19f575a3e6079d2e2ae" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "bytes" +version = "1.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "428d9aa8fbc0670b7b8d6030a7fadd0f86151cae55e4dbbece15f3780a3dfaf3" +dependencies = [ + "serde", +] + +[[package]] +name = "cbc" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26b52a9543ae338f279b96b0b9fed9c8093744685043739079ce85cd58f289a6" +dependencies = [ + "cipher", +] + +[[package]] +name = "cc" +version = "1.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07b1695e2c7e8fc85310cde85aeaab7e3097f593c91d209d3f9df76c928100f0" +dependencies = [ + "shlex", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chrono" +version = "0.4.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "js-sys", + "num-traits", + "serde", + "wasm-bindgen", + "windows-targets 0.52.6", +] + +[[package]] +name = "cipher" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" +dependencies = [ + "crypto-common", + "inout", +] + +[[package]] +name = "clap" +version = "4.5.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0956a43b323ac1afaffc053ed5c4b7c1f1800bacd1683c353aabbb752515dd3" +dependencies = [ + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap_builder" +version = "4.5.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d72166dd41634086d5803a47eb71ae740e61d84709c36f3c34110173db3961b" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim 0.11.1", + "terminal_size", +] + +[[package]] +name = "clap_derive" +version = "4.5.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab" +dependencies = [ + "heck 0.5.0", + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 2.0.77", +] + +[[package]] +name = "clap_lex" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97" + +[[package]] +name = "codespan" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3362992a0d9f1dd7c3d0e89e0ab2bb540b7a95fea8cd798090e758fda2899b5e" +dependencies = [ + "codespan-reporting", + "serde", +] + +[[package]] +name = "codespan-reporting" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e" +dependencies = [ + "serde", + "termcolor", + "unicode-width", +] + +[[package]] +name = "colorchoice" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0" + +[[package]] +name = "colored" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbf2150cce219b664a8a70df7a1f933836724b503f8a413af9365b4dcc4d90b8" +dependencies = [ + "lazy_static", + "windows-sys 0.48.0", +] + +[[package]] +name = "consensus-config" +version = "0.1.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "fastcrypto", + "mysten-network", + "rand", + "serde", + "shared-crypto", +] + +[[package]] +name = "console" +version = "0.15.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb" +dependencies = [ + "encode_unicode", + "lazy_static", + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "const-oid" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" + +[[package]] +name = "constant_time_eq" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" + +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "core2" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b49ba7ef1ad6107f8824dbe97de947cbaac53c44e7f9756a1fba0d37c1eec505" +dependencies = [ + "memchr", +] + +[[package]] +name = "cpufeatures" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "608697df725056feaccfa42cffdaeeec3fccc4ffc38358ecd19b243e716a78e0" +dependencies = [ + "libc", +] + +[[package]] +name = "crc32fast" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crunchy" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" + +[[package]] +name = "crypto-bigint" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef2b4b23cddf68b89b8f8069890e8c270d54e2d5fe1b143820234805e4cb17ef" +dependencies = [ + "generic-array", + "rand_core", + "subtle", + "zeroize", +] + +[[package]] +name = "crypto-bigint" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" +dependencies = [ + "generic-array", + "rand_core", + "subtle", + "zeroize", +] + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "rand_core", + "typenum", +] + +[[package]] +name = "csv" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac574ff4d437a7b5ad237ef331c17ccca63c46479e5b5453eb8e10bb99a759fe" +dependencies = [ + "csv-core", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "csv-core" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5efa2b3d7902f4b634a20cae3c9c4e6209dc4779feb6863329607560143efa70" +dependencies = [ + "memchr", +] + +[[package]] +name = "ctr" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835" +dependencies = [ + "cipher", +] + +[[package]] +name = "curve25519-dalek-ng" +version = "4.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c359b7249347e46fb28804470d071c921156ad62b3eef5d34e2ba867533dec8" +dependencies = [ + "byteorder", + "digest 0.9.0", + "rand_core", + "subtle-ng", + "zeroize", +] + +[[package]] +name = "darling" +version = "0.14.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b750cb3417fd1b327431a470f388520309479ab0bf5e323505daf0290cd3850" +dependencies = [ + "darling_core 0.14.4", + "darling_macro 0.14.4", +] + +[[package]] +name = "darling" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" +dependencies = [ + "darling_core 0.20.10", + "darling_macro 0.20.10", +] + +[[package]] +name = "darling_core" +version = "0.14.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "109c1ca6e6b7f82cc233a97004ea8ed7ca123a9af07a8230878fcfda9b158bf0" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2 1.0.86", + "quote 1.0.37", + "strsim 0.10.0", + "syn 1.0.109", +] + +[[package]] +name = "darling_core" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2 1.0.86", + "quote 1.0.37", + "strsim 0.11.1", + "syn 2.0.77", +] + +[[package]] +name = "darling_macro" +version = "0.14.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4aab4dbc9f7611d8b55048a3a16d2d010c2c8334e46304b40ac1cc14bf3b48e" +dependencies = [ + "darling_core 0.14.4", + "quote 1.0.37", + "syn 1.0.109", +] + +[[package]] +name = "darling_macro" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" +dependencies = [ + "darling_core 0.20.10", + "quote 1.0.37", + "syn 2.0.77", +] + +[[package]] +name = "dashmap" +version = "5.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" +dependencies = [ + "cfg-if", + "hashbrown 0.14.5", + "lock_api", + "once_cell", + "parking_lot_core", +] + +[[package]] +name = "data-encoding" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8566979429cf69b49a5c740c60791108e86440e8be149bbea4fe54d2c32d6e2" + +[[package]] +name = "data-encoding-macro" +version = "0.1.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1559b6cba622276d6d63706db152618eeb15b89b3e4041446b05876e352e639" +dependencies = [ + "data-encoding", + "data-encoding-macro-internal", +] + +[[package]] +name = "data-encoding-macro-internal" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "332d754c0af53bc87c108fed664d121ecf59207ec4196041f04d6ab9002ad33f" +dependencies = [ + "data-encoding", + "syn 1.0.109", +] + +[[package]] +name = "der" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1a467a65c5e759bce6e65eaf91cc29f466cdc57cb65777bd646872a8a1fd4de" +dependencies = [ + "const-oid", + "pem-rfc7468 0.6.0", + "zeroize", +] + +[[package]] +name = "der" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f55bf8e7b65898637379c1b74eb1551107c8294ed26d855ceb9fd1a09cfc9bc0" +dependencies = [ + "const-oid", + "pem-rfc7468 0.7.0", + "zeroize", +] + +[[package]] +name = "der-parser" +version = "8.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbd676fbbab537128ef0278adb5576cf363cff6aa22a7b24effe97347cfab61e" +dependencies = [ + "asn1-rs", + "displaydoc", + "nom", + "num-bigint 0.4.6", + "num-traits", + "rusticata-macros", +] + +[[package]] +name = "deranged" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +dependencies = [ + "powerfmt", + "serde", +] + +[[package]] +name = "derivative" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 1.0.109", +] + +[[package]] +name = "derive-syn-parse" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e79116f119dd1dba1abf1f3405f03b9b0e79a27a3883864bfebded8a3dc768cd" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 1.0.109", +] + +[[package]] +name = "derive_more" +version = "0.99.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" +dependencies = [ + "convert_case", + "proc-macro2 1.0.86", + "quote 1.0.37", + "rustc_version", + "syn 2.0.77", +] + +[[package]] +name = "difference" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198" + +[[package]] +name = "digest" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" +dependencies = [ + "generic-array", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer 0.10.4", + "const-oid", + "crypto-common", + "subtle", +] + +[[package]] +name = "dirs" +version = "4.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3aa72a6f96ea37bbc5aa912f6788242832f75369bdfdadcb0e38423f100059" +dependencies = [ + "dirs-sys", +] + +[[package]] +name = "dirs-next" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" +dependencies = [ + "cfg-if", + "dirs-sys-next", +] + +[[package]] +name = "dirs-sys" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b1d1d91c932ef41c0f2663aa8b0ca0342d444d842c06914aa0a7e352d0bada6" +dependencies = [ + "libc", + "redox_users", + "winapi", +] + +[[package]] +name = "dirs-sys-next" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" +dependencies = [ + "libc", + "redox_users", + "winapi", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 2.0.77", +] + +[[package]] +name = "doc-comment" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" + +[[package]] +name = "dotenvy" +version = "0.15.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" + +[[package]] +name = "dunce" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" + +[[package]] +name = "dyn-clone" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d6ef0072f8a535281e4876be788938b528e9a1d43900b82c2569af7da799125" + +[[package]] +name = "e2e_tests" +version = "0.1.0" +dependencies = [ + "anyhow", + "axum 0.7.6", + "dotenvy", + "env_logger", + "futures-util", + "log", + "rand", + "reqwest 0.12.7", + "serde", + "serde_json", + "sui-keys", + "sui-sdk", + "tokio", +] + +[[package]] +name = "ecdsa" +version = "0.14.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413301934810f597c1d19ca71c8710e99a3f1ba28a0d2ebc01551a2daeea3c5c" +dependencies = [ + "der 0.6.1", + "elliptic-curve 0.12.3", + "rfc6979 0.3.1", + "signature 1.6.4", +] + +[[package]] +name = "ecdsa" +version = "0.16.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" +dependencies = [ + "der 0.7.9", + "digest 0.10.7", + "elliptic-curve 0.13.8", + "rfc6979 0.4.0", + "signature 2.2.0", + "spki 0.7.3", +] + +[[package]] +name = "ed25519" +version = "1.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91cff35c70bba8a626e3185d8cd48cc11b5437e1a5bcd15b9b5fa3c64b6dfee7" +dependencies = [ + "pkcs8 0.9.0", + "signature 1.6.4", + "zeroize", +] + +[[package]] +name = "ed25519-consensus" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c8465edc8ee7436ffea81d21a019b16676ee3db267aa8d5a8d729581ecf998b" +dependencies = [ + "curve25519-dalek-ng", + "hex", + "rand_core", + "serde", + "sha2 0.9.9", + "thiserror", + "zeroize", +] + +[[package]] +name = "either" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" + +[[package]] +name = "elliptic-curve" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7bb888ab5300a19b8e5bceef25ac745ad065f3c9f7efc6de1b91958110891d3" +dependencies = [ + "base16ct 0.1.1", + "crypto-bigint 0.4.9", + "der 0.6.1", + "digest 0.10.7", + "ff 0.12.1", + "generic-array", + "group 0.12.1", + "rand_core", + "sec1 0.3.0", + "subtle", + "zeroize", +] + +[[package]] +name = "elliptic-curve" +version = "0.13.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" +dependencies = [ + "base16ct 0.2.0", + "crypto-bigint 0.5.5", + "digest 0.10.7", + "ff 0.13.0", + "generic-array", + "group 0.13.0", + "pem-rfc7468 0.7.0", + "pkcs8 0.10.2", + "rand_core", + "sec1 0.7.3", + "subtle", + "zeroize", +] + +[[package]] +name = "encode_unicode" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" + +[[package]] +name = "encoding_rs" +version = "0.8.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "enum-compat-util" +version = "0.1.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "serde_yaml", +] + +[[package]] +name = "enum_dispatch" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa18ce2bc66555b3218614519ac839ddb759a7d6720732f979ef8d13be147ecd" +dependencies = [ + "once_cell", + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 2.0.77", +] + +[[package]] +name = "env_filter" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f2c92ceda6ceec50f43169f9ee8424fe2db276791afde7b2cd8bc084cb376ab" +dependencies = [ + "log", + "regex", +] + +[[package]] +name = "env_logger" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13fa619b91fb2381732789fc5de83b45675e882f66623b7d8cb4f643017018d" +dependencies = [ + "anstream", + "anstyle", + "env_filter", + "humantime", + "log", +] + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "errno" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "ethnum" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b90ca2580b73ab6a1f724b76ca11ab632df820fd6040c336200d2c1df7b3c82c" + +[[package]] +name = "event-listener" +version = "2.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" + +[[package]] +name = "eyre" +version = "0.6.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7cd915d99f24784cdc19fd37ef22b97e3ff0ae756c7e492e9fbfe897d61e2aec" +dependencies = [ + "indenter", + "once_cell", +] + +[[package]] +name = "fastcrypto" +version = "0.1.8" +source = "git+https://github.com/MystenLabs/fastcrypto?rev=4988a4744fcaf8bc7f60bf660d9a223ed0f54cc6#4988a4744fcaf8bc7f60bf660d9a223ed0f54cc6" +dependencies = [ + "aes", + "aes-gcm", + "ark-ec", + "ark-ff", + "ark-secp256r1", + "ark-serialize", + "auto_ops", + "base64ct", + "bech32", + "bincode", + "blake2", + "blst", + "bs58", + "cbc", + "ctr", + "curve25519-dalek-ng", + "derive_more", + "digest 0.10.7", + "ecdsa 0.16.9", + "ed25519-consensus", + "elliptic-curve 0.13.8", + "fastcrypto-derive", + "generic-array", + "hex", + "hex-literal", + "hkdf", + "lazy_static", + "num-bigint 0.4.6", + "once_cell", + "p256", + "rand", + "readonly", + "rfc6979 0.4.0", + "rsa", + "schemars", + "secp256k1", + "serde", + "serde_json", + "serde_with", + "sha2 0.10.8", + "sha3", + "signature 2.2.0", + "static_assertions", + "thiserror", + "tokio", + "typenum", + "zeroize", +] + +[[package]] +name = "fastcrypto-derive" +version = "0.1.3" +source = "git+https://github.com/MystenLabs/fastcrypto?rev=4988a4744fcaf8bc7f60bf660d9a223ed0f54cc6#4988a4744fcaf8bc7f60bf660d9a223ed0f54cc6" +dependencies = [ + "quote 1.0.37", + "syn 1.0.109", +] + +[[package]] +name = "fastcrypto-tbls" +version = "0.1.0" +source = "git+https://github.com/MystenLabs/fastcrypto?rev=4988a4744fcaf8bc7f60bf660d9a223ed0f54cc6#4988a4744fcaf8bc7f60bf660d9a223ed0f54cc6" +dependencies = [ + "bcs", + "digest 0.10.7", + "fastcrypto", + "hex", + "itertools 0.10.5", + "rand", + "serde", + "sha3", + "tap", + "tracing", + "typenum", +] + +[[package]] +name = "fastcrypto-zkp" +version = "0.1.3" +source = "git+https://github.com/MystenLabs/fastcrypto?rev=4988a4744fcaf8bc7f60bf660d9a223ed0f54cc6#4988a4744fcaf8bc7f60bf660d9a223ed0f54cc6" +dependencies = [ + "ark-bls12-381", + "ark-bn254", + "ark-ec", + "ark-ff", + "ark-groth16", + "ark-relations", + "ark-serialize", + "ark-snark", + "blst", + "byte-slice-cast", + "derive_more", + "fastcrypto", + "ff 0.13.0", + "im", + "itertools 0.12.1", + "lazy_static", + "neptune", + "num-bigint 0.4.6", + "once_cell", + "reqwest 0.11.27", + "schemars", + "serde", + "serde_json", + "typenum", +] + +[[package]] +name = "fastrand" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8c02a5121d4ea3eb16a80748c74f5549a5665e4c21333c6098f283870fbdea6" + +[[package]] +name = "ff" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d013fc25338cc558c5c2cfbad646908fb23591e2404481826742b651c9af7160" +dependencies = [ + "rand_core", + "subtle", +] + +[[package]] +name = "ff" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449" +dependencies = [ + "bitvec 1.0.1", + "byteorder", + "ff_derive", + "rand_core", + "subtle", +] + +[[package]] +name = "ff_derive" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9f54704be45ed286151c5e11531316eaef5b8f5af7d597b806fdb8af108d84a" +dependencies = [ + "addchain", + "cfg-if", + "num-bigint 0.3.3", + "num-integer", + "num-traits", + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 1.0.109", +] + +[[package]] +name = "fixed-hash" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcf0ed7fe52a17a03854ec54a9f76d6d84508d1c0e66bc1793301c73fc8493c" +dependencies = [ + "byteorder", + "rand", + "rustc-hex", + "static_assertions", +] + +[[package]] +name = "fixedbitset" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37ab347416e802de484e4d03c7316c48f1ecb56574dfd4a46a80f173ce1de04d" + +[[package]] +name = "flate2" +version = "1.0.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "324a1be68054ef05ad64b861cc9eaf1d623d2d8cb25b4bf2cb9cdd902b4bf253" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "funty" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fed34cd105917e91daa4da6b3728c47b068749d6a62c59811f06ed2ac71d9da7" + +[[package]] +name = "funty" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" + +[[package]] +name = "futures" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" + +[[package]] +name = "futures-executor" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" + +[[package]] +name = "futures-macro" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 2.0.77", +] + +[[package]] +name = "futures-sink" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" + +[[package]] +name = "futures-task" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" + +[[package]] +name = "futures-timer" +version = "3.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" + +[[package]] +name = "futures-util" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "serde", + "typenum", + "version_check", + "zeroize", +] + +[[package]] +name = "getrandom" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "ghash" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0d8a4362ccb29cb0b265253fb0a2728f592895ee6854fd9bc13f2ffda266ff1" +dependencies = [ + "opaque-debug", + "polyval", +] + +[[package]] +name = "gimli" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32085ea23f3234fc7846555e85283ba4de91e21016dc0455a16286d87a292d64" + +[[package]] +name = "glob" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" + +[[package]] +name = "globset" +version = "0.4.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15f1ce686646e7f1e19bf7d5533fe443a45dbfb990e00629110797578b42fb19" +dependencies = [ + "aho-corasick", + "bstr", + "log", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "group" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7" +dependencies = [ + "ff 0.12.1", + "rand_core", + "subtle", +] + +[[package]] +name = "group" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" +dependencies = [ + "ff 0.13.0", + "rand", + "rand_core", + "rand_xorshift", + "subtle", +] + +[[package]] +name = "h2" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http 0.2.12", + "indexmap 2.5.0", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "h2" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524e8ac6999421f49a846c2d4411f337e53497d8ec55d67753beffa43c5d9205" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http 1.1.0", + "indexmap 2.5.0", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" +dependencies = [ + "ahash 0.7.8", +] + +[[package]] +name = "hashbrown" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" +dependencies = [ + "ahash 0.8.11", +] + +[[package]] +name = "hashbrown" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" + +[[package]] +name = "hdrhistogram" +version = "7.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "765c9198f173dd59ce26ff9f95ef0aafd0a0fe01fb9d72841bc5066a4c06511d" +dependencies = [ + "byteorder", + "num-traits", +] + +[[package]] +name = "headers" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06683b93020a07e3dbcf5f8c0f6d40080d725bea7936fc01ad345c01b97dc270" +dependencies = [ + "base64 0.21.7", + "bytes", + "headers-core", + "http 0.2.12", + "httpdate", + "mime", + "sha1", +] + +[[package]] +name = "headers-core" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7f66481bfee273957b1f20485a4ff3362987f85b2c236580d81b4eb7a326429" +dependencies = [ + "http 0.2.12", +] + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "hermit-abi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" +dependencies = [ + "serde", +] + +[[package]] +name = "hex-literal" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fe2267d4ed49bc07b63801559be28c718ea06c4738b7a03c94df7386d2cde46" + +[[package]] +name = "hkdf" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" +dependencies = [ + "hmac", +] + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest 0.10.7", +] + +[[package]] +name = "hmac-sha512" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77e806677ce663d0a199541030c816847b36e8dc095f70dae4a4f4ad63da5383" + +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +dependencies = [ + "bytes", + "http 0.2.12", + "pin-project-lite", +] + +[[package]] +name = "http-body" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http 1.1.0", +] + +[[package]] +name = "http-body-util" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" +dependencies = [ + "bytes", + "futures-util", + "http 1.1.0", + "http-body 1.0.1", + "pin-project-lite", +] + +[[package]] +name = "http-range-header" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "add0ab9360ddbd88cfeb3bd9574a1d85cfdfa14db10b3e21d3700dbc4328758f" + +[[package]] +name = "httparse" +version = "1.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fcc0b4a115bf80b728eb8ea024ad5bd707b615bfed49e0665b6e0f86fd082d9" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" + +[[package]] +name = "hyper" +version = "0.14.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a152ddd61dfaec7273fe8419ab357f33aee0d914c5f4efbf0d96fa749eea5ec9" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2 0.3.26", + "http 0.2.12", + "http-body 0.4.6", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "h2 0.4.6", + "http 1.1.0", + "http-body 1.0.1", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.23.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1788965e61b367cd03a62950836d5cd41560c3577d90e40e0819373194d1661c" +dependencies = [ + "http 0.2.12", + "hyper 0.14.30", + "log", + "rustls 0.20.9", + "rustls-native-certs", + "tokio", + "tokio-rustls 0.23.4", + "webpki-roots 0.22.6", +] + +[[package]] +name = "hyper-rustls" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" +dependencies = [ + "futures-util", + "http 0.2.12", + "hyper 0.14.30", + "rustls 0.21.12", + "tokio", + "tokio-rustls 0.24.1", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08afdbb5c31130e3034af566421053ab03787c640246a446327f550d11bcb333" +dependencies = [ + "futures-util", + "http 1.1.0", + "hyper 1.4.1", + "hyper-util", + "rustls 0.23.13", + "rustls-pki-types", + "tokio", + "tokio-rustls 0.26.0", + "tower-service", +] + +[[package]] +name = "hyper-timeout" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" +dependencies = [ + "hyper 0.14.30", + "pin-project-lite", + "tokio", + "tokio-io-timeout", +] + +[[package]] +name = "hyper-tls" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +dependencies = [ + "bytes", + "http-body-util", + "hyper 1.4.1", + "hyper-util", + "native-tls", + "tokio", + "tokio-native-tls", + "tower-service", +] + +[[package]] +name = "hyper-util" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da62f120a8a37763efb0cf8fdf264b884c7b8b9ac8660b900c8661030c00e6ba" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "http 1.1.0", + "http-body 1.0.1", + "hyper 1.4.1", + "pin-project-lite", + "socket2", + "tokio", + "tower 0.4.13", + "tower-service", + "tracing", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "235e081f3925a06703c2d0117ea8b91f042756fd6e7a6e5d901e8ca1a996b220" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "idna" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "im" +version = "15.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0acd33ff0285af998aaf9b57342af478078f53492322fafc47450e09397e0e9" +dependencies = [ + "bitmaps", + "rand_core", + "rand_xoshiro", + "sized-chunks", + "typenum", + "version_check", +] + +[[package]] +name = "impl-codec" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "161ebdfec3c8e3b52bf61c4f3550a1eea4f9579d10dc1b936f3171ebdcd6c443" +dependencies = [ + "parity-scale-codec", +] + +[[package]] +name = "impl-serde" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4551f042f3438e64dbd6226b20527fc84a6e1fe65688b58746a2f53623f25f5c" +dependencies = [ + "serde", +] + +[[package]] +name = "impl-trait-for-tuples" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 1.0.109", +] + +[[package]] +name = "indenter" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", + "serde", +] + +[[package]] +name = "indexmap" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68b900aa2f7301e21c36462b170ee99994de34dff39a4a6a528e80e7376d07e5" +dependencies = [ + "equivalent", + "hashbrown 0.14.5", + "serde", +] + +[[package]] +name = "inout" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" +dependencies = [ + "block-padding", + "generic-array", +] + +[[package]] +name = "insta" +version = "1.40.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6593a41c7a73841868772495db7dc1e8ecab43bb5c0b6da2059246c4b506ab60" +dependencies = [ + "console", + "lazy_static", + "linked-hash-map", + "pest", + "pest_derive", + "serde", + "similar", +] + +[[package]] +name = "ipnet" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "187674a687eed5fe42285b40c6291f9a01517d415fad1c3cbc6a9f778af7fcd4" + +[[package]] +name = "iri-string" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f0f7638c1e223529f1bfdc48c8b133b9e0b434094d1d28473161ee48b235f78" +dependencies = [ + "nom", +] + +[[package]] +name = "is_terminal_polyfill" +version = "1.70.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] +name = "itertools" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +dependencies = [ + "either", +] + +[[package]] +name = "itertools" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" + +[[package]] +name = "js-sys" +version = "0.3.70" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1868808506b929d7b0cfa8f75951347aa71bb21144b7791bae35d9bccfcfe37a" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "json_to_table" +version = "0.6.0" +source = "git+https://github.com/zhiburt/tabled/?rev=e449317a1c02eb6b29e409ad6617e5d9eb7b3bd4#e449317a1c02eb6b29e409ad6617e5d9eb7b3bd4" +dependencies = [ + "serde_json", + "tabled", +] + +[[package]] +name = "jsonrpsee" +version = "0.16.2" +source = "git+https://github.com/wlmyng/jsonrpsee.git?rev=b1b300784795f6a64d0fcdf8f03081a9bc38bde8#b1b300784795f6a64d0fcdf8f03081a9bc38bde8" +dependencies = [ + "jsonrpsee-core", + "jsonrpsee-http-client", + "jsonrpsee-proc-macros", + "jsonrpsee-server", + "jsonrpsee-types", + "jsonrpsee-ws-client", + "tracing", +] + +[[package]] +name = "jsonrpsee-client-transport" +version = "0.16.2" +source = "git+https://github.com/wlmyng/jsonrpsee.git?rev=b1b300784795f6a64d0fcdf8f03081a9bc38bde8#b1b300784795f6a64d0fcdf8f03081a9bc38bde8" +dependencies = [ + "futures-util", + "http 0.2.12", + "jsonrpsee-core", + "jsonrpsee-types", + "pin-project", + "rustls-native-certs", + "soketto", + "thiserror", + "tokio", + "tokio-rustls 0.23.4", + "tokio-util", + "tracing", + "webpki-roots 0.22.6", +] + +[[package]] +name = "jsonrpsee-core" +version = "0.16.2" +source = "git+https://github.com/wlmyng/jsonrpsee.git?rev=b1b300784795f6a64d0fcdf8f03081a9bc38bde8#b1b300784795f6a64d0fcdf8f03081a9bc38bde8" +dependencies = [ + "anyhow", + "arrayvec", + "async-lock", + "async-trait", + "beef", + "futures-channel", + "futures-timer", + "futures-util", + "globset", + "hyper 0.14.30", + "jsonrpsee-types", + "parking_lot", + "rand", + "rustc-hash", + "serde", + "serde_json", + "soketto", + "thiserror", + "tokio", + "tracing", +] + +[[package]] +name = "jsonrpsee-http-client" +version = "0.16.2" +source = "git+https://github.com/wlmyng/jsonrpsee.git?rev=b1b300784795f6a64d0fcdf8f03081a9bc38bde8#b1b300784795f6a64d0fcdf8f03081a9bc38bde8" +dependencies = [ + "async-trait", + "hyper 0.14.30", + "hyper-rustls 0.23.2", + "jsonrpsee-core", + "jsonrpsee-types", + "rustc-hash", + "serde", + "serde_json", + "thiserror", + "tokio", + "tracing", +] + +[[package]] +name = "jsonrpsee-proc-macros" +version = "0.16.2" +source = "git+https://github.com/wlmyng/jsonrpsee.git?rev=b1b300784795f6a64d0fcdf8f03081a9bc38bde8#b1b300784795f6a64d0fcdf8f03081a9bc38bde8" +dependencies = [ + "heck 0.4.1", + "proc-macro-crate", + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 1.0.109", +] + +[[package]] +name = "jsonrpsee-server" +version = "0.16.2" +source = "git+https://github.com/wlmyng/jsonrpsee.git?rev=b1b300784795f6a64d0fcdf8f03081a9bc38bde8#b1b300784795f6a64d0fcdf8f03081a9bc38bde8" +dependencies = [ + "futures-channel", + "futures-util", + "http 0.2.12", + "hyper 0.14.30", + "jsonrpsee-core", + "jsonrpsee-types", + "serde", + "serde_json", + "soketto", + "tokio", + "tokio-stream", + "tokio-util", + "tower 0.4.13", + "tracing", +] + +[[package]] +name = "jsonrpsee-types" +version = "0.16.2" +source = "git+https://github.com/wlmyng/jsonrpsee.git?rev=b1b300784795f6a64d0fcdf8f03081a9bc38bde8#b1b300784795f6a64d0fcdf8f03081a9bc38bde8" +dependencies = [ + "anyhow", + "beef", + "serde", + "serde_json", + "thiserror", + "tracing", +] + +[[package]] +name = "jsonrpsee-ws-client" +version = "0.16.2" +source = "git+https://github.com/wlmyng/jsonrpsee.git?rev=b1b300784795f6a64d0fcdf8f03081a9bc38bde8#b1b300784795f6a64d0fcdf8f03081a9bc38bde8" +dependencies = [ + "http 0.2.12", + "jsonrpsee-client-transport", + "jsonrpsee-core", + "jsonrpsee-types", +] + +[[package]] +name = "k256" +version = "0.11.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72c1e0b51e7ec0a97369623508396067a486bd0cbed95a2659a4b863d28cfc8b" +dependencies = [ + "cfg-if", + "ecdsa 0.14.8", + "elliptic-curve 0.12.3", + "sha2 0.10.8", + "sha3", +] + +[[package]] +name = "keccak" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" +dependencies = [ + "cpufeatures", +] + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" +dependencies = [ + "spin 0.9.8", +] + +[[package]] +name = "leb128" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67" + +[[package]] +name = "libc" +version = "0.2.158" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439" + +[[package]] +name = "libm" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" + +[[package]] +name = "libredox" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" +dependencies = [ + "bitflags 2.6.0", + "libc", +] + +[[package]] +name = "linked-hash-map" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" + +[[package]] +name = "linux-raw-sys" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" + +[[package]] +name = "lock_api" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" +dependencies = [ + "serde", +] + +[[package]] +name = "lru" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "718e8fae447df0c7e1ba7f5189829e63fd536945c8988d61444c19039f16b670" +dependencies = [ + "hashbrown 0.13.2", +] + +[[package]] +name = "match_opt" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "405ba1524a1e6ae755334d6966380c60ec40157e0155f9032dd3c294b6384da9" + +[[package]] +name = "matchit" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73cbba799671b762df5a175adf59ce145165747bb891505c43d09aefbbf38beb" + +[[package]] +name = "matchit" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" + +[[package]] +name = "memchr" +version = "2.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "mime_guess" +version = "2.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7c44f8e672c00fe5308fa235f821cb4198414e1c77935c1ab6948d3fd78550e" +dependencies = [ + "mime", + "unicase", +] + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + +[[package]] +name = "miniz_oxide" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1" +dependencies = [ + "adler2", +] + +[[package]] +name = "mio" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec" +dependencies = [ + "hermit-abi", + "libc", + "wasi", + "windows-sys 0.52.0", +] + +[[package]] +name = "move-abstract-interpreter" +version = "0.1.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "move-binary-format", + "move-bytecode-verifier-meter", +] + +[[package]] +name = "move-abstract-stack" +version = "0.0.1" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" + +[[package]] +name = "move-binary-format" +version = "0.0.3" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "anyhow", + "enum-compat-util", + "move-core-types", + "move-proc-macros", + "ref-cast", + "serde", + "variant_count", +] + +[[package]] +name = "move-borrow-graph" +version = "0.0.1" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" + +[[package]] +name = "move-bytecode-source-map" +version = "0.1.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "anyhow", + "bcs", + "move-binary-format", + "move-command-line-common", + "move-core-types", + "move-ir-types", + "move-symbol-pool", + "serde", +] + +[[package]] +name = "move-bytecode-utils" +version = "0.1.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "anyhow", + "move-binary-format", + "move-core-types", + "petgraph", + "serde-reflection", +] + +[[package]] +name = "move-bytecode-verifier" +version = "0.1.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "move-abstract-interpreter", + "move-abstract-stack", + "move-binary-format", + "move-borrow-graph", + "move-bytecode-verifier-meter", + "move-core-types", + "move-vm-config", + "petgraph", +] + +[[package]] +name = "move-bytecode-verifier-meter" +version = "0.1.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "move-binary-format", + "move-core-types", + "move-vm-config", +] + +[[package]] +name = "move-command-line-common" +version = "0.1.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "anyhow", + "difference", + "dirs-next", + "hex", + "move-core-types", + "num-bigint 0.4.6", + "once_cell", + "serde", + "sha2 0.9.9", + "vfs", + "walkdir", +] + +[[package]] +name = "move-compiler" +version = "0.0.1" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "anyhow", + "bcs", + "clap", + "codespan-reporting", + "dunce", + "hex", + "move-binary-format", + "move-borrow-graph", + "move-bytecode-source-map", + "move-bytecode-verifier", + "move-command-line-common", + "move-core-types", + "move-ir-to-bytecode", + "move-ir-types", + "move-proc-macros", + "move-symbol-pool", + "once_cell", + "petgraph", + "regex", + "serde", + "serde_json", + "similar", + "stacker", + "tempfile", + "vfs", +] + +[[package]] +name = "move-core-types" +version = "0.0.4" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "anyhow", + "bcs", + "enum-compat-util", + "ethnum", + "hex", + "leb128", + "move-proc-macros", + "num", + "once_cell", + "primitive-types", + "rand", + "ref-cast", + "serde", + "serde_bytes", + "thiserror", + "uint", +] + +[[package]] +name = "move-coverage" +version = "0.1.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "anyhow", + "bcs", + "clap", + "codespan", + "colored", + "move-abstract-interpreter", + "move-binary-format", + "move-bytecode-source-map", + "move-command-line-common", + "move-core-types", + "move-ir-types", + "petgraph", + "serde", +] + +[[package]] +name = "move-disassembler" +version = "0.1.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "anyhow", + "bcs", + "clap", + "colored", + "hex", + "move-abstract-interpreter", + "move-binary-format", + "move-bytecode-source-map", + "move-command-line-common", + "move-compiler", + "move-core-types", + "move-coverage", + "move-ir-types", +] + +[[package]] +name = "move-ir-to-bytecode" +version = "0.1.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "anyhow", + "codespan-reporting", + "log", + "move-binary-format", + "move-bytecode-source-map", + "move-command-line-common", + "move-core-types", + "move-ir-to-bytecode-syntax", + "move-ir-types", + "move-symbol-pool", + "ouroboros", +] + +[[package]] +name = "move-ir-to-bytecode-syntax" +version = "0.1.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "anyhow", + "hex", + "move-command-line-common", + "move-core-types", + "move-ir-types", + "move-symbol-pool", +] + +[[package]] +name = "move-ir-types" +version = "0.1.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "hex", + "move-command-line-common", + "move-core-types", + "move-symbol-pool", + "once_cell", + "serde", +] + +[[package]] +name = "move-proc-macros" +version = "0.1.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "enum-compat-util", + "quote 1.0.37", + "syn 2.0.77", +] + +[[package]] +name = "move-symbol-pool" +version = "0.1.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "once_cell", + "phf", + "serde", +] + +[[package]] +name = "move-vm-config" +version = "0.1.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "move-binary-format", + "once_cell", +] + +[[package]] +name = "move-vm-profiler" +version = "0.1.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "move-vm-config", + "once_cell", + "serde", + "serde_json", + "tracing", +] + +[[package]] +name = "move-vm-test-utils" +version = "0.1.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "anyhow", + "move-binary-format", + "move-core-types", + "move-vm-profiler", + "move-vm-types", + "once_cell", + "serde", +] + +[[package]] +name = "move-vm-types" +version = "0.1.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "bcs", + "move-binary-format", + "move-core-types", + "move-vm-profiler", + "serde", + "smallvec", +] + +[[package]] +name = "msim-macros" +version = "0.1.0" +source = "git+https://github.com/MystenLabs/mysten-sim.git?rev=077b735b484cf33e79f9d621db1d0c3a5827b81e#077b735b484cf33e79f9d621db1d0c3a5827b81e" +dependencies = [ + "darling 0.14.4", + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 1.0.109", +] + +[[package]] +name = "multiaddr" +version = "0.17.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b36f567c7099511fa8612bbbb52dda2419ce0bdbacf31714e3a5ffdb766d3bd" +dependencies = [ + "arrayref", + "byteorder", + "data-encoding", + "log", + "multibase", + "multihash", + "percent-encoding", + "serde", + "static_assertions", + "unsigned-varint", + "url", +] + +[[package]] +name = "multibase" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b3539ec3c1f04ac9748a260728e855f261b4977f5c3406612c884564f329404" +dependencies = [ + "base-x", + "data-encoding", + "data-encoding-macro", +] + +[[package]] +name = "multihash" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "835d6ff01d610179fbce3de1694d007e500bf33a7f29689838941d6bf783ae40" +dependencies = [ + "core2", + "multihash-derive", + "unsigned-varint", +] + +[[package]] +name = "multihash-derive" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d6d4752e6230d8ef7adf7bd5d8c4b1f6561c1014c5ba9a37445ccefe18aa1db" +dependencies = [ + "proc-macro-crate", + "proc-macro-error", + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 1.0.109", + "synstructure", +] + +[[package]] +name = "mysten-metrics" +version = "0.7.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "async-trait", + "axum 0.6.20", + "dashmap", + "futures", + "once_cell", + "parking_lot", + "prometheus", + "prometheus-closure-metric", + "scopeguard", + "tap", + "tokio", + "tracing", + "uuid", +] + +[[package]] +name = "mysten-network" +version = "0.2.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "anemo", + "bcs", + "bytes", + "eyre", + "futures", + "http 0.2.12", + "multiaddr", + "pin-project-lite", + "serde", + "snap", + "tokio", + "tokio-stream", + "tonic", + "tonic-health", + "tower 0.4.13", + "tower-http", + "tracing", +] + +[[package]] +name = "mysten-util-mem" +version = "0.11.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "cfg-if", + "ed25519-consensus", + "fastcrypto", + "fastcrypto-tbls", + "hashbrown 0.12.3", + "impl-trait-for-tuples", + "indexmap 2.5.0", + "mysten-util-mem-derive", + "once_cell", + "parking_lot", + "roaring", + "smallvec", +] + +[[package]] +name = "mysten-util-mem-derive" +version = "0.1.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "proc-macro2 1.0.86", + "syn 1.0.109", + "synstructure", +] + +[[package]] +name = "narwhal-config" +version = "0.1.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "fastcrypto", + "match_opt", + "mysten-network", + "mysten-util-mem", + "narwhal-crypto", + "rand", + "serde", + "serde_json", + "thiserror", + "tracing", +] + +[[package]] +name = "narwhal-crypto" +version = "0.1.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "bcs", + "fastcrypto", + "serde", + "shared-crypto", +] + +[[package]] +name = "native-tls" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8614eb2c83d59d1c8cc974dd3f920198647674a0a035e1af1fa58707e317466" +dependencies = [ + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "neptune" +version = "13.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06626c9ac04c894e9a23d061ba1309f28506cdc5fe64156d28a15fb57fc8e438" +dependencies = [ + "bellpepper", + "bellpepper-core", + "blake2s_simd", + "blstrs", + "byteorder", + "ff 0.13.0", + "generic-array", + "log", + "pasta_curves", + "serde", + "trait-set", +] + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "nonempty" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "995defdca0a589acfdd1bd2e8e3b896b4d4f7675a31fd14c32611440c7f608e6" + +[[package]] +name = "num" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35bd024e8b2ff75562e5f34e7f4905839deb4b22955ef5e73d2fea1b9813cb23" +dependencies = [ + "num-bigint 0.4.6", + "num-complex", + "num-integer", + "num-iter", + "num-rational", + "num-traits", +] + +[[package]] +name = "num-bigint" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6f7833f2cbf2360a6cfd58cd41a53aa7a90bd4c202f5b1c7dd2ed73c57b2c3" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-bigint" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" +dependencies = [ + "num-integer", + "num-traits", + "rand", +] + +[[package]] +name = "num-bigint-dig" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151" +dependencies = [ + "byteorder", + "lazy_static", + "libm", + "num-integer", + "num-iter", + "num-traits", + "rand", + "smallvec", + "zeroize", +] + +[[package]] +name = "num-complex" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73f88a1307638156682bada9d7604135552957b7818057dcef22705b4d509495" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + +[[package]] +name = "num-integer" +version = "0.1.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-iter" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-rational" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824" +dependencies = [ + "num-bigint 0.4.6", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", + "libm", +] + +[[package]] +name = "num_cpus" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +dependencies = [ + "hermit-abi", + "libc", +] + +[[package]] +name = "num_enum" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a015b430d3c108a207fd776d2e2196aaf8b1cf8cf93253e3a097ff3085076a1" +dependencies = [ + "num_enum_derive", +] + +[[package]] +name = "num_enum_derive" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96667db765a921f7b295ffee8b60472b686a51d4f21c2ee4ffdb94c7013b65a6" +dependencies = [ + "proc-macro-crate", + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 2.0.77", +] + +[[package]] +name = "object" +version = "0.36.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "084f1a5821ac4c651660a94a7153d27ac9d8a53736203f58b31945ded098070a" +dependencies = [ + "memchr", +] + +[[package]] +name = "object_store" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f930c88a43b1c3f6e776dfe495b4afab89882dbc81530c632db2ed65451ebcb4" +dependencies = [ + "async-trait", + "base64 0.21.7", + "bytes", + "chrono", + "futures", + "humantime", + "hyper 0.14.30", + "itertools 0.11.0", + "parking_lot", + "percent-encoding", + "quick-xml", + "rand", + "reqwest 0.11.27", + "ring 0.16.20", + "rustls-pemfile 1.0.4", + "serde", + "serde_json", + "snafu", + "tokio", + "tracing", + "url", + "walkdir", +] + +[[package]] +name = "oid-registry" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9bedf36ffb6ba96c2eb7144ef6270557b52e54b20c0a8e1eb2ff99a6c6959bff" +dependencies = [ + "asn1-rs", +] + +[[package]] +name = "once_cell" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" + +[[package]] +name = "opaque-debug" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" + +[[package]] +name = "openssl" +version = "0.10.66" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9529f4786b70a3e8c61e11179af17ab6188ad8d0ded78c5529441ed39d4bd9c1" +dependencies = [ + "bitflags 2.6.0", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 2.0.77", +] + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "openssl-sys" +version = "0.9.103" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f9e8deee91df40a943c71b917e5874b951d32a802526c85721ce3b776c929d6" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "ouroboros" +version = "0.17.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2ba07320d39dfea882faa70554b4bd342a5f273ed59ba7c1c6b4c840492c954" +dependencies = [ + "aliasable", + "ouroboros_macro", + "static_assertions", +] + +[[package]] +name = "ouroboros_macro" +version = "0.17.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec4c6225c69b4ca778c0aea097321a64c421cf4577b331c61b229267edabb6f8" +dependencies = [ + "heck 0.4.1", + "proc-macro-error", + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 2.0.77", +] + +[[package]] +name = "p256" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9863ad85fa8f4460f9c48cb909d38a0d689dba1f6f6988a5e3e0d31071bcd4b" +dependencies = [ + "ecdsa 0.16.9", + "elliptic-curve 0.13.8", + "primeorder", + "sha2 0.10.8", +] + +[[package]] +name = "pairing" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81fec4625e73cf41ef4bb6846cafa6d44736525f442ba45e407c4a000a13996f" +dependencies = [ + "group 0.13.0", +] + +[[package]] +name = "papergrid" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae7891b22598926e4398790c8fe6447930c72a67d36d983a49d6ce682ce83290" +dependencies = [ + "bytecount", + "fnv", + "unicode-width", +] + +[[package]] +name = "parity-scale-codec" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "373b1a4c1338d9cd3d1fa53b3a11bdab5ab6bd80a20f7f7becd76953ae2be909" +dependencies = [ + "arrayvec", + "bitvec 0.20.4", + "byte-slice-cast", + "impl-trait-for-tuples", + "parity-scale-codec-derive", + "serde", +] + +[[package]] +name = "parity-scale-codec-derive" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1557010476e0595c9b568d16dcfb81b93cdeb157612726f5170d31aa707bed27" +dependencies = [ + "proc-macro-crate", + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 1.0.109", +] + +[[package]] +name = "parking_lot" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-targets 0.52.6", +] + +[[package]] +name = "pasta_curves" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3e57598f73cc7e1b2ac63c79c517b31a0877cd7c402cdcaa311b5208de7a095" +dependencies = [ + "blake2b_simd", + "ff 0.13.0", + "group 0.13.0", + "hex", + "lazy_static", + "rand", + "serde", + "static_assertions", + "subtle", +] + +[[package]] +name = "paste" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" + +[[package]] +name = "pbkdf2" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83a0692ec44e4cf1ef28ca317f14f8f07da2d95ec3fa01f86e4467b725e60917" +dependencies = [ + "digest 0.10.7", +] + +[[package]] +name = "pem" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8835c273a76a90455d7344889b0964598e3316e2a79ede8e36f16bdcf2228b8" +dependencies = [ + "base64 0.13.1", +] + +[[package]] +name = "pem-rfc7468" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d159833a9105500e0398934e205e0773f0b27529557134ecfc51c27646adac" +dependencies = [ + "base64ct", +] + +[[package]] +name = "pem-rfc7468" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" +dependencies = [ + "base64ct", +] + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "pest" +version = "2.7.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdbef9d1d47087a895abd220ed25eb4ad973a5e26f6a4367b038c25e28dfc2d9" +dependencies = [ + "memchr", + "thiserror", + "ucd-trie", +] + +[[package]] +name = "pest_derive" +version = "2.7.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d3a6e3394ec80feb3b6393c725571754c6188490265c61aaf260810d6b95aa0" +dependencies = [ + "pest", + "pest_generator", +] + +[[package]] +name = "pest_generator" +version = "2.7.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94429506bde1ca69d1b5601962c73f4172ab4726571a59ea95931218cb0e930e" +dependencies = [ + "pest", + "pest_meta", + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 2.0.77", +] + +[[package]] +name = "pest_meta" +version = "2.7.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac8a071862e93690b6e34e9a5fb8e33ff3734473ac0245b27232222c4906a33f" +dependencies = [ + "once_cell", + "pest", + "sha2 0.10.8", +] + +[[package]] +name = "petgraph" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "467d164a6de56270bd7c4d070df81d07beace25012d5103ced4e9ff08d6afdb7" +dependencies = [ + "fixedbitset", + "indexmap 1.9.3", +] + +[[package]] +name = "phf" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc" +dependencies = [ + "phf_macros", + "phf_shared", +] + +[[package]] +name = "phf_generator" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48e4cc64c2ad9ebe670cb8fd69dd50ae301650392e81c05f9bfcb2d5bdbc24b0" +dependencies = [ + "phf_shared", + "rand", +] + +[[package]] +name = "phf_macros" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3444646e286606587e49f3bcf1679b8cef1dc2c5ecc29ddacaffc305180d464b" +dependencies = [ + "phf_generator", + "phf_shared", + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 2.0.77", +] + +[[package]] +name = "phf_shared" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" +dependencies = [ + "siphasher", +] + +[[package]] +name = "pin-project" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 2.0.77", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkcs1" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eff33bdbdfc54cc98a2eca766ebdec3e1b8fb7387523d5c9c9a2891da856f719" +dependencies = [ + "der 0.6.1", + "pkcs8 0.9.0", + "spki 0.6.0", + "zeroize", +] + +[[package]] +name = "pkcs8" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9eca2c590a5f85da82668fa685c09ce2888b9430e83299debf1f34b65fd4a4ba" +dependencies = [ + "der 0.6.1", + "spki 0.6.0", +] + +[[package]] +name = "pkcs8" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +dependencies = [ + "der 0.7.9", + "spki 0.7.3", +] + +[[package]] +name = "pkg-config" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" + +[[package]] +name = "polyval" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d1fe60d06143b2430aa532c94cfe9e29783047f06c0d7fd359a9a51b729fa25" +dependencies = [ + "cfg-if", + "cpufeatures", + "opaque-debug", + "universal-hash", +] + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppv-lite86" +version = "0.2.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "primeorder" +version = "0.13.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6" +dependencies = [ + "elliptic-curve 0.13.8", +] + +[[package]] +name = "primitive-types" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05e4722c697a58a99d5d06a08c30821d7c082a4632198de1eaa5a6c22ef42373" +dependencies = [ + "fixed-hash", + "impl-codec", + "impl-serde", + "uint", +] + +[[package]] +name = "proc-macro-crate" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e17d47ce914bf4de440332250b0edd23ce48c005f59fab39d3335866b114f11a" +dependencies = [ + "thiserror", + "toml", +] + +[[package]] +name = "proc-macro-error" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" +dependencies = [ + "proc-macro-error-attr", + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 1.0.109", + "version_check", +] + +[[package]] +name = "proc-macro-error-attr" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "version_check", +] + +[[package]] +name = "proc-macro2" +version = "0.4.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759" +dependencies = [ + "unicode-xid 0.1.0", +] + +[[package]] +name = "proc-macro2" +version = "1.0.86" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "prometheus" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d33c28a30771f7f96db69893f78b857f7450d7e0237e9c8fc6427a81bae7ed1" +dependencies = [ + "cfg-if", + "fnv", + "lazy_static", + "memchr", + "parking_lot", + "protobuf", + "thiserror", +] + +[[package]] +name = "prometheus-closure-metric" +version = "0.1.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "anyhow", + "prometheus", + "protobuf", +] + +[[package]] +name = "proptest" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4c2511913b88df1637da85cc8d96ec8e43a3f8bb8ccb71ee1ac240d6f3df58d" +dependencies = [ + "bit-set", + "bit-vec", + "bitflags 2.6.0", + "lazy_static", + "num-traits", + "rand", + "rand_chacha", + "rand_xorshift", + "regex-syntax", + "rusty-fork", + "tempfile", + "unarray", +] + +[[package]] +name = "proptest-derive" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90b46295382dc76166cb7cf2bb4a97952464e4b7ed5a43e6cd34e1fec3349ddc" +dependencies = [ + "proc-macro2 0.4.30", + "quote 0.6.13", + "syn 0.15.44", +] + +[[package]] +name = "prost" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "deb1435c188b76130da55f17a466d252ff7b1418b2ad3e037d127b94e3411f29" +dependencies = [ + "bytes", + "prost-derive", +] + +[[package]] +name = "prost-derive" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81bddcdb20abf9501610992b6759a4c888aef7d1a7247ef75e2404275ac24af1" +dependencies = [ + "anyhow", + "itertools 0.12.1", + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 2.0.77", +] + +[[package]] +name = "protobuf" +version = "2.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "106dd99e98437432fed6519dedecfade6a06a73bb7b2a1e019fdd2bee5778d94" +dependencies = [ + "bytes", +] + +[[package]] +name = "psm" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa37f80ca58604976033fae9515a8a2989fc13797d953f7c04fb8fa36a11f205" +dependencies = [ + "cc", +] + +[[package]] +name = "quick-error" +version = "1.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" + +[[package]] +name = "quick-xml" +version = "0.30.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eff6510e86862b57b210fd8cbe8ed3f0d7d600b9c2863cd4549a2e033c66e956" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "quinn" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8cc2c5017e4b43d5995dcea317bc46c1e09404c0a9664d2908f7f02dfe943d75" +dependencies = [ + "bytes", + "futures-io", + "pin-project-lite", + "quinn-proto", + "quinn-udp", + "rustc-hash", + "rustls 0.21.12", + "thiserror", + "tokio", + "tracing", +] + +[[package]] +name = "quinn-proto" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "141bf7dfde2fbc246bfd3fe12f2455aa24b0fbd9af535d8c86c7bd1381ff2b1a" +dependencies = [ + "bytes", + "rand", + "ring 0.16.20", + "rustc-hash", + "rustls 0.21.12", + "slab", + "thiserror", + "tinyvec", + "tracing", +] + +[[package]] +name = "quinn-udp" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "055b4e778e8feb9f93c4e439f71dc2156ef13360b432b799e179a8c4cdf0b1d7" +dependencies = [ + "bytes", + "libc", + "socket2", + "tracing", + "windows-sys 0.48.0", +] + +[[package]] +name = "quote" +version = "0.6.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1" +dependencies = [ + "proc-macro2 0.4.30", +] + +[[package]] +name = "quote" +version = "1.0.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" +dependencies = [ + "proc-macro2 1.0.86", +] + +[[package]] +name = "radium" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "643f8f41a8ebc4c5dc4515c82bb8abd397b527fc20fd681b7c011c2aee5d44fb" + +[[package]] +name = "radium" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "rand_xorshift" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f" +dependencies = [ + "rand_core", +] + +[[package]] +name = "rand_xoshiro" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa" +dependencies = [ + "rand_core", +] + +[[package]] +name = "rcgen" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6413f3de1edee53342e6138e75b56d32e7bc6e332b3bd62d497b1929d4cfbcdd" +dependencies = [ + "pem", + "ring 0.16.20", + "time", + "yasna", +] + +[[package]] +name = "readonly" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a25d631e41bfb5fdcde1d4e2215f62f7f0afa3ff11e26563765bd6ea1d229aeb" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 2.0.77", +] + +[[package]] +name = "redox_syscall" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0884ad60e090bf1345b93da0a5de8923c93884cd03f40dfcfddd3b4bee661853" +dependencies = [ + "bitflags 2.6.0", +] + +[[package]] +name = "redox_users" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" +dependencies = [ + "getrandom", + "libredox", + "thiserror", +] + +[[package]] +name = "ref-cast" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf0a6f84d5f1d581da8b41b47ec8600871962f2a528115b542b362d4b744931" +dependencies = [ + "ref-cast-impl", +] + +[[package]] +name = "ref-cast-impl" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bcc303e793d3734489387d205e9b186fac9c6cfacedd98cbb2e8a5943595f3e6" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 2.0.77", +] + +[[package]] +name = "regex" +version = "1.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" + +[[package]] +name = "reqwest" +version = "0.11.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" +dependencies = [ + "base64 0.21.7", + "bytes", + "encoding_rs", + "futures-core", + "futures-util", + "h2 0.3.26", + "http 0.2.12", + "http-body 0.4.6", + "hyper 0.14.30", + "hyper-rustls 0.24.2", + "ipnet", + "js-sys", + "log", + "mime", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls 0.21.12", + "rustls-pemfile 1.0.4", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper 0.1.2", + "system-configuration 0.5.1", + "tokio", + "tokio-rustls 0.24.1", + "tokio-util", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", + "webpki-roots 0.25.4", + "winreg", +] + +[[package]] +name = "reqwest" +version = "0.12.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8f4955649ef5c38cc7f9e8aa41761d48fb9677197daea9984dc54f56aad5e63" +dependencies = [ + "base64 0.22.1", + "bytes", + "encoding_rs", + "futures-core", + "futures-util", + "h2 0.4.6", + "http 1.1.0", + "http-body 1.0.1", + "http-body-util", + "hyper 1.4.1", + "hyper-rustls 0.27.3", + "hyper-tls", + "hyper-util", + "ipnet", + "js-sys", + "log", + "mime", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls-pemfile 2.1.3", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper 1.0.1", + "system-configuration 0.6.1", + "tokio", + "tokio-native-tls", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "windows-registry", +] + +[[package]] +name = "rfc6979" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7743f17af12fa0b03b803ba12cd6a8d9483a587e89c69445e3909655c0b9fabb" +dependencies = [ + "crypto-bigint 0.4.9", + "hmac", + "zeroize", +] + +[[package]] +name = "rfc6979" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" +dependencies = [ + "hmac", + "subtle", +] + +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin 0.5.2", + "untrusted 0.7.1", + "web-sys", + "winapi", +] + +[[package]] +name = "ring" +version = "0.17.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +dependencies = [ + "cc", + "cfg-if", + "getrandom", + "libc", + "spin 0.9.8", + "untrusted 0.9.0", + "windows-sys 0.52.0", +] + +[[package]] +name = "ripemd" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd124222d17ad93a644ed9d011a40f4fb64aa54275c08cc216524a9ea82fb09f" +dependencies = [ + "digest 0.10.7", +] + +[[package]] +name = "roaring" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f4b84ba6e838ceb47b41de5194a60244fac43d9fe03b71dbe8c5a201081d6d1" +dependencies = [ + "bytemuck", + "byteorder", +] + +[[package]] +name = "rsa" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55a77d189da1fee555ad95b7e50e7457d91c0e089ec68ca69ad2989413bbdab4" +dependencies = [ + "byteorder", + "digest 0.10.7", + "num-bigint-dig", + "num-integer", + "num-iter", + "num-traits", + "pkcs1", + "pkcs8 0.9.0", + "rand_core", + "sha2 0.10.8", + "signature 2.2.0", + "subtle", + "zeroize", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" + +[[package]] +name = "rustc-hash" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" + +[[package]] +name = "rustc-hex" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" + +[[package]] +name = "rustc_version" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" +dependencies = [ + "semver", +] + +[[package]] +name = "rusticata-macros" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "faf0c4a6ece9950b9abdb62b1cfcf2a68b3b67a10ba445b3bb85be2a293d0632" +dependencies = [ + "nom", +] + +[[package]] +name = "rustix" +version = "0.38.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8acb788b847c24f28525660c4d7758620a7210875711f79e7f663cc152726811" +dependencies = [ + "bitflags 2.6.0", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustls" +version = "0.20.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99" +dependencies = [ + "log", + "ring 0.16.20", + "sct", + "webpki", +] + +[[package]] +name = "rustls" +version = "0.21.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" +dependencies = [ + "log", + "ring 0.17.8", + "rustls-webpki 0.101.7", + "sct", +] + +[[package]] +name = "rustls" +version = "0.23.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2dabaac7466917e566adb06783a81ca48944c6898a1b08b9374106dd671f4c8" +dependencies = [ + "once_cell", + "rustls-pki-types", + "rustls-webpki 0.102.8", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-native-certs" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" +dependencies = [ + "openssl-probe", + "rustls-pemfile 1.0.4", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +dependencies = [ + "base64 0.21.7", +] + +[[package]] +name = "rustls-pemfile" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "196fe16b00e106300d3e45ecfcb764fa292a535d7326a29a5875c579c7417425" +dependencies = [ + "base64 0.22.1", + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc0a2ce646f8655401bb81e7927b812614bd5d91dbc968696be50603510fcaf0" + +[[package]] +name = "rustls-webpki" +version = "0.101.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +dependencies = [ + "ring 0.17.8", + "untrusted 0.9.0", +] + +[[package]] +name = "rustls-webpki" +version = "0.102.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" +dependencies = [ + "ring 0.17.8", + "rustls-pki-types", + "untrusted 0.9.0", +] + +[[package]] +name = "rustversion" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" + +[[package]] +name = "rusty-fork" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f" +dependencies = [ + "fnv", + "quick-error", + "tempfile", + "wait-timeout", +] + +[[package]] +name = "ryu" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "schannel" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9aaafd5a2b6e3d657ff009d82fbd630b6bd54dd4eb06f21693925cdf80f9b8b" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "schemars" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09c024468a378b7e36765cd36702b7a90cc3cba11654f6685c8f233408e89e92" +dependencies = [ + "dyn-clone", + "either", + "schemars_derive", + "serde", + "serde_json", +] + +[[package]] +name = "schemars_derive" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1eee588578aff73f856ab961cd2f79e36bc45d7ded33a7562adba4667aecc0e" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "serde_derive_internals", + "syn 2.0.77", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "sct" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" +dependencies = [ + "ring 0.17.8", + "untrusted 0.9.0", +] + +[[package]] +name = "sec1" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3be24c1842290c45df0a7bf069e0c268a747ad05a192f2fd7dcfdbc1cba40928" +dependencies = [ + "base16ct 0.1.1", + "der 0.6.1", + "generic-array", + "subtle", + "zeroize", +] + +[[package]] +name = "sec1" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" +dependencies = [ + "base16ct 0.2.0", + "der 0.7.9", + "generic-array", + "pkcs8 0.10.2", + "subtle", + "zeroize", +] + +[[package]] +name = "secp256k1" +version = "0.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25996b82292a7a57ed3508f052cfff8640d38d32018784acd714758b43da9c8f" +dependencies = [ + "bitcoin_hashes", + "rand", + "secp256k1-sys", +] + +[[package]] +name = "secp256k1-sys" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70a129b9e9efbfb223753b9163c4ab3b13cff7fd9c7f010fbac25ab4099fa07e" +dependencies = [ + "cc", +] + +[[package]] +name = "security-framework" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" +dependencies = [ + "bitflags 2.6.0", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea4a292869320c0272d7bc55a5a6aafaff59b4f63404a003887b679a2e05b4b6" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "semver" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" + +[[package]] +name = "serde" +version = "1.0.210" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde-name" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b5b14ebbcc4e4f2b3642fa99c388649da58d1dc3308c7d109f39f565d1710f0" +dependencies = [ + "serde", + "thiserror", +] + +[[package]] +name = "serde-reflection" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f05a5f801ac62a51a49d378fdb3884480041b99aced450b28990673e8ff99895" +dependencies = [ + "once_cell", + "serde", + "thiserror", +] + +[[package]] +name = "serde_bytes" +version = "0.11.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "387cc504cb06bb40a96c8e04e951fe01854cf6bc921053c954e4a606d9675c6a" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_derive" +version = "1.0.210" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 2.0.77", +] + +[[package]] +name = "serde_derive_internals" +version = "0.29.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 2.0.77", +] + +[[package]] +name = "serde_json" +version = "1.0.128" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8" +dependencies = [ + "indexmap 2.5.0", + "itoa", + "memchr", + "ryu", + "serde", +] + +[[package]] +name = "serde_path_to_error" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af99884400da37c88f5e9146b7f1fd0fbcae8f6eec4e9da38b67d05486f814a6" +dependencies = [ + "itoa", + "serde", +] + +[[package]] +name = "serde_repr" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 2.0.77", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_with" +version = "2.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07ff71d2c147a7b57362cead5e22f772cd52f6ab31cfcd9edcd7f6aeb2a0afbe" +dependencies = [ + "base64 0.13.1", + "chrono", + "hex", + "indexmap 1.9.3", + "serde", + "serde_json", + "serde_with_macros", + "time", +] + +[[package]] +name = "serde_with_macros" +version = "2.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "881b6f881b17d13214e5d494c939ebab463d01264ce1811e9d4ac3a882e7695f" +dependencies = [ + "darling 0.20.10", + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 2.0.77", +] + +[[package]] +name = "serde_yaml" +version = "0.8.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "578a7433b776b56a35785ed5ce9a7e777ac0598aac5a6dd1b4b18a307c7fc71b" +dependencies = [ + "indexmap 1.9.3", + "ryu", + "serde", + "yaml-rust", +] + +[[package]] +name = "sha-1" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6" +dependencies = [ + "block-buffer 0.9.0", + "cfg-if", + "cpufeatures", + "digest 0.9.0", + "opaque-debug", +] + +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest 0.10.7", +] + +[[package]] +name = "sha2" +version = "0.9.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" +dependencies = [ + "block-buffer 0.9.0", + "cfg-if", + "cpufeatures", + "digest 0.9.0", + "opaque-debug", +] + +[[package]] +name = "sha2" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest 0.10.7", +] + +[[package]] +name = "sha3" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60" +dependencies = [ + "digest 0.10.7", + "keccak", +] + +[[package]] +name = "shared-crypto" +version = "0.0.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "bcs", + "eyre", + "fastcrypto", + "serde", + "serde_repr", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "signal-hook-registry" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" +dependencies = [ + "libc", +] + +[[package]] +name = "signature" +version = "1.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" +dependencies = [ + "digest 0.10.7", + "rand_core", +] + +[[package]] +name = "signature" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" +dependencies = [ + "digest 0.10.7", + "rand_core", +] + +[[package]] +name = "similar" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1de1d4f81173b03af4c0cbed3c898f6bff5b870e4a7f5d6f4057d62a7a4b686e" + +[[package]] +name = "siphasher" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" + +[[package]] +name = "sized-chunks" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16d69225bde7a69b235da73377861095455d298f2b970996eec25ddbb42b3d1e" +dependencies = [ + "bitmaps", + "typenum", +] + +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] + +[[package]] +name = "slip10_ed25519" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4be0ff28bf14f9610a342169084e87a4f435ad798ec528dc7579a3678fa9dc9a" +dependencies = [ + "hmac-sha512", +] + +[[package]] +name = "smallvec" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" + +[[package]] +name = "snafu" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4de37ad025c587a29e8f3f5605c00f70b98715ef90b9061a815b9e59e9042d6" +dependencies = [ + "doc-comment", + "snafu-derive", +] + +[[package]] +name = "snafu-derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "990079665f075b699031e9c08fd3ab99be5029b96f3b78dc0709e8f77e4efebf" +dependencies = [ + "heck 0.4.1", + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 1.0.109", +] + +[[package]] +name = "snap" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b6b67fb9a61334225b5b790716f609cd58395f895b3fe8b328786812a40bc3b" + +[[package]] +name = "socket2" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "soketto" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d1c5305e39e09653383c2c7244f2f78b3bcae37cf50c64cb4789c9f5096ec2" +dependencies = [ + "base64 0.13.1", + "bytes", + "futures", + "http 0.2.12", + "httparse", + "log", + "rand", + "sha-1", +] + +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" + +[[package]] +name = "spki" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67cf02bbac7a337dc36e4f5a693db6c21e7863f45070f7064577eb4367a3212b" +dependencies = [ + "base64ct", + "der 0.6.1", +] + +[[package]] +name = "spki" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" +dependencies = [ + "base64ct", + "der 0.7.9", +] + +[[package]] +name = "stacker" +version = "0.1.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "799c883d55abdb5e98af1a7b3f23b9b6de8ecada0ecac058672d7635eb48ca7b" +dependencies = [ + "cc", + "cfg-if", + "libc", + "psm", + "windows-sys 0.59.0", +] + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "strum" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" +dependencies = [ + "strum_macros", +] + +[[package]] +name = "strum_macros" +version = "0.24.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" +dependencies = [ + "heck 0.4.1", + "proc-macro2 1.0.86", + "quote 1.0.37", + "rustversion", + "syn 1.0.109", +] + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "subtle-ng" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "734676eb262c623cec13c3155096e08d1f8f29adce39ba17948b18dad1e54142" + +[[package]] +name = "sui-config" +version = "0.0.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "anemo", + "anyhow", + "bcs", + "clap", + "csv", + "dirs", + "fastcrypto", + "narwhal-config", + "object_store", + "once_cell", + "prometheus", + "rand", + "reqwest 0.11.27", + "serde", + "serde_with", + "serde_yaml", + "sui-keys", + "sui-protocol-config", + "sui-types", + "tracing", +] + +[[package]] +name = "sui-enum-compat-util" +version = "0.1.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "serde_yaml", +] + +[[package]] +name = "sui-json" +version = "0.0.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "anyhow", + "bcs", + "fastcrypto", + "move-binary-format", + "move-bytecode-utils", + "move-core-types", + "schemars", + "serde", + "serde_json", + "sui-types", +] + +[[package]] +name = "sui-json-rpc-api" +version = "0.0.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "anyhow", + "fastcrypto", + "jsonrpsee", + "mysten-metrics", + "once_cell", + "prometheus", + "sui-json", + "sui-json-rpc-types", + "sui-open-rpc", + "sui-open-rpc-macros", + "sui-types", + "tap", + "tracing", +] + +[[package]] +name = "sui-json-rpc-types" +version = "0.0.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "anyhow", + "bcs", + "colored", + "enum_dispatch", + "fastcrypto", + "itertools 0.10.5", + "json_to_table", + "move-binary-format", + "move-bytecode-utils", + "move-core-types", + "mysten-metrics", + "schemars", + "serde", + "serde_json", + "serde_with", + "sui-enum-compat-util", + "sui-json", + "sui-macros", + "sui-package-resolver", + "sui-protocol-config", + "sui-types", + "tabled", + "tracing", +] + +[[package]] +name = "sui-keys" +version = "0.0.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "anyhow", + "bip32", + "fastcrypto", + "rand", + "regex", + "serde", + "serde_json", + "shared-crypto", + "signature 1.6.4", + "slip10_ed25519", + "sui-types", + "tiny-bip39", +] + +[[package]] +name = "sui-macros" +version = "0.7.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "futures", + "once_cell", + "sui-proc-macros", + "tracing", +] + +[[package]] +name = "sui-open-rpc" +version = "1.26.1" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "bcs", + "schemars", + "serde", + "serde_json", + "versions", +] + +[[package]] +name = "sui-open-rpc-macros" +version = "0.1.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "derive-syn-parse", + "itertools 0.10.5", + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 1.0.109", + "unescape", +] + +[[package]] +name = "sui-package-resolver" +version = "0.1.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "async-trait", + "bcs", + "eyre", + "lru", + "move-binary-format", + "move-command-line-common", + "move-core-types", + "serde", + "sui-rest-api", + "sui-types", + "thiserror", + "tokio", +] + +[[package]] +name = "sui-proc-macros" +version = "0.7.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "msim-macros", + "proc-macro2 1.0.86", + "quote 1.0.37", + "sui-enum-compat-util", + "syn 2.0.77", +] + +[[package]] +name = "sui-protocol-config" +version = "0.1.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "clap", + "insta", + "move-vm-config", + "schemars", + "serde", + "serde_with", + "sui-protocol-config-macros", + "tracing", +] + +[[package]] +name = "sui-protocol-config-macros" +version = "0.1.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 1.0.109", +] + +[[package]] +name = "sui-rest-api" +version = "0.1.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "anyhow", + "axum 0.6.20", + "bcs", + "fastcrypto", + "mime", + "rand", + "reqwest 0.11.27", + "serde", + "serde_json", + "serde_with", + "sui-types", + "tap", + "thiserror", +] + +[[package]] +name = "sui-sdk" +version = "1.26.1" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "anyhow", + "async-trait", + "base64 0.21.7", + "bcs", + "clap", + "colored", + "fastcrypto", + "futures", + "futures-core", + "jsonrpsee", + "move-core-types", + "reqwest 0.11.27", + "serde", + "serde_json", + "serde_with", + "shared-crypto", + "sui-config", + "sui-json", + "sui-json-rpc-api", + "sui-json-rpc-types", + "sui-keys", + "sui-transaction-builder", + "sui-types", + "thiserror", + "tokio", + "tracing", +] + +[[package]] +name = "sui-transaction-builder" +version = "0.0.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "anyhow", + "async-trait", + "bcs", + "futures", + "move-binary-format", + "move-core-types", + "sui-json", + "sui-json-rpc-types", + "sui-protocol-config", + "sui-types", +] + +[[package]] +name = "sui-types" +version = "0.1.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "anemo", + "anyhow", + "bcs", + "better_any", + "bincode", + "byteorder", + "chrono", + "consensus-config", + "derivative", + "derive_more", + "enum_dispatch", + "eyre", + "fastcrypto", + "fastcrypto-tbls", + "fastcrypto-zkp", + "im", + "indexmap 2.5.0", + "itertools 0.10.5", + "jsonrpsee", + "lru", + "move-binary-format", + "move-bytecode-utils", + "move-command-line-common", + "move-core-types", + "move-disassembler", + "move-ir-types", + "move-vm-profiler", + "move-vm-test-utils", + "move-vm-types", + "mysten-metrics", + "mysten-network", + "narwhal-config", + "narwhal-crypto", + "nonempty", + "num-bigint 0.4.6", + "num-traits", + "num_enum", + "once_cell", + "parking_lot", + "prometheus", + "proptest", + "proptest-derive", + "rand", + "roaring", + "schemars", + "serde", + "serde-name", + "serde_json", + "serde_with", + "shared-crypto", + "signature 1.6.4", + "static_assertions", + "strum", + "strum_macros", + "sui-enum-compat-util", + "sui-macros", + "sui-protocol-config", + "tap", + "thiserror", + "tonic", + "tracing", + "typed-store-error", +] + +[[package]] +name = "syn" +version = "0.15.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ca4b3b69a77cbe1ffc9e198781b7acb0c7365a883670e8f1c1bc66fba79a5c5" +dependencies = [ + "proc-macro2 0.4.30", + "quote 0.6.13", + "unicode-xid 0.1.0", +] + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f35bcdf61fd8e7be6caf75f429fdca8beb3ed76584befb503b1569faee373ed" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + +[[package]] +name = "sync_wrapper" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" +dependencies = [ + "futures-core", +] + +[[package]] +name = "synstructure" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 1.0.109", + "unicode-xid 0.2.6", +] + +[[package]] +name = "system-configuration" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "system-configuration-sys 0.5.0", +] + +[[package]] +name = "system-configuration" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" +dependencies = [ + "bitflags 2.6.0", + "core-foundation", + "system-configuration-sys 0.6.0", +] + +[[package]] +name = "system-configuration-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "system-configuration-sys" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "tabled" +version = "0.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce69a5028cd9576063ec1f48edb2c75339fd835e6094ef3e05b3a079bf594a6" +dependencies = [ + "papergrid", + "tabled_derive", + "unicode-width", +] + +[[package]] +name = "tabled_derive" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99f688a08b54f4f02f0a3c382aefdb7884d3d69609f785bd253dc033243e3fe4" +dependencies = [ + "heck 0.4.1", + "proc-macro-error", + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 1.0.109", +] + +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + +[[package]] +name = "tempfile" +version = "3.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04cbcdd0c794ebb0d4cf35e88edd2f7d2c4c3e9a5a6dab322839b321c6a87a64" +dependencies = [ + "cfg-if", + "fastrand", + "once_cell", + "rustix", + "windows-sys 0.59.0", +] + +[[package]] +name = "termcolor" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "terminal_size" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21bebf2b7c9e0a515f6e0f8c51dc0f8e4696391e6f1ff30379559f8365fb0df7" +dependencies = [ + "rustix", + "windows-sys 0.48.0", +] + +[[package]] +name = "thiserror" +version = "1.0.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d50af8abc119fb8bb6dbabcfa89656f46f84aa0ac7688088608076ad2b459a84" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 2.0.77", +] + +[[package]] +name = "threadpool" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa" +dependencies = [ + "num_cpus", +] + +[[package]] +name = "time" +version = "0.3.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" +dependencies = [ + "deranged", + "itoa", + "num-conv", + "powerfmt", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" + +[[package]] +name = "time-macros" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" +dependencies = [ + "num-conv", + "time-core", +] + +[[package]] +name = "tiny-bip39" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62cc94d358b5a1e84a5cb9109f559aa3c4d634d2b1b4de3d0fa4adc7c78e2861" +dependencies = [ + "anyhow", + "hmac", + "once_cell", + "pbkdf2", + "rand", + "rustc-hash", + "sha2 0.10.8", + "thiserror", + "unicode-normalization", + "wasm-bindgen", + "zeroize", +] + +[[package]] +name = "tinyvec" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.40.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2b070231665d27ad9ec9b8df639893f46727666c6767db40317fbe920a5d998" +dependencies = [ + "backtrace", + "bytes", + "libc", + "mio", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "windows-sys 0.52.0", +] + +[[package]] +name = "tokio-io-timeout" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" +dependencies = [ + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-macros" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 2.0.77", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.23.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" +dependencies = [ + "rustls 0.20.9", + "tokio", + "webpki", +] + +[[package]] +name = "tokio-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +dependencies = [ + "rustls 0.21.12", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" +dependencies = [ + "rustls 0.23.13", + "rustls-pki-types", + "tokio", +] + +[[package]] +name = "tokio-stream" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f4e6ce100d0eb49a2734f8c0812bcd324cf357d21810932c5df6b96ef2b86f1" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", + "tokio-util", +] + +[[package]] +name = "tokio-tungstenite" +version = "0.20.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "212d5dcb2a1ce06d81107c3d0ffa3121fe974b73f068c8282cb1c32328113b6c" +dependencies = [ + "futures-util", + "log", + "tokio", + "tungstenite", +] + +[[package]] +name = "tokio-util" +version = "0.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a" +dependencies = [ + "bytes", + "futures-core", + "futures-io", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "toml" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" +dependencies = [ + "serde", +] + +[[package]] +name = "tonic" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76c4eb7a4e9ef9d4763600161f12f5070b92a578e1b634db88a6887844c91a13" +dependencies = [ + "async-stream", + "async-trait", + "axum 0.6.20", + "base64 0.21.7", + "bytes", + "h2 0.3.26", + "http 0.2.12", + "http-body 0.4.6", + "hyper 0.14.30", + "hyper-timeout", + "percent-encoding", + "pin-project", + "prost", + "tokio", + "tokio-stream", + "tower 0.4.13", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tonic-health" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2cef6e24bc96871001a7e48e820ab240b3de2201e59b517cf52835df2f1d2350" +dependencies = [ + "async-stream", + "prost", + "tokio", + "tokio-stream", + "tonic", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "hdrhistogram", + "indexmap 1.9.3", + "pin-project", + "pin-project-lite", + "rand", + "slab", + "tokio", + "tokio-util", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2873938d487c3cfb9aed7546dc9f2711d867c9f90c46b889989a2cb84eba6b4f" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper 0.1.2", + "tokio", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-http" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f873044bf02dd1e8239e9c1293ea39dad76dc594ec16185d0a1bf31d8dc8d858" +dependencies = [ + "async-compression", + "base64 0.13.1", + "bitflags 1.3.2", + "bytes", + "futures-core", + "futures-util", + "http 0.2.12", + "http-body 0.4.6", + "http-range-header", + "httpdate", + "iri-string", + "mime", + "mime_guess", + "percent-encoding", + "pin-project-lite", + "tokio", + "tokio-util", + "tower 0.4.13", + "tower-layer", + "tower-service", + "tracing", + "uuid", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + +[[package]] +name = "tracing" +version = "0.1.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +dependencies = [ + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 2.0.77", +] + +[[package]] +name = "tracing-core" +version = "0.1.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +dependencies = [ + "once_cell", +] + +[[package]] +name = "trait-set" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b79e2e9c9ab44c6d7c20d5976961b47e8f49ac199154daa514b77cd1ab536625" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 1.0.109", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "tungstenite" +version = "0.20.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9" +dependencies = [ + "byteorder", + "bytes", + "data-encoding", + "http 0.2.12", + "httparse", + "log", + "rand", + "sha1", + "thiserror", + "url", + "utf-8", +] + +[[package]] +name = "typed-store-error" +version = "0.4.0" +source = "git+https://github.com/mystenlabs/sui?tag=testnet-v1.26.1#540d384f226a3e3b4911f1afa995b9c60bcf0b2c" +dependencies = [ + "serde", + "thiserror", +] + +[[package]] +name = "typenum" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" + +[[package]] +name = "ucd-trie" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" + +[[package]] +name = "uint" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76f64bba2c53b04fcab63c01a7d7427eadc821e3bc48c34dc9ba29c501164b52" +dependencies = [ + "byteorder", + "crunchy", + "hex", + "static_assertions", +] + +[[package]] +name = "unarray" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" + +[[package]] +name = "unescape" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccb97dac3243214f8d8507998906ca3e2e0b900bf9bf4870477f125b82e68f6e" + +[[package]] +name = "unicase" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" +dependencies = [ + "version_check", +] + +[[package]] +name = "unicode-bidi" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" + +[[package]] +name = "unicode-ident" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" + +[[package]] +name = "unicode-normalization" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-width" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" + +[[package]] +name = "unicode-xid" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" + +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + +[[package]] +name = "universal-hash" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc1de2c688dc15305988b563c3854064043356019f97a4b46276fe734c4f07ea" +dependencies = [ + "crypto-common", + "subtle", +] + +[[package]] +name = "unsigned-varint" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6889a77d49f1f013504cec6bf97a2c730394adedaeb1deb5ea08949a50541105" + +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", +] + +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + +[[package]] +name = "utf8parse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" + +[[package]] +name = "uuid" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81dfa00651efa65069b0b6b651f4aaa31ba9e3c3ce0137aaad053604ee7e0314" +dependencies = [ + "getrandom", + "rand", +] + +[[package]] +name = "variant_count" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aae2faf80ac463422992abf4de234731279c058aaf33171ca70277c98406b124" +dependencies = [ + "quote 1.0.37", + "syn 1.0.109", +] + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "versions" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee97e1d97bd593fb513912a07691b742361b3dd64ad56f2c694ea2dbfe0665d3" +dependencies = [ + "itertools 0.10.5", + "nom", +] + +[[package]] +name = "vfs" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e4fe92cfc1bad19c19925d5eee4b30584dbbdee4ff10183b261acccbef74e2d" + +[[package]] +name = "wait-timeout" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" +dependencies = [ + "libc", +] + +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a82edfc16a6c469f5f44dc7b571814045d60404b55a0ee849f9bcfa2e63dd9b5" +dependencies = [ + "cfg-if", + "once_cell", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9de396da306523044d3302746f1208fa71d7532227f15e347e2d93e4145dd77b" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 2.0.77", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61e9300f63a621e96ed275155c108eb6f843b6a26d053f122ab69724559dc8ed" +dependencies = [ + "cfg-if", + "js-sys", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "585c4c91a46b072c92e908d99cb1dcdf95c5218eeb6f3bf1efa991ee7a68cccf" +dependencies = [ + "quote 1.0.37", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 2.0.77", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c62a0a307cb4a311d3a07867860911ca130c3494e8c2719593806c08bc5d0484" + +[[package]] +name = "wasm-streams" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b65dc4c90b63b118468cf747d8bf3566c1913ef60be765b5730ead9e0a3ba129" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "web-sys" +version = "0.3.70" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26fdeaafd9bd129f65e7c031593c24d62186301e0c72c8978fa1678be7d532c0" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki" +version = "0.22.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53" +dependencies = [ + "ring 0.17.8", + "untrusted 0.9.0", +] + +[[package]] +name = "webpki-roots" +version = "0.22.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87" +dependencies = [ + "webpki", +] + +[[package]] +name = "webpki-roots" +version = "0.25.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-core" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-registry" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e400001bb720a623c1c69032f8e3e4cf09984deec740f007dd2b03ec864804b0" +dependencies = [ + "windows-result", + "windows-strings", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-result" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-strings" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10" +dependencies = [ + "windows-result", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "winreg" +version = "0.50.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" +dependencies = [ + "cfg-if", + "windows-sys 0.48.0", +] + +[[package]] +name = "wyz" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85e60b0d1b5f99db2556934e21937020776a5d31520bf169e851ac44e6420214" + +[[package]] +name = "wyz" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" +dependencies = [ + "tap", +] + +[[package]] +name = "x509-parser" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0ecbeb7b67ce215e40e3cc7f2ff902f94a223acf44995934763467e7b1febc8" +dependencies = [ + "asn1-rs", + "base64 0.13.1", + "data-encoding", + "der-parser", + "lazy_static", + "nom", + "oid-registry", + "rusticata-macros", + "thiserror", + "time", +] + +[[package]] +name = "yaml-rust" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85" +dependencies = [ + "linked-hash-map", +] + +[[package]] +name = "yasna" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e17bb3549cc1321ae1296b9cdc2698e2b6cb1992adfa19a8c72e5b7a738f44cd" +dependencies = [ + "time", +] + +[[package]] +name = "zerocopy" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" +dependencies = [ + "byteorder", + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 2.0.77", +] + +[[package]] +name = "zeroize" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" +dependencies = [ + "zeroize_derive", +] + +[[package]] +name = "zeroize_derive" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.37", + "syn 2.0.77", +] diff --git a/e2e_tests/Cargo.toml b/e2e_tests/Cargo.toml new file mode 100644 index 0000000..f4d99b8 --- /dev/null +++ b/e2e_tests/Cargo.toml @@ -0,0 +1,34 @@ +[package] +name = "e2e_tests" +version = "0.1.0" +edition = "2021" + +[[bin]] +name = "e2e_tests_bin" +path = "src/main.rs" + +[dependencies] +anyhow = "1.0" +axum = { version = "0.7", default-features = false, features = [ + "tokio", + "http2", +] } +dotenvy = "0.15" +env_logger = "0.11" +futures-util = "0.3" +log = "0.4" +rand = "0.8" +reqwest = { version = "0.12", features = ["json"] } +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +tokio = { version = "1", features = ["full"] } + +[dependencies.sui_sdk] +git = "https://github.com/mystenlabs/sui" +tag = "testnet-v1.26.1" +package = "sui-sdk" + +[dependencies.sui_keys] +git = "https://github.com/mystenlabs/sui" +tag = "testnet-v1.26.1" +package = "sui-keys" diff --git a/e2e_tests/README.md b/e2e_tests/README.md new file mode 100644 index 0000000..22e6a9a --- /dev/null +++ b/e2e_tests/README.md @@ -0,0 +1,63 @@ +This binary setups dummy node, model, agent and cluster. + +Then it creates a test prompt (and therefore cluster execution) and calls ollama (or a mocked API) to generate completions for submitted prompts. + +The purpose of this code is to test Nexus with specific scenarios. +Currently we use it in [CI](../.github/workflows/talus-agentic-framework.yml). + +# Run + +Check your active environment and make sure it's `localnet`. +For example + +```bash +$ sui client active-env +``` + +Have Sui localnet running. +For example, with the Sui CLI you can run + +```bash +$ sui start +``` + +Optionally, have Ollama running with `mistras` model. +For example + +```bash +$ ollama run mistral +``` + +Ollama http APIs are optional because they are mocked in the test binary itself if the HTTP endpoint is not provided. +This is useful for example if you want to test some change and are not interested in the completions themselves. +CI is using the mocked API as well. + +This test binary expects some env vars. +These can be directly set to the environment or set in a `.env` file. + +``` +RUST_LOG=info +FW_PKG_ID=... +SUI_WALLET_PATH=~/.sui/sui_config/client.yaml +OLLAMA_HTTP_API=http://localhost:11434/api/generate +``` + +`FW_PKG_ID` will be the package id of the framework package that you want to test. +It must be deployed in the Sui localnet. +For example, you can deploy the package with + +```bash +$ cd onchain +$ sui client publish --skip-dependency-verification +``` + +This command spits out package ID that you can set to that env var. + +`SUI_WALLET_PATH` must point to an existing yaml file with your local wallet. + +As mentioned before, `OLLAMA_HTTP_API` is optional and will be mocked if empty. + +Now you should be good to go to `cargo run`. + +There's a oneclick test script [`oneclick-test.sh`](./oneclick-test.sh) that sets up the environment and runs the test binary. +It emulates what happens in CI. diff --git a/e2e_tests/oneclick-test.sh b/e2e_tests/oneclick-test.sh new file mode 100755 index 0000000..e128c09 --- /dev/null +++ b/e2e_tests/oneclick-test.sh @@ -0,0 +1,162 @@ +#!/usr/bin/env bash + +# +# The purpose of this bash script is to run the end-to-end tests for the Talus +# package in the localnet environment with one command. +# +# The script assumes: +# - sui CLI, jq, cargo +# - env var SUI_WALLET_PATH poiting to the wallet yaml +# or it can be alternatively defined in .env file (see README) +# - pwd is a git repo +# +# 1. Assert that dependencies are installed +# 2. Assert that the script is run from the correct directory +# 3. Start sui _localnet_ node in the background +# 4. Wait for sui RPC to be available +# 5. Publish the Talus package to the sui node and get its package ID +# 6. Run the E2E tests against the published package but with mocked ollama +# 7. Kill the sui node +# +# This script has been tested on Ubuntu 22.04. +# + +# +# 1. +# + +sui --version +if [ $? -ne 0 ]; then + echo "Sui CLI is not installed" + exit 1 +fi +jq --version +if [ $? -ne 0 ]; then + echo "jq is not installed" + exit 1 +fi +cargo --version +if [ $? -ne 0 ]; then + echo "cargo is not installed" + exit 1 +fi + +# +# 2. +# + +# get the root dir, assuming we are under a git repo structure +root_dir=$(git rev-parse --show-toplevel) +if [ $? -ne 0 ]; then + echo "Not in a git repo" + exit 1 +fi +pkg_path="${root_dir}/onchain" +e2e_path="${root_dir}/e2e_tests" + +if [ ! -d "$pkg_path" ]; then + echo "Talus package path ${pkg_path} does not exist" + exit 1 +fi +if [ ! -d "$e2e_path" ]; then + echo "E2E Rust tests path ${e2e_path} does not exist" + exit 1 +fi + +# +# 3. +# + +# assert that active env is localnet +# we assume that you have set it up, e.g. using `sui genesis` +# we first run 'sui client active-env' which prompts the user to create an +# environment if it doesn't exist yet +echo "Expecting active environment to be localnet at http://localhost:9000" +sui client active-env || exit 1 +active_env=$(sui client active-env sui.log 2>&1 & +sui_pid=$! # will be used to kill sui process later +kill_sui_localnet() { + echo "Killing sui node" + kill $sui_pid +} + +# handle Control-C, which is can be useful when running the script interactively +trap 'kill_sui_localnet' INT + +# +# 4. +# + +echo "Waiting for sui to start" +# retry sui client balance 10 times with 2 second delay until it succeeds +# or exit if it fails after 10 retries +max_retries=10 +for i in $(seq 1 $max_retries); do + balance=$(sui client balance) + if [ $? -eq 0 ]; then + break + fi + if [ $i -eq $max_retries ]; then + echo "Failed to start sui." + echo "Try 'sui start' and see what might be the issue." + echo "You need 'sui genesis' if you haven't started the localnet yet" + # send exit signal just in case + kill_sui_localnet + exit 1 + fi + sleep 2 +done + +# kill the sui node if the tests fail +trap 'kill_sui_localnet' ERR + +# +# 5. +# + +echo "Publishing package" +cd $pkg_path +json=$(sui client publish --skip-dependency-verification --json) +if [ $? -ne 0 ]; then + echo "Failed to publish package:" + echo + echo + echo $json + exit 1 +fi + +fw_pkg_id=$(echo $json | jq -cr '.objectChanges[] | select(.packageId) | .packageId') +# assert fw_pkg_id starts with 0x as a valid object ID +if [[ ! $fw_pkg_id =~ ^0x ]]; then + echo "Invalid package ID: ${fw_pkg_id}" + exit 1 +fi + +# +# 6. +# + +echo "Running E2E tests" +cd $e2e_path +# start with mocked ollama +FW_PKG_ID="${fw_pkg_id}" \ + OLLAMA_HTTP_API="" \ + RUST_LOG="info" \ + cargo run +if [ $? -ne 0 ]; then + echo "E2E tests failed" + exit 1 +fi + +# +# 7. +# + +kill_sui_localnet diff --git a/e2e_tests/src/completion.rs b/e2e_tests/src/completion.rs new file mode 100644 index 0000000..ba80c6e --- /dev/null +++ b/e2e_tests/src/completion.rs @@ -0,0 +1,207 @@ +//! We listen to emitted events named [`REQ_FOR_COMPLETION_EVENT`] in the [`PROMPT_MODULE`]. +//! Then we run the LLM with the given input read from the event and wait for its completion. +//! Finally, we submit the completion result to the chain. + +use { + crate::{prelude::*, setup::TestsSetup, TestsContext}, + futures_util::StreamExt, + serde::Deserialize, + serde_json::json, + std::str::FromStr, + sui_sdk::{ + json::SuiJsonValue, + rpc_types::{EventFilter, SuiEvent}, + }, +}; + +const CLUSTER_MODULE: &str = "cluster"; +const CLUSTER_SUBMIT_COMPLETION_FUNCTION: &str = + "submit_completion_as_cluster_owner"; +const PROMPT_MODULE: &str = "prompt"; +const REQ_FOR_COMPLETION_EVENT: &str = "RequestForCompletionEvent"; + +/// This is the JSON that we expect to capture. +/// There are more fields on this event but for now we only care about these. +#[derive(Deserialize)] +struct RequestForCompletionEvent { + /// Cluster execution ID + cluster_execution: String, + /// Model ID + model: String, + /// The model name that should complete + model_name: String, + /// What the model should complete + prompt_contents: String, + /// If attached, we execute the tool and attach the output to the + /// LLM prompt. + tool: Option, +} + +/// As returned by Sui APIs. +#[derive(Deserialize)] +struct ToolObject { + fields: Tool, +} + +/// As defined in the smart contract. +#[derive(Deserialize)] +struct Tool { + name: String, + args: Vec, +} + +/// Starts listening to the [`REQ_FOR_COMPLETION_EVENT`] events and completes +/// them using the Ollama API in a separate task. +pub(crate) async fn spawn_task( + mut ctx: TestsContext, + resources: TestsSetup, +) -> Result<()> { + debug!("Creating stream of Sui events {REQ_FOR_COMPLETION_EVENT}"); + + // filter for the event type we are interested in + // ideally we'd also filter by the event's data, but this event filter is + // currently broken in the Sui SDK + let mut stream = ctx + .client() + .await? + .event_api() + .subscribe_event(EventFilter::MoveEventType(FromStr::from_str( + &format!( + "{pkg_id}::{PROMPT_MODULE}::{REQ_FOR_COMPLETION_EVENT}", + pkg_id = ctx.pkg_id, + ), + )?)) + .await?; + + tokio::spawn(async move { + while let Some(event_res) = stream.next().await { + let event_json = match event_res { + Ok(SuiEvent { parsed_json, .. }) => parsed_json, + Err(err) => { + error!( + "Error while listening to \ + {REQ_FOR_COMPLETION_EVENT}: {err}" + ); + break; + } + }; + + if let Err(err) = + submit_for_event(&mut ctx, &resources, event_json).await + { + error!("Error submitting completion: {err}"); + }; + } + }); + + Ok(()) +} + +async fn submit_for_event( + ctx: &mut TestsContext, + resources: &TestsSetup, + event_json: JsonValue, +) -> Result<()> { + let RequestForCompletionEvent { + cluster_execution, + model, + mut prompt_contents, + model_name, + tool, + } = serde_json::from_value(event_json.clone()).map_err(|err| { + anyhow!( + "Failed to parse {REQ_FOR_COMPLETION_EVENT} event\n\ + {event_json:#?}\n\nError: {err}" + ) + })?; + + let expected_model_id = resources.model.id; + if expected_model_id != ObjectID::from_str(&model)? { + // not an event that we are supposed to handle + return Ok(()); + } + + if let Some(ToolObject { + fields: Tool { name, args }, + .. + }) = tool + { + match name.as_str() { + // here we could implement some tool execution + "some_tool" => { + prompt_contents += + &format!("\n\nInvoked some tool with args:\n{args:#?}",); + } + unknown_tool => warn!( + "Execution '{cluster_execution}' \ + asked for an unknown tool: {unknown_tool}" + ), + }; + } + + // talk to ollama via HTTP API + let client = reqwest::Client::new(); + let res = client + .post(ctx.ollama_http_api.clone()) + .json(&json!({ + "model": model_name, + "prompt": prompt_contents, + })) + .send() + .await?; + + if !res.status().is_success() { + error!( + "Failed to get completion from ollama: {}\n\n{:?}", + res.status(), + res.text().await + ); + return Ok(()); + } + + let mut completion = String::with_capacity(1024); + for line in res + .text() + .await? + .lines() + .map(|l| l.trim()) + .filter(|l| !l.is_empty()) + { + #[derive(Deserialize)] + struct OllamaResponseLine { + response: String, + done: bool, + } + + let OllamaResponseLine { response, done } = serde_json::from_str(line)?; + + if done { + break; + } + + completion.push_str(&response); + } + + let cluster_execution = ObjectID::from_str(&cluster_execution)?; + // this could happen in a separate task and the listener can run another + // completion meanwhile already + let resp = ctx + .move_call( + CLUSTER_MODULE, + CLUSTER_SUBMIT_COMPLETION_FUNCTION, + vec![ + SuiJsonValue::from_object_id(cluster_execution), + SuiJsonValue::from_object_id(resources.cluster.owner_cap), + SuiJsonValue::new(JsonValue::String(completion))?, + ], + ) + .await?; + + info!( + "Submitted completion for model '{model_name}' and cluster execution \ + '{cluster_execution}' in tx {}", + resp.digest + ); + + Ok(()) +} diff --git a/e2e_tests/src/main.rs b/e2e_tests/src/main.rs new file mode 100644 index 0000000..c29ec90 --- /dev/null +++ b/e2e_tests/src/main.rs @@ -0,0 +1,280 @@ +//! This is an executable that runs an e2e test for Nexus +//! Move package. +//! +//! It assumes +//! - Sui localnet running +//! - deployed Nexus package +//! - env var [`env_vars::FW_PKG_ID`] with the ID of the Nexus package +//! - env var [`env_vars::SUI_WALLET_PATH`] with a filesystem path to Sui wallet configuration +//! - Sui wallet configured to localnet +//! - at least 1 SUI in an owned `Coin` +//! +//! Optionally +//! - Env var [`env_vars::OLLAMA_HTTP_API`] with reachable URL of the Ollama HTTP API +//! following [this specs][ollama-http-api]. +//! If not provided then a mock server is started. +//! +//! We setup resources such as node, model, agent and cluster. +//! This happens in [setup]. +//! Then, we start listening for requests and connect them to the Ollama API. +//! This happens in [completion]. +//! And finally we regularly send prompts to the cluster. +//! This happens in [prompt]. +//! +//! The tools are not executed in this test. +//! One tool is set up for the test cluster and we parse the tool from the Sui +//! event that is listened to by the [completion] module. +//! However, as of now the tool is mocked. +//! +//! [ollama-http-api]: https://github.com/ollama/ollama/blob/main/docs/api.md + +mod completion; +mod ollama_mock; +mod prelude; +mod prompt; +mod setup; + +use { + prelude::*, + std::{env, path::PathBuf, str::FromStr, sync::Arc}, + sui_sdk::{ + json::SuiJsonValue, + rpc_types::{ + SuiObjectData, + SuiObjectDataOptions, + SuiTransactionBlockResponse, + }, + types::{ + base_types::{ObjectRef, SuiAddress}, + transaction::{ProgrammableTransaction, TransactionData}, + }, + }, + tokio::sync::Mutex, +}; + +const REQUIRED_SUI_ENV: &str = "localnet"; +const GAS_BUDGET: u64 = 10_000_000_000; + +mod env_vars { + pub(super) const SUI_WALLET_PATH: &str = "SUI_WALLET_PATH"; + pub(super) const FW_PKG_ID: &str = "FW_PKG_ID"; + pub(super) const OLLAMA_HTTP_API: &str = "OLLAMA_HTTP_API"; +} + +#[derive(Clone)] +struct TestsContext { + wallet: Arc>, + pkg_id: ObjectID, + me: SuiAddress, + ollama_http_api: reqwest::Url, +} + +#[tokio::main] +async fn main() -> Result<()> { + dotenvy::dotenv().ok(); + + env_logger::init(); + + let mut ctx = TestsContext::from_env().await?; + + let resources = setup::test(&mut ctx).await?; + + info!( + "\ + Resources successfully set up\n\ + Node : {node}\n\ + Model : {model}\n\ + Agent : {agent}\n\ + Cluster : {cluster}\n\ + ", + node = resources.node, + model = resources.model.id, + agent = resources.agent.id, + cluster = resources.cluster.id, + ); + + completion::spawn_task(ctx.clone(), resources.clone()).await?; + + prompt::send_and_expect_answer(&mut ctx, &resources).await?; + + info!("All done"); + Ok(()) +} + +impl TestsContext { + async fn from_env() -> Result { + let ollama_http_api = if let Some(http_api) = + env::var(env_vars::OLLAMA_HTTP_API) + .ok() + .filter(|s| !s.is_empty()) + { + FromStr::from_str(&http_api)? + } else { + info!( + "Env var {} missing, starting mock server", + env_vars::OLLAMA_HTTP_API + ); + ollama_mock::start().await? + }; + + let pkg_id = ObjectID::from_str( + &env::var(env_vars::FW_PKG_ID).map_err(|_| { + anyhow!( + "Env var {} missing.\n\ + You need to publish the Sui package \ + and set the resulting ID to this env var.\n\ + You can also store the ID in an .env file", + env_vars::FW_PKG_ID + ) + })?, + )?; + + let wallet_path = PathBuf::from( + env::var(env_vars::SUI_WALLET_PATH).map_err(|_| { + anyhow!( + "Env var {} missing.\n\ + You must provide a path to the Sui wallet configuration, \ + typically at ~/.sui/sui_config/client.yaml.\n\ + If you don't have this file, you can create it by running \ + 'sui genesis'.\n\ + You can also store the path in an .env file", + env_vars::SUI_WALLET_PATH + ) + })?, + ); + let mut wallet = WalletContext::new(&wallet_path, None, None)?; + + if wallet.config.active_env != Some(REQUIRED_SUI_ENV.to_string()) { + anyhow::bail!("This test requires {REQUIRED_SUI_ENV}"); + } + let active_env = wallet + .config + .envs + .iter_mut() + .find(|env| env.alias == REQUIRED_SUI_ENV) + .ok_or_else(|| anyhow!("No env with alias {REQUIRED_SUI_ENV}"))?; + if active_env.ws.is_none() { + // if WS endpoint is missing, we assume it as in localnet it's pretty + // much the same as RPC with just the protocol changed + active_env.ws = Some(active_env.rpc.replace("http", "ws")); + } + + Ok(Self { + me: wallet.active_address()?, + wallet: Arc::new(Mutex::new(wallet)), + pkg_id, + ollama_http_api, + }) + } + + async fn client(&self) -> Result { + self.wallet.lock().await.get_client().await + } + + async fn find_sui_coin(&self) -> Result { + let sui_coin = None; // the default is SUI + let two_sui = 2_000_000_000; + let coins = self + .client() + .await? + .coin_read_api() + .select_coins(self.me, sui_coin, two_sui, vec![]) + .await?; + + coins + .first() + .map(|c| (c.coin_object_id, c.version, c.digest)) + .ok_or_else(|| anyhow!("No SUI coins found for the addr")) + } + + async fn move_call( + &self, + module: &str, + function: &str, + call_args: Vec, + ) -> Result { + let tx = self + .client() + .await? + .transaction_builder() + .move_call( + self.me, + self.pkg_id, + module, + function, + vec![], + call_args, + None, // auto pick coin + GAS_BUDGET, + None, + ) + .await?; + + let wallet = self.wallet.lock().await; + let signed_tx = wallet.sign_transaction(&tx); + let resp = wallet.execute_transaction_must_succeed(signed_tx).await; + + debug!("Call to {module}::{function}: {}", resp.digest); + + Ok(resp) + } + + async fn execute_ptx( + &mut self, + tx: ProgrammableTransaction, + ) -> Result { + let coin = self.find_sui_coin().await?; + + let gas_price = self + .client() + .await? + .read_api() + .get_reference_gas_price() + .await?; + let tx = TransactionData::new_programmable( + self.me, + vec![coin], + tx, + GAS_BUDGET, + gas_price, + ); + + let wallet = self.wallet.lock().await; + let signed_tx = wallet.sign_transaction(&tx); + let resp = wallet.execute_transaction_must_succeed(signed_tx).await; + + debug!("Call to programmable tx: {}", resp.digest); + + Ok(resp) + } + + /// Returns [`ObjectRef`] for the object with the given ID. + /// That's useful for programmable txs. + async fn get_object_ref(&self, id: ObjectID) -> Result { + let SuiObjectData { + version, digest, .. + } = self + .client() + .await? + .read_api() + .get_object_with_options(id, SuiObjectDataOptions::full_content()) + .await? + .data + .ok_or_else(|| anyhow!("Object {id} not found"))?; + + Ok((id, version, digest)) + } +} + +trait SuiJsonValueExt +where + Self: Sized, +{ + fn from_str_to_string(s: &str) -> Result; +} + +impl SuiJsonValueExt for SuiJsonValue { + fn from_str_to_string(s: &str) -> Result { + SuiJsonValue::new(JsonValue::String(s.to_string())) + } +} diff --git a/e2e_tests/src/ollama_mock.rs b/e2e_tests/src/ollama_mock.rs new file mode 100644 index 0000000..0915428 --- /dev/null +++ b/e2e_tests/src/ollama_mock.rs @@ -0,0 +1,38 @@ +//! If ollama http API env var is not provided, we spawn a simple HTTP server +//! to mock those APIs that return a static response. + +use { + crate::prelude::*, + axum::{routing::post, Router}, + reqwest::Url, + std::str::FromStr, +}; + +pub(crate) async fn start() -> Result { + let app = Router::new().route("/", post(mocked_model_response)); + + let addr = "0.0.0.0:3000"; + let listener = tokio::net::TcpListener::bind(addr).await?; + + tokio::spawn(async move { + if let Err(err) = axum::serve(listener, app).await { + error!("Failed to start mock ollama HTTP server: {err}"); + } + }); + + Ok(FromStr::from_str(&format!("http://{addr}"))?) +} + +/// Prints "This is a mock LLM response." and is done. +async fn mocked_model_response() -> &'static str { + r#"{"model":"mistral","created_at":"2024-08-07T10:08:39.33050386Z","response":" \"","done":false} +{"model":"mistral","created_at":"2024-08-07T10:08:39.330506894Z","response":"This","done":false} +{"model":"mistral","created_at":"2024-08-07T10:08:39.333717178Z","response":" is","done":false} +{"model":"mistral","created_at":"2024-08-07T10:08:39.346571008Z","response":" a","done":false} +{"model":"mistral","created_at":"2024-08-07T10:08:39.359445086Z","response":" mock","done":false} +{"model":"mistral","created_at":"2024-08-07T10:08:39.372366904Z","response":" LL","done":false} +{"model":"mistral","created_at":"2024-08-07T10:08:39.385213658Z","response":"M","done":false} +{"model":"mistral","created_at":"2024-08-07T10:08:39.39866316Z","response":" response","done":false} +{"model":"mistral","created_at":"2024-08-07T10:08:39.411548254Z","response":".\"","done":false} +{"model":"mistral","created_at":"2024-08-07T10:08:39.425126938Z","response":"","done":true,"done_reason":"stop","context":[3,29473,2066,16650,3095,29515,1619,1117,1032,9743,9582,29487,3667,4,1027,1113,4028,1117,1032,9743,17472,29523,3667,1379],"total_duration":183077547,"load_duration":3397703,"prompt_eval_count":16,"prompt_eval_duration":20677000,"eval_count":10,"eval_duration":117239000}"# +} diff --git a/e2e_tests/src/prelude.rs b/e2e_tests/src/prelude.rs new file mode 100644 index 0000000..b07c499 --- /dev/null +++ b/e2e_tests/src/prelude.rs @@ -0,0 +1,12 @@ +pub(crate) use { + anyhow::anyhow, + log::{debug, error, info, warn}, + serde_json::Value as JsonValue, + sui_sdk::{ + types::base_types::ObjectID, + wallet_context::WalletContext, + SuiClient, + }, +}; + +pub(crate) type Result = std::result::Result; diff --git a/e2e_tests/src/prompt.rs b/e2e_tests/src/prompt.rs new file mode 100644 index 0000000..d06c782 --- /dev/null +++ b/e2e_tests/src/prompt.rs @@ -0,0 +1,125 @@ +//! Runs a simple test: sends a prompt to the cluster and waits for the response. +//! The status of the cluster execution is checked every [`SLEEP_BETWEEN_CHECKS_FOR_SUCCESS`]. +//! If the execution is not done after [`MAX_WAIT_FOR_SUCCESS`], the test fails. +//! Being done is defined as having a status [`STATUS_DONE`] and a non-empty `cluster_response`. + +use { + crate::{prelude::*, setup::TestsSetup, SuiJsonValueExt, TestsContext}, + std::{str::FromStr, time::Duration}, + sui_sdk::{ + json::SuiJsonValue, + rpc_types::{SuiObjectDataOptions, SuiParsedData}, + }, + tokio::time::Instant, +}; + +const CLUSTER_EXECUTION_CREATED_EVENT: &str = "ClusterExecutionCreatedEvent"; +const SLEEP_BETWEEN_CHECKS_FOR_SUCCESS: Duration = Duration::from_secs(1); +/// This test will wait after 2 minutes without the execution being done. +const MAX_WAIT_FOR_SUCCESS: Duration = Duration::from_secs(120); +const CLUSTER_MODULE: &str = "cluster"; +const STATUS_DONE: &str = "SUCCESS"; +const CLUSTER_EXECUTE_FUNCTION: &str = "execute"; + +pub async fn send_and_expect_answer( + ctx: &mut TestsContext, + resources: &TestsSetup, +) -> Result<()> { + let events = ctx + .move_call( + CLUSTER_MODULE, + CLUSTER_EXECUTE_FUNCTION, + vec![ + SuiJsonValue::from_object_id(resources.cluster.id), + SuiJsonValue::from_str_to_string( + "Write a poem about sleep or there lack of", + )?, + ], + ) + .await? + .events + .ok_or_else(|| anyhow!("No events in response"))? + .data; + + // extract the execution ID from the tx response + let Some(execution_id) = events + .into_iter() + .find(|event| { + event.type_.name.to_string() == CLUSTER_EXECUTION_CREATED_EVENT + }) + .map(|event| event.parsed_json["execution"].clone()) + else { + anyhow::bail!( + "No {CLUSTER_EXECUTION_CREATED_EVENT}.execution event in response", + ); + }; + let execution_id = execution_id.as_str().ok_or_else(|| { + anyhow!("{CLUSTER_EXECUTION_CREATED_EVENT}.execution is not a string") + })?; + let execution_id = ObjectID::from_str(execution_id)?; + info!("Sent a new prompt, execution: {execution_id}"); + + wait_for_all_tasks_to_be_done(ctx, execution_id).await?; + + Ok(()) +} + +/// A better approach here would be to wait for the final event of this +/// execution perhaps, but this is simple enough. +async fn wait_for_all_tasks_to_be_done( + ctx: &mut TestsContext, + execution_id: ObjectID, +) -> Result<()> { + let started_at = Instant::now(); + loop { + let object_data = ctx + .client() + .await? + .read_api() + .get_object_with_options( + execution_id, + SuiObjectDataOptions::full_content(), + ) + .await? + .data + .ok_or_else(|| anyhow!("No data in response for {execution_id}"))?; + + let Some(SuiParsedData::MoveObject(object_data)) = object_data.content + else { + anyhow::bail!("No MoveObject in response for {execution_id}"); + }; + let json = object_data.fields.to_json_value(); + let status = json["status"].as_str().ok_or_else(|| { + anyhow!("No status in response for {execution_id}") + })?; + + if status == STATUS_DONE { + let response = + json["cluster_response"].as_str().ok_or_else(|| { + anyhow!( + "No cluster_response in response for {execution_id}" + ) + })?; + + if response.is_empty() { + anyhow::bail!( + "Prompt {execution_id} is done, but cluster_response is empty.\ + Last execution object state: {json:#?}" + ); + } + + info!("Prompt {execution_id} is done:\n\n\n{response}\n\n\n"); + break; + } else if started_at.elapsed() > MAX_WAIT_FOR_SUCCESS { + anyhow::bail!( + "Prompt {execution_id} is not done after \ + {MAX_WAIT_FOR_SUCCESS:?}. \ + Last execution object state: {json:#?}" + ); + } + + tokio::time::sleep(SLEEP_BETWEEN_CHECKS_FOR_SUCCESS).await; + } + + Ok(()) +} diff --git a/e2e_tests/src/setup.rs b/e2e_tests/src/setup.rs new file mode 100644 index 0000000..a57c4a2 --- /dev/null +++ b/e2e_tests/src/setup.rs @@ -0,0 +1,508 @@ +//! The first step of the tests is to setup all required resources on chain. +//! The resources are defined in the [`TestsSetup`] struct. + +use { + crate::{prelude::*, SuiJsonValueExt, TestsContext}, + std::str::FromStr, + sui_sdk::{ + json::SuiJsonValue, + types::{ + programmable_transaction_builder::ProgrammableTransactionBuilder, + transaction::ObjectArg, + Identifier, + }, + }, +}; + +const AGENT_CREATE_AND_SHARE_FUNCTION: &str = "create_and_share"; +const AGENT_CREATED_EVENT: &str = "AgentCreatedEvent"; +const AGENT_GET_BLUEPRINT_FUNCTION: &str = "get_blueprint"; +const AGENT_MODULE: &str = "agent"; +const AGENT_NAME: &str = "my-agent-1"; +const CLUSTER_ADD_AGENT_FUNCTION: &str = "add_agent"; +const CLUSTER_ADD_TASK_ENTRY_FUNCTION: &str = "add_task_entry"; +const CLUSTER_ATTACH_TASK_TOOL_FUNCTION: &str = "attach_tool_to_task_entry"; +const CLUSTER_CREATE_FUNCTION: &str = "create"; +const CLUSTER_CREATED_EVENT: &str = "ClusterCreatedEvent"; +const CLUSTER_MODULE: &str = "cluster"; +const MODEL_CREATE_FUNCTION: &str = "create"; +const MODEL_CREATED_EVENT: &str = "ModelCreatedEvent"; +const MODEL_GET_INFO_FUNCTION: &str = "get_info"; +const MODEL_MODULE: &str = "model"; +const MODEL_NAME: &str = "mistral"; +const NODE_CREATE_FUNCTION: &str = "create"; +const NODE_CREATED_EVENT: &str = "NodeCreatedEvent"; +const NODE_MODULE: &str = "node"; + +#[derive(Debug, Clone)] +pub(crate) struct TestsSetup { + pub(crate) node: ObjectID, + pub(crate) model: TestModel, + pub(crate) agent: TestAgent, + pub(crate) cluster: TestCluster, +} + +#[derive(Debug, Clone)] +pub(crate) struct TestModel { + pub(crate) id: ObjectID, + pub(crate) owner_cap: ObjectID, +} + +#[derive(Debug, Clone)] +pub(crate) struct TestAgent { + pub(crate) id: ObjectID, + pub(crate) owner_cap: ObjectID, +} + +#[derive(Debug, Clone)] +pub(crate) struct TestCluster { + pub(crate) id: ObjectID, + pub(crate) owner_cap: ObjectID, +} + +/// Tests setup functionality of the Talus framework. +/// The resulting test resources will be used later in the tests. +pub(crate) async fn test(ctx: &mut TestsContext) -> Result { + info!("Setting up a node"); + let node = create_node(ctx).await?; + + info!("Setting up a model"); + let model = create_model_with_owner_cap(ctx, node).await?; + + info!("Setting up an agent"); + let agent = create_agent(ctx, &model).await?; + + info!("Setting up a cluster"); + let cluster = create_cluster_with_owner_cap(ctx).await?; + + info!("Adding agent to the cluster"); + add_agent_to_cluster(ctx, &agent, &cluster).await?; + + info!("Adding tasks to the cluster"); + // tasks as defined in the `cluster_tests.move` + add_task_to_cluster( + ctx, + &cluster, + TaskDefinition { + task_name: "Analyze Poem Request", + agent_name: AGENT_NAME, + description: "Analyze the user's request for poem creation", + expected_output: "A structured analysis of the poem request", + prompt: "Analyze the user's input for poem style and subject. \ + If either is missing, prepare an error message.", + tool: Some(ToolDefinition { + tool_name: "some_tool", + args: &["arg1", "arg2"], + }), + ..Default::default() + }, + ) + .await?; + add_task_to_cluster( + ctx, + &cluster, + TaskDefinition { + task_name: "Create Poem", + agent_name: AGENT_NAME, + description: "Create a poem based on the analyzed request", + expected_output: "A poem matching the user's requirements", + prompt: "Create a poem based on the provided style and subject. \ + Be creative and inspiring.", + ..Default::default() + }, + ) + .await?; + + Ok(TestsSetup { + node, + model, + agent, + cluster, + }) +} + +/// Sets up a new dummy node. +/// This node will be an owned object of the wallet. +async fn create_node(ctx: &mut TestsContext) -> Result { + let events = ctx + .move_call( + NODE_MODULE, + NODE_CREATE_FUNCTION, + vec![ + // name: String + SuiJsonValue::from_str_to_string("my-node-1")?, + // node_type: String, + SuiJsonValue::from_str_to_string("some-type")?, + // gpu_memory: u64, + SuiJsonValue::from_str_to_string("16")?, + // image_hash: vector, + SuiJsonValue::new(JsonValue::Array(vec![]))?, + // external_arguments: vector, + SuiJsonValue::new(JsonValue::Array(vec![]))?, + ], + ) + .await? + .events + .ok_or_else(|| { + anyhow!( + "No events found in the response of create node transaction" + ) + })? + .data; + + let Some(node_id) = events + .into_iter() + .find(|event| event.type_.name.to_string() == NODE_CREATED_EVENT) + .map(|event| event.parsed_json["node"].clone()) + else { + anyhow::bail!( + "No {NODE_CREATED_EVENT}.node found \ + in the response of create node transaction" + ) + }; + + let node_id_str = node_id + .as_str() + .ok_or_else(|| anyhow!("{NODE_CREATED_EVENT}.node is not a string"))?; + + Ok(ObjectID::from_str(node_id_str)?) +} + +/// Sets up a new dummy model. +/// Returns model ID and model owner capability ID. +async fn create_model_with_owner_cap( + ctx: &mut TestsContext, + node: ObjectID, +) -> Result { + let events = ctx + .move_call( + MODEL_MODULE, + MODEL_CREATE_FUNCTION, + vec![ + // node: &Node + SuiJsonValue::from_object_id(node), + // name: String + SuiJsonValue::from_str_to_string(MODEL_NAME)?, + // model_hash: vector + SuiJsonValue::new(JsonValue::Array(vec![JsonValue::Number( + 1.into(), + )]))?, + // url: String + SuiJsonValue::from_str_to_string("https://example.com")?, + // token_price: u64 + SuiJsonValue::from_str_to_string("100")?, + // capacity: u64 + SuiJsonValue::from_str_to_string("1")?, + // num_params: u64 + SuiJsonValue::from_str_to_string("1")?, + // description: String + SuiJsonValue::from_str_to_string("This is my test model")?, + // max_context_length: u64 + SuiJsonValue::from_str_to_string("2048")?, + // is_fine_tuned: bool + SuiJsonValue::new(JsonValue::Bool(true))?, + // family: String + SuiJsonValue::from_str_to_string("my-family")?, + // vendor: String + SuiJsonValue::from_str_to_string("my-vendor")?, + // is_open_source: bool + SuiJsonValue::new(JsonValue::Bool(true))?, + // datasets: vector + SuiJsonValue::new(JsonValue::Array(vec![]))?, + ], + ) + .await? + .events + .ok_or_else(|| { + anyhow!( + "No events found in the response of create model transaction" + ) + })? + .data; + + let Some(event_json) = events + .into_iter() + .find(|event| event.type_.name.to_string() == MODEL_CREATED_EVENT) + .map(|event| event.parsed_json.clone()) + else { + anyhow::bail!( + "No {MODEL_CREATED_EVENT} found in \ + the response of create model transaction" + ) + }; + + let model_id_str = event_json["model"].as_str().ok_or_else(|| { + anyhow!("{MODEL_CREATED_EVENT}.model is not a string") + })?; + let model_owner_cap_id_str = + event_json["owner_cap"].as_str().ok_or_else(|| { + anyhow!("{MODEL_CREATED_EVENT}.owner_cap is not a string") + })?; + + Ok(TestModel { + id: ObjectID::from_str(model_id_str)?, + owner_cap: ObjectID::from_str(model_owner_cap_id_str)?, + }) +} + +/// Sets up a new dummy agent. +/// Since we already created a model that we _own_ we can create the agent +/// without a model inference promise. +async fn create_agent( + ctx: &mut TestsContext, + model: &TestModel, +) -> Result { + let owner_cap_ref = ctx.get_object_ref(model.owner_cap).await?; + let (_, model_version, _) = ctx.get_object_ref(model.id).await?; + + // In this programmable tx we will first get the ModelInfo with our + // owner cap. + // Then we use the model info to create an agent. + let mut ptb = ProgrammableTransactionBuilder::new(); + + let model_arg = ptb.obj(ObjectArg::SharedObject { + id: model.id, + initial_shared_version: model_version, + mutable: false, + })?; + + let model_owner_cap_arg = + ptb.obj(ObjectArg::ImmOrOwnedObject(owner_cap_ref))?; + + let model_info_arg = ptb.programmable_move_call( + ctx.pkg_id, + Identifier::new(MODEL_MODULE)?, + Identifier::new(MODEL_GET_INFO_FUNCTION)?, + vec![], + vec![model_arg, model_owner_cap_arg], + ); + + // agent as defined in the `cluster_tests.move` + let agent_name_arg = ptb.pure(AGENT_NAME)?; + let agent_role_arg = ptb.pure("AI Poet")?; + let agent_goal_arg = ptb.pure("Create beautiful poems")?; + let agent_backstory_arg = + ptb.pure("An AI trained to create poetic masterpieces")?; + + ptb.programmable_move_call( + ctx.pkg_id, + Identifier::new(AGENT_MODULE)?, + Identifier::new(AGENT_CREATE_AND_SHARE_FUNCTION)?, + vec![], + vec![ + agent_name_arg, + agent_role_arg, + agent_goal_arg, + agent_backstory_arg, + model_info_arg, + ], + ); + + let events = ctx + .execute_ptx(ptb.finish()) + .await? + .events + .ok_or_else(|| { + anyhow!("No events found in the response of create agent ptx") + })? + .data; + + let Some(event_json) = events + .into_iter() + .find(|event| event.type_.name.to_string() == AGENT_CREATED_EVENT) + .map(|event| event.parsed_json.clone()) + else { + anyhow::bail!( + "No {AGENT_CREATED_EVENT} found in \ + the response of create agent ptx" + ) + }; + + let agent_id_str = event_json["agent"].as_str().ok_or_else(|| { + anyhow!("{AGENT_CREATED_EVENT}.agent is not a string") + })?; + let agent_owner_cap_id_str = + event_json["owner_cap"].as_str().ok_or_else(|| { + anyhow!("{AGENT_CREATED_EVENT}.owner_cap is not a string") + })?; + + Ok(TestAgent { + id: ObjectID::from_str(agent_id_str)?, + owner_cap: ObjectID::from_str(agent_owner_cap_id_str)?, + }) +} + +/// Sets up a new dummy cluster. +/// Returns cluster ID and cluster owner capability ID. +async fn create_cluster_with_owner_cap( + ctx: &mut TestsContext, +) -> Result { + let events = ctx + .move_call( + CLUSTER_MODULE, + CLUSTER_CREATE_FUNCTION, + vec![ + // name: String + SuiJsonValue::from_str_to_string("my-cluster-1")?, + // description: String + SuiJsonValue::from_str_to_string("Poet cluster")?, + ], + ) + .await? + .events + .ok_or_else(|| { + anyhow!( + "No events found in the response of create cluster transaction" + ) + })? + .data; + + let Some(event_json) = events + .into_iter() + .find(|event| event.type_.name.to_string() == CLUSTER_CREATED_EVENT) + .map(|event| event.parsed_json.clone()) + else { + anyhow::bail!( + "No {CLUSTER_CREATED_EVENT} found in \ + the response of create cluster transaction" + ) + }; + + let cluster_id_str = event_json["cluster"].as_str().ok_or_else(|| { + anyhow!("{CLUSTER_CREATED_EVENT}.cluster is not a string") + })?; + let cluster_owner_cap_id_str = + event_json["owner_cap"].as_str().ok_or_else(|| { + anyhow!("{CLUSTER_CREATED_EVENT}.owner_cap is not a string") + })?; + + Ok(TestCluster { + id: ObjectID::from_str(cluster_id_str)?, + owner_cap: ObjectID::from_str(cluster_owner_cap_id_str)?, + }) +} + +async fn add_agent_to_cluster( + ctx: &mut TestsContext, + agent: &TestAgent, + cluster: &TestCluster, +) -> Result<()> { + let cluster_ref = ctx.get_object_ref(cluster.id).await?; + let cluster_owner_cap_ref = ctx.get_object_ref(cluster.owner_cap).await?; + + let agent_ref = ctx.get_object_ref(agent.id).await?; + let agent_owner_cap_ref = ctx.get_object_ref(agent.owner_cap).await?; + + let mut ptb = ProgrammableTransactionBuilder::new(); + + // talus::agent::get_blueprint(&Agent, &OwnerCap) + let agent_arg = ptb.obj(ObjectArg::SharedObject { + id: agent.id, + initial_shared_version: agent_ref.1, + mutable: false, + })?; + let agent_owner_cap_arg = + ptb.obj(ObjectArg::ImmOrOwnedObject(agent_owner_cap_ref))?; + let agent_blueprint_arg = ptb.programmable_move_call( + ctx.pkg_id, + Identifier::new(AGENT_MODULE)?, + Identifier::new(AGENT_GET_BLUEPRINT_FUNCTION)?, + vec![], + vec![agent_arg, agent_owner_cap_arg], + ); + + // talus::cluster::add_agent(&mut Cluster, &OwnerCap, &AgentBlueprint) + let cluster_arg = ptb.obj(ObjectArg::SharedObject { + id: cluster.id, + initial_shared_version: cluster_ref.1, + mutable: true, + })?; + let cluster_owner_cap_arg = + ptb.obj(ObjectArg::ImmOrOwnedObject(cluster_owner_cap_ref))?; + ptb.programmable_move_call( + ctx.pkg_id, + Identifier::new(CLUSTER_MODULE)?, + Identifier::new(CLUSTER_ADD_AGENT_FUNCTION)?, + vec![], + vec![cluster_arg, cluster_owner_cap_arg, agent_blueprint_arg], + ); + + ctx.execute_ptx(ptb.finish()).await?; + + Ok(()) +} + +#[derive(Default)] +struct TaskDefinition { + task_name: &'static str, + agent_name: &'static str, + description: &'static str, + expected_output: &'static str, + prompt: &'static str, + context: &'static str, + tool: Option, +} + +#[derive(Default)] +struct ToolDefinition { + tool_name: &'static str, + args: &'static [&'static str], +} + +async fn add_task_to_cluster( + ctx: &mut TestsContext, + cluster: &TestCluster, + task: TaskDefinition, +) -> Result<()> { + ctx.move_call( + CLUSTER_MODULE, + CLUSTER_ADD_TASK_ENTRY_FUNCTION, + vec![ + // cluster: &mut Cluster + SuiJsonValue::from_object_id(cluster.id), + // owner_cap: &ClusterOwnerCap + SuiJsonValue::from_object_id(cluster.owner_cap), + // task_name: String + SuiJsonValue::from_str_to_string(task.task_name)?, + // agent_name: String + SuiJsonValue::from_str_to_string(task.agent_name)?, + // description: String + SuiJsonValue::from_str_to_string(task.description)?, + // expected_output: String + SuiJsonValue::from_str_to_string(task.expected_output)?, + // prompt: String + SuiJsonValue::from_str_to_string(task.prompt)?, + // context: String + SuiJsonValue::from_str_to_string(task.context)?, + ], + ) + .await?; + + if let Some(ToolDefinition { tool_name, args }) = task.tool { + info!("Attaching tool {tool_name} to task {}", task.task_name); + ctx.move_call( + CLUSTER_MODULE, + CLUSTER_ATTACH_TASK_TOOL_FUNCTION, + vec![ + // cluster: &mut Cluster + SuiJsonValue::from_object_id(cluster.id), + // owner_cap: &ClusterOwnerCap + SuiJsonValue::from_object_id(cluster.owner_cap), + // task_name: String + SuiJsonValue::from_str_to_string(task.task_name)?, + // tool_name: String + SuiJsonValue::from_str_to_string(tool_name)?, + // args: vector + SuiJsonValue::new(JsonValue::Array( + args.iter() + .map(ToOwned::to_owned) + .map(From::from) + .collect(), + ))?, + ], + ) + .await?; + } + + Ok(()) +} diff --git a/examples/LICENSE b/examples/LICENSE new file mode 100644 index 0000000..f0cd2d9 --- /dev/null +++ b/examples/LICENSE @@ -0,0 +1,176 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS \ No newline at end of file diff --git a/examples/README.md b/examples/README.md new file mode 100644 index 0000000..9a1120a --- /dev/null +++ b/examples/README.md @@ -0,0 +1,121 @@ +# Examples + +We have built a few examples to showcase Nexus agents. + +Before you can use the examples or build your own agents, you need to install a few things first, +as shown in the next section. + +When you run either of the examples, you will be prompted to start two services: + +- [`tools`][tools_README] which you need to start only once for all examples, and +- [`events`][events_README] which you need to start for each example anew and once the example finished, you can stop it. + +You will be given exact instructions on how to start and stop these services when you run either example. + +- [Examples](#examples) + - [Environment setup](#environment-setup) + - [Operating System](#operating-system) + - [Helper tools](#helper-tools) + - [Operating System packages](#operating-system-packages) + - [Python and virtual environment](#python-and-virtual-environment) + - [Suibase](#suibase) + - [`PATH`](#path) + - [Ollama](#ollama) + - [Example: Instagram Post Planner](#example-instagram-post-planner) + - [Example: Trip Planner](#example-trip-planner) + - [Example: CLI Cluster](#example-cli-cluster) + - [Tools](#tools) + +## Environment setup + +### Operating System + +We assume Ubuntu `22.04 LTS`. + +### Helper tools + +You need to install the following tools by following their official installation instructions: + +- [`cargo`][cargo] +- [`just`][just] (on Linux install it with "Pre-Built Binaries" rather than with `apt` because of an outdated version) +- [`uv`][uv] + +We use `just` as a general command runner, and `uv` to manage Python and virtual environments +(`.venv`). The [`justfile`][justfile] contains installation instructions for everything we +describe here, and you can run `just` from this folder, as it will automatically discover the +`justfile` at the top level. + +### Operating System packages + +You can install dependencies with `just apt-setup`. + +### Python and virtual environment + +We install and use Python `3.10`. + +From inside the working copy of the repository, run `just venv-setup` to: + +- install Python, +- create the necessary `.venv`, +- install all the needed dependencies in the `.venv`. + +### Suibase + +Talus smart contracts are written in Sui Move, and until our testnet is ready we use a +compatible (from the Sui Move point of view) chain, based on [`Suibase`][suibase]. + +You can download and install Suibase with `just suibase-setup`. + +> [!NOTE] +> Our setup script pins localnet to a particular version in `~/suibase/workdirs/localnet/suibase.yaml` + +### `PATH` + +Make sure `~/.local/bin` is in your `PATH`. Suibase requires this, as it installs its +executables there. + +### Ollama + +For the LLM component we install and use [Ollama][ollama] with the `llama3.1` model. You can +install both with `just ollama-setup`. + +## Example: Instagram Post Planner + +This [example][ig_post_planner] demonstrates how to create an Instagram post planner agent using +the Nexus SDK. + +Run with `just example ig-post-planner`. + +## Example: Trip Planner + +This [example][trip_planner] demonstrates how to create a trip planner agent using the Nexus +SDK. + +Run with `just example trip-planner`. + +## Example: CLI Cluster + +This [example][cli_cluster] prompts the user to create a [cluster][design_cluster] by describing +agents and tasks on the command line. + +Run with `just example cli-cluster`. + +## Tools + +Agents can use tools to enhance their capabilities. Please refer to the [`tools` README][tools_README] +for a list of available tools, and instructions on how to add new ones. + + + +[cargo]: https://doc.rust-lang.org/cargo/getting-started/installation.html +[just]: https://github.com/casey/just +[uv]: https://github.com/astral-sh/uv +[suibase]: https://suibase.io/ +[ollama]: https://ollama.com/ +[tools_README]: ../offchain/tools/README.md +[events_README]: ../offchain/events/README.md +[ig_post_planner]: ./ig_post_planner.py +[trip_planner]: ./trip_planner.py +[cli_cluster]: ./cli_cluster.py +[justfile]: ../justfile +[design_cluster]: ../onchain/README.md#cluster diff --git a/examples/cli_cluster.py b/examples/cli_cluster.py new file mode 100644 index 0000000..0fc54d7 --- /dev/null +++ b/examples/cli_cluster.py @@ -0,0 +1,226 @@ +# Runs an example that prompts the user to define a cluster, agents, tasks, and tools. + +from nexus_sdk import ( + create_cluster, + create_agent_for_cluster, + create_task, + execute_cluster, + get_cluster_execution_response, +) +from pysui.sui.sui_txn.sync_transaction import SuiTransaction +from pysui.sui.sui_types.scalars import ObjectID, SuiString +from pysui.sui.sui_types.collections import SuiArray + + +def get_user_input_for_cluster(): + cluster_name = input("Enter Cluster name: ") + cluster_description = input("Enter Cluster description: ") + return cluster_name, cluster_description + + +def get_user_input_for_agent(): + agent_name = input("Enter Agent name: ") + agent_role = input("Enter Agent role: ") + agent_goal = input("Enter Agent goal: ") + agent_backstory = input("Enter Agent backstory: ") + return { + "name": agent_name, + "role": agent_role, + "goal": agent_goal, + "backstory": agent_backstory, + } + + +def get_user_input_for_task(): + task_name = input("Enter Task name: ") + agent_name = input("Enter Agent name for this task: ") + task_description = input("Enter Task description: ") + task_expected_output = input("Enter Task expected output: ") + task_prompt = input("Enter Task prompt: ") + task_context = input("Enter Task context: ") + return { + "name": task_name, + "agent_name": agent_name, + "description": task_description, + "expected_output": task_expected_output, + "prompt": task_prompt, + "context": task_context, + } + + +def get_user_input_for_tool(): + task_name = input("Enter Task name for this tool: ") + tool_name = input("Enter Tool name: ") + tool_args = input("Enter Tool args (separated by commas, no spaces): ") + return {"task_name": task_name, "tool_name": tool_name, "tool_args": tool_args} + + +class CliCluster: + def __init__( + self, + client, + package_id, + model_id, + model_owner_cap_id, + cluster_name, + cluster_description, + agents, + tasks, + tools, + ): + self.client = client + self.package_id = package_id + self.model_id = model_id + self.model_owner_cap_id = model_owner_cap_id + + self.cluster_name = cluster_name + self.cluster_description = cluster_description + self.agents = agents + self.tasks = tasks + self.tools = tools + + def setup_cluster(self): + cluster_id, cluster_owner_cap_id = create_cluster( + self.client, + self.package_id, + self.cluster_name, + self.cluster_description, + ) + return cluster_id, cluster_owner_cap_id + + def setup_agents(self, cluster_id, cluster_owner_cap_id): + for agent in self.agents: + create_agent_for_cluster( + self.client, + self.package_id, + cluster_id, + cluster_owner_cap_id, + self.model_id, + self.model_owner_cap_id, + agent["name"], + agent["role"], + agent["goal"], + agent["backstory"], + ) + + def setup_tasks(self, cluster_id, cluster_owner_cap_id): + for task in self.tasks: + create_task( + client=self.client, + package_id=self.package_id, + cluster_id=cluster_id, + cluster_owner_cap_id=cluster_owner_cap_id, + name=task["name"], + agent_name=task["agent_name"], + description=task["description"], + expected_output=task["expected_output"], + prompt=task["prompt"], + context=task["context"], + ) + + def setup_tools(self, cluster_id, cluster_owner_cap_id): + for tool in self.tools: + self.attach_tool_to_task( + cluster_id=cluster_id, + cluster_owner_cap_id=cluster_owner_cap_id, + task_name=tool["task_name"], + tool_name=tool["tool_name"], + tool_args=tool["tool_args"], + ) + + def attach_tool_to_task( + self, + cluster_id, + cluster_owner_cap_id, + task_name, + tool_name, + tool_args, + ): + txn = SuiTransaction(client=self.client) + + try: + result = txn.move_call( + target=f"{self.package_id}::cluster::attach_tool_to_task_entry", + arguments=[ + ObjectID(cluster_id), + ObjectID(cluster_owner_cap_id), + SuiString(task_name), + SuiString(tool_name), + SuiArray([SuiString(arg) for arg in tool_args]), + ], + ) + except Exception as e: + print(f"Error in attach_task_to_tool: {e}") + return None + + result = txn.execute(gas_budget=10000000) + + if result.is_ok(): + if result.result_data.effects.status.status == "success": + print(f"Task attached to Tool") + return True + else: + error_message = result.result_data.effects.status.error + print(f"Transaction failed: {error_message}") + return None + return None + + def run(self, user_input): + cluster_id, cluster_owner_cap_id = self.setup_cluster() + self.setup_agents(cluster_id, cluster_owner_cap_id) + self.setup_tasks(cluster_id, cluster_owner_cap_id) + + execution_id = execute_cluster( + self.client, + self.package_id, + cluster_id, + user_input, + ) + + if execution_id is None: + return "Cluster execution failed" + + print(f"Cluster execution started with ID: {execution_id}") + return get_cluster_execution_response(self.client, execution_id) + + +# Runs the CLI agent example using the provided Nexus package ID. +def run_cli_cluster_example(client, package_id, model_id, mode_owner_cap): + cluster_name, cluster_description = get_user_input_for_cluster() + + num_agents = int(input("How many agents would you like to define? ")) + num_tasks = int(input("How many tasks would you like to define? ")) + num_tools = int(input("How many tools would you like to define? ")) + + agents = [] + for i in range(num_agents): + print(f"\nEnter details for Agent {i+1}:") + agent = get_user_input_for_agent() + agents.append(agent) + + tasks = [] + for i in range(num_tasks): + print(f"\nEnter details for Task {i+1}:") + task = get_user_input_for_task() + tasks.append(task) + + tools = [] + for i in range(num_tools): + print(f"\nEnter details for Tool {i+1}:") + tool = get_user_input_for_tool() + tools.append(tool) + + cluster = CliCluster( + client, + package_id, + model_id, + mode_owner_cap, + cluster_name, + cluster_description, + agents, + tasks, + tools, + ) + + print("Enter some text to start the execution with:") + cluster.run(input()) diff --git a/examples/example.just b/examples/example.just new file mode 100644 index 0000000..d9acd1f --- /dev/null +++ b/examples/example.just @@ -0,0 +1,26 @@ +# Examples assume that the dependencies are already installed in venv + +[private] +default: + @just -l example + +# Runs an example that prompts the user for a description of their post. +[no-cd] +ig-post-planner: + #!/usr/bin/env bash + source .venv/bin/activate + python3 examples/main.py "ig_post_planner" + +# Runs an example that prompts the user for description of their trip. +[no-cd] +trip-planner: + #!/usr/bin/env bash + source .venv/bin/activate + python3 examples/main.py "trip_planner" + +# Runs an example that prompts the user for description of their cluster. +[no-cd] +cli-cluster: + #!/usr/bin/env bash + source .venv/bin/activate + python3 examples/main.py "cli_cluster" diff --git a/examples/ig_post_planner.py b/examples/ig_post_planner.py new file mode 100644 index 0000000..4266b1c --- /dev/null +++ b/examples/ig_post_planner.py @@ -0,0 +1,206 @@ +# Use [run_ig_post_planner_example] to run the Instagram Post Planner example. +# It's a blocking function that takes a client and package ID as arguments +# and then prompts the user for input to describe what product they want to market. + +from nexus_sdk import ( + create_cluster, + create_agent_for_cluster, + create_task, + execute_cluster, + get_cluster_execution_response, +) + + +class InstagramPostPlanner: + def __init__( + self, + client, + package_id, + model_id, + model_owner_cap_id, + product_website, + product_details, + ): + self.client = client + self.package_id = package_id + self.model_id = model_id + self.model_owner_cap_id = model_owner_cap_id + + self.product_website = product_website + self.product_details = product_details + + def setup_cluster(self): + cluster_id, cluster_owner_cap_id = create_cluster( + self.client, + self.package_id, + "Instagram Post Planning Cluster", + "A cluster for creating Instagram marketing content", + ) + return cluster_id, cluster_owner_cap_id + + def setup_agents(self, cluster_id, cluster_owner_cap_id): + agent_configs = [ + ( + "product_competitor", + "Lead Market Analyst", + "Conduct amazing analysis of products and competitors", + ), + ( + "strategy_planner", + "Chief Marketing Strategist", + "Synthesize insights to formulate incredible marketing strategies", + ), + ( + "creative_content", + "Creative Content Creator", + "Develop compelling content for social media campaigns", + ), + ( + "senior_photographer", + "Senior Photographer", + "Take amazing photographs for Instagram ads", + ), + ( + "chief_creative_director", + "Chief Creative Director", + "Oversee and approve the final content", + ), + ] + + for agent_name, role, goal in agent_configs: + create_agent_for_cluster( + self.client, + self.package_id, + cluster_id, + cluster_owner_cap_id, + self.model_id, + self.model_owner_cap_id, + agent_name, + role, + goal, + f"An AI agent specialized in {role.lower()} for Instagram marketing.", + ) + + def setup_tasks(self, cluster_id, cluster_owner_cap_id): + tasks = [ + ( + "product_analysis", + "product_competitor", + f""" + Analyze the product website: {self.product_website}. + Extra details: {self.product_details}. + Identify unique features, benefits, and overall narrative. + Report on key selling points, market appeal, and suggestions for enhancement. + """, + ), + ( + "competitor_analysis", + "product_competitor", + f""" + Explore competitors of: {self.product_website}. + Identify top 3 competitors and analyze their strategies and positioning. + Provide a detailed comparison to the competitors. + """, + ), + ( + "campaign_development", + "strategy_planner", + f""" + Create a targeted marketing campaign for: {self.product_website}. + Develop a strategy and creative content ideas that will resonate with the audience. + Include all context about the product and customer. + """, + ), + ( + "instagram_ad_copy", + "creative_content", + """ + Craft 3 engaging Instagram post copy options. + Make them punchy, captivating, and concise. + Align with the product marketing strategy and highlight unique selling points. + Encourage viewers to take action (visit website, make purchase, learn more). + """, + ), + ( + "take_photograph", + "senior_photographer", + f""" + Describe 3 amazing photo options for an Instagram post. + Use the product details: {self.product_details}. + Each description should be a paragraph, focusing on capturing audience attention. + Don't show the actual product in the photo. + """, + ), + ( + "review_photo", + "chief_creative_director", + f""" + Review the 3 photo options from the senior photographer. + Ensure they align with the product goals: {self.product_website}. + Approve, ask clarifying questions, or suggest improvements. + Provide 3 reviewed and improved photo descriptions. + """, + ), + ] + + task_ids = [] + for task_name, agent_id, description in tasks: + task_id = create_task( + self.client, + self.package_id, + cluster_id, + cluster_owner_cap_id, + task_name, + agent_id, + description, + f"Complete {task_name} for Instagram post", + description, + "", + ) + task_ids.append(task_id) + + return task_ids + + def run(self): + cluster_id, cluster_owner_cap_id = self.setup_cluster() + self.setup_agents(cluster_id, cluster_owner_cap_id) + self.setup_tasks(cluster_id, cluster_owner_cap_id) + + execution_id = execute_cluster( + self.client, + self.package_id, + cluster_id, + f""" + Create an Instagram post for the product: {self.product_website} + Additional details: {self.product_details} + Provide both ad copy options and photo descriptions. + """, + ) + + if execution_id is None: + return "Cluster execution failed" + + print(f"Cluster execution started with ID: {execution_id}") + return get_cluster_execution_response(self.client, execution_id) + + +# Runs the Instagram Post Planner example using the provided Nexus package ID. +def run_ig_post_planner_example(client, package_id, model_id, mode_owner_cap): + print("## Welcome to the Instagram Post Planner") + print("-------------------------------") + product_website = input( + "What is the product website you want a marketing strategy for? " + ) + product_details = input( + "Any extra details about the product and/or the Instagram post you want? " + ) + + planner = InstagramPostPlanner( + client, package_id, model_id, mode_owner_cap, product_website, product_details + ) + result = planner.run() + + print("\n\n########################") + print("## Here is the result") + print("########################\n") + print(result) diff --git a/examples/main.py b/examples/main.py new file mode 100644 index 0000000..e87c3ef --- /dev/null +++ b/examples/main.py @@ -0,0 +1,269 @@ +# +# This script accepts one argument: the name of the example to run. +# +# Available examples: +# - trip_planner +# - ig_post_planner +# - cli_cluster +# +# ```bash +# python main.py ${EXAMPLE_NAME} +# ``` +# +# # Requirements +# +# - Suibase "localnet" CLI +# - Nexus SDK installed +# - `offchain/tools` installed +# - `offchain/events` installed +# +# # Steps +# +# This script prepares all resources necessary to run an example. +# 1. Starts localnet with Suibase +# 2. Publishes the talus package +# 3. Gets the Sui keypair from the environment and airdrops SUI +# 4. Creates a node and a model +# 5. Asks the user to Talus services +# 6. Runs the example that the user selected + +import json +import os +import re +import subprocess +import sys +from cli_cluster import run_cli_cluster_example +from colorama import init as colorama_init +from ig_post_planner import run_ig_post_planner_example +from nexus_sdk import get_sui_client, create_node, create_model +from pathlib import Path +from trip_planner import run_trip_planner_example + +# we know that this script is located in the ./examples directory, so we go +# one level up to get the root directory of the repository +repo_root_dir = Path(__file__).resolve().parent.parent + + +# Maps example name to a function that runs it. +# In essence, this is the source of truth for supported examples. +EXAMPLES = { + "trip_planner": run_trip_planner_example, + "ig_post_planner": run_ig_post_planner_example, + "cli_cluster": run_cli_cluster_example, +} + + +def main(): + colorama_init() + + example_name = sys.argv[1] + if example_name not in EXAMPLES: + raise ValueError( + f"Unknown example name: {example_name}. Available examples: {EXAMPLES.keys()}" + ) + + # 1. + print("Starting localnet...") + start_localnet() + + # 2. + print("Publishing Talus package...") + package_id = publish_talus_package() + + # 3. + print("Preparing Sui address...") + sui_address = get_sui_address() + airdrop_sui(sui_address) + private_key = get_sui_address_private_key(sui_address) + client = get_sui_client(private_key) + + # 4. + print("Creating node and model...") + node_id = create_example_node(client, package_id) + llama_id, llama_owner_cap_id = create_llama_model(client, package_id, node_id) + + # 5. + ask_user_to_start_talus_services(private_key, package_id, llama_owner_cap_id) + + # 6. + try: + print() + EXAMPLES[example_name](client, package_id, llama_id, llama_owner_cap_id) + print() + print(f"Example {example_name} finished") + except Exception as e: + print(f"Failed to run example {example_name}: {e}") + + +def start_localnet(): + run_command("localnet start") + + status_output = run_command("localnet status") + + # "OK" is printed in green color, so we cannot do a simple string comparison + ansi_escape = re.compile(r"\x1B[@-_][0-?]*[ -/]*[@-~]") + clean_status_output = ansi_escape.sub("", status_output) + if "localnet OK" not in clean_status_output: + print() + print("Output of localnet status:") + print(status_output) + raise Exception("Failed to start localnet. Try `$ localnet regen`") + + +# Assumes localnet being started and Suibase installed in the default path. +def publish_talus_package(): + # TODO: https://github.com/Talus-Network/TAF/issues/9 + run_command( + "localnet publish --skip-dependency-verification", + cwd=repo_root_dir / "onchain", + ) + + package_id = None + published_data_path = os.path.expanduser( + "~/suibase/workdirs/localnet/published-data/talus/most-recent/package-id.json" + ) + if not os.path.exists(published_data_path): + raise FileNotFoundError( + f"Published data file not found at {published_data_path}. Please ensure the Talus package has been published." + ) + with open(published_data_path) as f: + data = json.load(f) + if not data: + raise ValueError( + "Published data file is empty. Please check your Talus package publication." + ) + package_id = data[0] + + if not package_id: + raise ValueError("Failed to extract PACKAGE_ID from the published data file.") + + return package_id + + +# Uses the suibase CLI to get the currently active address +def get_sui_address(): + return run_command("lsui client active-address").strip() + + +# Airdrops some SUI to the localnet faucet address and returns the address. +def airdrop_sui(address): + # trims whitespaces and new lines + run_command(f"localnet faucet {address}") + + +# Reads the private key for the given address from the Sui keystore. +def get_sui_address_private_key(for_address): + all_addresses_json = run_command("lsui client addresses --json") + # Find the position of the address in the list of .addresses. + # Each address is a two-element list: [method, public_key] + all_addresses = json.loads(all_addresses_json) + position = None + for i, [_, address] in enumerate(all_addresses["addresses"]): + if address == for_address: + position = i + break + + if position is None: + raise ValueError(f"Address '{for_address}' not found in client addresses") + + keystore_path = os.path.expanduser( + "~/suibase/workdirs/localnet/config/sui.keystore" + ) + if not os.path.exists(keystore_path): + raise FileNotFoundError( + f"Sui client file not found at {keystore_path}. Please ensure Sui is properly set up." + ) + + with open(keystore_path) as f: + keys = json.load(f) + if not keys: + raise ValueError( + "Sui keystore file is empty. Please check your Sui configuration." + ) + return keys[position] + + +# Creates a new node owned object. +def create_example_node(client, package_id): + node_id = create_node(client, package_id, "LocalNode", "CPU", 16) + if not node_id: + raise Exception("Failed to create node") + + return node_id + + +# Creates llama model representation on chain. +# +# Returns the model ID and the model owner capability ID. +def create_llama_model(client, package_id, node_id): + model_id, model_owner_cap_id = create_model( + client=client, + package_id=package_id, + node_id=node_id, + name="llama3.1", + model_hash=b"llama3.1_8B_hash", + url="http://localhost:11434", + token_price=1000, + capacity=1000000, + num_params=1000000000, + description="Llama3.1 8B", + max_context_length=8192, + is_fine_tuned=False, + family="Llama3.1", + vendor="Meta", + is_open_source=True, + datasets=["test"], + ) + if not model_id: + raise Exception("Failed to create model") + + return model_id, model_owner_cap_id + + +# Prints how to start Talus services. +def ask_user_to_start_talus_services(private_key, package_id, model_owner_cap_id): + print() + print("You need to start the Talus services.") + print("Open a new terminal for both the LLM Assistant and the Event Listener.") + print() + + # check if something is running on port 8080 + llm_assistant_cmd = f""" + just start-tools + """ + print("First you need to start the LLM assistant unless it's running already.") + print("Start the LLM Assistant with the following command:") + print(llm_assistant_cmd) + + input("Press enter when ready...") + print() + + inferenced_cmd = f""" + just start-events \\ + --packageid {package_id} \\ + --privkey {private_key} \\ + --modelownercapid {model_owner_cap_id} + """ + print( + "Next, let's start the Event Listener for this example with the following command:" + ) + print() + print(inferenced_cmd) + + input("Press enter when ready...") + + +# Runs given command and returns the output. +def run_command(command, cwd=None): + result = subprocess.run( + command, cwd=cwd, shell=True, capture_output=True, text=True + ) + if result.returncode != 0: + print(f"Error executing command: {command}") + print(f"Error output: {result.stdout}\n\n{result.stderr}") + raise Exception(f"Command failed: {command}") + return result.stdout + + +if __name__ == "__main__": + main() diff --git a/examples/requirements.txt b/examples/requirements.txt new file mode 100644 index 0000000..67e5e91 --- /dev/null +++ b/examples/requirements.txt @@ -0,0 +1,7 @@ +# Last version before JSON RPC is marked as deprecated +pysui==0.52.0 +asyncio +# Get a decent python shell for exploring the SDK and evertyhing +ptpython +# For colorized output +colorama diff --git a/examples/trip_planner.py b/examples/trip_planner.py new file mode 100644 index 0000000..9bdfc04 --- /dev/null +++ b/examples/trip_planner.py @@ -0,0 +1,222 @@ +# Use [run_trip_planner_example] to run the Trip Planner example. +# It's a blocking function that takes a client and package ID as arguments +# and then prompts the user for input to describe their trip details. + +import textwrap +from colorama import Fore, Style +from nexus_sdk import ( + create_cluster, + create_agent_for_cluster, + create_task, + execute_cluster, + get_cluster_execution_response, +) + + +class TripPlanner: + def __init__( + self, + client, + package_id, + model_id, + model_owner_cap_id, + origin, + cities, + date_range, + interests, + ): + self.client = client + self.package_id = package_id + self.model_id = model_id + self.model_owner_cap_id = model_owner_cap_id + + self.origin = origin + self.cities = cities + self.date_range = date_range + self.interests = interests + + def setup_cluster(self): + # Create a cluster (equivalent to Crew in CrewAI) + cluster_id, cluster_owner_cap_id = create_cluster( + self.client, + self.package_id, + "Trip Planning Cluster", + "A cluster for planning the perfect trip", + ) + return cluster_id, cluster_owner_cap_id + + def setup_agents(self, cluster_id, cluster_owner_cap_id): + # Create agents (assuming we have model_ids and model_owner_cap_ids) + agent_configs = [ + ( + "city_selector", + "City Selection Expert", + "Select the best city based on weather, season, and prices", + ), + ( + "local_expert", + "Local Expert", + "Provide the BEST insights about the selected city", + ), + ( + "travel_concierge", + "Travel Concierge", + "Create amazing travel itineraries with budget and packing suggestions", + ), + ] + + for agent_name, role, goal in agent_configs: + create_agent_for_cluster( + self.client, + self.package_id, + cluster_id, + cluster_owner_cap_id, + self.model_id, + self.model_owner_cap_id, + agent_name, + role, + goal, + f"An AI agent specialized in {role.lower()} for trip planning.", + ) + + def setup_tasks(self, cluster_id, cluster_owner_cap_id): + tasks = [ + ( + "identify_city", + "city_selector", + f""" + Analyze and select the best city for the trip based on specific criteria. + Consider weather patterns, seasonal events, and travel costs. + Compare multiple cities, factoring in current weather conditions, + upcoming events, and overall travel expenses. + Provide a detailed report on the chosen city, including flight costs, + weather forecast, and attractions. + Origin: {self.origin} + City Options: {self.cities} + Trip Date: {self.date_range} + Traveler Interests: {self.interests} + """, + ), + ( + "gather_info", + "local_expert", + f""" + As a local expert, compile an in-depth guide for the selected city. + Include key attractions, local customs, special events, and daily activity recommendations. + Highlight hidden gems and local favorites. + Provide a comprehensive overview of the city's offerings, including cultural insights, + must-visit landmarks, weather forecasts, and high-level costs. + Trip Date: {self.date_range} + Traveling from: {self.origin} + Traveler Interests: {self.interests} + """, + ), + ( + "plan_itinerary", + "travel_concierge", + f""" + Create a full 7-day travel itinerary with detailed per-day plans. + Include weather forecasts, places to eat, packing suggestions, and a budget breakdown. + Suggest specific places to visit, hotels to stay at, and restaurants to try. + Cover all aspects of the trip from arrival to departure. + Format the plan as markdown, including a daily schedule, anticipated weather conditions, + recommended clothing and items to pack, and a detailed budget. + Explain why each place was chosen and what makes them special. + Trip Date: {self.date_range} + Traveling from: {self.origin} + Traveler Interests: {self.interests} + """, + ), + ] + + task_ids = [] + for task_name, agent_id, description in tasks: + task_id = create_task( + self.client, + self.package_id, + cluster_id, + cluster_owner_cap_id, + task_name, + agent_id, + description, + f"Complete {task_name} for trip planning", + description, + "", # No specific context provided in this example + ) + task_ids.append(task_id) + + return task_ids + + def run(self): + cluster_id, cluster_owner_cap_id = self.setup_cluster() + self.setup_agents(cluster_id, cluster_owner_cap_id) + self.setup_tasks(cluster_id, cluster_owner_cap_id) + + execution_id = execute_cluster( + self.client, + self.package_id, + cluster_id, + f""" + Plan a trip from {self.origin} to one of these cities: {self.cities}. + Travel dates: {self.date_range} + Traveler interests: {self.interests} + """, + ) + + if execution_id is None: + return "Cluster execution failed" + + print(f"Cluster execution started with ID: {execution_id}") + return get_cluster_execution_response(self.client, execution_id) + + +# Runs the Trip Planner example using the provided Nexus package ID. +def run_trip_planner_example(client, package_id, model_id, mode_owner_cap): + print(f"{Fore.CYAN}## Welcome to Trip Planner using Nexus{Style.RESET_ALL}") + print(f"{Fore.YELLOW}-------------------------------{Style.RESET_ALL}") + + origin = input(f"{Fore.GREEN}Where will you be traveling from? {Style.RESET_ALL}") + cities = input( + f"{Fore.GREEN}Which cities are you interested in visiting? {Style.RESET_ALL}" + ) + date_range = input( + f"{Fore.GREEN}What is your preferred date range for travel? {Style.RESET_ALL}" + ) + interests = input( + f"{Fore.GREEN}What are your main interests or hobbies? {Style.RESET_ALL}" + ) + + planner = TripPlanner( + client, + package_id, + model_id, + mode_owner_cap, + origin, + cities, + date_range, + interests, + ) + + print() + result = planner.run() + + print(f"\n\n{Fore.CYAN}########################{Style.RESET_ALL}") + print(f"{Fore.CYAN}## Here is your Trip Plan{Style.RESET_ALL}") + print(f"{Fore.CYAN}########################\n{Style.RESET_ALL}") + + paginate_output(result) + + +# Helper function to paginate the result output +def paginate_output(text, width=80): + lines = text.split("\n") + + for i, line in enumerate(lines, 1): + wrapped_line = textwrap.fill(line, width) + print(wrapped_line) + + # It's nice when this equals the number of lines in the terminal, using + # default value 32 for now. + pause_every_n_lines = 32 + if i % pause_every_n_lines == 0: + input(f"{Fore.YELLOW}-- Press Enter to continue --{Style.RESET_ALL}") diff --git a/justfile b/justfile new file mode 100644 index 0000000..85d5673 --- /dev/null +++ b/justfile @@ -0,0 +1,148 @@ +python_version := "3.10" +llama_version := "llama3.1" +sui_tag := "testnet-v1.28.3" + +[private] +default: + @just -l + +# Commands for running examples +mod example 'examples/example.just' + +# Installs `uv`. +uv-setup: + #!/usr/bin/env bash + set -eu + + # See: https://github.com/astral-sh/uv + if ! command -v uv; then + curl -LsSf https://astral.sh/uv/install.sh | sh + fi + + uv --version + +# Installs python using `uv`. +python-setup: uv-setup + #!/usr/bin/env bash + set -eu + export RUST_LOG=warn + + uv python install {{ python_version }} + +# Creates a `.venv` and installs all the dependencies. +venv-setup: python-setup + #!/usr/bin/env bash + set -eu + export RUST_LOG=warn + + # Create the venv + uv venv -p {{ python_version }} + + # Install everything + uv pip install ./nexus_sdk/ + uv pip install ./offchain/events + uv pip install ./offchain/tools + + uv pip install -r ./examples/requirements.txt + + +# lightweight check to see if .venv exists, instead of using `venv-setup` +[private] +venv-exists: + @test -d .venv || (echo "Please run 'just venv-setup' first" && exit 1) + +# Starts a ptpython shell with the `.venv` activated. +python-shell: venv-exists + #!/usr/bin/env bash + source .venv/bin/activate + ptpython + +# Installs ollama. +ollama-setup: + curl -fsSL https://ollama.com/install.sh | sh + ollama pull {{ llama_version }} + +# Installs OS-level dependencies. +apt-setup: + #!/usr/bin/env bash + + # These should already be installed ... + sudo apt install -y git-all curl wget python3 + + sudo apt install -y cmake libssl-dev pkg-config lsof + +# below is from christos PR (https://github.com/Talus-Network/protochain/pull/19): +# Installs `suibase` and sets up `localnet`. +suibase-setup: + #!/usr/bin/env bash + set -euo pipefail + + # Suibase installs everything in ~/.local/bin. + # So this must be in the PATH. + # We abort if it is not because other scripts depend on it. + if [[ ":$PATH:" != *":$HOME/.local/bin:"* ]]; then + echo '=======================================' + echo 'ERROR: ~/.local/bin is NOT in your PATH' + echo 'Suibase installs everything in ~/.local/bin and heavily relies on it.' + echo 'Please add it to your PATH and try again.' + echo '=======================================' + exit 1 + fi + + # install suibase + if [[ ! -d ~/suibase ]]; then + echo Installing suibase + echo + git clone https://github.com/sui-base/suibase.git ~/suibase + cd ~/suibase + ./install + + localnet create + # Pin Sui version to minimum supported by Suibase. + # This ought to match the talus package version as close as possible. + config=~/suibase/workdirs/localnet/suibase.yaml + echo '' >> $config + echo 'force_tag: "{{ sui_tag }}"' >> $config + localnet update + else + echo ~/suibase exists + fi + +# Starts LLM and other tools in an uvicorn server on port 8080. +start-tools: + #!/usr/bin/env bash + source .venv/bin/activate + uvicorn offchain.tools.src.nexus_tools.server.main:app --host 0.0.0.0 --port 8080 + +# Starts Sui event listener that invokes tools and submits completions. +# See `offchain/events` for more information about flags/envs. +start-events +args: + #!/usr/bin/env bash + source .venv/bin/activate + python3 offchain/events/src/nexus_events/sui_event.py {{args}} + +############################################ +## devnet +############################################ +# Sets up `devnet` (which is `localnet` from suibase) +devnet-setup: suibase-setup + echo + type lsui localnet + localnet set-active + +devnet-status: devnet-setup + echo + localnet status + echo + localnet links || true + +devnet-start: devnet-setup + #!/usr/bin/env bash + echo + RUST_LOG=warn localnet start + + + + + + diff --git a/nexus_sdk/LICENSE b/nexus_sdk/LICENSE new file mode 100644 index 0000000..f0cd2d9 --- /dev/null +++ b/nexus_sdk/LICENSE @@ -0,0 +1,176 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS \ No newline at end of file diff --git a/nexus_sdk/README.md b/nexus_sdk/README.md new file mode 100644 index 0000000..c2afb6b --- /dev/null +++ b/nexus_sdk/README.md @@ -0,0 +1,4 @@ +This SDK provides utility functions for setting up and interacting with Nexus. +It includes functionality for environment setup, node and model registration, and contract management. + +See [`examples`](../examples) to understand how you can use the SDK to build agents. diff --git a/nexus_sdk/requirements.txt b/nexus_sdk/requirements.txt new file mode 100644 index 0000000..83f1bab --- /dev/null +++ b/nexus_sdk/requirements.txt @@ -0,0 +1,2 @@ +# Last version before JSON RPC is marked as deprecated +pysui==0.52.0 diff --git a/nexus_sdk/setup.py b/nexus_sdk/setup.py new file mode 100644 index 0000000..a0ac7ad --- /dev/null +++ b/nexus_sdk/setup.py @@ -0,0 +1,17 @@ +from setuptools import setup, find_packages + +setup( + name="nexus_sdk", + version="0.1.2", + packages=find_packages(where="src"), + package_dir={"": "src"}, + install_requires=["pysui==0.52.0", "setuptools"], + description="Talus Nexus SDK", + long_description=open("README.md").read(), + long_description_content_type="text/markdown", + classifiers=[ + "Development Status :: 3 - Alpha", + "Intended Audience :: Developers", + "Programming Language :: Python :: 3.10", + ], +) diff --git a/nexus_sdk/src/nexus_sdk/__init__.py b/nexus_sdk/src/nexus_sdk/__init__.py new file mode 100644 index 0000000..cbf0083 --- /dev/null +++ b/nexus_sdk/src/nexus_sdk/__init__.py @@ -0,0 +1,21 @@ +from .node import create_node +from .model import create_model +from .utils import get_sui_client +from .cluster import ( + create_cluster, + create_agent_for_cluster, + create_task, + execute_cluster, + get_cluster_execution_response, +) + +__all__ = [ + "create_agent_for_cluster", + "create_cluster", + "create_model", + "create_node", + "create_task", + "execute_cluster", + "get_cluster_execution_response", + "get_sui_client", +] diff --git a/nexus_sdk/src/nexus_sdk/cluster.py b/nexus_sdk/src/nexus_sdk/cluster.py new file mode 100644 index 0000000..01b21fc --- /dev/null +++ b/nexus_sdk/src/nexus_sdk/cluster.py @@ -0,0 +1,213 @@ +from pysui.sui.sui_builders.get_builders import GetObject +from pysui.sui.sui_txn.sync_transaction import SuiTransaction +from pysui.sui.sui_types.scalars import ObjectID, SuiString +import time +import ast +import traceback + +# Equal to 1 SUI which should be enough for most transactions. +GAS_BUDGET = 1000000000 + + +# Creates an empty cluster object to which agents and tasks can be added. +# See functions [create_agent_for_cluster] and [create_task]. +# +# Returns the cluster ID and the cluster owner capability ID. +def create_cluster(client, package_id, name, description, gas_budget=GAS_BUDGET): + txn = SuiTransaction(client=client) + + try: + result = txn.move_call( + target=f"{package_id}::cluster::create", + arguments=[SuiString(name), SuiString(description)], + ) + result = txn.execute(gas_budget=gas_budget) + if result.is_ok(): + if result.result_data.effects.status.status == "success": + # just because it says "parsed_json" doesn't mean it's actually valid JSON apparently + not_json = result.result_data.events[0].parsed_json + created_event = ast.literal_eval(not_json.replace("\n", "\\n")) + cluster_id = created_event["cluster"] + cluster_owner_cap_id = created_event["owner_cap"] + + return cluster_id, cluster_owner_cap_id + print(f"Failed to create Cluster: {result.result_string}") + return None + except Exception as e: + print(f"Error in create_cluster: {e}") + return None + + +# Creates a new agent for the given cluster. +# This means that the agent does not live on-chain as a standalone object that +# other clusters could reference. +def create_agent_for_cluster( + client, + package_id, + cluster_id, + cluster_owner_cap_id, + model_id, + model_owner_cap_id, + name, + role, + goal, + backstory, + gas_budget=GAS_BUDGET, +): + txn = SuiTransaction(client=client) + + try: + result = txn.move_call( + target=f"{package_id}::cluster::add_agent_entry", + arguments=[ + ObjectID(cluster_id), + ObjectID(cluster_owner_cap_id), + ObjectID(model_id), + ObjectID(model_owner_cap_id), + SuiString(name), + SuiString(role), + SuiString(goal), + SuiString(backstory), + ], + ) + result = txn.execute(gas_budget=gas_budget) + if result.is_ok(): + return True + print(f"Failed to add Agent: {result.result_string}") + return False + except Exception as e: + print(f"Error in create_agent: {e}") + return False + + +# Creates a new task for the given cluster. +# Each task must be executed by an agent that is part of the cluster. +def create_task( + client, + package_id, + cluster_id, + cluster_owner_cap_id, + name, + agent_name, + description, + expected_output, + prompt, + context, + gas_budget=GAS_BUDGET, +): + txn = SuiTransaction(client=client) + + try: + result = txn.move_call( + target=f"{package_id}::cluster::add_task_entry", + arguments=[ + ObjectID(cluster_id), + ObjectID(cluster_owner_cap_id), + SuiString(name), + SuiString(agent_name), + SuiString(description), + SuiString(expected_output), + SuiString(prompt), + SuiString(context), + ], + ) + result = txn.execute(gas_budget=gas_budget) + if result.is_ok(): + return True + print(f"Failed to add Task: {result.result_string}") + return False + except Exception as e: + print(f"Error in create_task: {e}") + return False + + +# Begins execution of a cluster. +# Returns the cluster execution ID. +# Use the function [get_cluster_execution_response] to fetch the response of the execution +# in a blocking manner. +def execute_cluster( + client, + package_id, + cluster_id, + input, + gas_budget=GAS_BUDGET, +): + txn = SuiTransaction(client=client) + + try: + result = txn.move_call( + target=f"{package_id}::cluster::execute", + arguments=[ObjectID(cluster_id), SuiString(input)], + ) + except Exception as e: + print(f"Error in execute_cluster: {e}") + traceback.print_exc() + return None + + result = txn.execute(gas_budget=gas_budget) + + if result.is_ok(): + if result.result_data.effects.status.status == "success": + # just because it says "parsed_json" doesn't mean it's actually valid JSON apparently + not_json = result.result_data.events[0].parsed_json + created_event = ast.literal_eval(not_json.replace("\n", "\\n")) + + # There's going to be either field "execution" or "cluster execution" + # because there are two events emitted in the tx. + # We could check for the event name or just try both. + execution_id = created_event.get( + "execution", created_event.get("cluster_execution") + ) + + return execution_id + else: + error_message = result.result_data.effects.status.error + print(f"Execute Cluster Transaction failed: {error_message}") + return None + else: + print(f"Failed to create ClusterExecution: {result.result_string}") + return None + + +# Fetches the response of a cluster execution. +# If the execution is not complete within the specified time, the function returns a timeout message. +def get_cluster_execution_response( + client, execution_id, max_wait_time_s=180, check_interval_s=5 +): + start_time = time.time() + while time.time() - start_time < max_wait_time_s: + try: + # Create a GetObject builder + get_object_builder = GetObject(object_id=ObjectID(execution_id)) + + # Execute the query + result = client.execute(get_object_builder) + + if result.is_ok(): + object_data = result.result_data + if object_data and object_data.content: + fields = object_data.content.fields + status = fields.get("status") + if status == "SUCCESS": + return fields.get("cluster_response") + elif status == "FAILED": + return f"Execution failed: {fields.get('error_message')}" + elif status == "IDLE": + print("Execution has not started yet.") + elif status == "RUNNING": + until_timeout = max_wait_time_s - (time.time() - start_time) + print( + "Execution is still running, waiting... (%.2fs until timeout)" + % until_timeout + ) + else: + return f"Unknown status: {status}" + + time.sleep(check_interval_s) + else: + return f"Failed to get object: {result.result_string}" + + except Exception as e: + return f"Error checking execution status: {e}" + + return "Timeout: Execution did not complete within the specified time." diff --git a/nexus_sdk/src/nexus_sdk/model.py b/nexus_sdk/src/nexus_sdk/model.py new file mode 100644 index 0000000..0cda400 --- /dev/null +++ b/nexus_sdk/src/nexus_sdk/model.py @@ -0,0 +1,63 @@ +from pysui.sui.sui_txn.sync_transaction import SuiTransaction +from pysui.sui.sui_types.scalars import ObjectID, SuiU64, SuiU8, SuiString, SuiBoolean +from pysui.sui.sui_types.collections import SuiArray +import ast + + +# Creates a new on-chain model object. +# Returns the model ID and the model owner capability ID. +def create_model( + client, + package_id, + node_id, + name, + model_hash, + url, + token_price, + capacity, + num_params, + description, + max_context_length, + is_fine_tuned, + family, + vendor, + is_open_source, + datasets, +): + txn = SuiTransaction(client=client) + + args = [ + ObjectID(node_id), + SuiString(name), + SuiArray([SuiU8(b) for b in model_hash]), + SuiString(url), + SuiU64(token_price), + SuiU64(capacity), + SuiU64(num_params), + SuiString(description), + SuiU64(max_context_length), + SuiBoolean(is_fine_tuned), + SuiString(family), + SuiString(vendor), + SuiBoolean(is_open_source), + SuiArray([SuiString(dataset) for dataset in datasets]), + ] + + result = txn.move_call( + target=f"{package_id}::model::create", + arguments=args, + ) + result = txn.execute(gas_budget=10000000) + + if result.is_ok(): + effects = result.result_data.effects + if effects.status.status == "success": + # just because it says "parsed_json" doesn't mean it's actually valid JSON apparently + not_json = result.result_data.events[0].parsed_json + created_event = ast.literal_eval(not_json.replace("\n", "\\n")) + + model_id = created_event["model"] + model_owner_cap_id = created_event["owner_cap"] + return model_id, model_owner_cap_id + + return None diff --git a/nexus_sdk/src/nexus_sdk/node.py b/nexus_sdk/src/nexus_sdk/node.py new file mode 100644 index 0000000..8020377 --- /dev/null +++ b/nexus_sdk/src/nexus_sdk/node.py @@ -0,0 +1,21 @@ +from pysui.sui.sui_txn.sync_transaction import SuiTransaction +from pysui.sui.sui_types.scalars import SuiU64 + + +# Creates a new node owned object. +# Returns the node ID. +def create_node(client, package_id, name, node_type, gpu_memory): + txn = SuiTransaction(client=client) + + result = txn.move_call( + target=f"{package_id}::node::create", + arguments=[name, node_type, SuiU64(gpu_memory), "c", []], + ) + result = txn.execute(gas_budget=10000000) + + if result.is_ok() or result._data.succeeded: + node_id = result._data.effects.created[0].reference.object_id + return node_id + else: + print(f"Failed to create node: {result.result_string}") + return None diff --git a/nexus_sdk/src/nexus_sdk/utils.py b/nexus_sdk/src/nexus_sdk/utils.py new file mode 100644 index 0000000..104aef4 --- /dev/null +++ b/nexus_sdk/src/nexus_sdk/utils.py @@ -0,0 +1,17 @@ +from pysui.sui.sui_clients.sync_client import SuiClient +from pysui.sui.sui_config import SuiConfig + + +# Returns Sui client with the given private key. +def get_sui_client( + private_key, + rpc_url="http://localhost:9000", + ws_url="ws://localhost:9000", +): + return SuiClient( + SuiConfig.user_config( + rpc_url=rpc_url, + ws_url=ws_url, + prv_keys=[private_key], + ) + ) diff --git a/offchain/LICENSE b/offchain/LICENSE new file mode 100644 index 0000000..e6ab15f --- /dev/null +++ b/offchain/LICENSE @@ -0,0 +1,38 @@ +Business Source License 1.1 +License text copyright (c) 2020 MariaDB Corporation Ab, All Rights Reserved. +“Business Source License” is a trademark of MariaDB Corporation Ab. +____________________________________________________________________________ + +Business Source License 1.1 +Parameters +Licensor: Talus Labs, Inc. +Licensed Work: Talus AI Agent Framework © 2024 Talus Labs, Inc. +Change Date: December 31, 2027 +Change License: Apache License, Version 2.0 +____________________________________________________________________________ + +Terms +The Licensor hereby grants you the right to copy, modify, create derivative works, redistribute, and make non-production use of the Licensed Work. The Licensor may make an Additional Use Grant, above, permitting limited production use. + +Effective on the Change Date, or the fourth anniversary of the first publicly available distribution of a specific version of the Licensed Work under this License, whichever comes first, the Licensor hereby grants you rights under the terms of the Change License, and the rights granted in the paragraph above terminate. + +If your use of the Licensed Work does not comply with the requirements currently in effect as described in this License, you must purchase a commercial license from the Licensor, its affiliated entities, or authorized resellers, or you must refrain from using the Licensed Work. + +All copies of the original and modified Licensed Work, and derivative works of the Licensed Work, are subject to this License. This License applies separately for each version of the Licensed Work and the Change Date may vary for each version of the Licensed Work released by Licensor. + +You must conspicuously display this License on each original or modified copy of the Licensed Work. If you receive the Licensed Work in original or modified form from a third party, the terms and conditions set forth in this License apply to your use of that work. +Any use of the Licensed Work in violation of this License will automatically terminate your rights under this License for the current and all other versions of the Licensed Work. + +This License does not grant you any right in any trademark or logo of Licensor or its affiliates (provided that you may use a trademark or logo of Licensor as expressly required by this License). + +TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE LICENSED WORK IS PROVIDED ON AN “AS IS” BASIS. LICENSOR HEREBY DISCLAIMS ALL WARRANTIES AND CONDITIONS, EXPRESS OR IMPLIED, INCLUDING (WITHOUT LIMITATION) WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT, AND TITLE. +_____________________________________________________________________________ +Covenants of Licensor +In consideration of the right to use this License’s text and the “Business Source License” name and trademark, Licensor covenants to MariaDB, and to all other recipients of the licensed work to be provided by Licensor: + +To specify as the Change License the GPL Version 2.0 or any later version, or a license that is compatible with GPL Version 2.0 or a later version, where “compatible” means that software provided under the Change License can be included in a program with software provided under GPL Version 2.0 or a later version. Licensor may specify additional Change Licenses without limitation. + +To either: (a) specify an additional grant of rights to use that does not impose any additional restriction on the right granted in this License, as the Additional Use Grant; or (b) insert the text “None” to specify a Change Date. Not to modify this License in any other way. +____________________________________________________________________________ +Notice +The Business Source License (this document, or the “License”) is not an Open Source license. However, the Licensed Work will eventually be made available under an Open Source License, as stated in this License. diff --git a/offchain/README.md b/offchain/README.md new file mode 100644 index 0000000..654476a --- /dev/null +++ b/offchain/README.md @@ -0,0 +1,7 @@ +# Nexus offchain components + +See [`events`][events] and [`tools`][tools]. + + +[events]: ./events/ +[tools]: ./tools/ \ No newline at end of file diff --git a/offchain/events/.gitignore b/offchain/events/.gitignore new file mode 100644 index 0000000..6d7f792 --- /dev/null +++ b/offchain/events/.gitignore @@ -0,0 +1,150 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# Mac shit +.DS_Store + +# python things +__pycache__ +__pypackages__ + +# Generator obsolete +.openapi-generator + +tmp.py \ No newline at end of file diff --git a/offchain/events/LICENSE b/offchain/events/LICENSE new file mode 100644 index 0000000..e6ab15f --- /dev/null +++ b/offchain/events/LICENSE @@ -0,0 +1,38 @@ +Business Source License 1.1 +License text copyright (c) 2020 MariaDB Corporation Ab, All Rights Reserved. +“Business Source License” is a trademark of MariaDB Corporation Ab. +____________________________________________________________________________ + +Business Source License 1.1 +Parameters +Licensor: Talus Labs, Inc. +Licensed Work: Talus AI Agent Framework © 2024 Talus Labs, Inc. +Change Date: December 31, 2027 +Change License: Apache License, Version 2.0 +____________________________________________________________________________ + +Terms +The Licensor hereby grants you the right to copy, modify, create derivative works, redistribute, and make non-production use of the Licensed Work. The Licensor may make an Additional Use Grant, above, permitting limited production use. + +Effective on the Change Date, or the fourth anniversary of the first publicly available distribution of a specific version of the Licensed Work under this License, whichever comes first, the Licensor hereby grants you rights under the terms of the Change License, and the rights granted in the paragraph above terminate. + +If your use of the Licensed Work does not comply with the requirements currently in effect as described in this License, you must purchase a commercial license from the Licensor, its affiliated entities, or authorized resellers, or you must refrain from using the Licensed Work. + +All copies of the original and modified Licensed Work, and derivative works of the Licensed Work, are subject to this License. This License applies separately for each version of the Licensed Work and the Change Date may vary for each version of the Licensed Work released by Licensor. + +You must conspicuously display this License on each original or modified copy of the Licensed Work. If you receive the Licensed Work in original or modified form from a third party, the terms and conditions set forth in this License apply to your use of that work. +Any use of the Licensed Work in violation of this License will automatically terminate your rights under this License for the current and all other versions of the Licensed Work. + +This License does not grant you any right in any trademark or logo of Licensor or its affiliates (provided that you may use a trademark or logo of Licensor as expressly required by this License). + +TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE LICENSED WORK IS PROVIDED ON AN “AS IS” BASIS. LICENSOR HEREBY DISCLAIMS ALL WARRANTIES AND CONDITIONS, EXPRESS OR IMPLIED, INCLUDING (WITHOUT LIMITATION) WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT, AND TITLE. +_____________________________________________________________________________ +Covenants of Licensor +In consideration of the right to use this License’s text and the “Business Source License” name and trademark, Licensor covenants to MariaDB, and to all other recipients of the licensed work to be provided by Licensor: + +To specify as the Change License the GPL Version 2.0 or any later version, or a license that is compatible with GPL Version 2.0 or a later version, where “compatible” means that software provided under the Change License can be included in a program with software provided under GPL Version 2.0 or a later version. Licensor may specify additional Change Licenses without limitation. + +To either: (a) specify an additional grant of rights to use that does not impose any additional restriction on the right granted in this License, as the Additional Use Grant; or (b) insert the text “None” to specify a Change Date. Not to modify this License in any other way. +____________________________________________________________________________ +Notice +The Business Source License (this document, or the “License”) is not an Open Source license. However, the Licensed Work will eventually be made available under an Open Source License, as stated in this License. diff --git a/offchain/events/README.md b/offchain/events/README.md new file mode 100755 index 0000000..3d3a6ed --- /dev/null +++ b/offchain/events/README.md @@ -0,0 +1,32 @@ +# Events + +This directory contains the offchain event listener [sui_event.py][sui_event_py], which receives `RequestForCompletionEvent` events +emitted by agents executing onchain. It then calls their required tools and passes those results with the defined prompt +to inference of specified models. + +Any hosted Talus node would contain the contents of the `events` directory to efficiently handle requests +to their nodes models and tools. + +**Towards hosted inference for Talus** +In the future, nodes can be modified to run inference using other's compute. [offchain.py][offchain_py] could be modified +to call any compute host for inference. Currently, [offchain.py][offchain_py] contains `process()` which calls the +[main.py][main_py] route `/predict`. + +To see available models/tools and define new ones, see the [`tools` README.md][tools_readme]. + +## How to run this + +When you start this service it expects the following variables that can be set either as environment variables or with flags: + +- `--packageid` (env `PACKAGE_ID`) (required): Package ID to filter events +- `--privkey` (env `SUI_PRIVATE_KEY`) (required): Sui private key +- `--modelownercapid` (env `MODEL_OWNER_CAP_ID`) (required): Model owner capability object ID to submit completions +- `--rpc` (default: `http://localhost:9000`): RPC URL +- `--ws` (default: `ws://localhost:9000`): WebSocket URL + + + +[sui_event_py]: ./sui_event.py +[offchain_py]: ./offchain.py +[main_py]: ../tools/server/main.py +[tools_readme]: ../tools/README.md diff --git a/offchain/events/pyproject.toml b/offchain/events/pyproject.toml new file mode 100644 index 0000000..16a63da --- /dev/null +++ b/offchain/events/pyproject.toml @@ -0,0 +1,26 @@ +[build-system] +requires = ["setuptools", "wheel"] +build-backend = "setuptools.build_meta" + +[tool.setuptools.packages.find] +where = ["src"] + +[project] +name = "nexus_events" +version = "0.1.0" +description = "Nexus offchain event handling" +authors = [ + { name="Talus", email="hi@talus.network" } +] +dependencies = [ + "python-dotenv", + "pysui==0.52.0", + "asyncio", + "aiohttp", + "pathlib", + "pynacl", + "psutil", + "unidecode" +] + + diff --git a/offchain/events/src/nexus_events/__init__.py b/offchain/events/src/nexus_events/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/offchain/events/src/nexus_events/offchain.py b/offchain/events/src/nexus_events/offchain.py new file mode 100644 index 0000000..8b6e180 --- /dev/null +++ b/offchain/events/src/nexus_events/offchain.py @@ -0,0 +1,52 @@ +import requests +import os +from dotenv import load_dotenv + +load_dotenv() + +LLM_ASSISTANT_URL = os.getenv("LLM_ASSISTANT_URL", "http://localhost:8080/predict") + + +class OffChain: + def process( + self, prompt: str, model_name: str, max_tokens: int, temperature: float + ) -> str: + url = LLM_ASSISTANT_URL + headers = {"Content-Type": "application/json"} + prompt_data = { + "prompt": prompt, + "model": model_name, + "max_tokens": int(max_tokens), + "temperature": temperature, + } + + try: + + response = requests.post(url, headers=headers, json=prompt_data) + response.raise_for_status() + result = response.json() + + completion = result["completion"] + return completion + except requests.exceptions.RequestException as e: + msg = f"Error occurred while calling the API: {e}" + if hasattr(e, "response") and e.response is not None: + msg += f"\nResponse content: {e.response.text}" + print(msg) + raise Exception(status_code=500, detail=msg) + + +def main(): + + off_chain = OffChain() + prompt = "Write python script that prints the numbers 1 to 100" + model_name = "tinyllama" + max_tokens = 3000 + temperature = 0.3 + + completion = off_chain.process(prompt, model_name, max_tokens, temperature) + print(completion) + + +if __name__ == "__main__": + main() diff --git a/offchain/events/src/nexus_events/sui_event.py b/offchain/events/src/nexus_events/sui_event.py new file mode 100755 index 0000000..c8359eb --- /dev/null +++ b/offchain/events/src/nexus_events/sui_event.py @@ -0,0 +1,266 @@ +import asyncio +from pysui import SuiConfig +from pysui.sui.sui_clients.sync_client import SuiClient +import aiohttp +import ast +import argparse +import time +from pysui.sui.sui_types.collections import EventID +from pysui.sui.sui_types.event_filter import MoveEventTypeQuery +from typing import Any +import sys +import os +import sys +import os +import signal + +root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) +sys.path.insert(0, root_dir) +from nexus_tools.server.tools.tools import TOOLS, TOOL_ARGS_MAPPING +from pysui.sui.sui_clients.sync_client import SuiClient as SyncClient +from pysui.sui.sui_txn import SyncTransaction +from pysui.sui.sui_types.scalars import ObjectID, SuiU64, SuiU8, SuiString, SuiBoolean +from pysui.sui.sui_txresults.complex_tx import ( + SubscribedEvent, + SubscribedEventParms, + Event, +) +from pysui.sui.sui_types.collections import SuiArray +import hashlib +from nexus_events.offchain import OffChain +import json +import unicodedata +import unidecode +import re +import traceback + +# possible values TALUS_NODE, EXTERNAL_NODE +node_type = os.environ.get("NODE_TYPE", "TALUS_NODE") + +off_chain = OffChain() + + +async def call_use_tool(name, args): + """calls /tool/use endpoint with tool name and args, called by event handler""" + print(f"Calling /tool/use with name: {name}, args: {args}") + url = "http://0.0.0.0:8080/tool/use" + + try: + if name not in TOOLS: + print(f"Tool '{name}' not found in TOOLS dictionary") + print(f"Available tools: {list(TOOLS.keys())}") + return None + + ToolArgsClass = TOOL_ARGS_MAPPING.get(name, None) + if ToolArgsClass is None: + print(f"No ToolArgs class found for tool: {name}") + return None + + tool_args = ToolArgsClass(**dict(zip(ToolArgsClass.__fields__.keys(), args))) + + payload = {"tool_name": name, "args": tool_args.dict()} + + headers = {"Content-Type": "application/json"} + + async with aiohttp.ClientSession() as session: + async with session.post(url, json=payload, headers=headers) as response: + if response.status == 400 or response.status == 422: + error_detail = await response.text() + print(f"Error {response.status}: {error_detail}") + return None + response.raise_for_status() + result = await response.json() + return result + + except Exception as e: + print(f"Error in call_use_tool: {e}") + return None + + +def sanitize_text(text): + text = unidecode.unidecode(text) + text = unicodedata.normalize("NFKD", text) + text = text.replace("\r\n", "\n").replace("\r", "\n") + text = text.replace('"', '"').replace('"', '"') + text = text.replace("…", "...").replace("–", "-").replace("—", "-") + text = re.sub(r"[^\x00-\x7F]+", "", text) + text = "".join(char for char in text if ord(char) < 256) + return text + + +async def prompt_event_handler( + client: SuiClient, package_id: str, model_owner_cap_id: str, event: SubscribedEvent +) -> Any: + """Handler captures the move event type for each received.""" + try: + parsed_json = ast.literal_eval(event.parsed_json) + + model_name = parsed_json["model_name"] + prompt = parsed_json["prompt_contents"] + max_tokens = parsed_json["max_tokens"] + temperature = parsed_json["temperature"] / 100 + cluster_execution_id = parsed_json["cluster_execution"] + + completion = "" + if temperature < 0.0 or temperature > 2.0: + print( + f"Invalid temperature value {temperature}. Setting to default value of 1.0" + ) + temperature = 1 + + if parsed_json["tool"]: + tool_name = parsed_json["tool"]["fields"]["name"] + tool_args = parsed_json["tool"]["fields"]["args"] + print(f"Calling tool '{tool_name}' with args: {tool_args}") + + tool_result = await call_use_tool(tool_name, tool_args) + tool_result = tool_result["result"] + print(f"tool_result: {tool_result}") + + if tool_result: + prompt = ( + "context from" + tool_name + ": " + str(tool_result) + ". " + prompt + ) + else: + print(f"Error calling tool: {tool_name}") + return None + + except Exception as e: + print(f"Error extracting prompt info: {e}") + + print("Waiting for completion...") + completion = off_chain.process(prompt, model_name, max_tokens, temperature) + + try: + # Create the configuration + txn = SyncTransaction(client=client) + + completion_json = json.loads(completion) + completion = completion_json["message"]["content"] + completion_safe = sanitize_text(completion) + + try: + print("Submitting completion ...") + result = txn.move_call( + target=f"{package_id}::cluster::submit_completion_as_model_owner", + arguments=[ + ObjectID(cluster_execution_id), + ObjectID(model_owner_cap_id), + SuiString(completion_safe), + ], + ) + except ValueError as e: + print(f"Error: {e}") + return None + except Exception as e: + print(f"Error in create_completion: {e}") + traceback.print_exc() + return + + result = txn.execute(gas_budget=1000000000) + if result.is_ok(): + print( + f"Completion created in tx '{result.result_data.effects.transaction_digest}'" + ) + return {"func": result.result_data} + else: + print(f"Completion creation transaction failed: {result.result_string}") + return None + except Exception as e: + print(f"Error in create_completion: {e}") + print(f"Error type: {type(e)}") + print(f"Traceback: {traceback.format_exc()}") + return None + + +def main(): + parser = argparse.ArgumentParser( + description="Listen for ToolUsed events on the Sui network" + ) + parser.add_argument("--rpc", default="http://localhost:9000", help="RPC URL") + parser.add_argument("--ws", default="ws://localhost:9000", help="WebSocket URL") + parser.add_argument( + "--packageid", + default=(os.getenv("PACKAGE_ID")), + help="Package ID to filter events (required)", + ) + parser.add_argument( + "--privkey", + default=(os.getenv("SUI_PRIVATE_KEY")), + help="Sui private key (required)", + ) + parser.add_argument( + "--modelownercapid", + default=(os.getenv("MODEL_OWNER_CAP_ID")), + help="Model owner capability object ID (required)", + ) + + args = parser.parse_args() + + package_id = args.packageid + model_owner_cap_id = args.modelownercapid + + config = SuiConfig.user_config( + rpc_url=args.rpc, ws_url=args.ws, prv_keys=[args.privkey] + ) + client = SuiClient(config) + + next_cursor = None + while True: + next_cursor = process_next_event_page( + client, + package_id, + model_owner_cap_id, + cursor=next_cursor, + ) + + +# Fetches the next page of events +# +# Returns a tuple: +# - The first element is the next cursor to use +# - The second element is a boolean indicating whether the first event should be skipped +def process_next_event_page( + client: SuiClient, + package_id: str, + model_owner_cap_id: str, + cursor: EventID, +): + prompt_event_type = f"{package_id}::prompt::RequestForCompletionEvent" + event_filter = MoveEventTypeQuery(prompt_event_type) + + events_result = client.get_events( + query=event_filter, descending_order=SuiBoolean(False), cursor=cursor + ) + if events_result.is_err(): + print(f"Cannot read Sui events: {events_result.result_string}") + sys.exit(1) + + events = events_result.result_data.data + # If you needed to debug the events, print some information with this: + for event in events: + print(f"event_id: {event.event_id}, timestamp_ms: {event.timestamp_ms}") + + if not events: + print(f"No new events, waiting...") + time.sleep(3) + return cursor + + print(f"Processing {len(events)} events") + for event in events: + asyncio.run(prompt_event_handler(client, package_id, model_owner_cap_id, event)) + + # Set the cursor to the last event. + # Also next fetch will skip the first event (the last event of this fetch) + # since it will have been already processed. + # We don't use the "next_cursor" property to simplify the code. + + last_event_id = events[-1].event_id + event_seq = last_event_id["eventSeq"] + tx_digest = last_event_id["txDigest"] + next_cursor = EventID(event_seq, tx_digest) + return next_cursor + + +if __name__ == "__main__": + main() diff --git a/offchain/tools/.flake8 b/offchain/tools/.flake8 new file mode 100644 index 0000000..94ade3a --- /dev/null +++ b/offchain/tools/.flake8 @@ -0,0 +1,3 @@ +[flake8] +max-line-length = 200 +exclude = .git,__pycache__,__init__.py,.mypy_cache,.pytest_cache,.venv diff --git a/offchain/tools/.gitignore b/offchain/tools/.gitignore new file mode 100644 index 0000000..2e995a6 --- /dev/null +++ b/offchain/tools/.gitignore @@ -0,0 +1,148 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# Mac shit +.DS_Store + +# python things +__pycache__ +__pypackages__ + +# Generator obsolete +.openapi-generator diff --git a/offchain/tools/LICENSE b/offchain/tools/LICENSE new file mode 100644 index 0000000..e6ab15f --- /dev/null +++ b/offchain/tools/LICENSE @@ -0,0 +1,38 @@ +Business Source License 1.1 +License text copyright (c) 2020 MariaDB Corporation Ab, All Rights Reserved. +“Business Source License” is a trademark of MariaDB Corporation Ab. +____________________________________________________________________________ + +Business Source License 1.1 +Parameters +Licensor: Talus Labs, Inc. +Licensed Work: Talus AI Agent Framework © 2024 Talus Labs, Inc. +Change Date: December 31, 2027 +Change License: Apache License, Version 2.0 +____________________________________________________________________________ + +Terms +The Licensor hereby grants you the right to copy, modify, create derivative works, redistribute, and make non-production use of the Licensed Work. The Licensor may make an Additional Use Grant, above, permitting limited production use. + +Effective on the Change Date, or the fourth anniversary of the first publicly available distribution of a specific version of the Licensed Work under this License, whichever comes first, the Licensor hereby grants you rights under the terms of the Change License, and the rights granted in the paragraph above terminate. + +If your use of the Licensed Work does not comply with the requirements currently in effect as described in this License, you must purchase a commercial license from the Licensor, its affiliated entities, or authorized resellers, or you must refrain from using the Licensed Work. + +All copies of the original and modified Licensed Work, and derivative works of the Licensed Work, are subject to this License. This License applies separately for each version of the Licensed Work and the Change Date may vary for each version of the Licensed Work released by Licensor. + +You must conspicuously display this License on each original or modified copy of the Licensed Work. If you receive the Licensed Work in original or modified form from a third party, the terms and conditions set forth in this License apply to your use of that work. +Any use of the Licensed Work in violation of this License will automatically terminate your rights under this License for the current and all other versions of the Licensed Work. + +This License does not grant you any right in any trademark or logo of Licensor or its affiliates (provided that you may use a trademark or logo of Licensor as expressly required by this License). + +TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE LICENSED WORK IS PROVIDED ON AN “AS IS” BASIS. LICENSOR HEREBY DISCLAIMS ALL WARRANTIES AND CONDITIONS, EXPRESS OR IMPLIED, INCLUDING (WITHOUT LIMITATION) WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT, AND TITLE. +_____________________________________________________________________________ +Covenants of Licensor +In consideration of the right to use this License’s text and the “Business Source License” name and trademark, Licensor covenants to MariaDB, and to all other recipients of the licensed work to be provided by Licensor: + +To specify as the Change License the GPL Version 2.0 or any later version, or a license that is compatible with GPL Version 2.0 or a later version, where “compatible” means that software provided under the Change License can be included in a program with software provided under GPL Version 2.0 or a later version. Licensor may specify additional Change Licenses without limitation. + +To either: (a) specify an additional grant of rights to use that does not impose any additional restriction on the right granted in this License, as the Additional Use Grant; or (b) insert the text “None” to specify a Change Date. Not to modify this License in any other way. +____________________________________________________________________________ +Notice +The Business Source License (this document, or the “License”) is not an Open Source license. However, the Licensed Work will eventually be made available under an Open Source License, as stated in this License. diff --git a/offchain/tools/Modelfile b/offchain/tools/Modelfile new file mode 100644 index 0000000..1bc2aa7 --- /dev/null +++ b/offchain/tools/Modelfile @@ -0,0 +1,13 @@ +FROM tinyllama +TEMPLATE """{{- if .System }}System: {{.System}} + +{{end}}Human: {{.Prompt}} + +Assistant: """ +SYSTEM "You are a helpful AI assistant named Mistral." +PARAMETER temperature 1.0 +PARAMETER top_k 40 +PARAMETER top_p 0.95 +PARAMETER stop "" +PARAMETER stop "Human:" +PARAMETER stop "Assistant:" \ No newline at end of file diff --git a/offchain/tools/README.md b/offchain/tools/README.md new file mode 100644 index 0000000..0626c89 --- /dev/null +++ b/offchain/tools/README.md @@ -0,0 +1,63 @@ +# Tools + +This directory contains offchain tools for LLM inference and other functionalities. + +## Model Inference + +Model inference currently relies on ollama through the [server/main.py][main_py] route `/predict`, which runs inference +of the defined ollama models. + +## Tools + +Available tools are defined in [server/tools/tools.py][tools_py]. Current supported tools are listed +below and any desired tools can be added by following the instructions in **Adding Tools**. +Tools are executed through the [server/main.py][main_py] route `/tool/use`. + +_Note_: to use the OpenAI, Gemini, Scenexplain, or Tavily tools, equivalent api keys must be set in the `.env` and can be obtained here: + +- [OpenAI Key](https://openai.com/index/openai-api/) +- [Scenex Key](https://scenex.jina.ai/api) +- [Tavily Key](https://app.tavily.com) + +The above tools can also be deleted if not desired for simplicity. + +### Adding Tools + +In [server/tools/tools.py][tools_py], each tool has a defined argument structure which inherits from `pydantic` `BaseModel`, +and a `ToolCallBody` which consists of their name and the argument substructure. +`TOOL_ARGS_MAPPING` is a dictionary of available tools and their args, and `TOOLS` is a dictionary of available tools +and their actual executables, wrapped by the `create_clusterai_tool` function which allows for any lambda +function to be defined as a tool. This setup was intended towards support of definition of tools from onchain. + +### Supported Tools + +1. `search`: Web search using DuckDuckGo. +2. `wikipedia`: Query Wikipedia for information. +3. `arxiv`: Search academic papers on arXiv. +4. `pubmed`: Search medical and life sciences literature. +5. `scene_explain`: Explain the contents of an image. +6. `shell`: Execute shell commands. +7. `tavily_search`: Perform searches using Tavily. +8. `python_repl`: Execute Python code. +9. `read_file`: Read the contents of a file. +10. `list_directory`: List the contents of a directory. +11. `gpt4_vision`: Analyze images using GPT-4 Vision. +12. `dalle3`: Generate images based on text prompts. +13. `openai_embeddings`: Create text embeddings using OpenAI's API. +14. `browser`: Scrape and summarize website content. +15. `instagram_search`: Search for Instagram-specific content. + +Note: Each tool accepts specific arguments as defined in the `TOOL_ARGS_MAPPING` in the `tools.py` file. The AI model can use these tools by specifying the tool name and providing the required arguments. + +## Tests + +To run the tests: + +```bash +pip3 install pytest +PYTHONPATH=src pytest tests +``` + + +[main_py]: ./server/main.py +[tools_py]: ./server/tools/tools.py \ No newline at end of file diff --git a/offchain/tools/pyproject.toml b/offchain/tools/pyproject.toml new file mode 100644 index 0000000..48d56bf --- /dev/null +++ b/offchain/tools/pyproject.toml @@ -0,0 +1,67 @@ +[build-system] +requires = ["setuptools", "wheel"] +build-backend = "setuptools.build_meta" + +[tool.setuptools.packages.find] +where = ["src"] + +[project] +name = "nexus_tools" +version = "0.1.0" +description = "Nexus offchain tool execution" +authors = [ + { name="Talus", email="hi@talus.network" } +] +dependencies = [ + "aiofiles", + "aniso8601", + "async-exit-stack", + "async-generator", + "certifi", + "chardet", + "click", + "dnspython", + "email-validator", + "fastapi", + "graphene", + "graphql-core", + "graphql-relay", + "h11", + "httptools", + "httpx", + "itsdangerous", + "Jinja2", + "MarkupSafe", + "promise", + "pydantic", + "python-dotenv", + "python-multipart", + "requests", + "Rx", + "starlette", + "typing-extensions", + "ujson", + "urllib3", + "uvicorn", + "uvloop", + "watchgod", + "websockets", + "ollama", + "crewai", + "crewai-tools", + "pytest", + "langchain_community", + "langchain_openai", + "duckduckgo-search", + "langchain", + "wikipedia", + "langchain-experimental", + "arxiv", + "xmltodict", + "wolframalpha", + "langchain_experimental", + "google-generativeai", + "unstructured" +] + + diff --git a/offchain/tools/src/nexus_tools/__init__.py b/offchain/tools/src/nexus_tools/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/offchain/tools/src/nexus_tools/server/__init__.py b/offchain/tools/src/nexus_tools/server/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/offchain/tools/src/nexus_tools/server/controllers/__init__.py b/offchain/tools/src/nexus_tools/server/controllers/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/offchain/tools/src/nexus_tools/server/controllers/inference.py b/offchain/tools/src/nexus_tools/server/controllers/inference.py new file mode 100644 index 0000000..0dff310 --- /dev/null +++ b/offchain/tools/src/nexus_tools/server/controllers/inference.py @@ -0,0 +1,20 @@ +import ollama + + +class Inference: + @staticmethod + def prompt(prompt, model, max_tokens=1000, temperature=1.0): + options = {"temperature": temperature, "num_predict": max_tokens} + + response = ollama.chat( + model=model, + options=options, + messages=[ + { + "role": "user", + "content": prompt, + }, + ], + ) + + return response diff --git a/offchain/tools/src/nexus_tools/server/crew/talus_chat_ollama.py b/offchain/tools/src/nexus_tools/server/crew/talus_chat_ollama.py new file mode 100644 index 0000000..e163caf --- /dev/null +++ b/offchain/tools/src/nexus_tools/server/crew/talus_chat_ollama.py @@ -0,0 +1,71 @@ +import json +from typing import Any, Iterator, List, Optional + +from langchain_core.callbacks import CallbackManagerForLLMRun +from langchain_core.language_models.chat_models import BaseChatModel +from langchain_core.messages import BaseMessage, AIMessage +from langchain_core.outputs import ChatResult, ChatGeneration + +from langchain_community.llms.ollama import OllamaEndpointNotFoundError, _OllamaCommon + + +class TalusChatOllama(BaseChatModel, _OllamaCommon): + def __init__( + self, + prompt_contract: Any, + completion_contract: Any, + **kwargs: Any, + ): + super().__init__(**kwargs) + self.prompt_contract = prompt_contract + self.completion_contract = completion_contract + + @property + def _llm_type(self) -> str: + return "blockchain-ollama-chat" + + def _generate( + self, + messages: List[BaseMessage], + stop: Optional[List[str]] = None, + run_manager: Optional[CallbackManagerForLLMRun] = None, + **kwargs: Any, + ) -> ChatResult: + # Call the prompt contract to retrieve the prompt + prompt = self.prompt_contract.get_prompt() + + # Convert the prompt to the format expected by Ollama + ollama_messages = self._convert_messages_to_ollama_messages([prompt]) + + # Call the Ollama API to generate the completion + final_chunk = self._chat_stream_with_aggregation( + ollama_messages, + stop=stop, + run_manager=run_manager, + verbose=self.verbo**kwargs, + ) + + # Extract the generated text from the final chunk + generated_text = final_chunk.text + + # Call the completion contract to store the completion + self.completion_contract.store_completion(generated_text) + + # Create a ChatGeneration object with the generated text + chat_generation = ChatGeneration( + message=AIMessage(content=generated_text), + generation_info=final_chunk.generation_info, + ) + + return ChatResult(generations=[chat_generation]) + + def _stream( + self, + messages: List[BaseMessage], + stop: Optional[List[str]] = None, + run_manager: Optional[CallbackManagerForLLMRun] = None, + **kwargs: Any, + ) -> Iterator[ChatGeneration]: + raise NotImplementedError( + "Streaming is not supported for BlockchainChatOllama." + ) diff --git a/offchain/tools/src/nexus_tools/server/crew/talus_ollama.py b/offchain/tools/src/nexus_tools/server/crew/talus_ollama.py new file mode 100644 index 0000000..860d1df --- /dev/null +++ b/offchain/tools/src/nexus_tools/server/crew/talus_ollama.py @@ -0,0 +1,169 @@ +### This class will basically overrides the LLM implementaion for Ollama as we added +### the ability to report usage per agent request, the logic here is to be able to chrage + + +import json +from typing import Any, AsyncIterator, Dict, Iterator, List, Optional, Union, cast + +from langchain_core._api import deprecated +from langchain_core.callbacks import ( + AsyncCallbackManagerForLLMRun, + CallbackManagerForLLMRun, +) +from langchain_core.language_models.chat_models import BaseChatModel +from langchain_core.messages import ( + AIMessage, + AIMessageChunk, + BaseMessage, + ChatMessage, + HumanMessage, + SystemMessage, +) +from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult + +from langchain_community.llms.ollama import OllamaEndpointNotFoundError, _OllamaCommon + + +class TalusOllama(BaseChatModel, _OllamaCommon): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.prompt_tokens = 0 + self.completion_tokens = 0 + + @property + def _llm_type(self) -> str: + return "talus-ollama-chat" + + def _convert_messages_to_ollama_messages( + self, messages: List[BaseMessage] + ) -> List[Dict[str, Union[str, List[str]]]]: + ollama_messages: List = [] + for message in messages: + role = "" + if isinstance(message, HumanMessage): + role = "user" + elif isinstance(message, AIMessage): + role = "assistant" + elif isinstance(message, SystemMessage): + role = "system" + else: + raise ValueError("Received unsupported message type for Ollama.") + + content = "" + images = [] + if isinstance(message.content, str): + content = message.content + else: + for content_part in cast(List[Dict], message.content): + if content_part.get("type") == "text": + content += f"\n{content_part['text']}" + elif content_part.get("type") == "image_url": + if isinstance(content_part.get("image_url"), str): + image_url_components = content_part["image_url"].split(",") + if len(image_url_components) > 1: + images.append(image_url_components[1]) + else: + images.append(image_url_components[0]) + else: + raise ValueError( + "Only string image_url content parts are supported." + ) + else: + raise ValueError( + "Unsupported message content type. " + "Must either have type 'text' or type 'image_url' " + "with a string 'image_url' field." + ) + + ollama_messages.append( + { + "role": role, + "content": content, + "images": images, + } + ) + + return ollama_messages + + def _create_chat_stream( + self, + messages: List[BaseMessage], + stop: Optional[List[str]] = None, + **kwargs: Any, + ) -> Iterator[str]: + payload = { + "model": self.model, + "messages": self._convert_messages_to_ollama_messages(messages), + } + self.prompt_tokens = self._count_tokens(payload) + self.report_prompt_charges() # Report prompt charges before calling LLM + yield from self._create_stream( + payload=payload, stop=stop, api_url=f"{self.base_url}/api/chat", **kwargs + ) + + def _chat_stream_with_aggregation( + self, + messages: List[BaseMessage], + stop: Optional[List[str]] = None, + run_manager: Optional[CallbackManagerForLLMRun] = None, + verbose: bool = False, + **kwargs: Any, + ) -> ChatGenerationChunk: + final_chunk: Optional[ChatGenerationChunk] = None + for stream_resp in self._create_chat_stream(messages, stop, **kwargs): + if stream_resp: + chunk = _chat_stream_response_to_chat_generation_chunk(stream_resp) + if final_chunk is None: + final_chunk = chunk + else: + final_chunk += chunk + if run_manager: + run_manager.on_llm_new_token( + chunk.text, + chunk=chunk, + verbose=verbose, + ) + if final_chunk is None: + raise ValueError("No data received from Ollama stream.") + + self.completion_tokens = self._count_tokens(final_chunk.text) + self.report_completion_charges() # Report completion charges after receiving from LLM + return final_chunk + + def _generate( + self, + messages: List[BaseMessage], + stop: Optional[List[str]] = None, + run_manager: Optional[CallbackManagerForLLMRun] = None, + **kwargs: Any, + ) -> ChatResult: + final_chunk = self._chat_stream_with_aggregation( + messages, + stop=stop, + run_manager=run_manager, + verbose=self.verbose, + **kwargs, + ) + chat_generation = ChatGeneration( + message=AIMessage(content=final_chunk.text), + generation_info=final_chunk.generation_info, + ) + return ChatResult(generations=[chat_generation]) + + def _count_tokens(self, text: Union[str, Dict]) -> int: + # TODO: Implement token counting logic based on your specific requirements + # This is a placeholder implementation + if isinstance(text, str): + return len(text.split()) + elif isinstance(text, Dict): + return sum(len(str(value).split()) for value in text.values()) + else: + raise ValueError("Unsupported text type for token counting.") + + def report_prompt_charges(self) -> None: + # TODO: Implement the logic to report prompt charges to the blockchain + print(f"Reporting prompt charges: {self.prompt_tokens} tokens") + + def report_completion_charges(self) -> None: + # TODO: Implement the logic to report completion charges to the blockchain + print(f"Reporting completion charges: {self.completion_tokens} tokens") diff --git a/offchain/tools/src/nexus_tools/server/main.py b/offchain/tools/src/nexus_tools/server/main.py new file mode 100755 index 0000000..bb2e1ad --- /dev/null +++ b/offchain/tools/src/nexus_tools/server/main.py @@ -0,0 +1,258 @@ +import sys +import logging +from pathlib import Path +import json +from typing import Dict + +from .models.completion import Completion +from .models.error import Error +from .models.prompt import Prompt +from .controllers.inference import Inference +from .models.model import ModelsResponse +from .tools.tools import TOOLS, ToolCallBody + +import ollama +from datetime import datetime +from fastapi import Body, FastAPI, HTTPException +from dotenv import load_dotenv + +from langchain.prompts import PromptTemplate +from langchain_experimental.llms.ollama_functions import OllamaFunctions +from langchain.callbacks.manager import CallbackManager +from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler +from langchain_core.runnables import RunnablePassthrough + +from dotenv import load_dotenv # New user: add .env file with oai key + +load_dotenv() +import openai + +# Add the root directory to PYTHONPATH +root_path = Path(__file__).resolve().parent.parent +sys.path.append(str(root_path)) + +logging.basicConfig(level=logging.DEBUG) +logger = logging.getLogger(__name__) + + +app = FastAPI( + title="Talus Model Integration API", + description="Talus Utility for integrating Models trough API", + version="6.0.2", +) + +inference = Inference() + + +@app.post( + "/predict", + responses={ + 200: { + "model": Completion, + "description": "The AI model successfully generated a completion.", + }, + 400: { + "model": Error, + "description": "The request body contains invalid parameters.", + }, + 500: { + "model": Error, + "description": "An unexpected error occurred while the server was processing the request.", + }, + }, + tags=["default"], + summary="Get a completion response from the AI model based on the provided prompt and parameters.", + response_model_by_alias=True, +) +async def predict( + prompt_data: Prompt = Body(..., description="The input data for the AI model.") +) -> Completion: + """ + This endpoint processes the input prompt with specified parameters and returns the AI-generated completion. + """ + print("start... predict") + print(f"prompt_data: {prompt_data}") + + completion = inference.prompt( + prompt=prompt_data.prompt, + model=prompt_data.model, + max_tokens=prompt_data.max_tokens, + temperature=prompt_data.temperature, + ) + print(f"completion: {completion}") + + return Completion(completion=json.dumps(completion), timestamp=datetime.now()) + + +@app.post( + "/tool/use", + responses={ + 200: {"model": dict, "description": "Successfully used the specified tool."}, + 400: { + "model": Error, + "description": "The request body contains invalid parameters or unsupported tool.", + }, + 500: { + "model": Error, + "description": "An unexpected error occurred while processing the request.", + }, + }, + tags=["default"], + summary="Use a specified tool to process the provided query.", + response_model_by_alias=True, +) +async def use_tool(tool_call_body: ToolCallBody) -> Dict[str, str]: + """ + This endpoint processes the input query using the specified tool. + Supported tools are in TOOLS + """ + + print(f"use tool called with: {tool_call_body}") + if tool_call_body.tool_name not in TOOLS: + raise HTTPException( + status_code=400, detail=f"Unknown tool: {tool_call_body.tool_name}" + ) + + try: + tool = TOOLS[tool_call_body.tool_name] + result = tool._run(**tool_call_body.args.dict()) + print(f"tool result: {result}") + return {"result": result} + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except openai.OpenAIError as e: + raise HTTPException(status_code=500, detail=f"OpenAI API error: {str(e)}") + except Exception as e: + raise HTTPException( + status_code=500, detail="An error occurred while using the tool" + ) + + +# === The below is still in development === + + +def complete_json(incomplete_json): + try: + # Try to parse as is + return json.loads(incomplete_json) + except json.JSONDecodeError: + # If it fails, attempt to complete the JSON + if incomplete_json.strip().endswith("}"): + # If it ends with '}', assume it's just missing the final '}' + return json.loads(incomplete_json + "}") + elif '"tool_input": {' in incomplete_json: + # If it has an incomplete tool_input, try to complete it + return json.loads(incomplete_json + "}}") + else: + # If we can't easily complete it, raise an error + raise ValueError(f"Unable to complete JSON: {incomplete_json}") + + +@app.post("/prompt_tools", response_model=Completion) +async def prompt_tools(prompt_data: Prompt = Body(...)): + def wrap_clusterai_tool(tool): + def wrapped(**kwargs): + return tool._run(**kwargs) + + return wrapped + + wrapped_tools = {name: wrap_clusterai_tool(tool) for name, tool in TOOLS.items()} + + try: + print(f"Received prompt: {prompt_data.prompt}") + print(f"Selected tools: {prompt_data.tools}") + + llm = OllamaFunctions( + model=prompt_data.model, + temperature=prompt_data.temperature, + callback_manager=CallbackManager([StreamingStdOutCallbackHandler()]), + format="json", + ) + print("OllamaFunctions initialized") + + selected_tools = [] + for tool_name in prompt_data.tools: + if tool_name in TOOLS: + tool = TOOLS[tool_name] + selected_tools.append( + { + "name": tool.name, + "description": tool.description, + "parameters": { + "type": "object", + "properties": { + "query": {"type": "string"}, + }, + "required": ["query"], + }, + } + ) + print(f"Added tool: {tool.name}") + else: + raise ValueError(f"Unknown tool: {tool_name}") + + llm_with_tools = llm.bind_tools(selected_tools) + print("Tools bound to LLM") + + prompt_template = PromptTemplate( + input_variables=["input"], + template="Answer the following question, using the provided tools if necessary. Always use a tool before answering: {input}", + ) + print("Prompt template created") + + chain = {"input": RunnablePassthrough()} | prompt_template | llm_with_tools + print("RunnableSequence created") + + print("Invoking the chain") + result = chain.invoke(prompt_data.prompt) + print(f"Chain result: {result}") + + # Process the result + if isinstance(result, str): + try: + result_json = complete_json(result) + if "tool" in result_json: + tool_name = result_json["tool"] + tool_input = result_json["tool_input"] + print(f"Executing tool: {tool_name} with input: {tool_input}") + tool_result = wrapped_tools[tool_name](**tool_input) + print(f"Tool result: {tool_result}") + final_result = f"Tool {tool_name} returned: {tool_result}" + else: + final_result = json.dumps(result_json) + except Exception as e: + print(f"Error processing result: {e}") + final_result = f"Error processing result: {result}" + else: + final_result = str(result) + + if not final_result: + print("Chain returned an empty result") + return Completion( + completion="The model did not generate any output. Please try again.", + timestamp=datetime.now(), + ) + + return Completion(completion=final_result, timestamp=datetime.now()) + + except Exception as e: + print(f"An error occurred in prompt_tools: {e}") + raise HTTPException(status_code=500, detail=str(e)) + + +def execute_tool_call(tool_call): + tool_name = tool_call["name"] + args = tool_call["args"] + + if tool_name not in TOOLS: + raise ValueError(f"Unknown tool: {tool_name}") + + tool = TOOLS[tool_name] + return tool._run(**args) + + +@app.get("/models", response_model=ModelsResponse) +async def get_models() -> ModelsResponse: + models_res = ollama.list() + print(models_res["models"]) + return ModelsResponse(models=models_res["models"]) diff --git a/offchain/tools/src/nexus_tools/server/models/__init__.py b/offchain/tools/src/nexus_tools/server/models/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/offchain/tools/src/nexus_tools/server/models/agents.py b/offchain/tools/src/nexus_tools/server/models/agents.py new file mode 100644 index 0000000..4ae35ea --- /dev/null +++ b/offchain/tools/src/nexus_tools/server/models/agents.py @@ -0,0 +1,30 @@ +# models.py +from pydantic import BaseModel, Field +from typing import List + + +class ToolModel(BaseModel): + name: str + description: str + + +class AgentModel(BaseModel): + role: str + goal: str + backstory: str + tools: List[ToolModel] + + +class TaskModel(BaseModel): + description: str + expected_output: str + agent_role: str + + +class CreateAgentRequest(BaseModel): + company_description: str + company_domain: str + hiring_needs: str + specific_benefits: str + agents: List[AgentModel] + tasks: List[TaskModel] diff --git a/offchain/tools/src/nexus_tools/server/models/completion.py b/offchain/tools/src/nexus_tools/server/models/completion.py new file mode 100644 index 0000000..ee29199 --- /dev/null +++ b/offchain/tools/src/nexus_tools/server/models/completion.py @@ -0,0 +1,30 @@ +# coding: utf-8 + +from __future__ import annotations +from datetime import datetime +from typing import List, Optional +from pydantic import BaseModel, Field + + +class ToolCall(BaseModel): + name: str + args: dict + + +class Completion(BaseModel): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + + Completion - a model defined in OpenAPI + + completion: The completion of this Completion. + timestamp: The timestamp of this Completion. + """ + + completion: str = Field(alias="completion") + timestamp: datetime = Field(alias="timestamp") + tool_calls: Optional[List[ToolCall]] = None + + +Completion.update_forward_refs() diff --git a/offchain/tools/src/nexus_tools/server/models/error.py b/offchain/tools/src/nexus_tools/server/models/error.py new file mode 100644 index 0000000..b0e81e7 --- /dev/null +++ b/offchain/tools/src/nexus_tools/server/models/error.py @@ -0,0 +1,23 @@ +# coding: utf-8 + +from __future__ import annotations +from typing import Optional +from pydantic import BaseModel, Field + + +class Error(BaseModel): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + + Error - a model defined in OpenAPI + + message: The message of this Error [Optional]. + code: The code of this Error [Optional]. + """ + + message: Optional[str] = Field(alias="message", default=None) + code: Optional[int] = Field(alias="code", default=None) + + +Error.update_forward_refs() diff --git a/offchain/tools/src/nexus_tools/server/models/extra_models.py b/offchain/tools/src/nexus_tools/server/models/extra_models.py new file mode 100644 index 0000000..4c64cf1 --- /dev/null +++ b/offchain/tools/src/nexus_tools/server/models/extra_models.py @@ -0,0 +1,7 @@ +from pydantic import BaseModel + + +class TokenModel(BaseModel): + """Defines a token model.""" + + sub: str diff --git a/offchain/tools/src/nexus_tools/server/models/model.py b/offchain/tools/src/nexus_tools/server/models/model.py new file mode 100644 index 0000000..f07fa9d --- /dev/null +++ b/offchain/tools/src/nexus_tools/server/models/model.py @@ -0,0 +1,23 @@ +from typing import List, Optional +from pydantic import BaseModel +from datetime import datetime + + +class ModelDetail(BaseModel): + format: str + family: str + families: Optional[List[str]] = None + parameter_size: str + quantization_level: str + + +class Model(BaseModel): + name: str + modified_at: datetime + size: int + digest: str + details: ModelDetail + + +class ModelsResponse(BaseModel): + models: List[Model] diff --git a/offchain/tools/src/nexus_tools/server/models/prompt.py b/offchain/tools/src/nexus_tools/server/models/prompt.py new file mode 100644 index 0000000..c5207f7 --- /dev/null +++ b/offchain/tools/src/nexus_tools/server/models/prompt.py @@ -0,0 +1,42 @@ +# coding: utf-8 + +from __future__ import annotations + +from pydantic import BaseModel, Field, validator +from typing import List, Optional + + +class Prompt(BaseModel): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + + Prompt - a model defined in OpenAPI + + prompt: The text of this Prompt. + model: The model of this Prompt. + max_tokens: The max_tokens of this Prompt. + temperature: The temperature of this Prompt. + """ + + prompt: str = Field(alias="prompt") + model: str = Field(alias="model", default="llama3:70b") + max_tokens: int = Field(alias="max_tokens", default=1000) + temperature: float = Field(alias="temperature", default=1.0) + tools: Optional[List[str]] = Field( + default=None, description="List of tool names to be used" + ) + + @validator("temperature") + def temperature_max(cls, value): + assert value <= 1 + assert value >= 0 + return value + + @validator("temperature") + def temperature_min(cls, value): + assert value >= 0 + return value + + +Prompt.update_forward_refs() diff --git a/offchain/tools/src/nexus_tools/server/security_api.py b/offchain/tools/src/nexus_tools/server/security_api.py new file mode 100644 index 0000000..41e6ec2 --- /dev/null +++ b/offchain/tools/src/nexus_tools/server/security_api.py @@ -0,0 +1,19 @@ +# coding: utf-8 + +from typing import List + +from fastapi import Depends, Security +from fastapi.openapi.models import OAuthFlowImplicit, OAuthFlows +from fastapi.security import ( + HTTPAuthorizationCredentials, + HTTPBasic, + HTTPBasicCredentials, + HTTPBearer, + OAuth2, + OAuth2AuthorizationCodeBearer, + OAuth2PasswordBearer, + SecurityScopes, +) +from fastapi.security.api_key import APIKeyCookie, APIKeyHeader, APIKeyQuery + +from openapi_server.models.extra_models import TokenModel diff --git a/offchain/tools/src/nexus_tools/server/tools/__init__.py b/offchain/tools/src/nexus_tools/server/tools/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/offchain/tools/src/nexus_tools/server/tools/tools.py b/offchain/tools/src/nexus_tools/server/tools/tools.py new file mode 100644 index 0000000..cda83f5 --- /dev/null +++ b/offchain/tools/src/nexus_tools/server/tools/tools.py @@ -0,0 +1,323 @@ +import sys +import json +from pathlib import Path +import requests +import os +from pydantic import BaseModel, Field + +root_path = Path(__file__).resolve().parent.parent +sys.path.append(str(root_path)) +from pydantic import BaseModel, Field +from typing import Any, Union, Callable + +from langchain_community.tools import DuckDuckGoSearchRun, WikipediaQueryRun +from langchain_community.utilities import WikipediaAPIWrapper +from langchain_community.tools import ( + ArxivQueryRun, + PubmedQueryRun, + SceneXplainTool, + ShellTool, +) +from langchain_community.tools.tavily_search import TavilySearchResults +from langchain_experimental.utilities import PythonREPL +from langchain_community.tools import ( + ReadFileTool as LangchainReadFileTool, + ListDirectoryTool as LangchainListDirectoryTool, +) +import google.generativeai as genai +from crewai_tools import BaseTool +from openai import OpenAI +from unstructured.partition.html import partition_html + +from dotenv import load_dotenv + +load_dotenv() +import os + +# Load API keys +OPENAI_API_KEY = os.getenv("OPENAI_API_KEY") +SCENEX_API_KEY = os.getenv("SCENEX_API_KEY") +TAVILY_API_KEY = os.getenv("TAVILY_API_KEY") +GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY") +genai.configure(api_key=GOOGLE_API_KEY) + + +class GeminiToolArgs(BaseModel): + prompt: str = Field(..., description="The prompt for the Gemini model") + model: str = Field(default="gemini-pro", description="The Gemini model to use") + + +class SearchToolArgs(BaseModel): + query: str = Field(..., description="The search query to be used") + num_results: str = Field(..., description="Number of results to return") + + +class WikipediaToolArgs(BaseModel): + query: str = Field(..., description="The Wikipedia query to be used") + + +class ArxivToolArgs(BaseModel): + query: str = Field(..., description="The Arxiv query to be used") + + +class PubmedToolArgs(BaseModel): + query: str = Field(..., description="The Pubmed query to be used") + + +class SceneExplainToolArgs(BaseModel): + image_url: str = Field(..., description="The URL of the image to be explained") + + +class ShellToolArgs(BaseModel): + command: str = Field(..., description="The shell command to be executed") + + +class TavilySearchToolArgs(BaseModel): + query: str = Field(..., description="The Tavily search query to be used") + + +class PythonREPLToolArgs(BaseModel): + code: str = Field(..., description="The Python code to be executed") + + +class ReadFileToolArgs(BaseModel): + file_path: str = Field(..., description="The path of the file to be read") + + +class ListDirectoryToolArgs(BaseModel): + directory_path: str = Field( + ..., description="The path of the directory to be listed" + ) + + +class GPT4VisionToolArgs(BaseModel): + image_url: str = Field(..., description="The URL of the image to analyze") + prompt: str = Field(..., description="The prompt for image analysis") + + +class DALLE3ToolArgs(BaseModel): + prompt: str = Field(..., description="The prompt for image generation") + + +class OpenAIEmbeddingsToolArgs(BaseModel): + text: str = Field(..., description="The text to create embeddings for") + + +ToolArgs = Union[ + SearchToolArgs, + WikipediaToolArgs, + ArxivToolArgs, + PubmedToolArgs, + SceneExplainToolArgs, + ShellToolArgs, + TavilySearchToolArgs, + PythonREPLToolArgs, + ReadFileToolArgs, + ListDirectoryToolArgs, + GPT4VisionToolArgs, + DALLE3ToolArgs, + OpenAIEmbeddingsToolArgs, +] + + +class ToolCallBody(BaseModel): + tool_name: str = Field(..., description="Name of the tool to call") + args: ToolArgs + + +def create_clusterai_tool( + tool_name: str, tool_description: str, my_lambda_function: Callable[..., Any] +) -> BaseTool: + class CustomTool(BaseTool): + name: str = tool_name + description: str = tool_description + function: Callable[..., Any] = my_lambda_function + + def __init__(self): + super().__init__() + + def _run(self, **kwargs: Any) -> Any: + return self.function(**kwargs) + + return CustomTool() + + +class BrowserTools: + @staticmethod + def scrape_and_summarize_website(url: str) -> str: + browserless_api_key = os.getenv("BROWSERLESS_API_KEY") + if not browserless_api_key: + return "Error: BROWSERLESS_API_KEY is not set in the environment variables." + + browserless_url = ( + f"https://chrome.browserless.io/content?token={browserless_api_key}" + ) + payload = json.dumps({"url": url}) + headers = {"cache-control": "no-cache", "content-type": "application/json"} + + try: + response = requests.post(browserless_url, headers=headers, data=payload) + response.raise_for_status() + elements = partition_html(text=response.text) + content = "\n\n".join([str(el) for el in elements]) + + # Simple summarization (you may want to implement a more sophisticated summarization method) + summary = content[:1000] + "..." if len(content) > 1000 else content + return f"Summary of {url}:\n\n{summary}" + except requests.RequestException as e: + return f"Error scraping website: {str(e)}" + + +class InstagramSearchTools: + @staticmethod + def search_instagram(query: str) -> str: + instagram_query = f"site:instagram.com {query}" + search_tool = DuckDuckGoSearchRun() + results = search_tool.run(instagram_query) + return f"Instagram search results for '{query}':\n\n{results}" + + +class BrowserToolArgs(BaseModel): + url: str = Field(..., description="The URL of the website to scrape and summarize") + + +class InstagramSearchToolArgs(BaseModel): + query: str = Field(..., description="The Instagram-specific search query") + + +TOOL_ARGS_MAPPING = { + "gemini": GeminiToolArgs, + "search": SearchToolArgs, + "wikipedia": WikipediaToolArgs, + "arxiv": ArxivToolArgs, + "pubmed": PubmedToolArgs, + "scene_explain": SceneExplainToolArgs, + "shell": ShellToolArgs, + "tavily_search": TavilySearchToolArgs, + "python_repl": PythonREPLToolArgs, + "read_file": ReadFileToolArgs, + "list_directory": ListDirectoryToolArgs, + "gpt4_vision": GPT4VisionToolArgs, + "dalle3": DALLE3ToolArgs, + "openai_embeddings": OpenAIEmbeddingsToolArgs, + "browser": BrowserToolArgs, + "instagram_search": InstagramSearchToolArgs, +} + +TOOLS = { + "gemini": create_clusterai_tool( + "gemini", + "Useful for generating text using Google's Gemini AI model.", + lambda prompt, model="gemini-pro": genai.GenerativeModel(model) + .generate_content(prompt) + .text, + ), + "search": create_clusterai_tool( + "search", + "Useful for searching the web for current information.", + lambda query, num_results: DuckDuckGoSearchRun().run( + f"{query} num_results={num_results}" + ), + ), + "wikipedia": create_clusterai_tool( + "wikipedia", + "Useful for querying Wikipedia for general knowledge.", + lambda query: WikipediaQueryRun(api_wrapper=WikipediaAPIWrapper()).run(query), + ), + "arxiv": create_clusterai_tool( + "arxiv", + "Useful for searching academic papers on arXiv.", + lambda query: ArxivQueryRun().run(query), + ), + "pubmed": create_clusterai_tool( + "pubmed", + "Useful for searching medical and life sciences literature.", + lambda query: PubmedQueryRun().run(query), + ), + "scene_explain": create_clusterai_tool( + "scene_explain", + "Useful for explaining the contents of an image.", + lambda image_url: SceneXplainTool(api_key=SCENEX_API_KEY).run(image_url), + ), + "shell": create_clusterai_tool( + "shell", + "Useful for running shell commands.", + lambda command: ShellTool().run(command), + ), + "tavily_search": create_clusterai_tool( + "tavily_search", + "Useful for performing searches using Tavily.", + lambda query: json.dumps( + TavilySearchResults(api_key=TAVILY_API_KEY).run(query) + ), + ), + "python_repl": create_clusterai_tool( + "python_repl", + "Useful for executing Python code.", + lambda code: PythonREPL().run(code), + ), + "read_file": create_clusterai_tool( + "read_file", + "Useful for reading the contents of a file.", + lambda file_path: LangchainReadFileTool().run(file_path), + ), + "list_directory": create_clusterai_tool( + "list_directory", + "Useful for listing the contents of a directory.", + lambda directory_path: LangchainListDirectoryTool().run(directory_path), + ), + "gpt4_vision": create_clusterai_tool( + "gpt4_vision", + "Useful for analyzing images using GPT-4 Vision.", + lambda image_url, prompt: OpenAI(api_key=OPENAI_API_KEY) + .chat.completions.create( + model="gpt-4o", + messages=[ + { + "role": "user", + "content": [ + {"type": "text", "text": prompt}, + {"type": "image_url", "image_url": {"url": image_url}}, + ], + } + ], + max_tokens=300, + ) + .choices[0] + .message.content, + ), + "dalle3": create_clusterai_tool( + "dalle3", + "Useful for generating images based on text prompts.", + lambda prompt: OpenAI(api_key=OPENAI_API_KEY) + .images.generate( + model="dall-e-3", + prompt=prompt, + size="1024x1024", + quality="standard", + n=1, + ) + .data[0] + .url, + ), + "openai_embeddings": create_clusterai_tool( + "openai_embeddings", + "Useful for creating text embeddings using OpenAI's API.", + lambda text: json.dumps( + OpenAI(api_key=OPENAI_API_KEY) + .embeddings.create(model="text-embedding-ada-002", input=text) + .data[0] + .embedding + ), + ), + "browser": create_clusterai_tool( + "browser", + "Useful for browsing websites and summarizing their content.", + lambda url: BrowserTool().run(url), + ), + "instagram_search": create_clusterai_tool( + "instagram_search", + "Useful for searching Instagram for images and videos.", + lambda query: InstagramSearchTools.search_instagram(query), + ), +} diff --git a/offchain/tools/tests/__init__.py b/offchain/tools/tests/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/offchain/tools/tests/conftest.py b/offchain/tools/tests/conftest.py new file mode 100644 index 0000000..cbde552 --- /dev/null +++ b/offchain/tools/tests/conftest.py @@ -0,0 +1,17 @@ +import pytest +from fastapi import FastAPI +from fastapi.testclient import TestClient + +from openapi_server.main import app as application + + +@pytest.fixture +def app() -> FastAPI: + application.dependency_overrides = {} + + return application + + +@pytest.fixture +def client(app) -> TestClient: + return TestClient(app) diff --git a/offchain/tools/tests/test_agent.py b/offchain/tools/tests/test_agent.py new file mode 100644 index 0000000..313dbc3 --- /dev/null +++ b/offchain/tools/tests/test_agent.py @@ -0,0 +1,102 @@ +# test_agent.py + +from fastapi.testclient import TestClient +from unittest.mock import patch, MagicMock +import pytest +from src.server.main import app +from src.server.models import CreateAgentRequest + +client = TestClient(app) + + +@pytest.mark.unit +def test_create_agents_and_tasks(): + request_company_descriptiondata = { + "desciption": "A tech company specializing in AI solutions.", + "company_domain": "ai-tech.com", + "hiring_needs": "Senior AI Engineer", + "specific_benefits": "Remote work, flexible hours, stock options", + "agents": [ + { + "role": "Research Analyst", + "goal": "Analyze the company website and provided description to extract insights on culture, values, and specific needs.", + "backstory": "Expert in analyzing company cultures and identifying key values and needs from various sources, including websites and brief descriptions.", + "tools": [ + { + "name": "WebsiteSearchTool", + "description": "Tool for searching websites", + }, + { + "name": "SeperDevTool", + "description": "Development tool for data separation", + }, + ], + }, + { + "role": "Job Description Writer", + "goal": "Use insights from the Research Analyst to create a detailed, engaging, and enticing job posting.", + "backstory": "Skilled in crafting compelling job descriptions that resonate with the company's values and attract the right candidates.", + "tools": [ + { + "name": "WebsiteSearchTool", + "description": "Tool for searching websites", + }, + { + "name": "SeperDevTool", + "description": "Development tool for data separation", + }, + {"name": "FileReadTool", "description": "Tool for reading files"}, + ], + }, + { + "role": "Review and Editing Specialist", + "goal": "Review the job posting for clarity, engagement, grammatical accuracy, and alignment with company values and refine it to ensure perfection.", + "backstory": "A meticulous editor with an eye for detail, ensuring every piece of content is clear, engaging, and grammatically perfect.", + "tools": [ + { + "name": "WebsiteSearchTool", + "description": "Tool for searching websites", + }, + { + "name": "SeperDevTool", + "description": "Development tool for data separation", + }, + {"name": "FileReadTool", "description": "Tool for reading files"}, + ], + }, + ], + "tasks": [ + { + "description": "Analyze the provided company website and the hiring manager's company's domain ai-tech.com, description: \"A tech company specializing in AI solutions.\". Focus on understanding the company's culture, values, and mission. Identify unique selling points and specific projects or achievements highlighted on the site. Compile a report summarizing these insights, specifically how they can be leveraged in a job posting to attract the right candidates.", + "expected_output": "A comprehensive report detailing the company's culture, values, and mission, along with specific selling points relevant to the job role. Suggestions on incorporating these insights into the job posting should be included.", + "agent_role": "Research Analyst", + }, + { + "description": 'Draft a job posting for the role described by the hiring manager: "Senior AI Engineer". Use the insights on "A tech company specializing in AI solutions." to start with a compelling introduction, followed by a detailed role description, responsibilities, and required skills and qualifications. Ensure the tone aligns with the company\'s culture and incorporate any unique benefits or opportunities offered by the company. Specific benefits: "Remote work, flexible hours, stock options"', + "expected_output": "A detailed, engaging job posting that includes an introduction, role description, responsibilities, requirements, and unique company benefits. The tone should resonate with the company's culture and values, aimed at attracting the right candidates.", + "agent_role": "Job Description Writer", + }, + { + "description": "Review the draft job posting for the role: \"Senior AI Engineer\". Check for clarity, engagement, grammatical accuracy, and alignment with the company's culture and values. Edit and refine the content, ensuring it speaks directly to the desired candidates and accurately reflects the role's unique benefits and opportunities. Provide feedback for any necessary revisions.", + "expected_output": "A polished, error-free job posting that is clear, engaging, and perfectly aligned with the company's culture and values. Feedback on potential improvements and final approval for publishing. Formatted in markdown.", + "agent_role": "Review and Editing Specialist", + }, + ], + } + + response = client.post("/agent", json=request_data) + assert response.status_code == 200 + data = response.json() + assert data["message"] == "Job Posting Creation Process Completed" + assert "result" in data + + +@pytest.mark.unit +def test_run_agent_process(): + # Assuming you have an endpoint to run the agent process + response = client.post("/agent/run") + assert response.status_code == 200 + data = response.json() + assert "message" in data + assert "result" in data + assert data["message"] == "Agent process started successfully" diff --git a/offchain/tools/tests/test_default_api.py b/offchain/tools/tests/test_default_api.py new file mode 100644 index 0000000..8691291 --- /dev/null +++ b/offchain/tools/tests/test_default_api.py @@ -0,0 +1,32 @@ +# coding: utf-8 + +from fastapi.testclient import TestClient + + +from openapi_server.models.completion import Completion +from openapi_server.models.error import Error +from openapi_server.models.prompt import Prompt + + +def test_predict_post(client: TestClient): + """Test case for predict_post + + Get a completion response from the AI model based on the provided prompt and parameters. + """ + prompt = { + "max_tokens": 1024, + "temperature": 1.0, + "model": "llama2-code", + "text": "What is the capital of France?", + } + + headers = {} + response = client.request( + "POST", + "/predict", + headers=headers, + json=prompt, + ) + + # uncomment below to assert the status code of the HTTP response + # assert response.status_code == 200 diff --git a/offchain/tools/tests/test_ollama.py b/offchain/tools/tests/test_ollama.py new file mode 100644 index 0000000..5cca407 --- /dev/null +++ b/offchain/tools/tests/test_ollama.py @@ -0,0 +1,58 @@ +import argparse +import ollama + + +class Inference: + @staticmethod + def prompt(prompt, model, max_tokens=1000, temperature=1.0): + options = {"temperature": temperature, "num_predict": max_tokens} + + response = ollama.chat( + model=model, + options=options, + messages=[ + { + "role": "user", + "content": prompt, + }, + ], + ) + + return response + + +def main(): + parser = argparse.ArgumentParser(description="Test Ollama chat with Mistral model") + parser.add_argument("prompt", help="The prompt to send to the model") + parser.add_argument( + "--max-tokens", + type=int, + default=1000, + help="Maximum number of tokens to generate", + ) + parser.add_argument( + "--temperature", type=float, default=1.0, help="Temperature for text generation" + ) + + args = parser.parse_args() + + model = "mistral-latest" + + print(f"Testing Ollama chat with model: {model}") + print(f"Prompt: {args.prompt}") + print(f"Max tokens: {args.max_tokens}") + print(f"Temperature: {args.temperature}") + print("\nGenerating response...\n") + + try: + response = Inference.prompt( + args.prompt, model, args.max_tokens, args.temperature + ) + print("Response:") + print(response["message"]["content"]) + except Exception as e: + print(f"An error occurred: {e}") + + +if __name__ == "__main__": + main() diff --git a/offchain/tools/tests/test_tool.py b/offchain/tools/tests/test_tool.py new file mode 100755 index 0000000..530faae --- /dev/null +++ b/offchain/tools/tests/test_tool.py @@ -0,0 +1,187 @@ +""" +tests for /tool/use route in main.py +To run, execute "pytest tests/test_tool.py" from `tools` directory +""" + +from fastapi.testclient import TestClient +from ..server.main import app +import pytest +import os + +client = TestClient(app) + + +def test_gpt4_vision_tool(): + response = client.post( + "/tool/use", + json={ + "tool_name": "gpt4_vision", + "args": { + "image_url": "https://i.imgur.com/Rr1jAAn.jpeg", + "prompt": "Describe this image", + }, + }, + ) + print(f"Response content: {response.json()}") + assert response.status_code == 200 + assert "result" in response.json() + + +def test_dalle3_tool(): + response = client.post( + "/tool/use", + json={"tool_name": "dalle3", "args": {"prompt": "A futuristic city"}}, + ) + print(f"Response content: {response.json()}") + assert response.status_code == 200 + assert "result" in response.json() + + +def test_openai_embeddings_tool(): + response = client.post( + "/tool/use", + json={"tool_name": "openai_embeddings", "args": {"text": "Test embedding"}}, + ) + print(f"Response content: {response.json()}") + assert response.status_code == 200 + assert "result" in response.json() + + +def test_search_tool(): + response = client.post( + "/tool/use", + json={ + "tool_name": "search", + "args": {"query": "FastAPI tutorial", "num_results": "5"}, + }, + ) + print(f"Response content: {response.json()}") + assert response.status_code == 200 + assert "result" in response.json() + + +def test_wikipedia_tool(): + response = client.post( + "/tool/use", + json={ + "tool_name": "wikipedia", + "args": {"query": "Python programming language"}, + }, + ) + print(f"Response content: {response.json()}") + assert response.status_code == 200 + assert "result" in response.json() + + +def test_arxiv_tool(): + response = client.post( + "/tool/use", json={"tool_name": "arxiv", "args": {"query": "quantum computing"}} + ) + print(f"Response content: {response.json()}") + assert response.status_code == 200 + assert "result" in response.json() + + +def test_pubmed_tool(): + response = client.post( + "/tool/use", json={"tool_name": "pubmed", "args": {"query": "COVID-19 vaccine"}} + ) + print(f"Response content: {response.json()}") + assert response.status_code == 200 + assert "result" in response.json() + + +def test_scene_explain_tool(): + response = client.post( + "/tool/use", + json={ + "tool_name": "scene_explain", + "args": {"image_url": "https://example.com/image.jpg"}, + }, + ) + print(f"Response content: {response.json()}") + assert response.status_code == 200 + assert "result" in response.json() + + +def test_shell_tool(): + response = client.post( + "/tool/use", + json={"tool_name": "shell", "args": {"command": "echo 'Hello, World!'"}}, + ) + print(f"Response content: {response.json()}") + assert response.status_code == 200 + assert "result" in response.json() + + +def test_tavily_search_tool(): + response = client.post( + "/tool/use", + json={ + "tool_name": "tavily_search", + "args": {"query": "latest AI breakthroughs"}, + }, + ) + print(f"Response content: {response.json()}") + assert response.status_code == 200 + assert "result" in response.json() + + +def test_python_repl_tool(): + response = client.post( + "/tool/use", + json={"tool_name": "python_repl", "args": {"code": "print('Hello, World!')"}}, + ) + print(f"Response content: {response.json()}") + assert response.status_code == 200 + assert "result" in response.json() + + +def test_read_file_tool(): + # Create a temporary file for testing + with open("test_file.txt", "w") as f: + f.write("Test content") + + response = client.post( + "/tool/use", + json={"tool_name": "read_file", "args": {"file_path": "test_file.txt"}}, + ) + print(f"Response content: {response.json()}") + assert response.status_code == 200 + assert "result" in response.json() + + # Clean up the temporary file + os.remove("test_file.txt") + + +def test_list_directory_tool(): + response = client.post( + "/tool/use", + json={"tool_name": "list_directory", "args": {"directory_path": "."}}, + ) + print(f"Response content: {response.json()}") + assert response.status_code == 200 + assert "result" in response.json() + + +def test_invalid_tool(): + response = client.post( + "/tool/use", + json={ + "tool_name": "invalid_tool", + "args": {"query": "test query", "num_results": "5"}, + }, + ) + print(f"Response status: {response.status_code}") + print(f"Response content: {response.json()}") + print(f"Response headers: {response.headers}") + assert ( + response.status_code == 400 + ), f"Expected 400, got {response.status_code}. Response: {response.json()}" + + +def test_invalid_args(): + response = client.post( + "/tool/use", json={"tool_name": "search", "args": {"invalid_arg": "value"}} + ) + assert response.status_code == 422 # Validation error diff --git a/onchain/LICENSE b/onchain/LICENSE new file mode 100644 index 0000000..e6ab15f --- /dev/null +++ b/onchain/LICENSE @@ -0,0 +1,38 @@ +Business Source License 1.1 +License text copyright (c) 2020 MariaDB Corporation Ab, All Rights Reserved. +“Business Source License” is a trademark of MariaDB Corporation Ab. +____________________________________________________________________________ + +Business Source License 1.1 +Parameters +Licensor: Talus Labs, Inc. +Licensed Work: Talus AI Agent Framework © 2024 Talus Labs, Inc. +Change Date: December 31, 2027 +Change License: Apache License, Version 2.0 +____________________________________________________________________________ + +Terms +The Licensor hereby grants you the right to copy, modify, create derivative works, redistribute, and make non-production use of the Licensed Work. The Licensor may make an Additional Use Grant, above, permitting limited production use. + +Effective on the Change Date, or the fourth anniversary of the first publicly available distribution of a specific version of the Licensed Work under this License, whichever comes first, the Licensor hereby grants you rights under the terms of the Change License, and the rights granted in the paragraph above terminate. + +If your use of the Licensed Work does not comply with the requirements currently in effect as described in this License, you must purchase a commercial license from the Licensor, its affiliated entities, or authorized resellers, or you must refrain from using the Licensed Work. + +All copies of the original and modified Licensed Work, and derivative works of the Licensed Work, are subject to this License. This License applies separately for each version of the Licensed Work and the Change Date may vary for each version of the Licensed Work released by Licensor. + +You must conspicuously display this License on each original or modified copy of the Licensed Work. If you receive the Licensed Work in original or modified form from a third party, the terms and conditions set forth in this License apply to your use of that work. +Any use of the Licensed Work in violation of this License will automatically terminate your rights under this License for the current and all other versions of the Licensed Work. + +This License does not grant you any right in any trademark or logo of Licensor or its affiliates (provided that you may use a trademark or logo of Licensor as expressly required by this License). + +TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE LICENSED WORK IS PROVIDED ON AN “AS IS” BASIS. LICENSOR HEREBY DISCLAIMS ALL WARRANTIES AND CONDITIONS, EXPRESS OR IMPLIED, INCLUDING (WITHOUT LIMITATION) WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT, AND TITLE. +_____________________________________________________________________________ +Covenants of Licensor +In consideration of the right to use this License’s text and the “Business Source License” name and trademark, Licensor covenants to MariaDB, and to all other recipients of the licensed work to be provided by Licensor: + +To specify as the Change License the GPL Version 2.0 or any later version, or a license that is compatible with GPL Version 2.0 or a later version, where “compatible” means that software provided under the Change License can be included in a program with software provided under GPL Version 2.0 or a later version. Licensor may specify additional Change Licenses without limitation. + +To either: (a) specify an additional grant of rights to use that does not impose any additional restriction on the right granted in this License, as the Additional Use Grant; or (b) insert the text “None” to specify a Change Date. Not to modify this License in any other way. +____________________________________________________________________________ +Notice +The Business Source License (this document, or the “License”) is not an Open Source license. However, the Licensed Work will eventually be made available under an Open Source License, as stated in this License. diff --git a/onchain/Move.toml b/onchain/Move.toml new file mode 100755 index 0000000..5062e91 --- /dev/null +++ b/onchain/Move.toml @@ -0,0 +1,39 @@ +[package] +name = "talus" +edition = "2024.beta" +authors = ["moshe beeri (moshe.beeri@gmail.com)"] +# license = "" # e.g., "MIT", "GPL", "Apache 2.0" + +[dependencies] +Sui = { git = "https://github.com/MystenLabs/sui.git", subdir = "crates/sui-framework/packages/sui-framework", rev = "devnet-v1.27.0" } +# Sui = { local = "../../../../../suibase/workdirs/localnet/sui-repo/crates/sui-framework/packages/sui-framework" } +# Sui = { local = "../../../sui/crates/sui-framework/packages/sui-framework" } +# Sui = { git = "https://github.com/MystenLabs/sui.git", subdir = "crates/sui-framework/packages/sui-framework", rev = "framework/devnet" } + +# For remote import, use the `{ git = "...", subdir = "...", rev = "..." }`. +# Revision can be a branch, a tag, and a commit hash. +# MyRemotePackage = { git = "https://some.remote/host.git", subdir = "remote/path", rev = "main" } + +# For local dependencies use `local = path`. Path is relative to the package root +# Local = { local = "../path/to" } + +# To resolve a version conflict and force a specific version for dependency +# override use `override = true` +# Override = { local = "../conflicting/version", override = true } + +[addresses] +talus = "0x0" + +# Named addresses will be accessible in Move as `@name`. They're also exported: +# for example, `std = "0x1"` is exported by the Standard Library. +# alice = "0xA11CE" + +[dev-dependencies] +# The dev-dependencies section allows overriding dependencies for `--test` and +# `--dev` modes. You can introduce test-only dependencies here. +# Local = { local = "../path/to/dev-build" } + +[dev-addresses] +# The dev-addresses section allows overwriting named addresses for the `--test` +# and `--dev` modes. +# alice = "0xB0B" diff --git a/onchain/README.md b/onchain/README.md new file mode 100644 index 0000000..d71506d --- /dev/null +++ b/onchain/README.md @@ -0,0 +1,118 @@ +# Nexus smart contracts + +This is the onchain part of Nexus. + +## General structure + +The Nexus Move contracts are organized into several modules, each responsible for a specific +aspect of the agentic framework: + +1. [`node`][node]: Represents computational units that can run models inferences. +2. [`model`][model]: Defines LLM models that can be run on nodes. +3. [`agent`][model]: Represents intelligent agents that use models to perform tasks. +4. [`cluster`][cluster]: Manages groups of agents working together. +5. [`task`][task]: Defines unit of work within a cluster. +6. [`tool`][tool]: Represents utilities that agents can use to complete tasks. +7. [`prompt`][prompt]: Handles the creation and management of prompts for LLMs. + +For a technical audience interested in building a client for Nexus or using it as a Move +dependency, the following section provides details about the design. + +## Design + +### `Node` to `Model` to `Agent` + +Invoking machine learning models requires hardware. +Nexus describes a state machine that tells the hardware what to do, but the execution of the machine learning models happens on `Node`s off-chain. +Creating a `Node` object is the first step when interacting with Nexus. +Each computing unit is represented by this _owned_ object, meaning whichever wallet owns the `Node` object has exclusive rights to permit other entities to use it. +See the [`talus::node` module](./sources/node.move) to understand how to create a node and what information is shared with the network. + +Once we have defined the computing unit, we need to represent the machine learning model that powers LLM inference. +At the moment, only a `Node` owner can create a new shared `Model` object. +Since it's a shared object, it means it can be referenced in anyone's transaction. +However, upon creation of `Model` the transaction sender receives an owned `ModelOwnerCap` object. +This is a common Move pattern to handle permissions. +The shared `Model` object is a wrapper around `ModelInfo` that contains the model's metadata. +See the [`talus::model` module](./sources/model.move) to understand how to create a model and what information is shared with the network. +With `ModelInfo` one can create agents as is shown in the next step. +There are two paths to get access to the `ModelInfo`: + +1. The model owner can get it from the `Model` object by showing the `ModelOwnerCap`. +2. The model owner can issue `ModelInferencePromise` and transfer it to another wallet. + Such wallet can then use the `ModelInferencePromise` to get the `ModelInfo`. + +These access patterns enable the model owner to control who can use the model. +Note the name `ModelInferencePromise`. +At the moment, we don't have any punishment system for slashing inference providers that don't deliver the result. +Hence, for now, the model owner only makes a promise to whoever wants to use the model that the inference will be done. + +Finally, we have the `Agent` object which is a wrapper around `AgentBlueprint` object similarly to `Model` and `ModelInfo`. +Upon creation of an `Agent` object, the transaction sender receives an owned `AgentOwnerCap` object. +See the [`talus::agent` module](./sources/agent.move) to understand how to create an agent and what information is shared with the network. + +An agent uses an LLM (the `Model`) for a specific narrower set of goals. +One node can run multiple models, and one model can be used by multiple agents. +Two agents with different roles are expected to still use the same model. + +### Cluster + +Agents can be combined into a `Cluster` object. +A `Cluster` also defines tasks to be performed by those agents. +(The simplest cluster that's runnable has one agent performing one task.) +When a `Cluster` is created, the creator receives a `ClusterOwnerCap` object. +With this object they can add tasks to the cluster. +They can also add agents to the cluster, either ones they created themselves (provided they have access to the `ModelInfo` via `ModelOwnerCap` or `ModelInferencePromise`) or agents created by others. + +However, agent owners have control over their agents. +To add someone else's agent, the cluster owner needs to have `AgentRosterPromise` for that agent. +They can only obtain it from the agent owner. +This is the same pattern we saw with the `ModelInferencePromise`. + +An example of a cluster is given in the [`talus::cluster_tests` module](./sources/tests/cluster_tests.move). + +Once the `Cluster` has been defined, users can submit their prompt that will be fed into the LLM of the agent owning the first task in the cluster. +This process creates `ClusterExecution` shared object which copies the `Cluster` blueprint and tracks the state of a particular user execution. +Multiple users can submit their prompts to the same cluster, having each their own `ClusterExecution` object. +See the [`talus::cluster::execute` entry function](./sources/cluster.move). + +Creating a new `ClusterExecution` emits a `RequestForCompletionEvent` event. +Nodes listen to these events and filter them based on IDs of models they run. +Once the node that runs the LLM inference for the first agent has finished its off-chain computation, it submits the result to the particular `ClusterExecution` object. +It submits the result via either + +- `submit_completion_as_node_owner`, +- `submit_completion_as_model_owner` or +- `submit_completion_as_cluster_owner`. + +The specific function depends on the off-chain node implementation and only differs in the way permissions are checked. + +All LLM output is stored on-chain. +If there are more than one task, the process repeats. +The completion submission emits `RequestForCompletionEvent` which leads to a (possibly different) node again feeding the string in the `RequestForCompletionEvent.prompt_contents` property to the LLM and submitting the result via one of the aforementioned functions. + +Once all tasks are done, the `ClusterExecution` object is marked as completed by setting appropriate value of its `status` property. + +### Tools + +Tools are defined on `ClusterBlueprint` level, specifically on a task. +Each task can optionally have a tool name and a list of parameters. +At the moment we make the assumption that the cluster owner defines only tools which the nodes that run agents know how to use. + +The off-chain listener then first matches the tool name to a function to execute. +The output of the function is appended to the prompt that is fed to the LLM. +LLM response is then submitted in the aforementioned completion flow. + + + +[gdoc-next-steps]: https://docs.google.com/document/d/1pWrayUt3zI1YQqnzR6MqLDYwz-x7i845WAGv9im0fis +[gdoc-user-stories]: https://docs.google.com/document/d/1zf-NdrW6bSCmmVWuKvM8rqG1s2KwxlcPfrjwwHxzXzU +[git-main]: https://github.com/Talus-Network/ai/tree/f64e92638 + +[node]: ./sources/node.move +[model]: ./sources/model.move +[agent]: ./sources/agent.move +[cluster]: ./sources/cluster.move +[task]: ./sources/task.move +[tool]: ./sources/tool.move +[prompt]: ./sources/prompt.move diff --git a/onchain/Suibase.toml b/onchain/Suibase.toml new file mode 100644 index 0000000..f053653 --- /dev/null +++ b/onchain/Suibase.toml @@ -0,0 +1,5 @@ +[meta] +creation_timestamp = "1720027248390943 2024-07-03 13:20:48.390943 -04:00" + +[packages] +talus = { uuid = "NH5F3LHUSTK3T3U6DVMWI2CJEA", uuid_custom = false } diff --git a/onchain/sources/agent.move b/onchain/sources/agent.move new file mode 100644 index 0000000..0e63ba1 --- /dev/null +++ b/onchain/sources/agent.move @@ -0,0 +1,305 @@ +module talus::agent { + //! An agent is a specialized entity designed to perform specific roles + //! within the [`talus::Cluster::Cluster`] setup. + //! + //! From a node we create models. + //! From a model we create agents. + //! From agents we create a clusters. + //! + //! There are two ways of going about creating an agent: + //! 1. Cluster owner can create a blueprint directly and add that blueprint to their cluster. + //! Useful if the cluster owner runs the agent's inference node off-chain. + //! 2. A model provider can create a shared [`Agent`] object and let other clusters use it. + //! + //! # Using a shared agent + //! A cluster owner can add an agent to the cluster setup by redeeming a [`AgentRosterPromise`]. + //! This object can be created by the agent owner. + //! It can be sent to the cluster owner, or it can be sold to them as an NFT. + + use std::string::{Self, String}; + use sui::event; + use sui::transfer::{transfer, share_object}; + use talus::model::ModelInfo; + + // === Errors === + + const EAgentMismatch: u64 = 1; + const ENotAgentOwner: u64 = 2; + + // === Data models === + + /// Other clusters can copy [`AgentBlueprint`] from this object to include this + /// agent in their setup. + /// They need the corresponding [`AgentRosterPromise`] to do that. + public struct Agent has key, store { + id: UID, + /// We mustn't give references the blueprint because at the moment having + /// access to [`AgentBlueprint`] means being able to request completions + /// from the agent owner. + blueprint: AgentBlueprint, + } + + /// When [`Agent`] is created, the sender of the transaction becomes the owner. + /// This is typically an owned object. + /// The owner of this object can submit transactions with the agent's completions. + /// + /// The cap can be cloned with [`clone_owner_cap`]. + /// This is useful if the agent owner runs multiple machines but wants to + /// keep their private keys separate, or if they want to emit [`AgentRosterPromise`] + /// from a Move package. + public struct AgentOwnerCap has key, store { + id: UID, + agent: ID, + } + + /// Defines the kind of work this agent typically does. + /// If the agent is an LLM, some of this information will be used for + /// pre-prompt. + /// + /// TBD: If we allow mutation of this data, we also need to add a version. + public struct AgentBlueprint has store, copy, drop { + /// Agents are identified by their name. + /// This implies that agent name must be unique within single [`talus::cluster::Cluster`]. + name: AgentName, + /// If this blueprint was created from another agent, this field will + /// contain the ID of the originating agent. + /// + /// If this blueprint was created by a cluster owner without referencing + /// existing agent, then this field will be `None`. + /// + /// This decides who can submit completions on behalf of the agent. + originated_from_agent: Option, + /// We must't give references to the model because at the moment having + /// access to [`ModelInfo`] means being able to request completions + /// from the model owner. + /// But model owner must have a say over who can request completions. + model: ModelInfo, + role: String, + goal: String, + backstory: String, + } + + /// Agent's state specific to [`talus::cluster::ClusterExecution`]. + /// Our first implementation is sequential execution, we don't support + /// hierarchy of agents yet. + /// Therefore, no agent is a manager. + public struct AgentState has store { + name: AgentName, + last_task_response: String, + } + + /// Agent name serves as an identifier for an agent. + public struct AgentName has store, copy, drop { + inner: String, + } + + /// A cluster owner can exchange this object for an agent in their setup. + /// The owner of an agent promises to submit completions on behalf of the + /// agent when the agent is added to the cluster. + /// + /// This object can be treated as an NFT. + public struct AgentRosterPromise has key, store { + id: UID, + agent: ID, + } + + // === Events === + + public struct AgentCreatedEvent has copy, drop { + agent: ID, + owner_cap: ID, + } + + public struct AgentRosterPromiseIssuedEvent has copy, drop { + promise: ID, + agent: ID, + } + + // === Constructors === + + /// Returns a new instance of an [`AgentBlueprint`]. + /// The blueprint is not associated with any specific agent and it's up to + /// the caller to ensure that there are off-chain services that can run the + /// agent. + /// + /// Does NOT emit any event. + public fun new( + name: AgentName, + role: String, + goal: String, + backstory: String, + model: ModelInfo, + ): AgentBlueprint { + AgentBlueprint { + name, + role, + goal, + backstory, + model, + originated_from_agent: option::none(), + } + } + + /// Creates a new [`Agent`] from blueprint. + /// Typically, the [`AgentOwnerCap`] is transferred as an owned object to + /// the agent owner and the [`Agent`] is shared. + public fun create_from_blueprint( + blueprint: AgentBlueprint, + ctx: &mut TxContext, + ): (AgentOwnerCap, Agent) { + let agent = Agent { + id: object::new(ctx), + blueprint, + }; + + let owner_cap = AgentOwnerCap { + id: object::new(ctx), + agent: object::id(&agent), + }; + + event::emit(AgentCreatedEvent { + agent: object::id(&agent), + owner_cap: object::id(&owner_cap), + }); + + (owner_cap, agent) + } + + #[allow(lint(share_owned, self_transfer))] + /// Similar to [`create_from_blueprint`] but tailored towards calls from + /// programmable txs. + /// The [`AgentOwnerCap`] is transferred to the tx sender as an owned object + /// and [`Agent`] is shared. + public fun create_and_share( + name: String, // AgentName as string for convenience + role: String, + goal: String, + backstory: String, + model: ModelInfo, + ctx: &mut TxContext, + ) { + let blueprint = new( + into_name(name), + role, + goal, + backstory, + model, + ); + + let (owner_cap, agent) = create_from_blueprint(blueprint, ctx); + + transfer(owner_cap, ctx.sender()); + share_object(agent); + } + + /// The agent's owner can issue a promise to any cluster owner that ends up + /// owning the [`AgentRosterPromise`] that the agent will participate in + /// their cluster. + public fun issue_roster_promise( + owner_cap: &AgentOwnerCap, ctx: &mut TxContext, + ): AgentRosterPromise { + let promise = AgentRosterPromise { + id: object::new(ctx), + agent: owner_cap.agent, + }; + + event::emit(AgentRosterPromiseIssuedEvent { + promise: object::id(&promise), + agent: owner_cap.agent, + }); + + promise + } + + /// Create a new instance of a [`AgentName`] from given string. + /// Name serves as an identifier. + public fun into_name(s: String): AgentName { + AgentName { inner: s } + } + + /// Creates another owner cap for the same agent. + public fun clone_owner_cap( + self: &AgentOwnerCap, ctx: &mut TxContext, + ): AgentOwnerCap { + AgentOwnerCap { + id: object::new(ctx), + agent: self.agent, + } + } + + /// Returns new empty state for the agent. + public fun new_state(agent_name: AgentName): AgentState { + AgentState { + name: agent_name, + last_task_response: string::utf8(b""), + } + } + + // === Destructors === + + public fun destroy_owner_cap(self: AgentOwnerCap) { + let AgentOwnerCap { id, .. } = self; + object::delete(id); + } + + // === Package protected === + + /// Returns a blueprint for the agent. + /// When the agent is added to the cluster we emit an event that notifies the + /// agent's owner. + public(package) fun redeem_roster_promise( + agent: &Agent, + promise: AgentRosterPromise, + ): AgentBlueprint { + assert!(object::id(agent) == promise.agent, EAgentMismatch); + + let mut blueprint = agent.blueprint; + blueprint.originated_from_agent = option::some(object::id(agent)); + + let AgentRosterPromise { id, .. } = promise; + id.delete(); + + blueprint + } + + // === Accessors === + + /// Only the owner can get the blueprint on chain as this is what we create + /// clusters from. + public fun get_blueprint( + self: &Agent, owner_cap: &AgentOwnerCap, + ): AgentBlueprint { + assert_owner(self, owner_cap); + let mut blueprint = self.blueprint; + blueprint.originated_from_agent = option::some(object::id(self)); + + blueprint + } + + public fun get_backstory(self: &AgentBlueprint): String { self.backstory } + public fun get_goal(self: &AgentBlueprint): String { self.goal } + public fun get_model_id(self: &AgentBlueprint): ID { self.model.get_id() } + public fun get_name(self: &AgentBlueprint): AgentName { self.name } + public fun get_node_id(self: &AgentBlueprint): ID { self.model.get_node_id() } + public fun get_originated_from_agent(self: &AgentBlueprint): Option { self.originated_from_agent } + public fun get_role(self: &AgentBlueprint): String { self.role } + + public fun get_owner_cap_agent(self: &AgentOwnerCap): ID { self.agent } + + public fun get_roster_promised_agent(self: &AgentRosterPromise): ID { self.agent } + + // === Package protected === + + /// See AgentBlueprint.model for why this is package protected. + public(package) fun get_model_info(self: &AgentBlueprint): ModelInfo { self.model } + + public(package) fun set_last_task_response(self: &mut AgentState, response: String) { + self.last_task_response = response; + } + + // === Helpers === + + fun assert_owner(agent: &Agent, owner_cap: &AgentOwnerCap) { + assert!(owner_cap.agent == object::id(agent), ENotAgentOwner); + } +} diff --git a/onchain/sources/cluster.move b/onchain/sources/cluster.move new file mode 100644 index 0000000..e77ed9a --- /dev/null +++ b/onchain/sources/cluster.move @@ -0,0 +1,678 @@ +module talus::cluster { + //! A [`Cluster`] is a group of agents working together to achieve a common goal. + //! A concrete goal they are working towards is called [`ClusterExecution`]. + //! + //! First, a new [`Cluster`] is created with a blueprint of agents and tasks. + //! Then whenever a user submits new prompt, a new [`ClusterExecution`] is created + //! and the current version of the blueprint is copied. + //! The off-chain services then work towards the completion of the goal. + //! + //! From a node we create models. + //! From a model we create agents. + //! From agents we create a clusters. + + use std::hash; + use std::string::{Self, String, utf8}; + use sui::event; + use sui::table_vec::{Self, TableVec}; + use sui::table::{Self, Table}; + use sui::vec_map::{Self, VecMap}; + use talus::agent::{Self, AgentName, AgentBlueprint, AgentState, Agent, AgentRosterPromise}; + use talus::consts::{status_idle, status_running, status_success}; + use talus::model::{Self, ModelOwnerCap, Model}; + use talus::node::Node; + use talus::tool; + use talus::prompt; + use talus::task::{Self, TaskName, TaskState, TaskBlueprint}; + + // === Errors === + + const ENoTasksToExecute: u64 = 1; + const ECurrentTaskNotIdle: u64 = 2; + const ETaskNotFound: u64 = 3; + const ENotClusterOwner: u64 = 4; + const ENodeMismatch: u64 = 5; + const EModelMismatch: u64 = 6; + + // === Consts === + + /// For the first iteration of Nexus, we arbitrarily chose to include this + /// many latest messages in the context for the next prompt. + const PastNMessagesToIncludeInContext: u64 = 5; + + // === Data models === + + /// An object that holds a cluster definition. + public struct Cluster has key, store { + id: UID, + blueprint: ClusterBlueprint, + } + + /// Usually an owned object that permissions operations on the [`Cluster`]. + /// + /// The cap can be cloned with [`clone_owner_cap`]. + /// This is useful if the cluster owner runs multiple machines but wants to + /// keep their private keys separate. + public struct ClusterOwnerCap has key, store { + id: UID, + cluster: ID, + } + + /// Blueprint for execution. + /// + /// TBD: We use [`VecMap`] for agents. + /// This allows trivial copies of the data. + /// However, we would preferably use [`Table`] instead and referenced + /// the agents by their name in the [`ClusterExecution`]. + /// We need to first figure out a good strategy for versioning to enable + /// editing of Cluster blueprints. + /// [`VecMap`] is a good in between step towards that goal. + /// As for tasks, those need to be ordered. + /// The same scenario applies to tasks, except we want [`TableVec`] + /// TBD: We should keep a version in the blueprint struct and bump it on + /// every update. + public struct ClusterBlueprint has store, copy, drop { + name: String, + description: String, + tasks: vector, + agents: VecMap, + } + + // This is what the user is paying for + public struct ClusterExecution has key, store { + id: UID, + from_cluster: ID, + blueprint: ClusterBlueprint, + running_user: address, + created_at_epoch: u64, + tasks: Table, + agents: Table, + /// With what prompt was the execution started. + cluster_user_message: String, + /// The final response of the cluster execution. + /// Empty until status is `StatusSuccess`. + cluster_response: String, + current_task: TaskName, + /// Enumeration of + /// - `StatusIdle` + /// - `StatusRunning` + /// - `StatusSuccess` + /// + /// We use string constants to be more friendly to explorers. + /// See [`talus::consts`]. + status: String, + memory: Memory, + } + + /// Right now memory consists of past messages. + /// We simply use past [`PastNMessagesToIncludeInContext`] message to build + /// the context for next prompt execution. + public struct Memory has store { + messages: TableVec, + } + + public struct Message has store, drop { + role: String, + content: String, + name: Option, + } + + // === Events === + + /// A new [`Cluster`] has been created. + /// It can be populated with agents and tasks later. + public struct ClusterCreatedEvent has copy, drop { + cluster: ID, + owner_cap: ID, + } + + /// A new [`ClusterExecution`] has been created. + public struct ClusterExecutionCreatedEvent has copy, drop { + cluster: ID, + execution: ID, + } + + public struct ClusterResponseEvent has copy, drop { + cluster: ID, + cluster_name: String, + // should not be a string in order to be able to support + // different types of responses images music etc. + // TBD: Do we really want to store the response in the event? With large + // responses this will be expensive. + response: vector, + } + + public struct AgentAddedToClusterEvent has copy, drop { + cluster: ID, + agent_name: AgentName, + /// Only present if the agent's blueprint was copied from existing agent + /// and not created from scratch. + /// See [`talus::agent::redeem_roster_promise`]. + /// + /// The agent's owner can the filter this event by this value to know + /// which of their agents were added to a cluster. + agent: Option, + /// Which model is the agent using. + model: ID, + /// On which HW is the agent running. + node: ID, + } + + // === Constructors === + + /// Create an empty [`Cluster`] shared object. + /// The tx sender gets an owned object [`ClusterOwnerCap`] that allows them to + /// modify the cluster. + public entry fun create( + name: String, + description: String, + ctx: &mut TxContext + ) { + let cluster = Cluster { + id: object::new(ctx), + blueprint: ClusterBlueprint { + name, + description, + agents: vec_map::empty(), + tasks: vector::empty(), + } + }; + + let owner_cap = ClusterOwnerCap { + id: object::new(ctx), + cluster: object::id(&cluster), + }; + + event::emit(ClusterCreatedEvent { + cluster: object::id(&cluster), + owner_cap: object::id(&owner_cap), + }); + + transfer::share_object(cluster); + transfer::transfer(owner_cap, ctx.sender()); + } + + /// Creates another owner cap for the same cluster. + public fun clone_owner_cap( + self: &ClusterOwnerCap, ctx: &mut TxContext, + ): ClusterOwnerCap { + ClusterOwnerCap { + id: object::new(ctx), + cluster: self.cluster, + } + } + + /// From given cluster blueprint, create a new [`ClusterExecution`] shared object. + public entry fun execute( + cluster: &Cluster, + user_input: String, + ctx: &mut TxContext, + ) { + if (cluster.blueprint.tasks.is_empty() ) { + // The cluster was not yet configured to perform any tasks. + std::debug::print(&utf8(b"No tasks to execute")); + abort ENoTasksToExecute + }; + + // Populate tasks + let mut task_states = table::new(ctx); + let mut i = 0; + while (i < cluster.blueprint.tasks.length()) { + let task = cluster.blueprint.tasks.borrow(i); + let task_name = task.get_name(); + table::add( + &mut task_states, + task_name, + task::new_state(task_name, task.get_agent_name()), + ); + + i = i + 1; + }; + + + // Populate agents + let mut agent_states = table::new(ctx); + let agent_names = cluster.blueprint.agents.keys(); + let mut i = 0; + while (i < agent_names.length()) { + let agent_name = *agent_names.borrow(i); + table::add( + &mut agent_states, + agent_name, + agent::new_state(agent_name), + ); + + i = i + 1; + }; + + let mut execution = ClusterExecution { + id: object::new(ctx), + from_cluster: object::id(cluster), + blueprint: cluster.blueprint, + running_user: ctx.sender(), + created_at_epoch: ctx.epoch(), + cluster_user_message: user_input, + tasks: task_states, + agents: agent_states, + cluster_response: string::utf8(b""), + // we already checked earlier that there are tasks in the blueprint + current_task: task::get_name(cluster.blueprint.tasks.borrow(0)), + status: status_idle(), + memory: Memory { + messages: table_vec::empty(ctx), + }, + }; + + // Add initial user message to memory + add_message( + &mut execution.memory, + string::utf8(b"user"), + user_input, + option::none(), + ); + + // we already checked earlier that there are tasks in the blueprint + schedule_current_task_for_execution(&mut execution); + + event::emit(ClusterExecutionCreatedEvent { + cluster: object::id(cluster), + execution: object::id(&execution), + }); + + transfer::share_object(execution) + } + + // === State management === + + /// Each agent's model has a node ID associated with it. + /// This entry function allows the node owner to submit the completion of + /// the prompt to the chain. + public entry fun submit_completion_as_node_owner( + execution: &mut ClusterExecution, + node: &Node, + completion: String, + ) { + let current_task_state = execution.tasks.borrow_mut( + execution.current_task, + ); + let agent_name = current_task_state.get_state_agent_name(); + let agent = execution.blueprint.agents.get(&agent_name); + assert!(agent.get_node_id() == object::id(node), ENodeMismatch); + + submit_completion(execution, completion); + } + + /// Each agent has a model ID associated with it. + /// This entry function allows the model owner to submit the completion of + /// the prompt to the chain. + public entry fun submit_completion_as_model_owner( + execution: &mut ClusterExecution, + owner_cap: &ModelOwnerCap, + completion: String, + ) { + let current_task_state = execution.tasks.borrow_mut( + execution.current_task, + ); + let agent_name = current_task_state.get_state_agent_name(); + let agent = execution.blueprint.agents.get(&agent_name); + assert!(agent.get_model_id() == owner_cap.get_model_id(), EModelMismatch); + + submit_completion(execution, completion); + } + + /// Cluster owner can submit completion on behalf of any agent. + public entry fun submit_completion_as_cluster_owner( + execution: &mut ClusterExecution, + owner_cap: &ClusterOwnerCap, + completion: String, + ) { + execution.assert_execution_owner(owner_cap); + submit_completion(execution, completion); + } + + /// Exchanges a roster promise for an agent blueprint and adds it to the cluster. + public entry fun redeem_roster_promise( + cluster: &mut Cluster, + owner_cap: &ClusterOwnerCap, + agent: &Agent, + roster_promise: AgentRosterPromise, + ) { + let blueprint = agent.redeem_roster_promise(roster_promise); + + // SAFETY: checks for permissions in the function + cluster.add_agent(owner_cap, blueprint); + } + + public fun add_agent( + self: &mut Cluster, + owner_cap: &ClusterOwnerCap, + agent: AgentBlueprint, + ) { + self.assert_owner(owner_cap); + + event::emit(AgentAddedToClusterEvent { + cluster: object::id(self), + agent_name: agent.get_name(), + agent: agent.get_originated_from_agent(), + model: agent.get_model_id(), + node: agent.get_node_id(), + }); + + self.blueprint.agents.insert(agent.get_name(), agent); + } + + public entry fun add_agent_entry( + self: &mut Cluster, + owner_cap: &ClusterOwnerCap, + model: &Model, + model_owner_cap: &ModelOwnerCap, + agent_name: String, + role: String, + goal: String, + backstory: String, + ) { + let agent_name_obj = agent::into_name(agent_name); + let model_info = model::get_info(model, model_owner_cap); + let agent = agent::new(agent_name_obj, role, goal, backstory, model_info); + + add_agent(self, owner_cap, agent); + } + + public fun add_task( + self: &mut Cluster, + owner_cap: &ClusterOwnerCap, + task: TaskBlueprint, + ) { + self.assert_owner(owner_cap); + + self.blueprint.tasks.push_back(task); + } + + /// Adds a task to the cluster tailored for off-chain clients. + public entry fun add_task_entry( + cluster: &mut Cluster, + owner_cap: &ClusterOwnerCap, + task_name: String, // converted to TaskName + agent_name: String, // converted to AgentName + description: String, + expected_output: String, + prompt: String, + context: String, + ) { + let task = task::new( + task::into_name(task_name), + agent::into_name(agent_name), + description, + expected_output, + prompt, + context, + ); + + // SAFETY: checks for permissions in the function + add_task(cluster, owner_cap, task); + } + + /// When using [add_task_entry] we create a task without a tool. + /// One can attach a tool to a task using this function. + /// You must ensure that the agent will know about this tool and + /// how to use it. + /// This will be improved upon in second iteration. + public entry fun attach_tool_to_task_entry( + cluster: &mut Cluster, + owner_cap: &ClusterOwnerCap, + task_name: String, + tool_name: String, + args: vector, + ) { + cluster.assert_owner(owner_cap); + + let (_, task) = find_task_mut( + &mut cluster.blueprint, task::into_name(task_name), + ); + task.attach_tool(tool::new(tool_name, args)); + } + + // === Destructors === + + public fun destroy_owner_cap(self: ClusterOwnerCap) { + let ClusterOwnerCap { id, .. } = self; + object::delete(id); + } + + // === Accessors === + + public fun get_cluster_blueprint(self: &Cluster): &ClusterBlueprint { &self.blueprint } + public fun get_execution_blueprint(self: &ClusterExecution): &ClusterBlueprint { &self.blueprint } + public fun get_execution_response_bytes(self: &ClusterExecution): vector { *string::bytes(&self.cluster_response) } + public fun get_execution_response(self: &ClusterExecution): String { self.cluster_response } + public fun get_execution_status(self: &ClusterExecution): String { self.status } + public fun get_execution_task_statuses(self: &ClusterExecution): &Table { &self.tasks } + public fun get_tasks(self: &ClusterBlueprint): &vector { &self.tasks } + public fun is_execution_idle(self: &ClusterExecution): bool { self.status == status_idle() } + public fun is_execution_running(self: &ClusterExecution): bool { self.status == status_running() } + public fun is_execution_successful(self: &ClusterExecution): bool { self.status == status_success() } + + // === Helpers === + + fun assert_owner(self: &Cluster, owner_cap: &ClusterOwnerCap) { + assert!(owner_cap.cluster == object::id(self), ENotClusterOwner); + } + + fun assert_execution_owner(self: &ClusterExecution, owner_cap: &ClusterOwnerCap) { + assert!(owner_cap.cluster == self.from_cluster, ENotClusterOwner); + } + + /// An off-chain node that runs the inference submits the completion of the + /// prompt to the chain. + /// + /// Before calling this function, the caller must verify that the tx sender + /// is permitted to submit the completion on the current agent's behalf. + fun submit_completion( + execution: &mut ClusterExecution, + completion: String, + ) { + // update task state to success and store the completion + let current_task_state = execution.tasks.borrow_mut( + execution.current_task, + ); + current_task_state.set_state_status(status_success()); + current_task_state.set_state_response(completion); + + // update the agent's last task response + let agent_state = execution.agents.borrow_mut( + current_task_state.get_state_agent_name(), + ); + agent_state.set_last_task_response(completion); + + add_message( + &mut execution.memory, + string::utf8(b"assistant"), + completion, + option::none(), + ); + + // find the next task + let (current_task_index, _) = current_task(execution); + let next_task_index = current_task_index + 1; + + if (next_task_index < execution.blueprint.tasks.length()) { + let next_task = execution.blueprint.tasks.borrow(next_task_index); + execution.current_task = next_task.get_name(); + execution.status = status_running(); + schedule_current_task_for_execution(execution); + } else { + execution.status = status_success(); + finalize_execution(execution, completion); + } + } + + /// This will set the current task to running state and emit an event that + /// wakes up off-chain service which will execute the task. + fun schedule_current_task_for_execution( + execution: &mut ClusterExecution, + ) { + let curr_task_name = execution.current_task; + let task_state = execution.tasks.borrow_mut(curr_task_name); + + // the task must yet to be scheduled + assert!(task::get_state_status(task_state) == status_idle(), ECurrentTaskNotIdle); + + // useful for the first task when execution is in idle state + execution.status = status_running(); + + let (task_index, task) = current_task(execution); + let tool = task.get_tool(); + let agent = execution.blueprint.agents.get(&task.get_agent_name()); + + let task_prompt = task.get_prompt(); + let context = build_context(execution, task_index); + let agent_model = agent.get_model_info(); + + let execution_id = object::id(execution); + let task_state = execution.tasks.borrow_mut(curr_task_name); + task_state.set_state_status(status_running()); + + let mut final_prompt = string::utf8(b""); + final_prompt.append(context); + final_prompt.append(string::utf8(b"\n\nTask: ")); + final_prompt.append(task_prompt); + + // Off-chain node that runs the model's inference will pick up the event + // and submit completion with [`submit_completion`]. + prompt::emit_request_for_completion( + &agent_model, + string::utf8(b""), + final_prompt, + hash::sha3_256(*string::bytes(&final_prompt)), + 1000, // TODO: Get max tokens from the Agent + 70, // TODO: Get temperature from the Agent + vector::empty(), + execution_id, + tool, + ); + } + + /// The justification for building the context on-chain as opposed to + /// fetching the execution state off-chain and building it there is that it + /// simplifies the inference node's logic, in particular avoids a GET call + /// to the APIs to fetch the object. + fun build_context( + execution: &ClusterExecution, + task_index: u64, + ): String { + let mut context = string::utf8(b""); + + // Add memory context + let memory_context = get_context( + &execution.memory, PastNMessagesToIncludeInContext, + ); + string::append(&mut context, memory_context); + + // Add previous task's context if it exists + if (task_index > 0) { + let prev_task_name = execution + .blueprint + .tasks + .borrow(task_index - 1) + .get_name(); + let prev_task_state = table::borrow(&execution.tasks, prev_task_name); + let input_context = task::get_state_input_context(prev_task_state); + if (!string::is_empty(&input_context)) { + string::append(&mut context, string::utf8(b"\nPrevious Task Context: ")); + string::append(&mut context, input_context); + }; + }; + + context + } + + fun add_message( + memory: &mut Memory, + role: String, + content: String, + name: Option, + ) { + memory.messages.push_back(Message { role, content, name }); + } + + fun get_context(memory: &Memory, max_messages: u64): String { + let mut context = string::utf8(b""); + let len = memory.messages.length(); + let start = if (len > max_messages) { len - max_messages } else { 0 }; + + let mut i = start; + while (i < len) { + let message = memory.messages.borrow(i); + string::append(&mut context, message.role); + string::append(&mut context, string::utf8(b": ")); + string::append(&mut context, message.content); + string::append(&mut context, string::utf8(b"\n")); + i = i + 1; + }; + + context + } + + fun finalize_execution( + execution: &mut ClusterExecution, + completion: String, + ) { + // Get the last task's response (completion) + let final_response = completion; + + // Convert the final_response to a vector + let response_bytes = *string::bytes(&final_response); + + // Update the execution object + execution.cluster_response = final_response; + execution.status = status_success(); + + // Emit the response to the user + event::emit(ClusterResponseEvent { + cluster: object::id(execution), + cluster_name: execution.blueprint.name, + response: response_bytes, + }); + } + + /// Returns the index of the task within the blueprint's task vector and + /// reference to the task itself. + fun current_task(self: &ClusterExecution): (u64, &TaskBlueprint) { + find_task(&self.blueprint, self.current_task) + } + + /// Returns the index of the task within the blueprint's task vector and + /// reference to the task itself. + fun find_task(blueprint: &ClusterBlueprint, needle: TaskName): (u64, &TaskBlueprint) { + let mut i = 0; + while (i < vector::length(&blueprint.tasks)) { + let task = vector::borrow(&blueprint.tasks, i); + if (task::get_name(task) == needle) { + return (i, task) + }; + + i = i + 1; + }; + + std::debug::print(&utf8(b"Task not found")); + std::debug::print(&needle); + abort ETaskNotFound + } + + /// Same as `find_task` but returns a mutable reference to the task. + fun find_task_mut(blueprint: &mut ClusterBlueprint, needle: TaskName): (u64, &mut TaskBlueprint) { + let mut i = 0; + while (i < vector::length(&blueprint.tasks)) { + let task = vector::borrow_mut(&mut blueprint.tasks, i); + if (task::get_name(task) == needle) { + return (i, task) + }; + + i = i + 1; + }; + + std::debug::print(&utf8(b"Task not found")); + std::debug::print(&needle); + abort ETaskNotFound + } +} diff --git a/onchain/sources/consts.move b/onchain/sources/consts.move new file mode 100644 index 0000000..51461ac --- /dev/null +++ b/onchain/sources/consts.move @@ -0,0 +1,14 @@ +module talus::consts { + use std::string::{String, utf8}; + + // === Statuses === + + const StatusIdle: vector = b"IDLE"; + public fun status_idle(): String { utf8(StatusIdle) } + + const StatusRunning: vector = b"RUNNING"; + public fun status_running(): String { utf8(StatusRunning) } + + const StatusSuccess: vector = b"SUCCESS"; + public fun status_success(): String { utf8(StatusSuccess) } +} diff --git a/onchain/sources/model.move b/onchain/sources/model.move new file mode 100644 index 0000000..e730cf1 --- /dev/null +++ b/onchain/sources/model.move @@ -0,0 +1,296 @@ +module talus::model { + //! Models represent software that runs on a [`Node`]. + //! The on-chain definition is descriptive so that agent owners and cluster + //! owners have exact idea of what will power their apps. + //! + //! From a node we create models. + //! From a model we create agents. + //! From agents we create a clusters. + + use std::string::String; + use sui::event; + use sui::transfer::{share_object, transfer}; + use talus::node::Node; + + #[test_only] + use std::string::utf8; + + // === Errors === + + const EModelHashCannotBeEmpty: u64 = 1; + const EModelMismatch: u64 = 2; + const ENotModelOwner: u64 = 3; + + // === Data models === + + /// Shared object that is used to create agents. + /// + /// TBD: Discuss how can we pause or disable a model gracefully. + /// The problem is that we copy model information to agents and from + /// agents to clusters. This complicates updates. Especially it + /// complicates the pausing/resuming, because we have to pause an agent + /// and therefore all clusters that use it. + public struct Model has key, store { + id: UID, + info: ModelInfo, + } + + /// Allows updating of the model, halting and resuming it and can issue + /// an inference promise for agents. + public struct ModelOwnerCap has key, store { + id: UID, + model: ID, + } + + /// Model parameters that define what software is running on a [`Node`]. + public struct ModelInfo has store, drop, copy { + /// The ID of the [`Model`] that is associated with this info. + id: ID, + /// The ID of the [`Node`] that runs this model. + node: ID, + capacity: u64, + datasets: vector, + description: String, + family: String, + is_fine_tuned: bool, + is_open_source: bool, + max_context_length: u64, + model_hash: vector, + name: String, + num_params: u64, + token_price: u64, + url: String, + vendor: String, + } + + /// A model owner can issue an inference promise that can be used to create + /// agents. + /// Since right now we don't have a way to punish model owners for not + /// delivering the promised inference, as of this version of the protocol + /// this is a "promise". + /// + /// The model owner uses this object to manage how many agents can be + /// created from this model. + /// + /// This object can be treated as an NFT. + public struct ModelInferencePromise has key, store { + id: UID, + model: ID, + } + + // === Events === + + public struct ModelCreatedEvent has copy, drop { + by: address, + model: ID, + name: String, + node: ID, + owner_cap: ID, + } + + public struct ModelInferencePromiseIssuedEvent has copy, drop { + model: ID, + promise: ID, + } + + // === Constructors === + + /// Creates a new shared [`Model`] object. + public entry fun create( + node: &Node, + name: String, + model_hash: vector, + url: String, + token_price: u64, + capacity: u64, + num_params: u64, + description: String, + max_context_length: u64, + is_fine_tuned: bool, + family: String, + vendor: String, + is_open_source: bool, + datasets: vector, + ctx: &mut TxContext, + ) { + assert!(!vector::is_empty(&model_hash), EModelHashCannotBeEmpty); + + let model_uid = object::new(ctx); + + let info = ModelInfo { + id: object::uid_to_inner(&model_uid), + name, + model_hash, + node: object::id(node), + url, + token_price, + capacity, + num_params, + description, + max_context_length, + is_fine_tuned, + family, + vendor, + is_open_source, + datasets, + }; + + let model = Model { + id: model_uid, + info, + }; + + let owner_cap = ModelOwnerCap { + id: object::new(ctx), + model: object::id(&model), + }; + + event::emit(ModelCreatedEvent { + by: ctx.sender(), + model: object::id(&model), + name: info.name, + node: info.node, + owner_cap: object::id(&owner_cap), + }); + + transfer(owner_cap, ctx.sender()); + share_object(model); + } + + /// Whoever holds this object can create agents. + public fun issue_inference_promise( + owner_cap: &ModelOwnerCap, ctx: &mut TxContext, + ): ModelInferencePromise { + let promise = ModelInferencePromise { + id: object::new(ctx), + model: owner_cap.model, + }; + + event::emit(ModelInferencePromiseIssuedEvent { + model: owner_cap.model, + promise: object::id(&promise), + }); + + promise + } + + /// Creates another owner cap for the same model. + public fun clone_owner_cap( + owner_cap: &ModelOwnerCap, ctx: &mut TxContext, + ): ModelOwnerCap { + ModelOwnerCap { + id: object::new(ctx), + model: owner_cap.model, + } + } + + /// We can create agents with the [`ModelInfo`] object. + public fun redeem_inference_promise( + model: &Model, + promise: ModelInferencePromise, + ): ModelInfo { + assert!(object::id(model) == promise.model, EModelMismatch); + + let ModelInferencePromise { id, .. } = promise; + object::delete(id); + + model.info + } + + // === Destructors === + + public fun destroy_owner_cap(self: ModelOwnerCap) { + let ModelOwnerCap { id, .. } = self; + object::delete(id); + } + + // === Accessors === + + /// Only the owner can get the info on chain as this is what we create + /// agents from. + public fun get_info(self: &Model, owner_cap: &ModelOwnerCap): ModelInfo { + assert_owner(self, owner_cap); + self.info + } + + public fun get_name(self: &ModelInfo): String { self.name } + public fun get_id(self: &ModelInfo): ID { self.id } + public fun get_node_id(self: &ModelInfo): ID { self.node } + + public fun get_model_id(self: &ModelOwnerCap): ID { self.model } + + // === Helpers === + + fun assert_owner(model: &Model, owner_cap: &ModelOwnerCap) { + assert!(owner_cap.model == object::id(model), ENotModelOwner); + } + + // === Tests === + + #[test_only] + public fun new_info_for_testing( + id: ID, + name: String, + model_hash: vector, + node: ID, + url: String, + token_price: u64, + capacity: u64, + num_params: u64, + description: String, + max_context_length: u64, + is_fine_tuned: bool, + family: String, + vendor: String, + is_open_source: bool, + datasets: vector, + ): ModelInfo { + ModelInfo { + id, + name, + model_hash, + node, + url, + token_price, + capacity, + num_params, + description, + max_context_length, + is_fine_tuned, + family, + vendor, + is_open_source, + datasets, + } + } + + #[test_only] + /// Creates a new [`ModelInfo`] object with mock data + public fun new_mock_info_for_testing(ctx: &mut TxContext): ModelInfo { + let mock_node_uid = object::new(ctx); + let mock_node_id = object::uid_to_inner(&mock_node_uid); + object::delete(mock_node_uid); + + let mock_model_uid = object::new(ctx); + let mock_model_id = object::uid_to_inner(&mock_model_uid); + object::delete(mock_model_uid); + + new_info_for_testing( + mock_model_id, + utf8(b"Test Model"), + b"model_hash", + mock_node_id, + utf8(b"http://example.com"), + 100, + 1000, + 1000000, + utf8(b"Test Description"), + 16, + false, + utf8(b"Test Family"), + utf8(b"Test Vendor"), + false, + vector::empty(), + ) + } +} diff --git a/onchain/sources/node.move b/onchain/sources/node.move new file mode 100644 index 0000000..2a4de05 --- /dev/null +++ b/onchain/sources/node.move @@ -0,0 +1,62 @@ +module talus::node { + //! A [`Node`] is a computational unit that can run one or more models. + //! + //! From a node we create models. + //! From a model we create agents. + //! From agents we create a clusters. + + use std::string::String; + use sui::event; + use sui::transfer::transfer; + + // === Data models === + + /// Meant as an owned object. + /// By having ownership of this object you can create new models that are + /// bound to this node. + /// + /// TODO: In future this should have the same ownership pattern as models + /// and agents. + public struct Node has key, store { + id: UID, + name: String, + node_type: String, + gpu_memory: u64, + image_hash: vector, + external_arguments: vector, + } + + // === Events === + + public struct NodeCreatedEvent has copy, drop { + node: ID, + name: String, + } + + // === Constructors === + + public entry fun create( + name: String, + node_type: String, + gpu_memory: u64, + image_hash: vector, + external_arguments: vector, + ctx: &mut TxContext, + ) { + let node = Node { + id: object::new(ctx), + name, + node_type, + gpu_memory, + image_hash, + external_arguments, + }; + + event::emit(NodeCreatedEvent { + node: object::id(&node), + name: node.name, + }); + + transfer(node, ctx.sender()); + } +} diff --git a/onchain/sources/prompt.move b/onchain/sources/prompt.move new file mode 100644 index 0000000..37c294c --- /dev/null +++ b/onchain/sources/prompt.move @@ -0,0 +1,72 @@ +module talus::prompt { + //! A prompt represents a request for a model to generate a response. + //! + //! The [`RequestForCompletionEvent`] is emitted every time cluster execution + //! is in need of a completion. + //! The off-chain node that runs the model's inference listens to this event + //! and submits the completion back to the chain. + + use std::string::{Self, String}; + use sui::event; + use talus::model::{Self, ModelInfo}; + use talus::tool::Tool; + + // === Errors === + + const EPromptCannotBeEmpty: u64 = 1; + const ETemperatureMustBeBetweenHundredAndZero: u64 = 2; + + // === Events === + + public struct RequestForCompletionEvent has copy, drop { + cluster_execution: ID, + node: ID, + model: ID, + external_provider: String, + model_name: String, + prompt_contents: String, + prompt_hash: vector, + max_tokens: u64, + /// A value between 0 and 100. + temperature: u8, + extra_arguments: vector, + tool: Option, + } + + // === Package protected === + + /// Emits an event that's listened to by the off-chain node that runs the + /// model. + /// + /// This is called within the context of the cluster execution hence package + /// protected. + public(package) fun emit_request_for_completion( + model: &ModelInfo, + external_provider: String, + prompt_contents: String, + prompt_hash: vector, + max_tokens: u64, + temperature: u8, // 0-200 + extra_arguments: vector, + cluster_execution: ID, + tool: Option, + ) { + assert!(temperature <= 200, ETemperatureMustBeBetweenHundredAndZero); + assert!(temperature >= 0, ETemperatureMustBeBetweenHundredAndZero); + assert!(string::length(&prompt_contents) > 0, EPromptCannotBeEmpty); + + event::emit(RequestForCompletionEvent { + node: model::get_node_id(model), + model: model::get_id(model), + cluster_execution, + model_name: model::get_name(model), + external_provider: external_provider, + prompt_contents, + prompt_hash, + max_tokens, + temperature, + extra_arguments, + tool, + }); + } +} diff --git a/onchain/sources/task.move b/onchain/sources/task.move new file mode 100644 index 0000000..60b1e61 --- /dev/null +++ b/onchain/sources/task.move @@ -0,0 +1,169 @@ +module talus::task { + //! A task represents units of work within the Cluster's execution. + //! It's always bound to a specific agent that is supposed to work on it. + + use std::string::{Self, String}; + use talus::agent::AgentName; + use talus::consts::{status_idle, status_running, status_success}; + use talus::tool::Tool; + + // === Data models === + + /// Defines specifically what's the agent supposed to do. + public struct TaskBlueprint has store, copy, drop { + /// Tasks are identified by their name. + /// This implies that task name must be unique within a single + /// [`talus::cluster::Cluster`]. + name: TaskName, + /// Which agent is responsible for running this task to completion. + /// This agent must exist within the same [`talus::cluster::Cluster`] as this + /// task. + agent: AgentName, + description: String, + expected_output: String, + prompt: String, + context: String, + /// If provided then the node will execute this tool and use the result + /// to run an inference using the prompt. + /// The LLM output is then uploaded as the response for this task. + tool: Option, + } + + /// Puts a task into a concrete situation. + public struct TaskState has store { + /// You can find the information about this task by searching the Cluster's + /// tasks by name. + name: TaskName, + agent_name: AgentName, + /// TBD: This is used to build context but it's never changed from its + /// initial value of empty string. + input_context: String, + /// Enumeration of + /// - `StatusIdle` + /// - `StatusRunning` + /// - `StatusSuccess` + /// + /// We use string constants to be more friendly to explorers. + status: String, + prompt: Option, + response: String, + } + + /// Task name serves as an identifier for a task. + public struct TaskName has store, copy, drop { + inner: String, + } + + // === Constructors === + + /// Returns a new instance of a [`TaskBlueprint`]. + public fun new( + name: TaskName, + agent: AgentName, + description: String, + expected_output: String, + prompt: String, + context: String, + ): TaskBlueprint { + TaskBlueprint { + name, + description, + expected_output, + agent, + prompt, + context, + tool: option::none(), + } + } + + /// Returns a new instance of a [`TaskBlueprint`] + /// with a tool attached. + public fun new_with_tool( + name: TaskName, + agent: AgentName, + description: String, + expected_output: String, + prompt: String, + context: String, + tool: Tool, + ): TaskBlueprint { + TaskBlueprint { + name, + description, + expected_output, + agent, + prompt, + context, + tool: option::some(tool), + } + } + + public fun new_state( + name: TaskName, + agent_name: AgentName, + ): TaskState { + TaskState { + name, + agent_name, + input_context: string::utf8(b""), + status: status_idle(), + prompt: option::none(), + response: string::utf8(b""), + } + } + + /// Create a new instance of a [`TaskName`] from given string. + /// Name serves as an identifier. + public fun into_name(s: String): TaskName { + TaskName { inner: s } + } + + /// Convert a [`TaskName`] into a string. + public fun into_string(name: TaskName): String { + name.inner + } + + // === State management === + + public fun attach_tool(self: &mut TaskBlueprint, tool: Tool) { + self.tool = option::some(tool); + } + + // === Accessors === + + public fun get_agent_name(self: &TaskBlueprint): AgentName { self.agent } + public fun get_context(self: &TaskBlueprint): String { self.context } + public fun get_description(self: &TaskBlueprint): String { self.description } + public fun get_expected_output(self: &TaskBlueprint): String { self.expected_output } + public fun get_name(self: &TaskBlueprint): TaskName { self.name } + public fun get_prompt(self: &TaskBlueprint): String { self.prompt } + public fun get_tool(self: &TaskBlueprint): Option { self.tool } + + public fun get_state_agent_name(self: &TaskState): AgentName { self.agent_name } + public fun get_state_input_context(self: &TaskState): String { self.input_context } + public fun get_state_output_bytes(self: &TaskState): vector { *string::bytes(&self.response) } + public fun get_state_status(self: &TaskState): String { self.status } + public fun is_idle(self: &TaskState): bool { self.status == status_idle() } + public fun is_running(self: &TaskState): bool { self.status == status_running() } + public fun is_successful(self: &TaskState): bool { self.status == status_success() } + + // === Package protected === + + public(package) fun set_state_status(self: &mut TaskState, status: String) { self.status = status; } + public(package) fun set_state_response(self: &mut TaskState, response: String) { self.response = response; } + public(package) fun set_state_prompt(self: &mut TaskState, prompt: ID) { self.prompt = option::some(prompt); } + + // === Tests === + + #[test_only] + public fun create_test_state(agent: AgentName): TaskState { + TaskState { + agent_name: agent, + name: into_name(string::utf8(b"Write Talus Poem")), + input_context: string::utf8(b"Talus is a decentralized network focusing on AI and blockchain"), + status: status_idle(), + prompt: option::none(), + response: string::utf8(b""), + } + } +} diff --git a/onchain/sources/tests/cluster_tests.move b/onchain/sources/tests/cluster_tests.move new file mode 100644 index 0000000..ad55b4a --- /dev/null +++ b/onchain/sources/tests/cluster_tests.move @@ -0,0 +1,195 @@ +#[test_only] +module talus::cluster_tests { + use std::string; + use sui::table; + use sui::test_scenario::{Self, Scenario, ctx}; + use sui::test_utils::print; + use talus::agent::{Self, AgentBlueprint, AgentName}; + use talus::cluster::{Self, Cluster, ClusterExecution, ClusterOwnerCap}; + use talus::model::{Self, ModelInfo}; + use talus::task::{Self, TaskBlueprint, TaskName}; + + #[test] + /// The goal of this test is to verify that a cluster can be set up and + /// that two tasks can be executed successfully, proving that the state + /// machine works. + /// + /// 1. We setup a cluster with two tasks: analyze poem request and create poem. + /// 2. We simulate the execution of the cluster with a valid input and verify + /// that the cluster execution is in the correct state. + /// 3. We simulate the completion of the first task (from the POV of an agent) + /// and verify that the cluster execution is in the correct state. + /// 4. We simulate the completion of the second task (from the POV of an agent) + /// and verify that the cluster execution is successful. + fun test_poem_creation_cluster() { + let owner = @0x1; + let mut scenario = test_scenario::begin(owner); + + // + // 1. + // + setup_poem_creation_cluster(&mut scenario); + + // + // 2. + // + test_scenario::next_tx(&mut scenario, owner); + { + print(b"Creating and executing cluster with valid input"); + let cluster = test_scenario::take_shared(&scenario); + let input = string::utf8(b"Create a poem about nature in a romantic style"); + cluster::execute(&cluster, input, ctx(&mut scenario)); + test_scenario::return_shared(cluster); + }; + test_scenario::next_tx(&mut scenario, owner); + { + let execution = test_scenario::take_shared(&scenario); + verify_initial_state(&execution); + test_scenario::return_shared(execution); + }; + + // + // 3. + // + test_scenario::next_tx(&mut scenario, owner); + { + let mut execution = test_scenario::take_shared(&scenario); + let owner_cap = test_scenario::take_from_address(&scenario, owner); + let analysis_result = string::utf8(b"The user has requested a romantic poem about nature. Both style (romantic) and subject (nature) are present."); + cluster::submit_completion_as_cluster_owner(&mut execution, &owner_cap, analysis_result); + verify_analysis_state(&execution); + test_scenario::return_shared(execution); + test_scenario::return_to_address(owner, owner_cap); + }; + + // + // 4. + // + test_scenario::next_tx(&mut scenario, owner); + { + let mut execution = test_scenario::take_shared(&scenario); + let owner_cap = test_scenario::take_from_address(&scenario, owner); + let poem = string::utf8(b"Gentle breeze whispers through leaves,\nNature's love song in the air,\nMoonlit meadows, stars above,\nA romantic scene beyond compare."); + cluster::submit_completion_as_cluster_owner(&mut execution, &owner_cap, poem); + verify_final_state(&execution); + test_scenario::return_shared(execution); + test_scenario::return_to_address(owner, owner_cap); + }; + + test_scenario::end(scenario); + } + + fun verify_initial_state(execution: &ClusterExecution) { + assert!(cluster::is_execution_running(execution), 0); + let tasks = cluster::get_execution_task_statuses(execution); + assert!(table::length(tasks) == 2, 1); + + let task1 = table::borrow(tasks, task1_name()); + assert!(task::is_running(task1), 2); + let task2 = table::borrow(tasks, task2_name()); + assert!(task::is_idle(task2), 3); + } + + fun verify_analysis_state(execution: &ClusterExecution) { + assert!(cluster::is_execution_running(execution), 0); + let tasks = cluster::get_execution_task_statuses(execution); + + let task1 = table::borrow(tasks, task1_name()); + assert!(task::is_successful(task1), 1); + let task2 = table::borrow(tasks, task2_name()); + assert!(task::is_running(task2), 2); + } + + fun verify_final_state(execution: &ClusterExecution) { + assert!(cluster::is_execution_successful(execution), 0); + let tasks = cluster::get_execution_task_statuses(execution); + + let task1 = table::borrow(tasks, task1_name()); + assert!(task::is_successful(task1), 2); + let task2 = table::borrow(tasks, task2_name()); + assert!(task::is_successful(task2) , 3); + + let response = cluster::get_execution_response(execution); + assert!(string::index_of(&response, &string::utf8(b"Gentle breeze")) == 0, 4); + } + + fun setup_poem_creation_cluster(scenario: &mut Scenario) { + let ctx = ctx(scenario); + + let model = model::new_mock_info_for_testing(ctx); + let manager_agent = create_manager_agent(&model); + let poet_agent = create_poet_agent(&model); + let poet_agent_name = agent::get_name(&poet_agent); + + let task1 = create_task1(poet_agent_name); + let task2 = create_task2(poet_agent_name); + + cluster::create( + string::utf8(b"Poem Creation Cluster"), + string::utf8(b"A cluster for creating custom poems"), + ctx + ); + + test_scenario::next_tx(scenario, @0x1); + { + let mut cluster = test_scenario::take_shared(scenario); + let cap = test_scenario::take_from_address(scenario, @0x1); + cluster::add_agent(&mut cluster, &cap, manager_agent); + cluster::add_agent(&mut cluster, &cap, poet_agent); + cluster::add_task(&mut cluster, &cap, task1); + cluster::add_task(&mut cluster, &cap, task2); + test_scenario::return_shared(cluster); + test_scenario::return_to_address(@0x1, cap); + }; + } + + fun create_manager_agent(model: &ModelInfo): AgentBlueprint { + agent::new( + agent::into_name(string::utf8(b"Manager")), + string::utf8(b"Poem Creation Manager"), + string::utf8(b"Manage the poem creation process"), + string::utf8(b"An AI trained to oversee poem creation"), + *model, + ) + } + + fun create_poet_agent(model: &ModelInfo): AgentBlueprint { + agent::new( + agent::into_name(string::utf8(b"Poet")), + string::utf8(b"AI Poet"), + string::utf8(b"Create beautiful poems"), + string::utf8(b"An AI trained to create poetic masterpieces"), + *model, + ) + } + + fun task1_name(): TaskName { + task::into_name(string::utf8(b"Analyze Poem Request")) + } + + fun create_task1(agent: AgentName): TaskBlueprint { + task::new( + task1_name(), + agent, + string::utf8(b"Analyze the user's request for poem creation"), + string::utf8(b"A structured analysis of the poem request"), + string::utf8(b"Analyze the user's input for poem style and subject. If either is missing, prepare an error message."), + string::utf8(b""), + ) + } + + fun task2_name(): TaskName { + task::into_name(string::utf8(b"Create Poem")) + } + + fun create_task2(agent: AgentName): TaskBlueprint { + task::new( + task2_name(), + agent, + string::utf8(b"Create a poem based on the analyzed request"), + string::utf8(b"A poem matching the user's requirements"), + string::utf8(b"Create a poem based on the provided style and subject. Be creative and inspiring."), + string::utf8(b""), + ) + } +} diff --git a/onchain/sources/tests/node_tests.move b/onchain/sources/tests/node_tests.move new file mode 100644 index 0000000..ecb48cf --- /dev/null +++ b/onchain/sources/tests/node_tests.move @@ -0,0 +1,36 @@ +#[test_only] +module talus::node_tests { + use sui::test_scenario; + use talus::node::{Self, Node}; + use std::string; + + #[test] + fun test_create_node() { + let mut scenario = test_scenario::begin(@0x1); + let ctx = test_scenario::ctx(&mut scenario); + + // Create a node + node::create( + string::utf8(b"Test Node"), + string::utf8(b"GPU"), + 16, + vector::empty(), + vector::empty(), + ctx + ); + + // Move to the next transaction + test_scenario::next_tx(&mut scenario, @0x1); + + // Check if the node was created and owned + assert!(test_scenario::has_most_recent_for_sender(&scenario), 0); + + // Get the created node + let node = test_scenario::take_from_sender(&scenario); + + // Return the node to the scenario + test_scenario::return_to_sender(&scenario, node); + + test_scenario::end(scenario); + } +} diff --git a/onchain/sources/tests/prompt_tests.move b/onchain/sources/tests/prompt_tests.move new file mode 100644 index 0000000..fc7ed43 --- /dev/null +++ b/onchain/sources/tests/prompt_tests.move @@ -0,0 +1,97 @@ +#[test_only] +module talus::prompt_tests { + use std::string; + use sui::test_scenario::{Self, ctx}; + use talus::model; + use talus::prompt; + + #[test] + fun test_emit_request_for_completion() { + let owner = @0x1; + let mut scenario = test_scenario::begin(owner); + + // Create a mock Model + let model = model::new_mock_info_for_testing(ctx(&mut scenario)); + + test_scenario::next_tx(&mut scenario, owner); + { + let mock_execution_id = object::new(ctx(&mut scenario)); + + prompt::emit_request_for_completion( + &model, + string::utf8(b"Test Provider"), + string::utf8(b"Test prompt"), + b"test_hash", + 100, + 50, + vector::empty(), + object::uid_to_inner(&mock_execution_id), + option::none(), // no tool + ); + + object::delete(mock_execution_id); + }; + + test_scenario::end(scenario); + } + + #[test] + #[expected_failure(abort_code = prompt::ETemperatureMustBeBetweenHundredAndZero)] + fun test_invalid_temperature() { + let owner = @0x1; + let mut scenario = test_scenario::begin(owner); + + // Create a mock Model + let model = model::new_mock_info_for_testing(ctx(&mut scenario)); + + test_scenario::next_tx(&mut scenario, owner); + { + let mock_execution_id = object::new(ctx(&mut scenario)); + prompt::emit_request_for_completion( + &model, + string::utf8(b"Test Provider"), + string::utf8(b"Test prompt"), + b"test_hash", + 100, + 201, // Invalid temperature + vector::empty(), + object::uid_to_inner(&mock_execution_id), + option::none(), // no tool + ); + + object::delete(mock_execution_id); + }; + + test_scenario::end(scenario); + } + + #[test] + #[expected_failure(abort_code = prompt::EPromptCannotBeEmpty)] + fun test_empty_prompt() { + let owner = @0x1; + let mut scenario = test_scenario::begin(owner); + + // Create a mock Model + let model = model::new_mock_info_for_testing(ctx(&mut scenario)); + + test_scenario::next_tx(&mut scenario, owner); + { + let mock_execution_id = object::new(ctx(&mut scenario)); + prompt::emit_request_for_completion( + &model, + string::utf8(b"Test Provider"), + string::utf8(b""), // Empty prompt + b"test_hash", + 100, + 50, + vector::empty(), + object::uid_to_inner(&mock_execution_id), + option::none(), // no tool + ); + + object::delete(mock_execution_id); + }; + + test_scenario::end(scenario); + } +} diff --git a/onchain/sources/tool.move b/onchain/sources/tool.move new file mode 100755 index 0000000..6ba96e7 --- /dev/null +++ b/onchain/sources/tool.move @@ -0,0 +1,31 @@ +module talus::tool { + //! A tool is utility or resource that an agent can use to complete tasks. + //! A tool is stored optionally on a [talus::task::Task] and if provided, + //! the agent will use the result of the tool to submit a response. + //! A tool can have side-effects. + //! + //! An example of a tool would be wiki search or a smart contract invocation. + + use std::string::String; + + // === Data models === + + /// Tool name serves as an identifier for a tool. + public struct Tool has store, copy, drop { + name: String, + /// At the moment tool can be parametrized only up front when creating a + /// cluster. + args: vector, + } + + // === Constructors === + + public fun new(name: String, args: vector): Tool { + Tool { name, args } + } + + // === Accessors === + + public fun get_name(self: &Tool): String { self.name } + public fun get_args(self: &Tool): vector { self.args } +} diff --git a/rustfmt.toml b/rustfmt.toml new file mode 100644 index 0000000..aa5026e --- /dev/null +++ b/rustfmt.toml @@ -0,0 +1,18 @@ +edition = "2021" + +reorder_imports = true +use_try_shorthand = true +remove_nested_parens = true +reorder_modules = true +use_field_init_shorthand = true +max_width = 80 + +imports_granularity = "One" +group_imports = "One" +imports_layout = "HorizontalVertical" +enum_discrim_align_threshold = 40 +hex_literal_case = "Lower" +newline_style = "Unix" +normalize_comments = true +normalize_doc_attributes = true +reorder_impl_items = true