From 135c60a572081f8ccf8327b9b2f5acf0b665f5c6 Mon Sep 17 00:00:00 2001 From: Iulia Feroli Date: Tue, 26 Mar 2024 15:23:34 +0100 Subject: [PATCH] Add inference doc code examples (#2488) * Add inference examples * switching from `model_id` to `inference_id` as per discussion. --- .../00fea15cbca83be9d5f1a024ff2ec708.asciidoc | 18 +++++++++++++++++ .../10c3fe2265bb34964bd1005f9da66773.asciidoc | 18 +++++++++++++++++ .../13ecdf99114098c76b050397d9c3d4e6.asciidoc | 13 ++++++++++++ .../3541d4a85e27b2c3896a7a7ee98b4b37.asciidoc | 2 +- .../46884e00674f82e7a7bc8b418d1777de.asciidoc | 20 +++++++++++++++++++ .../4e3414fc712b16311f9e433dd366f49d.asciidoc | 10 ++++++++++ .../4ea91a3ff42de540bb4c9ef268d607a6.asciidoc | 15 ++++++++++++++ .../73be1f93d789264e5b972ddb5991bc66.asciidoc | 2 +- .../87733deeea4b441b595d19a0f97346f0.asciidoc | 2 +- .../8e286a205a1f84f888a6d99f2620c80e.asciidoc | 2 +- .../9d79645ab3a9da3f63c54a1516214a5a.asciidoc | 2 +- .../a4a3c3cd09efa75168dab90105afb2e9.asciidoc | 10 ++++++++++ .../b7a9f60b3646efe3834ca8381f8aa560.asciidoc | 2 +- .../c0f4cbcb104747f38acdbc9a30bd13bf.asciidoc | 15 ++++++++++++++ .../eee6110831c08b9c1b3f56b24656e95b.asciidoc | 17 ++++++++++++++++ utils/generate-examples.py | 4 ++++ 16 files changed, 146 insertions(+), 6 deletions(-) create mode 100644 docs/examples/00fea15cbca83be9d5f1a024ff2ec708.asciidoc create mode 100644 docs/examples/10c3fe2265bb34964bd1005f9da66773.asciidoc create mode 100644 docs/examples/13ecdf99114098c76b050397d9c3d4e6.asciidoc create mode 100644 docs/examples/46884e00674f82e7a7bc8b418d1777de.asciidoc create mode 100644 docs/examples/4e3414fc712b16311f9e433dd366f49d.asciidoc create mode 100644 docs/examples/4ea91a3ff42de540bb4c9ef268d607a6.asciidoc create mode 100644 docs/examples/a4a3c3cd09efa75168dab90105afb2e9.asciidoc create mode 100644 docs/examples/c0f4cbcb104747f38acdbc9a30bd13bf.asciidoc create mode 100644 docs/examples/eee6110831c08b9c1b3f56b24656e95b.asciidoc diff --git a/docs/examples/00fea15cbca83be9d5f1a024ff2ec708.asciidoc b/docs/examples/00fea15cbca83be9d5f1a024ff2ec708.asciidoc new file mode 100644 index 000000000..e92d8e12d --- /dev/null +++ b/docs/examples/00fea15cbca83be9d5f1a024ff2ec708.asciidoc @@ -0,0 +1,18 @@ +// inference/put-inference.asciidoc:276 + +[source, python] +---- +resp = client.inference.put_model( + task_type="text_embedding", + inference_id="my-e5-model", + body={ + "service": "elasticsearch", + "service_settings": { + "num_allocations": 1, + "num_threads": 1, + "model_id": ".multilingual-e5-small", + }, + }, +) +print(resp) +---- \ No newline at end of file diff --git a/docs/examples/10c3fe2265bb34964bd1005f9da66773.asciidoc b/docs/examples/10c3fe2265bb34964bd1005f9da66773.asciidoc new file mode 100644 index 000000000..01acd4ce7 --- /dev/null +++ b/docs/examples/10c3fe2265bb34964bd1005f9da66773.asciidoc @@ -0,0 +1,18 @@ +// inference/put-inference.asciidoc:371 + +[source, python] +---- +resp = client.inference.put_model( + task_type="text_embedding", + inference_id="my-msmarco-minilm-model", + body={ + "service": "elasticsearch", + "service_settings": { + "num_allocations": 1, + "num_threads": 1, + "model_id": "msmarco-MiniLM-L12-cos-v5", + }, + }, +) +print(resp) +---- \ No newline at end of file diff --git a/docs/examples/13ecdf99114098c76b050397d9c3d4e6.asciidoc b/docs/examples/13ecdf99114098c76b050397d9c3d4e6.asciidoc new file mode 100644 index 000000000..5aeeb40da --- /dev/null +++ b/docs/examples/13ecdf99114098c76b050397d9c3d4e6.asciidoc @@ -0,0 +1,13 @@ +// inference/post-inference.asciidoc:72 + +[source, python] +---- +resp = client.inference.inference( + task_type="sparse_embedding", + inference_id="my-elser-model", + body={ + "input": "The sky above the port was the color of television tuned to a dead channel." + }, +) +print(resp) +---- \ No newline at end of file diff --git a/docs/examples/3541d4a85e27b2c3896a7a7ee98b4b37.asciidoc b/docs/examples/3541d4a85e27b2c3896a7a7ee98b4b37.asciidoc index e189b1def..2c5244e74 100644 --- a/docs/examples/3541d4a85e27b2c3896a7a7ee98b4b37.asciidoc +++ b/docs/examples/3541d4a85e27b2c3896a7a7ee98b4b37.asciidoc @@ -1,4 +1,4 @@ -// health/health.asciidoc:470 +// health/health.asciidoc:478 [source, python] ---- diff --git a/docs/examples/46884e00674f82e7a7bc8b418d1777de.asciidoc b/docs/examples/46884e00674f82e7a7bc8b418d1777de.asciidoc new file mode 100644 index 000000000..ffa7943d2 --- /dev/null +++ b/docs/examples/46884e00674f82e7a7bc8b418d1777de.asciidoc @@ -0,0 +1,20 @@ +// inference/put-inference.asciidoc:252 + +[source, python] +---- + +resp = client.inference.put_model( + task_type="text_embedding", + inference_id="cohere_embeddings", + body={ + "service": "cohere", + "service_settings": { + "api_key": "api_key", + "model_id": "embed-english-v3.0", + "embedding_type": "int8", + }, + "task_settings": {}, + }, +) +print(resp) +---- \ No newline at end of file diff --git a/docs/examples/4e3414fc712b16311f9e433dd366f49d.asciidoc b/docs/examples/4e3414fc712b16311f9e433dd366f49d.asciidoc new file mode 100644 index 000000000..b963b5e43 --- /dev/null +++ b/docs/examples/4e3414fc712b16311f9e433dd366f49d.asciidoc @@ -0,0 +1,10 @@ +// inference/delete-inference.asciidoc:51 + +[source, python] +---- +resp = client.inference.delete_model( + task_type="sparse_embedding", + inference_id="my-elser-model", +) +print(resp) +---- \ No newline at end of file diff --git a/docs/examples/4ea91a3ff42de540bb4c9ef268d607a6.asciidoc b/docs/examples/4ea91a3ff42de540bb4c9ef268d607a6.asciidoc new file mode 100644 index 000000000..9e9df674a --- /dev/null +++ b/docs/examples/4ea91a3ff42de540bb4c9ef268d607a6.asciidoc @@ -0,0 +1,15 @@ +// inference/put-inference.asciidoc:301 + +[source, python] +---- +resp = client.inference.put_model( + task_type="sparse_embedding", + inference_id="my-elser-model", + body={ + "service": "elser", + "service_settings": {"num_allocations": 1, "num_threads": 1}, + "task_settings": {}, + }, +) +print(resp) +---- \ No newline at end of file diff --git a/docs/examples/73be1f93d789264e5b972ddb5991bc66.asciidoc b/docs/examples/73be1f93d789264e5b972ddb5991bc66.asciidoc index d405994df..23c119b2a 100644 --- a/docs/examples/73be1f93d789264e5b972ddb5991bc66.asciidoc +++ b/docs/examples/73be1f93d789264e5b972ddb5991bc66.asciidoc @@ -1,4 +1,4 @@ -// setup/logging-config.asciidoc:156 +// setup/logging-config.asciidoc:158 [source, python] ---- diff --git a/docs/examples/87733deeea4b441b595d19a0f97346f0.asciidoc b/docs/examples/87733deeea4b441b595d19a0f97346f0.asciidoc index 7dce86873..63e252c69 100644 --- a/docs/examples/87733deeea4b441b595d19a0f97346f0.asciidoc +++ b/docs/examples/87733deeea4b441b595d19a0f97346f0.asciidoc @@ -1,4 +1,4 @@ -// health/health.asciidoc:463 +// health/health.asciidoc:471 [source, python] ---- diff --git a/docs/examples/8e286a205a1f84f888a6d99f2620c80e.asciidoc b/docs/examples/8e286a205a1f84f888a6d99f2620c80e.asciidoc index a3b8bd6e5..0d10bc179 100644 --- a/docs/examples/8e286a205a1f84f888a6d99f2620c80e.asciidoc +++ b/docs/examples/8e286a205a1f84f888a6d99f2620c80e.asciidoc @@ -1,4 +1,4 @@ -// setup/logging-config.asciidoc:233 +// setup/logging-config.asciidoc:235 [source, python] ---- diff --git a/docs/examples/9d79645ab3a9da3f63c54a1516214a5a.asciidoc b/docs/examples/9d79645ab3a9da3f63c54a1516214a5a.asciidoc index 974117af4..68c134d0a 100644 --- a/docs/examples/9d79645ab3a9da3f63c54a1516214a5a.asciidoc +++ b/docs/examples/9d79645ab3a9da3f63c54a1516214a5a.asciidoc @@ -1,4 +1,4 @@ -// health/health.asciidoc:455 +// health/health.asciidoc:463 [source, python] ---- diff --git a/docs/examples/a4a3c3cd09efa75168dab90105afb2e9.asciidoc b/docs/examples/a4a3c3cd09efa75168dab90105afb2e9.asciidoc new file mode 100644 index 000000000..e77b1eeae --- /dev/null +++ b/docs/examples/a4a3c3cd09efa75168dab90105afb2e9.asciidoc @@ -0,0 +1,10 @@ +// inference/get-inference.asciidoc:68 + +[source, python] +---- +resp = client.inference.get_model( + task_type="sparse_embedding", + inference_id="my-elser-model", +) +print(resp) +---- \ No newline at end of file diff --git a/docs/examples/b7a9f60b3646efe3834ca8381f8aa560.asciidoc b/docs/examples/b7a9f60b3646efe3834ca8381f8aa560.asciidoc index ca29f3a8f..7077a812f 100644 --- a/docs/examples/b7a9f60b3646efe3834ca8381f8aa560.asciidoc +++ b/docs/examples/b7a9f60b3646efe3834ca8381f8aa560.asciidoc @@ -1,4 +1,4 @@ -// setup/logging-config.asciidoc:169 +// setup/logging-config.asciidoc:171 [source, python] ---- diff --git a/docs/examples/c0f4cbcb104747f38acdbc9a30bd13bf.asciidoc b/docs/examples/c0f4cbcb104747f38acdbc9a30bd13bf.asciidoc new file mode 100644 index 000000000..1630cbc9d --- /dev/null +++ b/docs/examples/c0f4cbcb104747f38acdbc9a30bd13bf.asciidoc @@ -0,0 +1,15 @@ +// inference/put-inference.asciidoc:396 + +[source, python] +---- +resp = client.inference.put_model( + task_type="text_embedding", + inference_id="my_openai_embedding_model", + body={ + "service": "openai", + "service_settings": {"api_key": "api_key"}, + "task_settings": {"model": "text-embedding-ada-002"}, + }, +) +print(resp) +---- \ No newline at end of file diff --git a/docs/examples/eee6110831c08b9c1b3f56b24656e95b.asciidoc b/docs/examples/eee6110831c08b9c1b3f56b24656e95b.asciidoc new file mode 100644 index 000000000..8757d6710 --- /dev/null +++ b/docs/examples/eee6110831c08b9c1b3f56b24656e95b.asciidoc @@ -0,0 +1,17 @@ +// inference/put-inference.asciidoc:341 + +[source, python] +---- +resp = client.inference.put_model( + task_type="text_embedding", + inference_id="hugging-face-embeddings", + body={ + "service": "hugging_face", + "service_settings": { + "api_key": "", + "url": "", + }, + }, +) +print(resp) +---- \ No newline at end of file diff --git a/utils/generate-examples.py b/utils/generate-examples.py index fedfd1c4d..ce1262509 100644 --- a/utils/generate-examples.py +++ b/utils/generate-examples.py @@ -106,6 +106,10 @@ "cluster/update-settings.asciidoc", "health/health.asciidoc", "cluster/reroute.asciidoc", + "inference/get-inference.asciidoc", + "inference/delete-inference.asciidoc", + "inference/post-inference.asciidoc", + "inference/put-inference.asciidoc", ]