From d969fa6437713ad011c41da006eae8f831ab79cd Mon Sep 17 00:00:00 2001 From: Elastic Machine Date: Mon, 2 Sep 2024 06:03:36 +0000 Subject: [PATCH] Auto-generated API code --- .../00fea15cbca83be9d5f1a024ff2ec708.asciidoc | 8 +- .../04412d11783dac25b5fd2ec5407078a3.asciidoc | 11 +-- .../04de2e3a9c00c2056b07bf9cf9e63a99.asciidoc | 8 +- .../0ad8edd10542ec2c4d5d8700d7e2ba97.asciidoc | 8 +- .../0ade87c8cb0e3c188d2e3dce279d5cc2.asciidoc | 43 +++++---- .../13ecdf99114098c76b050397d9c3d4e6.asciidoc | 12 ++- .../13fe12cdb73bc89f07a83f1e6b127511.asciidoc | 23 +++++ .../19d60e4890cc57151d596326484d9076.asciidoc | 6 +- .../1a56df055b94466ca76818e0858752c6.asciidoc | 8 +- .../1a9e03ce0355872a7db27fedc783fbec.asciidoc | 8 +- .../1b60ad542abb511cbd926ac8c55b609c.asciidoc | 8 +- .../1dadb7efe27b6c0c231eb6535e413bd9.asciidoc | 8 +- .../1e26353d546d733634187b8c3a7837a7.asciidoc | 8 +- .../20179a8889e949d6a8ee5fbf2ba35c96.asciidoc | 22 +++++ .../21cd01cb90d3ea1acd0ab22d7edd2c88.asciidoc | 8 +- .../342ddf9121aeddd82fea2464665e25da.asciidoc | 13 ++- .../398389933901b572a06a752bc780af7c.asciidoc | 8 +- .../3b6718257421b5419bf4cd6a7303c57e.asciidoc | 6 +- .../3c0d0c38e1c819a35a68cdba5ae8ccc4.asciidoc | 20 +++++ .../41175d304e660da2931764f9a4418fd3.asciidoc | 17 ++-- .../430705509f8367aef92be413f702520b.asciidoc | 9 +- .../4c9350ed09b28f00e297ebe73c3b95a2.asciidoc | 8 +- .../4e3414fc712b16311f9e433dd366f49d.asciidoc | 6 +- .../52f4c5eb08d39f98e2e2f5527ece9731.asciidoc | 20 +++++ .../533087d787b48878a0bf3fa8d0851b64.asciidoc | 6 +- .../54c12d5099d7b715c15f5bbf65b386a1.asciidoc | 22 +++++ .../59d736a4d064ed2013c7ead8e32e0998.asciidoc | 8 +- .../640621cea39cdeeb76fbc95bff31a18d.asciidoc | 27 +++--- .../6606d46685d10377b996b5f20f1229b5.asciidoc | 9 +- .../6ddd4e657efbf45def430a6419825796.asciidoc | 8 +- .../7429b16221fe741fd31b0584786dd0b0.asciidoc | 16 ++-- .../7594a9a85c8511701e281974cbc253e1.asciidoc | 8 +- .../7752b677825523bfb0c38ad9325a6d47.asciidoc | 9 +- .../77b90f6787195767b6da60d8532714b4.asciidoc | 8 +- .../7c63a1d2fbec5283e913ff39fafd0604.asciidoc | 21 +++++ .../82844ef45e11c0eece100d3109db3182.asciidoc | 8 +- .../82eff1d681a5d0d1538ef011bb32ab9a.asciidoc | 18 ++++ .../840f8c863c30b04abcf2dd66b846f157.asciidoc | 8 +- .../8566f5ecf4ae14802ba63c8cc7c629f8.asciidoc | 8 +- .../8619bd17bbfe33490b1f277007f654db.asciidoc | 8 +- .../8e89fee0be6a436c4e3d7c152659c47e.asciidoc | 31 +++---- .../9326e323f7ffde678fa04d2d1de3d3bc.asciidoc | 22 +++++ .../981b331db1404b39c1a612a135e4e76d.asciidoc | 5 +- .../9868ce609f4450702934fcbf4c340bf1.asciidoc | 21 +++++ .../986f892bfa4dfdf1da8455fdf84a4b0c.asciidoc | 20 +++++ .../99803d7b111b862c0c82e9908e549b16.asciidoc | 8 +- .../9a203aae3e1412d919546276fb52a5ca.asciidoc | 8 +- .../9c021836acf7c0370e289f611325868d.asciidoc | 19 ++-- .../9d396afad93782699d7a929578c85284.asciidoc | 20 +++++ .../9f16fca9813304e398ee052aa857dbcd.asciidoc | 8 +- .../a225fc8c134cb21a85bc6025dac9368b.asciidoc | 8 +- .../a4a3c3cd09efa75168dab90105afb2e9.asciidoc | 6 +- .../a4ee2214d621bcfaf768c46d21325958.asciidoc | 8 +- .../a594f05459d9eecc8050c73fc8da336f.asciidoc | 8 +- .../a69c7c3412af73758f629e76263063b5.asciidoc | 18 ++++ .../a960b43e720b4934edb74ab4b085ca77.asciidoc | 5 +- .../aa676d54a59dee87ecd28bcc1edce59b.asciidoc | 20 +++++ .../b16700002af3aa70639f3e88c733bf35.asciidoc | 12 +++ .../b45a8c6fc746e9c90fd181e69a605fad.asciidoc | 10 +-- .../bdb671866e2f0195f8dfbdb7f20bf591.asciidoc | 8 +- .../c00c9412609832ebceb9e786dd9542df.asciidoc | 11 +-- .../c0ddfb2e6315f5bcf0d3ef414b5bbed3.asciidoc | 11 +-- .../c18100d62ed31bc9e05f62900156e6a8.asciidoc | 8 +- .../c21eb4bc30087188241cbba6b6b89999.asciidoc | 9 +- .../ce13afc0c976c5e1f424b58e0c97fd64.asciidoc | 17 ++-- .../cedb56a71cc743d80263ce352bb21720.asciidoc | 8 +- .../d03139a851888db53f8b7affd85eb495.asciidoc | 5 +- .../d6a21afa4a94b9baa734eac430940bcf.asciidoc | 10 +-- .../d7b61bfb6adb22986a43388b823894cc.asciidoc | 8 +- .../e9fc47015922d51c2b05e502ce9c622e.asciidoc | 8 +- .../eb54506fbc71a7d250e86b22d0600114.asciidoc | 8 +- .../eb96d7dd5f3116a50f7a86b729f1a934.asciidoc | 15 ++-- .../ecfd0d94dd14ef05dfa861f22544b388.asciidoc | 9 +- .../eee6110831c08b9c1b3f56b24656e95b.asciidoc | 8 +- .../f1b24217b1d9ba6ea5e4fa6e6f412022.asciidoc | 12 ++- .../f2a5f77f929cc7b893b80f4bd5b1a192.asciidoc | 5 +- .../f57ce7de0946e9416ddb9150e95f4b74.asciidoc | 8 +- .../fd620f09dbce62c6f0f603a366623607.asciidoc | 23 +++-- docs/reference.asciidoc | 48 +++++++--- src/api/api/ingest.ts | 60 ++++++++----- src/api/types.ts | 84 ++++++++++++++++-- src/api/typesWithBodyKey.ts | 87 +++++++++++++++++-- 82 files changed, 797 insertions(+), 418 deletions(-) create mode 100644 docs/doc_examples/13fe12cdb73bc89f07a83f1e6b127511.asciidoc create mode 100644 docs/doc_examples/20179a8889e949d6a8ee5fbf2ba35c96.asciidoc create mode 100644 docs/doc_examples/3c0d0c38e1c819a35a68cdba5ae8ccc4.asciidoc create mode 100644 docs/doc_examples/52f4c5eb08d39f98e2e2f5527ece9731.asciidoc create mode 100644 docs/doc_examples/54c12d5099d7b715c15f5bbf65b386a1.asciidoc create mode 100644 docs/doc_examples/7c63a1d2fbec5283e913ff39fafd0604.asciidoc create mode 100644 docs/doc_examples/82eff1d681a5d0d1538ef011bb32ab9a.asciidoc create mode 100644 docs/doc_examples/9326e323f7ffde678fa04d2d1de3d3bc.asciidoc create mode 100644 docs/doc_examples/9868ce609f4450702934fcbf4c340bf1.asciidoc create mode 100644 docs/doc_examples/986f892bfa4dfdf1da8455fdf84a4b0c.asciidoc create mode 100644 docs/doc_examples/9d396afad93782699d7a929578c85284.asciidoc create mode 100644 docs/doc_examples/a69c7c3412af73758f629e76263063b5.asciidoc create mode 100644 docs/doc_examples/aa676d54a59dee87ecd28bcc1edce59b.asciidoc create mode 100644 docs/doc_examples/b16700002af3aa70639f3e88c733bf35.asciidoc diff --git a/docs/doc_examples/00fea15cbca83be9d5f1a024ff2ec708.asciidoc b/docs/doc_examples/00fea15cbca83be9d5f1a024ff2ec708.asciidoc index d2c53fbf1..09675e02d 100644 --- a/docs/doc_examples/00fea15cbca83be9d5f1a024ff2ec708.asciidoc +++ b/docs/doc_examples/00fea15cbca83be9d5f1a024ff2ec708.asciidoc @@ -3,10 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_inference/text_embedding/my-e5-model", - body: { +const response = await client.inference.put({ + task_type: "text_embedding", + inference_id: "my-e5-model", + inference_config: { service: "elasticsearch", service_settings: { num_allocations: 1, diff --git a/docs/doc_examples/04412d11783dac25b5fd2ec5407078a3.asciidoc b/docs/doc_examples/04412d11783dac25b5fd2ec5407078a3.asciidoc index dbf3b4c90..fbefc580b 100644 --- a/docs/doc_examples/04412d11783dac25b5fd2ec5407078a3.asciidoc +++ b/docs/doc_examples/04412d11783dac25b5fd2ec5407078a3.asciidoc @@ -3,13 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_connector/my-connector/_api_key_id", - body: { - api_key_id: "my-api-key-id", - api_key_secret_id: "my-connector-secret-id", - }, +const response = await client.connector.updateApiKeyId({ + connector_id: "my-connector", + api_key_id: "my-api-key-id", + api_key_secret_id: "my-connector-secret-id", }); console.log(response); ---- diff --git a/docs/doc_examples/04de2e3a9c00c2056b07bf9cf9e63a99.asciidoc b/docs/doc_examples/04de2e3a9c00c2056b07bf9cf9e63a99.asciidoc index d78024183..00ef08a92 100644 --- a/docs/doc_examples/04de2e3a9c00c2056b07bf9cf9e63a99.asciidoc +++ b/docs/doc_examples/04de2e3a9c00c2056b07bf9cf9e63a99.asciidoc @@ -3,10 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_inference/text_embedding/google_vertex_ai_embeddings", - body: { +const response = await client.inference.put({ + task_type: "text_embedding", + inference_id: "google_vertex_ai_embeddings", + inference_config: { service: "googlevertexai", service_settings: { service_account_json: "", diff --git a/docs/doc_examples/0ad8edd10542ec2c4d5d8700d7e2ba97.asciidoc b/docs/doc_examples/0ad8edd10542ec2c4d5d8700d7e2ba97.asciidoc index 0fb258f3f..5c948b3d2 100644 --- a/docs/doc_examples/0ad8edd10542ec2c4d5d8700d7e2ba97.asciidoc +++ b/docs/doc_examples/0ad8edd10542ec2c4d5d8700d7e2ba97.asciidoc @@ -3,10 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_inference/text_embedding/amazon_bedrock_embeddings", - body: { +const response = await client.inference.put({ + task_type: "text_embedding", + inference_id: "amazon_bedrock_embeddings", + inference_config: { service: "amazonbedrock", service_settings: { access_key: "", diff --git a/docs/doc_examples/0ade87c8cb0e3c188d2e3dce279d5cc2.asciidoc b/docs/doc_examples/0ade87c8cb0e3c188d2e3dce279d5cc2.asciidoc index 5df1863c2..2c93643b2 100644 --- a/docs/doc_examples/0ade87c8cb0e3c188d2e3dce279d5cc2.asciidoc +++ b/docs/doc_examples/0ade87c8cb0e3c188d2e3dce279d5cc2.asciidoc @@ -3,29 +3,26 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_connector/my-g-drive-connector/_filtering", - body: { - rules: [ - { - field: "file_extension", - id: "exclude-txt-files", - order: 0, - policy: "exclude", - rule: "equals", - value: "txt", - }, - { - field: "_", - id: "DEFAULT", - order: 1, - policy: "include", - rule: "regex", - value: ".*", - }, - ], - }, +const response = await client.connector.updateFiltering({ + connector_id: "my-g-drive-connector", + rules: [ + { + field: "file_extension", + id: "exclude-txt-files", + order: 0, + policy: "exclude", + rule: "equals", + value: "txt", + }, + { + field: "_", + id: "DEFAULT", + order: 1, + policy: "include", + rule: "regex", + value: ".*", + }, + ], }); console.log(response); ---- diff --git a/docs/doc_examples/13ecdf99114098c76b050397d9c3d4e6.asciidoc b/docs/doc_examples/13ecdf99114098c76b050397d9c3d4e6.asciidoc index 52c6688ac..01baab9cf 100644 --- a/docs/doc_examples/13ecdf99114098c76b050397d9c3d4e6.asciidoc +++ b/docs/doc_examples/13ecdf99114098c76b050397d9c3d4e6.asciidoc @@ -3,13 +3,11 @@ [source, js] ---- -const response = await client.transport.request({ - method: "POST", - path: "/_inference/sparse_embedding/my-elser-model", - body: { - input: - "The sky above the port was the color of television tuned to a dead channel.", - }, +const response = await client.inference.inference({ + task_type: "sparse_embedding", + inference_id: "my-elser-model", + input: + "The sky above the port was the color of television tuned to a dead channel.", }); console.log(response); ---- diff --git a/docs/doc_examples/13fe12cdb73bc89f07a83f1e6b127511.asciidoc b/docs/doc_examples/13fe12cdb73bc89f07a83f1e6b127511.asciidoc new file mode 100644 index 000000000..672620810 --- /dev/null +++ b/docs/doc_examples/13fe12cdb73bc89f07a83f1e6b127511.asciidoc @@ -0,0 +1,23 @@ +// This file is autogenerated, DO NOT EDIT +// Use `node scripts/generate-docs-examples.js` to generate the docs examples + +[source, js] +---- +const response = await client.indices.create({ + index: "google-vertex-ai-embeddings", + mappings: { + properties: { + content_embedding: { + type: "dense_vector", + dims: 768, + element_type: "float", + similarity: "dot_product", + }, + content: { + type: "text", + }, + }, + }, +}); +console.log(response); +---- diff --git a/docs/doc_examples/19d60e4890cc57151d596326484d9076.asciidoc b/docs/doc_examples/19d60e4890cc57151d596326484d9076.asciidoc index c5e05aa73..6f918e3b2 100644 --- a/docs/doc_examples/19d60e4890cc57151d596326484d9076.asciidoc +++ b/docs/doc_examples/19d60e4890cc57151d596326484d9076.asciidoc @@ -3,9 +3,9 @@ [source, js] ---- -const response = await client.ingest.deleteGeoipDatabase({ - id: "my-database-id", - body: null, +const response = await client.transport.request({ + method: "DELETE", + path: "/_ingest/geoip/database/my-database-id", }); console.log(response); ---- diff --git a/docs/doc_examples/1a56df055b94466ca76818e0858752c6.asciidoc b/docs/doc_examples/1a56df055b94466ca76818e0858752c6.asciidoc index dfb2fc9c9..46718769b 100644 --- a/docs/doc_examples/1a56df055b94466ca76818e0858752c6.asciidoc +++ b/docs/doc_examples/1a56df055b94466ca76818e0858752c6.asciidoc @@ -3,10 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_inference/text_embedding/openai_embeddings", - body: { +const response = await client.inference.put({ + task_type: "text_embedding", + inference_id: "openai_embeddings", + inference_config: { service: "openai", service_settings: { api_key: "", diff --git a/docs/doc_examples/1a9e03ce0355872a7db27fedc783fbec.asciidoc b/docs/doc_examples/1a9e03ce0355872a7db27fedc783fbec.asciidoc index f29d19695..06c636ead 100644 --- a/docs/doc_examples/1a9e03ce0355872a7db27fedc783fbec.asciidoc +++ b/docs/doc_examples/1a9e03ce0355872a7db27fedc783fbec.asciidoc @@ -3,10 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_inference/rerank/google_vertex_ai_rerank", - body: { +const response = await client.inference.put({ + task_type: "rerank", + inference_id: "google_vertex_ai_rerank", + inference_config: { service: "googlevertexai", service_settings: { service_account_json: "", diff --git a/docs/doc_examples/1b60ad542abb511cbd926ac8c55b609c.asciidoc b/docs/doc_examples/1b60ad542abb511cbd926ac8c55b609c.asciidoc index 0c7d4b6f1..160884d3b 100644 --- a/docs/doc_examples/1b60ad542abb511cbd926ac8c55b609c.asciidoc +++ b/docs/doc_examples/1b60ad542abb511cbd926ac8c55b609c.asciidoc @@ -3,10 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_inference/sparse_embedding/my-elser-model", - body: { +const response = await client.inference.put({ + task_type: "sparse_embedding", + inference_id: "my-elser-model", + inference_config: { service: "elser", service_settings: { adaptive_allocations: { diff --git a/docs/doc_examples/1dadb7efe27b6c0c231eb6535e413bd9.asciidoc b/docs/doc_examples/1dadb7efe27b6c0c231eb6535e413bd9.asciidoc index 9319faa39..a38e95486 100644 --- a/docs/doc_examples/1dadb7efe27b6c0c231eb6535e413bd9.asciidoc +++ b/docs/doc_examples/1dadb7efe27b6c0c231eb6535e413bd9.asciidoc @@ -3,10 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_inference/text_embedding/azure_ai_studio_embeddings", - body: { +const response = await client.inference.put({ + task_type: "text_embedding", + inference_id: "azure_ai_studio_embeddings", + inference_config: { service: "azureaistudio", service_settings: { api_key: "", diff --git a/docs/doc_examples/1e26353d546d733634187b8c3a7837a7.asciidoc b/docs/doc_examples/1e26353d546d733634187b8c3a7837a7.asciidoc index 2f307bbf3..6e8f3033b 100644 --- a/docs/doc_examples/1e26353d546d733634187b8c3a7837a7.asciidoc +++ b/docs/doc_examples/1e26353d546d733634187b8c3a7837a7.asciidoc @@ -3,12 +3,8 @@ [source, js] ---- -const response = await client.transport.request({ - method: "GET", - path: "/_connector", - querystring: { - service_type: "sharepoint_online", - }, +const response = await client.connector.list({ + service_type: "sharepoint_online", }); console.log(response); ---- diff --git a/docs/doc_examples/20179a8889e949d6a8ee5fbf2ba35c96.asciidoc b/docs/doc_examples/20179a8889e949d6a8ee5fbf2ba35c96.asciidoc new file mode 100644 index 000000000..3a746ae88 --- /dev/null +++ b/docs/doc_examples/20179a8889e949d6a8ee5fbf2ba35c96.asciidoc @@ -0,0 +1,22 @@ +// This file is autogenerated, DO NOT EDIT +// Use `node scripts/generate-docs-examples.js` to generate the docs examples + +[source, js] +---- +const response = await client.search({ + index: "google-vertex-ai-embeddings", + knn: { + field: "content_embedding", + query_vector_builder: { + text_embedding: { + model_id: "google_vertex_ai_embeddings", + model_text: "Calculate fuel cost", + }, + }, + k: 10, + num_candidates: 100, + }, + _source: ["id", "content"], +}); +console.log(response); +---- diff --git a/docs/doc_examples/21cd01cb90d3ea1acd0ab22d7edd2c88.asciidoc b/docs/doc_examples/21cd01cb90d3ea1acd0ab22d7edd2c88.asciidoc index 2cf11a621..295b4ed6c 100644 --- a/docs/doc_examples/21cd01cb90d3ea1acd0ab22d7edd2c88.asciidoc +++ b/docs/doc_examples/21cd01cb90d3ea1acd0ab22d7edd2c88.asciidoc @@ -3,10 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_inference/text_embedding/azure_ai_studio_embeddings", - body: { +const response = await client.inference.put({ + task_type: "text_embedding", + inference_id: "azure_ai_studio_embeddings", + inference_config: { service: "azureaistudio", service_settings: { api_key: "", diff --git a/docs/doc_examples/342ddf9121aeddd82fea2464665e25da.asciidoc b/docs/doc_examples/342ddf9121aeddd82fea2464665e25da.asciidoc index 00d2c0234..0b2b04c09 100644 --- a/docs/doc_examples/342ddf9121aeddd82fea2464665e25da.asciidoc +++ b/docs/doc_examples/342ddf9121aeddd82fea2464665e25da.asciidoc @@ -3,14 +3,11 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_connector/my-connector", - body: { - index_name: "search-google-drive", - name: "My Connector", - service_type: "google_drive", - }, +const response = await client.connector.put({ + connector_id: "my-connector", + index_name: "search-google-drive", + name: "My Connector", + service_type: "google_drive", }); console.log(response); ---- diff --git a/docs/doc_examples/398389933901b572a06a752bc780af7c.asciidoc b/docs/doc_examples/398389933901b572a06a752bc780af7c.asciidoc index 029b478da..cfbe8ea75 100644 --- a/docs/doc_examples/398389933901b572a06a752bc780af7c.asciidoc +++ b/docs/doc_examples/398389933901b572a06a752bc780af7c.asciidoc @@ -3,10 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_inference/completion/anthropic_completion", - body: { +const response = await client.inference.put({ + task_type: "completion", + inference_id: "anthropic_completion", + inference_config: { service: "anthropic", service_settings: { api_key: "", diff --git a/docs/doc_examples/3b6718257421b5419bf4cd6a7303c57e.asciidoc b/docs/doc_examples/3b6718257421b5419bf4cd6a7303c57e.asciidoc index a00dcffd3..7ad8dcc20 100644 --- a/docs/doc_examples/3b6718257421b5419bf4cd6a7303c57e.asciidoc +++ b/docs/doc_examples/3b6718257421b5419bf4cd6a7303c57e.asciidoc @@ -3,9 +3,9 @@ [source, js] ---- -const response = await client.ingest.getGeoipDatabase({ - id: "my-database-id", - body: null, +const response = await client.transport.request({ + method: "GET", + path: "/_ingest/geoip/database/my-database-id", }); console.log(response); ---- diff --git a/docs/doc_examples/3c0d0c38e1c819a35a68cdba5ae8ccc4.asciidoc b/docs/doc_examples/3c0d0c38e1c819a35a68cdba5ae8ccc4.asciidoc new file mode 100644 index 000000000..d2b00a583 --- /dev/null +++ b/docs/doc_examples/3c0d0c38e1c819a35a68cdba5ae8ccc4.asciidoc @@ -0,0 +1,20 @@ +// This file is autogenerated, DO NOT EDIT +// Use `node scripts/generate-docs-examples.js` to generate the docs examples + +[source, js] +---- +const response = await client.inference.put({ + task_type: "text_embedding", + inference_id: "alibabacloud_ai_search_embeddings", + inference_config: { + service: "alibabacloud-ai-search", + service_settings: { + api_key: "", + service_id: "", + host: "", + workspace: "", + }, + }, +}); +console.log(response); +---- diff --git a/docs/doc_examples/41175d304e660da2931764f9a4418fd3.asciidoc b/docs/doc_examples/41175d304e660da2931764f9a4418fd3.asciidoc index 10c2ec536..81079be09 100644 --- a/docs/doc_examples/41175d304e660da2931764f9a4418fd3.asciidoc +++ b/docs/doc_examples/41175d304e660da2931764f9a4418fd3.asciidoc @@ -3,16 +3,13 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_connector/my-connector/_pipeline", - body: { - pipeline: { - extract_binary_content: true, - name: "my-connector-pipeline", - reduce_whitespace: true, - run_ml_inference: true, - }, +const response = await client.connector.updatePipeline({ + connector_id: "my-connector", + pipeline: { + extract_binary_content: true, + name: "my-connector-pipeline", + reduce_whitespace: true, + run_ml_inference: true, }, }); console.log(response); diff --git a/docs/doc_examples/430705509f8367aef92be413f702520b.asciidoc b/docs/doc_examples/430705509f8367aef92be413f702520b.asciidoc index 3fe855a63..c95b05eac 100644 --- a/docs/doc_examples/430705509f8367aef92be413f702520b.asciidoc +++ b/docs/doc_examples/430705509f8367aef92be413f702520b.asciidoc @@ -3,12 +3,9 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_connector/my-connector/_status", - body: { - status: "needs_configuration", - }, +const response = await client.connector.updateStatus({ + connector_id: "my-connector", + status: "needs_configuration", }); console.log(response); ---- diff --git a/docs/doc_examples/4c9350ed09b28f00e297ebe73c3b95a2.asciidoc b/docs/doc_examples/4c9350ed09b28f00e297ebe73c3b95a2.asciidoc index af5ca2ccc..2c8c7983b 100644 --- a/docs/doc_examples/4c9350ed09b28f00e297ebe73c3b95a2.asciidoc +++ b/docs/doc_examples/4c9350ed09b28f00e297ebe73c3b95a2.asciidoc @@ -3,10 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_inference/text_embedding/my-msmarco-minilm-model", - body: { +const response = await client.inference.put({ + task_type: "text_embedding", + inference_id: "my-msmarco-minilm-model", + inference_config: { service: "elasticsearch", service_settings: { num_allocations: 1, diff --git a/docs/doc_examples/4e3414fc712b16311f9e433dd366f49d.asciidoc b/docs/doc_examples/4e3414fc712b16311f9e433dd366f49d.asciidoc index 64930ca4f..e9274320d 100644 --- a/docs/doc_examples/4e3414fc712b16311f9e433dd366f49d.asciidoc +++ b/docs/doc_examples/4e3414fc712b16311f9e433dd366f49d.asciidoc @@ -3,9 +3,9 @@ [source, js] ---- -const response = await client.transport.request({ - method: "DELETE", - path: "/_inference/sparse_embedding/my-elser-model", +const response = await client.inference.delete({ + task_type: "sparse_embedding", + inference_id: "my-elser-model", }); console.log(response); ---- diff --git a/docs/doc_examples/52f4c5eb08d39f98e2e2f5527ece9731.asciidoc b/docs/doc_examples/52f4c5eb08d39f98e2e2f5527ece9731.asciidoc new file mode 100644 index 000000000..a6490dd78 --- /dev/null +++ b/docs/doc_examples/52f4c5eb08d39f98e2e2f5527ece9731.asciidoc @@ -0,0 +1,20 @@ +// This file is autogenerated, DO NOT EDIT +// Use `node scripts/generate-docs-examples.js` to generate the docs examples + +[source, js] +---- +const response = await client.inference.put({ + task_type: "sparse_embedding", + inference_id: "alibabacloud_ai_search_sparse", + inference_config: { + service: "alibabacloud-ai-search", + service_settings: { + api_key: "", + service_id: "ops-text-sparse-embedding-001", + host: "default-j01.platform-cn-shanghai.opensearch.aliyuncs.com", + workspace: "default", + }, + }, +}); +console.log(response); +---- diff --git a/docs/doc_examples/533087d787b48878a0bf3fa8d0851b64.asciidoc b/docs/doc_examples/533087d787b48878a0bf3fa8d0851b64.asciidoc index 9dfe27bbc..65425b66a 100644 --- a/docs/doc_examples/533087d787b48878a0bf3fa8d0851b64.asciidoc +++ b/docs/doc_examples/533087d787b48878a0bf3fa8d0851b64.asciidoc @@ -3,9 +3,9 @@ [source, js] ---- -const response = await client.ingest.deleteGeoipDatabase({ - id: "example-database-id", - body: null, +const response = await client.transport.request({ + method: "DELETE", + path: "/_ingest/geoip/database/example-database-id", }); console.log(response); ---- diff --git a/docs/doc_examples/54c12d5099d7b715c15f5bbf65b386a1.asciidoc b/docs/doc_examples/54c12d5099d7b715c15f5bbf65b386a1.asciidoc new file mode 100644 index 000000000..ea16a6a2c --- /dev/null +++ b/docs/doc_examples/54c12d5099d7b715c15f5bbf65b386a1.asciidoc @@ -0,0 +1,22 @@ +// This file is autogenerated, DO NOT EDIT +// Use `node scripts/generate-docs-examples.js` to generate the docs examples + +[source, js] +---- +const response = await client.indices.create({ + index: "alibabacloud-ai-search-embeddings", + mappings: { + properties: { + content_embedding: { + type: "dense_vector", + dims: 1024, + element_type: "float", + }, + content: { + type: "text", + }, + }, + }, +}); +console.log(response); +---- diff --git a/docs/doc_examples/59d736a4d064ed2013c7ead8e32e0998.asciidoc b/docs/doc_examples/59d736a4d064ed2013c7ead8e32e0998.asciidoc index f1aea891e..5ea918642 100644 --- a/docs/doc_examples/59d736a4d064ed2013c7ead8e32e0998.asciidoc +++ b/docs/doc_examples/59d736a4d064ed2013c7ead8e32e0998.asciidoc @@ -3,10 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_inference/completion/openai-completion", - body: { +const response = await client.inference.put({ + task_type: "completion", + inference_id: "openai-completion", + inference_config: { service: "openai", service_settings: { api_key: "", diff --git a/docs/doc_examples/640621cea39cdeeb76fbc95bff31a18d.asciidoc b/docs/doc_examples/640621cea39cdeeb76fbc95bff31a18d.asciidoc index a9ee715a4..feecc4a39 100644 --- a/docs/doc_examples/640621cea39cdeeb76fbc95bff31a18d.asciidoc +++ b/docs/doc_examples/640621cea39cdeeb76fbc95bff31a18d.asciidoc @@ -3,21 +3,18 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_connector/my-connector/_last_sync", - body: { - last_access_control_sync_error: "Houston, we have a problem!", - last_access_control_sync_scheduled_at: "2023-11-09T15:13:08.231Z", - last_access_control_sync_status: "pending", - last_deleted_document_count: 42, - last_incremental_sync_scheduled_at: "2023-11-09T15:13:08.231Z", - last_indexed_document_count: 42, - last_sync_error: "Houston, we have a problem!", - last_sync_scheduled_at: "2024-11-09T15:13:08.231Z", - last_sync_status: "completed", - last_synced: "2024-11-09T15:13:08.231Z", - }, +const response = await client.connector.lastSync({ + connector_id: "my-connector", + last_access_control_sync_error: "Houston, we have a problem!", + last_access_control_sync_scheduled_at: "2023-11-09T15:13:08.231Z", + last_access_control_sync_status: "pending", + last_deleted_document_count: 42, + last_incremental_sync_scheduled_at: "2023-11-09T15:13:08.231Z", + last_indexed_document_count: 42, + last_sync_error: "Houston, we have a problem!", + last_sync_scheduled_at: "2024-11-09T15:13:08.231Z", + last_sync_status: "completed", + last_synced: "2024-11-09T15:13:08.231Z", }); console.log(response); ---- diff --git a/docs/doc_examples/6606d46685d10377b996b5f20f1229b5.asciidoc b/docs/doc_examples/6606d46685d10377b996b5f20f1229b5.asciidoc index 4a0655e33..5cbb57477 100644 --- a/docs/doc_examples/6606d46685d10377b996b5f20f1229b5.asciidoc +++ b/docs/doc_examples/6606d46685d10377b996b5f20f1229b5.asciidoc @@ -3,12 +3,9 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_connector/my-connector/_index_name", - body: { - index_name: "data-from-my-google-drive", - }, +const response = await client.connector.updateIndexName({ + connector_id: "my-connector", + index_name: "data-from-my-google-drive", }); console.log(response); ---- diff --git a/docs/doc_examples/6ddd4e657efbf45def430a6419825796.asciidoc b/docs/doc_examples/6ddd4e657efbf45def430a6419825796.asciidoc index cb9376459..5bd1b226c 100644 --- a/docs/doc_examples/6ddd4e657efbf45def430a6419825796.asciidoc +++ b/docs/doc_examples/6ddd4e657efbf45def430a6419825796.asciidoc @@ -3,10 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_inference/completion/azure_ai_studio_completion", - body: { +const response = await client.inference.put({ + task_type: "completion", + inference_id: "azure_ai_studio_completion", + inference_config: { service: "azureaistudio", service_settings: { api_key: "", diff --git a/docs/doc_examples/7429b16221fe741fd31b0584786dd0b0.asciidoc b/docs/doc_examples/7429b16221fe741fd31b0584786dd0b0.asciidoc index fe849a80d..8f897c69c 100644 --- a/docs/doc_examples/7429b16221fe741fd31b0584786dd0b0.asciidoc +++ b/docs/doc_examples/7429b16221fe741fd31b0584786dd0b0.asciidoc @@ -3,15 +3,13 @@ [source, js] ---- -const response = await client.transport.request({ - method: "POST", - path: "/_inference/text_embedding/my-cohere-endpoint", - body: { - input: - "The sky above the port was the color of television tuned to a dead channel.", - task_settings: { - input_type: "ingest", - }, +const response = await client.inference.inference({ + task_type: "text_embedding", + inference_id: "my-cohere-endpoint", + input: + "The sky above the port was the color of television tuned to a dead channel.", + task_settings: { + input_type: "ingest", }, }); console.log(response); diff --git a/docs/doc_examples/7594a9a85c8511701e281974cbc253e1.asciidoc b/docs/doc_examples/7594a9a85c8511701e281974cbc253e1.asciidoc index e98728bf2..3c4dca864 100644 --- a/docs/doc_examples/7594a9a85c8511701e281974cbc253e1.asciidoc +++ b/docs/doc_examples/7594a9a85c8511701e281974cbc253e1.asciidoc @@ -3,10 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_inference/text_embedding/amazon_bedrock_embeddings", - body: { +const response = await client.inference.put({ + task_type: "text_embedding", + inference_id: "amazon_bedrock_embeddings", + inference_config: { service: "amazonbedrock", service_settings: { access_key: "", diff --git a/docs/doc_examples/7752b677825523bfb0c38ad9325a6d47.asciidoc b/docs/doc_examples/7752b677825523bfb0c38ad9325a6d47.asciidoc index 03b51a131..f6a5082a7 100644 --- a/docs/doc_examples/7752b677825523bfb0c38ad9325a6d47.asciidoc +++ b/docs/doc_examples/7752b677825523bfb0c38ad9325a6d47.asciidoc @@ -3,12 +3,9 @@ [source, js] ---- -const response = await client.transport.request({ - method: "DELETE", - path: "/_connector/another-connector", - querystring: { - delete_sync_jobs: "true", - }, +const response = await client.connector.delete({ + connector_id: "another-connector", + delete_sync_jobs: "true", }); console.log(response); ---- diff --git a/docs/doc_examples/77b90f6787195767b6da60d8532714b4.asciidoc b/docs/doc_examples/77b90f6787195767b6da60d8532714b4.asciidoc index 7ab2d290f..08570d5c6 100644 --- a/docs/doc_examples/77b90f6787195767b6da60d8532714b4.asciidoc +++ b/docs/doc_examples/77b90f6787195767b6da60d8532714b4.asciidoc @@ -3,10 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_inference/text_embedding/azure_openai_embeddings", - body: { +const response = await client.inference.put({ + task_type: "text_embedding", + inference_id: "azure_openai_embeddings", + inference_config: { service: "azureopenai", service_settings: { api_key: "", diff --git a/docs/doc_examples/7c63a1d2fbec5283e913ff39fafd0604.asciidoc b/docs/doc_examples/7c63a1d2fbec5283e913ff39fafd0604.asciidoc new file mode 100644 index 000000000..d83687e37 --- /dev/null +++ b/docs/doc_examples/7c63a1d2fbec5283e913ff39fafd0604.asciidoc @@ -0,0 +1,21 @@ +// This file is autogenerated, DO NOT EDIT +// Use `node scripts/generate-docs-examples.js` to generate the docs examples + +[source, js] +---- +const response = await client.ingest.putPipeline({ + id: "google_vertex_ai_embeddings", + processors: [ + { + inference: { + model_id: "google_vertex_ai_embeddings", + input_output: { + input_field: "content", + output_field: "content_embedding", + }, + }, + }, + ], +}); +console.log(response); +---- diff --git a/docs/doc_examples/82844ef45e11c0eece100d3109db3182.asciidoc b/docs/doc_examples/82844ef45e11c0eece100d3109db3182.asciidoc index f3f816377..07d04a9b1 100644 --- a/docs/doc_examples/82844ef45e11c0eece100d3109db3182.asciidoc +++ b/docs/doc_examples/82844ef45e11c0eece100d3109db3182.asciidoc @@ -3,10 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_inference/completion/amazon_bedrock_completion", - body: { +const response = await client.inference.put({ + task_type: "completion", + inference_id: "amazon_bedrock_completion", + inference_config: { service: "amazonbedrock", service_settings: { access_key: "", diff --git a/docs/doc_examples/82eff1d681a5d0d1538ef011bb32ab9a.asciidoc b/docs/doc_examples/82eff1d681a5d0d1538ef011bb32ab9a.asciidoc new file mode 100644 index 000000000..c7b1a1209 --- /dev/null +++ b/docs/doc_examples/82eff1d681a5d0d1538ef011bb32ab9a.asciidoc @@ -0,0 +1,18 @@ +// This file is autogenerated, DO NOT EDIT +// Use `node scripts/generate-docs-examples.js` to generate the docs examples + +[source, js] +---- +const response = await client.reindex({ + wait_for_completion: "false", + source: { + index: "test-data", + size: 50, + }, + dest: { + index: "alibabacloud-ai-search-embeddings", + pipeline: "alibabacloud_ai_search_embeddings", + }, +}); +console.log(response); +---- diff --git a/docs/doc_examples/840f8c863c30b04abcf2dd66b846f157.asciidoc b/docs/doc_examples/840f8c863c30b04abcf2dd66b846f157.asciidoc index b38af10e2..e16ea6560 100644 --- a/docs/doc_examples/840f8c863c30b04abcf2dd66b846f157.asciidoc +++ b/docs/doc_examples/840f8c863c30b04abcf2dd66b846f157.asciidoc @@ -3,10 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_inference/text_embedding/my-e5-model", - body: { +const response = await client.inference.put({ + task_type: "text_embedding", + inference_id: "my-e5-model", + inference_config: { service: "elasticsearch", service_settings: { adaptive_allocations: { diff --git a/docs/doc_examples/8566f5ecf4ae14802ba63c8cc7c629f8.asciidoc b/docs/doc_examples/8566f5ecf4ae14802ba63c8cc7c629f8.asciidoc index f6ab408a7..dad0bf4d5 100644 --- a/docs/doc_examples/8566f5ecf4ae14802ba63c8cc7c629f8.asciidoc +++ b/docs/doc_examples/8566f5ecf4ae14802ba63c8cc7c629f8.asciidoc @@ -3,10 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_inference/text_embedding/mistral_embeddings", - body: { +const response = await client.inference.put({ + task_type: "text_embedding", + inference_id: "mistral_embeddings", + inference_config: { service: "mistral", service_settings: { api_key: "", diff --git a/docs/doc_examples/8619bd17bbfe33490b1f277007f654db.asciidoc b/docs/doc_examples/8619bd17bbfe33490b1f277007f654db.asciidoc index 64b3d669e..48f850d0c 100644 --- a/docs/doc_examples/8619bd17bbfe33490b1f277007f654db.asciidoc +++ b/docs/doc_examples/8619bd17bbfe33490b1f277007f654db.asciidoc @@ -3,10 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_inference/rerank/cohere-rerank", - body: { +const response = await client.inference.put({ + task_type: "rerank", + inference_id: "cohere-rerank", + inference_config: { service: "cohere", service_settings: { api_key: "", diff --git a/docs/doc_examples/8e89fee0be6a436c4e3d7c152659c47e.asciidoc b/docs/doc_examples/8e89fee0be6a436c4e3d7c152659c47e.asciidoc index ddab55399..fb4577692 100644 --- a/docs/doc_examples/8e89fee0be6a436c4e3d7c152659c47e.asciidoc +++ b/docs/doc_examples/8e89fee0be6a436c4e3d7c152659c47e.asciidoc @@ -3,23 +3,20 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_connector/my-connector/_scheduling", - body: { - scheduling: { - access_control: { - enabled: true, - interval: "0 10 0 * * ?", - }, - full: { - enabled: true, - interval: "0 20 0 * * ?", - }, - incremental: { - enabled: false, - interval: "0 30 0 * * ?", - }, +const response = await client.connector.updateScheduling({ + connector_id: "my-connector", + scheduling: { + access_control: { + enabled: true, + interval: "0 10 0 * * ?", + }, + full: { + enabled: true, + interval: "0 20 0 * * ?", + }, + incremental: { + enabled: false, + interval: "0 30 0 * * ?", }, }, }); diff --git a/docs/doc_examples/9326e323f7ffde678fa04d2d1de3d3bc.asciidoc b/docs/doc_examples/9326e323f7ffde678fa04d2d1de3d3bc.asciidoc new file mode 100644 index 000000000..8ca628047 --- /dev/null +++ b/docs/doc_examples/9326e323f7ffde678fa04d2d1de3d3bc.asciidoc @@ -0,0 +1,22 @@ +// This file is autogenerated, DO NOT EDIT +// Use `node scripts/generate-docs-examples.js` to generate the docs examples + +[source, js] +---- +const response = await client.search({ + index: "alibabacloud-ai-search-embeddings", + knn: { + field: "content_embedding", + query_vector_builder: { + text_embedding: { + model_id: "alibabacloud_ai_search_embeddings", + model_text: "Calculate fuel cost", + }, + }, + k: 10, + num_candidates: 100, + }, + _source: ["id", "content"], +}); +console.log(response); +---- diff --git a/docs/doc_examples/981b331db1404b39c1a612a135e4e76d.asciidoc b/docs/doc_examples/981b331db1404b39c1a612a135e4e76d.asciidoc index b3393e3ec..0d3cdbbe3 100644 --- a/docs/doc_examples/981b331db1404b39c1a612a135e4e76d.asciidoc +++ b/docs/doc_examples/981b331db1404b39c1a612a135e4e76d.asciidoc @@ -3,8 +3,9 @@ [source, js] ---- -const response = await client.ingest.putGeoipDatabase({ - id: "my-database-id", +const response = await client.transport.request({ + method: "PUT", + path: "/_ingest/geoip/database/my-database-id", body: { name: "GeoIP2-Domain", maxmind: { diff --git a/docs/doc_examples/9868ce609f4450702934fcbf4c340bf1.asciidoc b/docs/doc_examples/9868ce609f4450702934fcbf4c340bf1.asciidoc new file mode 100644 index 000000000..efbc3834d --- /dev/null +++ b/docs/doc_examples/9868ce609f4450702934fcbf4c340bf1.asciidoc @@ -0,0 +1,21 @@ +// This file is autogenerated, DO NOT EDIT +// Use `node scripts/generate-docs-examples.js` to generate the docs examples + +[source, js] +---- +const response = await client.ingest.putPipeline({ + id: "alibabacloud_ai_search_embeddings", + processors: [ + { + inference: { + model_id: "alibabacloud_ai_search_embeddings", + input_output: { + input_field: "content", + output_field: "content_embedding", + }, + }, + }, + ], +}); +console.log(response); +---- diff --git a/docs/doc_examples/986f892bfa4dfdf1da8455fdf84a4b0c.asciidoc b/docs/doc_examples/986f892bfa4dfdf1da8455fdf84a4b0c.asciidoc new file mode 100644 index 000000000..570db554b --- /dev/null +++ b/docs/doc_examples/986f892bfa4dfdf1da8455fdf84a4b0c.asciidoc @@ -0,0 +1,20 @@ +// This file is autogenerated, DO NOT EDIT +// Use `node scripts/generate-docs-examples.js` to generate the docs examples + +[source, js] +---- +const response = await client.inference.put({ + task_type: "text_embedding", + inference_id: "alibabacloud_ai_search_embeddings", + inference_config: { + service: "alibabacloud-ai-search", + service_settings: { + api_key: "", + service_id: "ops-text-embedding-001", + host: "default-j01.platform-cn-shanghai.opensearch.aliyuncs.com", + workspace: "default", + }, + }, +}); +console.log(response); +---- diff --git a/docs/doc_examples/99803d7b111b862c0c82e9908e549b16.asciidoc b/docs/doc_examples/99803d7b111b862c0c82e9908e549b16.asciidoc index 3989008d1..620c04c62 100644 --- a/docs/doc_examples/99803d7b111b862c0c82e9908e549b16.asciidoc +++ b/docs/doc_examples/99803d7b111b862c0c82e9908e549b16.asciidoc @@ -3,10 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_inference/text_embedding/mistral-embeddings-test", - body: { +const response = await client.inference.put({ + task_type: "text_embedding", + inference_id: "mistral-embeddings-test", + inference_config: { service: "mistral", service_settings: { api_key: "", diff --git a/docs/doc_examples/9a203aae3e1412d919546276fb52a5ca.asciidoc b/docs/doc_examples/9a203aae3e1412d919546276fb52a5ca.asciidoc index 411649c25..e1311ca66 100644 --- a/docs/doc_examples/9a203aae3e1412d919546276fb52a5ca.asciidoc +++ b/docs/doc_examples/9a203aae3e1412d919546276fb52a5ca.asciidoc @@ -3,10 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_inference/text_embedding/cohere-embeddings", - body: { +const response = await client.inference.put({ + task_type: "text_embedding", + inference_id: "cohere-embeddings", + inference_config: { service: "cohere", service_settings: { api_key: "", diff --git a/docs/doc_examples/9c021836acf7c0370e289f611325868d.asciidoc b/docs/doc_examples/9c021836acf7c0370e289f611325868d.asciidoc index f50a76d1b..ed4557890 100644 --- a/docs/doc_examples/9c021836acf7c0370e289f611325868d.asciidoc +++ b/docs/doc_examples/9c021836acf7c0370e289f611325868d.asciidoc @@ -3,17 +3,14 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_connector/my-spo-connector/_configuration", - body: { - values: { - tenant_id: "my-tenant-id", - tenant_name: "my-sharepoint-site", - client_id: "foo", - secret_value: "bar", - site_collections: "*", - }, +const response = await client.connector.updateConfiguration({ + connector_id: "my-spo-connector", + values: { + tenant_id: "my-tenant-id", + tenant_name: "my-sharepoint-site", + client_id: "foo", + secret_value: "bar", + site_collections: "*", }, }); console.log(response); diff --git a/docs/doc_examples/9d396afad93782699d7a929578c85284.asciidoc b/docs/doc_examples/9d396afad93782699d7a929578c85284.asciidoc new file mode 100644 index 000000000..2f0a85e90 --- /dev/null +++ b/docs/doc_examples/9d396afad93782699d7a929578c85284.asciidoc @@ -0,0 +1,20 @@ +// This file is autogenerated, DO NOT EDIT +// Use `node scripts/generate-docs-examples.js` to generate the docs examples + +[source, js] +---- +const response = await client.inference.put({ + task_type: "text_embedding", + inference_id: "google_vertex_ai_embeddings", + inference_config: { + service: "googlevertexai", + service_settings: { + service_account_json: "", + model_id: "text-embedding-004", + location: "", + project_id: "", + }, + }, +}); +console.log(response); +---- diff --git a/docs/doc_examples/9f16fca9813304e398ee052aa857dbcd.asciidoc b/docs/doc_examples/9f16fca9813304e398ee052aa857dbcd.asciidoc index 0da3acf87..6be472e3b 100644 --- a/docs/doc_examples/9f16fca9813304e398ee052aa857dbcd.asciidoc +++ b/docs/doc_examples/9f16fca9813304e398ee052aa857dbcd.asciidoc @@ -3,10 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_inference/text_embedding/openai-embeddings", - body: { +const response = await client.inference.put({ + task_type: "text_embedding", + inference_id: "openai-embeddings", + inference_config: { service: "openai", service_settings: { api_key: "", diff --git a/docs/doc_examples/a225fc8c134cb21a85bc6025dac9368b.asciidoc b/docs/doc_examples/a225fc8c134cb21a85bc6025dac9368b.asciidoc index 8cfc3b071..da9071e2c 100644 --- a/docs/doc_examples/a225fc8c134cb21a85bc6025dac9368b.asciidoc +++ b/docs/doc_examples/a225fc8c134cb21a85bc6025dac9368b.asciidoc @@ -3,10 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_inference/sparse_embedding/elser_embeddings", - body: { +const response = await client.inference.put({ + task_type: "sparse_embedding", + inference_id: "elser_embeddings", + inference_config: { service: "elser", service_settings: { num_allocations: 1, diff --git a/docs/doc_examples/a4a3c3cd09efa75168dab90105afb2e9.asciidoc b/docs/doc_examples/a4a3c3cd09efa75168dab90105afb2e9.asciidoc index a340791bd..b03688330 100644 --- a/docs/doc_examples/a4a3c3cd09efa75168dab90105afb2e9.asciidoc +++ b/docs/doc_examples/a4a3c3cd09efa75168dab90105afb2e9.asciidoc @@ -3,9 +3,9 @@ [source, js] ---- -const response = await client.transport.request({ - method: "GET", - path: "/_inference/sparse_embedding/my-elser-model", +const response = await client.inference.get({ + task_type: "sparse_embedding", + inference_id: "my-elser-model", }); console.log(response); ---- diff --git a/docs/doc_examples/a4ee2214d621bcfaf768c46d21325958.asciidoc b/docs/doc_examples/a4ee2214d621bcfaf768c46d21325958.asciidoc index 7179d8152..1b51b6936 100644 --- a/docs/doc_examples/a4ee2214d621bcfaf768c46d21325958.asciidoc +++ b/docs/doc_examples/a4ee2214d621bcfaf768c46d21325958.asciidoc @@ -3,10 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_inference/text_embedding/hugging_face_embeddings", - body: { +const response = await client.inference.put({ + task_type: "text_embedding", + inference_id: "hugging_face_embeddings", + inference_config: { service: "hugging_face", service_settings: { api_key: "", diff --git a/docs/doc_examples/a594f05459d9eecc8050c73fc8da336f.asciidoc b/docs/doc_examples/a594f05459d9eecc8050c73fc8da336f.asciidoc index 7ab2d290f..08570d5c6 100644 --- a/docs/doc_examples/a594f05459d9eecc8050c73fc8da336f.asciidoc +++ b/docs/doc_examples/a594f05459d9eecc8050c73fc8da336f.asciidoc @@ -3,10 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_inference/text_embedding/azure_openai_embeddings", - body: { +const response = await client.inference.put({ + task_type: "text_embedding", + inference_id: "azure_openai_embeddings", + inference_config: { service: "azureopenai", service_settings: { api_key: "", diff --git a/docs/doc_examples/a69c7c3412af73758f629e76263063b5.asciidoc b/docs/doc_examples/a69c7c3412af73758f629e76263063b5.asciidoc new file mode 100644 index 000000000..2312ca864 --- /dev/null +++ b/docs/doc_examples/a69c7c3412af73758f629e76263063b5.asciidoc @@ -0,0 +1,18 @@ +// This file is autogenerated, DO NOT EDIT +// Use `node scripts/generate-docs-examples.js` to generate the docs examples + +[source, js] +---- +const response = await client.reindex({ + wait_for_completion: "false", + source: { + index: "test-data", + size: 50, + }, + dest: { + index: "google-vertex-ai-embeddings", + pipeline: "google_vertex_ai_embeddings", + }, +}); +console.log(response); +---- diff --git a/docs/doc_examples/a960b43e720b4934edb74ab4b085ca77.asciidoc b/docs/doc_examples/a960b43e720b4934edb74ab4b085ca77.asciidoc index 758df0a4d..8e7ed5f3d 100644 --- a/docs/doc_examples/a960b43e720b4934edb74ab4b085ca77.asciidoc +++ b/docs/doc_examples/a960b43e720b4934edb74ab4b085ca77.asciidoc @@ -3,9 +3,6 @@ [source, js] ---- -const response = await client.transport.request({ - method: "GET", - path: "/_connector", -}); +const response = await client.connector.list(); console.log(response); ---- diff --git a/docs/doc_examples/aa676d54a59dee87ecd28bcc1edce59b.asciidoc b/docs/doc_examples/aa676d54a59dee87ecd28bcc1edce59b.asciidoc new file mode 100644 index 000000000..a9bcf22e7 --- /dev/null +++ b/docs/doc_examples/aa676d54a59dee87ecd28bcc1edce59b.asciidoc @@ -0,0 +1,20 @@ +// This file is autogenerated, DO NOT EDIT +// Use `node scripts/generate-docs-examples.js` to generate the docs examples + +[source, js] +---- +const response = await client.inference.put({ + task_type: "rerank", + inference_id: "alibabacloud_ai_search_rerank", + inference_config: { + service: "alibabacloud-ai-search", + service_settings: { + api_key: "", + service_id: "ops-bge-reranker-larger", + host: "default-j01.platform-cn-shanghai.opensearch.aliyuncs.com", + workspace: "default", + }, + }, +}); +console.log(response); +---- diff --git a/docs/doc_examples/b16700002af3aa70639f3e88c733bf35.asciidoc b/docs/doc_examples/b16700002af3aa70639f3e88c733bf35.asciidoc new file mode 100644 index 000000000..87a17c886 --- /dev/null +++ b/docs/doc_examples/b16700002af3aa70639f3e88c733bf35.asciidoc @@ -0,0 +1,12 @@ +// This file is autogenerated, DO NOT EDIT +// Use `node scripts/generate-docs-examples.js` to generate the docs examples + +[source, js] +---- +const response = await client.openPointInTime({ + index: "my-index-000001", + keep_alive: "1m", + allow_partial_search_results: "true", +}); +console.log(response); +---- diff --git a/docs/doc_examples/b45a8c6fc746e9c90fd181e69a605fad.asciidoc b/docs/doc_examples/b45a8c6fc746e9c90fd181e69a605fad.asciidoc index d51f2451f..876b182d2 100644 --- a/docs/doc_examples/b45a8c6fc746e9c90fd181e69a605fad.asciidoc +++ b/docs/doc_examples/b45a8c6fc746e9c90fd181e69a605fad.asciidoc @@ -3,12 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "POST", - path: "/_inference/completion/openai_chat_completions", - body: { - input: "What is Elastic?", - }, +const response = await client.inference.inference({ + task_type: "completion", + inference_id: "openai_chat_completions", + input: "What is Elastic?", }); console.log(response); ---- diff --git a/docs/doc_examples/bdb671866e2f0195f8dfbdb7f20bf591.asciidoc b/docs/doc_examples/bdb671866e2f0195f8dfbdb7f20bf591.asciidoc index b53c375b8..f758ada37 100644 --- a/docs/doc_examples/bdb671866e2f0195f8dfbdb7f20bf591.asciidoc +++ b/docs/doc_examples/bdb671866e2f0195f8dfbdb7f20bf591.asciidoc @@ -3,10 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_inference/sparse_embedding/my-elser-endpoint", - body: { +const response = await client.inference.put({ + task_type: "sparse_embedding", + inference_id: "my-elser-endpoint", + inference_config: { service: "elser", service_settings: { num_allocations: 1, diff --git a/docs/doc_examples/c00c9412609832ebceb9e786dd9542df.asciidoc b/docs/doc_examples/c00c9412609832ebceb9e786dd9542df.asciidoc index c5c37e0b5..f2b49ad8b 100644 --- a/docs/doc_examples/c00c9412609832ebceb9e786dd9542df.asciidoc +++ b/docs/doc_examples/c00c9412609832ebceb9e786dd9542df.asciidoc @@ -3,13 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_connector/my-connector/_name", - body: { - name: "Custom connector", - description: "This is my customized connector", - }, +const response = await client.connector.updateName({ + connector_id: "my-connector", + name: "Custom connector", + description: "This is my customized connector", }); console.log(response); ---- diff --git a/docs/doc_examples/c0ddfb2e6315f5bcf0d3ef414b5bbed3.asciidoc b/docs/doc_examples/c0ddfb2e6315f5bcf0d3ef414b5bbed3.asciidoc index ad3b4d462..c0190ee1c 100644 --- a/docs/doc_examples/c0ddfb2e6315f5bcf0d3ef414b5bbed3.asciidoc +++ b/docs/doc_examples/c0ddfb2e6315f5bcf0d3ef414b5bbed3.asciidoc @@ -3,13 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_connector/my-spo-connector/_configuration", - body: { - values: { - secret_value: "foo-bar", - }, +const response = await client.connector.updateConfiguration({ + connector_id: "my-spo-connector", + values: { + secret_value: "foo-bar", }, }); console.log(response); diff --git a/docs/doc_examples/c18100d62ed31bc9e05f62900156e6a8.asciidoc b/docs/doc_examples/c18100d62ed31bc9e05f62900156e6a8.asciidoc index 3c99976f3..54f13ca9d 100644 --- a/docs/doc_examples/c18100d62ed31bc9e05f62900156e6a8.asciidoc +++ b/docs/doc_examples/c18100d62ed31bc9e05f62900156e6a8.asciidoc @@ -3,12 +3,8 @@ [source, js] ---- -const response = await client.transport.request({ - method: "GET", - path: "/_connector", - querystring: { - index_name: "search-google-drive", - }, +const response = await client.connector.list({ + index_name: "search-google-drive", }); console.log(response); ---- diff --git a/docs/doc_examples/c21eb4bc30087188241cbba6b6b89999.asciidoc b/docs/doc_examples/c21eb4bc30087188241cbba6b6b89999.asciidoc index 8c6ac6d07..c33aa6f65 100644 --- a/docs/doc_examples/c21eb4bc30087188241cbba6b6b89999.asciidoc +++ b/docs/doc_examples/c21eb4bc30087188241cbba6b6b89999.asciidoc @@ -3,12 +3,9 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_connector/my-connector/_service_type", - body: { - service_type: "sharepoint_online", - }, +const response = await client.connector.updateServiceType({ + connector_id: "my-connector", + service_type: "sharepoint_online", }); console.log(response); ---- diff --git a/docs/doc_examples/ce13afc0c976c5e1f424b58e0c97fd64.asciidoc b/docs/doc_examples/ce13afc0c976c5e1f424b58e0c97fd64.asciidoc index f33bbe266..e06ff8a73 100644 --- a/docs/doc_examples/ce13afc0c976c5e1f424b58e0c97fd64.asciidoc +++ b/docs/doc_examples/ce13afc0c976c5e1f424b58e0c97fd64.asciidoc @@ -3,16 +3,13 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_connector/my-connector", - body: { - index_name: "search-google-drive", - name: "My Connector", - description: "My Connector to sync data to Elastic index from Google Drive", - service_type: "google_drive", - language: "english", - }, +const response = await client.connector.put({ + connector_id: "my-connector", + index_name: "search-google-drive", + name: "My Connector", + description: "My Connector to sync data to Elastic index from Google Drive", + service_type: "google_drive", + language: "english", }); console.log(response); ---- diff --git a/docs/doc_examples/cedb56a71cc743d80263ce352bb21720.asciidoc b/docs/doc_examples/cedb56a71cc743d80263ce352bb21720.asciidoc index 1b8303420..c36f080a5 100644 --- a/docs/doc_examples/cedb56a71cc743d80263ce352bb21720.asciidoc +++ b/docs/doc_examples/cedb56a71cc743d80263ce352bb21720.asciidoc @@ -3,10 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_inference/sparse_embedding/my-elser-model", - body: { +const response = await client.inference.put({ + task_type: "sparse_embedding", + inference_id: "my-elser-model", + inference_config: { service: "elser", service_settings: { num_allocations: 1, diff --git a/docs/doc_examples/d03139a851888db53f8b7affd85eb495.asciidoc b/docs/doc_examples/d03139a851888db53f8b7affd85eb495.asciidoc index 6c02dd8e5..78a5a38bc 100644 --- a/docs/doc_examples/d03139a851888db53f8b7affd85eb495.asciidoc +++ b/docs/doc_examples/d03139a851888db53f8b7affd85eb495.asciidoc @@ -3,9 +3,8 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_connector/my-connector/_check_in", +const response = await client.connector.checkIn({ + connector_id: "my-connector", }); console.log(response); ---- diff --git a/docs/doc_examples/d6a21afa4a94b9baa734eac430940bcf.asciidoc b/docs/doc_examples/d6a21afa4a94b9baa734eac430940bcf.asciidoc index 328c10112..17ba11227 100644 --- a/docs/doc_examples/d6a21afa4a94b9baa734eac430940bcf.asciidoc +++ b/docs/doc_examples/d6a21afa4a94b9baa734eac430940bcf.asciidoc @@ -3,13 +3,9 @@ [source, js] ---- -const response = await client.transport.request({ - method: "GET", - path: "/_connector", - querystring: { - from: "0", - size: "2", - }, +const response = await client.connector.list({ + from: 0, + size: 2, }); console.log(response); ---- diff --git a/docs/doc_examples/d7b61bfb6adb22986a43388b823894cc.asciidoc b/docs/doc_examples/d7b61bfb6adb22986a43388b823894cc.asciidoc index 2a932f6a2..3e0ce9910 100644 --- a/docs/doc_examples/d7b61bfb6adb22986a43388b823894cc.asciidoc +++ b/docs/doc_examples/d7b61bfb6adb22986a43388b823894cc.asciidoc @@ -3,10 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_inference/text_embedding/cohere_embeddings", - body: { +const response = await client.inference.put({ + task_type: "text_embedding", + inference_id: "cohere_embeddings", + inference_config: { service: "cohere", service_settings: { api_key: "", diff --git a/docs/doc_examples/e9fc47015922d51c2b05e502ce9c622e.asciidoc b/docs/doc_examples/e9fc47015922d51c2b05e502ce9c622e.asciidoc index 3638bed6c..7ec14029d 100644 --- a/docs/doc_examples/e9fc47015922d51c2b05e502ce9c622e.asciidoc +++ b/docs/doc_examples/e9fc47015922d51c2b05e502ce9c622e.asciidoc @@ -3,10 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_inference/completion/google_ai_studio_completion", - body: { +const response = await client.inference.put({ + task_type: "completion", + inference_id: "google_ai_studio_completion", + inference_config: { service: "googleaistudio", service_settings: { api_key: "", diff --git a/docs/doc_examples/eb54506fbc71a7d250e86b22d0600114.asciidoc b/docs/doc_examples/eb54506fbc71a7d250e86b22d0600114.asciidoc index 87276bbd2..027c4ff88 100644 --- a/docs/doc_examples/eb54506fbc71a7d250e86b22d0600114.asciidoc +++ b/docs/doc_examples/eb54506fbc71a7d250e86b22d0600114.asciidoc @@ -3,12 +3,8 @@ [source, js] ---- -const response = await client.transport.request({ - method: "GET", - path: "/_connector", - querystring: { - service_type: "sharepoint_online,google_drive", - }, +const response = await client.connector.list({ + service_type: "sharepoint_online,google_drive", }); console.log(response); ---- diff --git a/docs/doc_examples/eb96d7dd5f3116a50f7a86b729f1a934.asciidoc b/docs/doc_examples/eb96d7dd5f3116a50f7a86b729f1a934.asciidoc index 236d76185..9f2187174 100644 --- a/docs/doc_examples/eb96d7dd5f3116a50f7a86b729f1a934.asciidoc +++ b/docs/doc_examples/eb96d7dd5f3116a50f7a86b729f1a934.asciidoc @@ -3,15 +3,12 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_connector/my-connector/_scheduling", - body: { - scheduling: { - full: { - enabled: true, - interval: "0 10 0 * * ?", - }, +const response = await client.connector.updateScheduling({ + connector_id: "my-connector", + scheduling: { + full: { + enabled: true, + interval: "0 10 0 * * ?", }, }, }); diff --git a/docs/doc_examples/ecfd0d94dd14ef05dfa861f22544b388.asciidoc b/docs/doc_examples/ecfd0d94dd14ef05dfa861f22544b388.asciidoc index 2c69648f6..3622004ac 100644 --- a/docs/doc_examples/ecfd0d94dd14ef05dfa861f22544b388.asciidoc +++ b/docs/doc_examples/ecfd0d94dd14ef05dfa861f22544b388.asciidoc @@ -3,12 +3,9 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_connector/my-connector/_error", - body: { - error: "Houston, we have a problem!", - }, +const response = await client.connector.updateError({ + connector_id: "my-connector", + error: "Houston, we have a problem!", }); console.log(response); ---- diff --git a/docs/doc_examples/eee6110831c08b9c1b3f56b24656e95b.asciidoc b/docs/doc_examples/eee6110831c08b9c1b3f56b24656e95b.asciidoc index 2c82d4a70..d414b928c 100644 --- a/docs/doc_examples/eee6110831c08b9c1b3f56b24656e95b.asciidoc +++ b/docs/doc_examples/eee6110831c08b9c1b3f56b24656e95b.asciidoc @@ -3,10 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_inference/text_embedding/hugging-face-embeddings", - body: { +const response = await client.inference.put({ + task_type: "text_embedding", + inference_id: "hugging-face-embeddings", + inference_config: { service: "hugging_face", service_settings: { api_key: "", diff --git a/docs/doc_examples/f1b24217b1d9ba6ea5e4fa6e6f412022.asciidoc b/docs/doc_examples/f1b24217b1d9ba6ea5e4fa6e6f412022.asciidoc index a73c47467..023d009ab 100644 --- a/docs/doc_examples/f1b24217b1d9ba6ea5e4fa6e6f412022.asciidoc +++ b/docs/doc_examples/f1b24217b1d9ba6ea5e4fa6e6f412022.asciidoc @@ -3,13 +3,11 @@ [source, js] ---- -const response = await client.transport.request({ - method: "POST", - path: "/_inference/rerank/cohere_rerank", - body: { - input: ["luke", "like", "leia", "chewy", "r2d2", "star", "wars"], - query: "star wars main character", - }, +const response = await client.inference.inference({ + task_type: "rerank", + inference_id: "cohere_rerank", + input: ["luke", "like", "leia", "chewy", "r2d2", "star", "wars"], + query: "star wars main character", }); console.log(response); ---- diff --git a/docs/doc_examples/f2a5f77f929cc7b893b80f4bd5b1a192.asciidoc b/docs/doc_examples/f2a5f77f929cc7b893b80f4bd5b1a192.asciidoc index a65ee1fe2..ab808a3c4 100644 --- a/docs/doc_examples/f2a5f77f929cc7b893b80f4bd5b1a192.asciidoc +++ b/docs/doc_examples/f2a5f77f929cc7b893b80f4bd5b1a192.asciidoc @@ -3,9 +3,8 @@ [source, js] ---- -const response = await client.transport.request({ - method: "GET", - path: "/_connector/my-connector", +const response = await client.connector.get({ + connector_id: "my-connector", }); console.log(response); ---- diff --git a/docs/doc_examples/f57ce7de0946e9416ddb9150e95f4b74.asciidoc b/docs/doc_examples/f57ce7de0946e9416ddb9150e95f4b74.asciidoc index 1fce2d137..1b2520793 100644 --- a/docs/doc_examples/f57ce7de0946e9416ddb9150e95f4b74.asciidoc +++ b/docs/doc_examples/f57ce7de0946e9416ddb9150e95f4b74.asciidoc @@ -3,10 +3,10 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_inference/completion/azure_openai_completion", - body: { +const response = await client.inference.put({ + task_type: "completion", + inference_id: "azure_openai_completion", + inference_config: { service: "azureopenai", service_settings: { api_key: "", diff --git a/docs/doc_examples/fd620f09dbce62c6f0f603a366623607.asciidoc b/docs/doc_examples/fd620f09dbce62c6f0f603a366623607.asciidoc index 1131a4d95..5fe2c6fd7 100644 --- a/docs/doc_examples/fd620f09dbce62c6f0f603a366623607.asciidoc +++ b/docs/doc_examples/fd620f09dbce62c6f0f603a366623607.asciidoc @@ -3,19 +3,16 @@ [source, js] ---- -const response = await client.transport.request({ - method: "PUT", - path: "/_connector/my-sql-connector/_filtering", - body: { - advanced_snippet: { - value: [ - { - tables: ["users", "orders"], - query: - "SELECT users.id AS id, orders.order_id AS order_id FROM users JOIN orders ON users.id = orders.user_id", - }, - ], - }, +const response = await client.connector.updateFiltering({ + connector_id: "my-sql-connector", + advanced_snippet: { + value: [ + { + tables: ["users", "orders"], + query: + "SELECT users.id AS id, orders.order_id AS order_id FROM users JOIN orders ON users.id = orders.user_id", + }, + ], }, }); console.log(response); diff --git a/docs/reference.asciidoc b/docs/reference.asciidoc index 722b880da..1fdae1564 100644 --- a/docs/reference.asciidoc +++ b/docs/reference.asciidoc @@ -5667,12 +5667,20 @@ client.inference.put({ inference_id }) === ingest [discrete] ==== delete_geoip_database -Deletes a geoip database configuration +Deletes a geoip database configuration. [source,ts] ---- -client.ingest.deleteGeoipDatabase() +client.ingest.deleteGeoipDatabase({ id }) ---- +[discrete] +==== Arguments + +* *Request (object):* +** *`id` (string | string[])*: A list of geoip database configurations to delete +** *`master_timeout` (Optional, string | -1 | 0)*: Period to wait for a connection to the master node. +If no response is received before the timeout expires, the request fails and returns an error. +** *`timeout` (Optional, string | -1 | 0)*: Period to wait for a response. If no response is received before the timeout expires, the request fails and returns an error. [discrete] ==== delete_pipeline @@ -5708,12 +5716,21 @@ client.ingest.geoIpStats() [discrete] ==== get_geoip_database -Returns geoip database configuration. +Returns information about one or more geoip database configurations. [source,ts] ---- -client.ingest.getGeoipDatabase() +client.ingest.getGeoipDatabase({ ... }) ---- +[discrete] +==== Arguments + +* *Request (object):* +** *`id` (Optional, string | string[])*: List of database configuration IDs to retrieve. +Wildcard (`*`) expressions are supported. +To get all database configurations, omit this parameter or use `*`. +** *`master_timeout` (Optional, string | -1 | 0)*: Period to wait for a connection to the master node. +If no response is received before the timeout expires, the request fails and returns an error. [discrete] ==== get_pipeline @@ -5752,12 +5769,23 @@ client.ingest.processorGrok() [discrete] ==== put_geoip_database -Puts the configuration for a geoip database to be downloaded +Returns information about one or more geoip database configurations. [source,ts] ---- -client.ingest.putGeoipDatabase() +client.ingest.putGeoipDatabase({ id, name, maxmind }) ---- +[discrete] +==== Arguments + +* *Request (object):* +** *`id` (string)*: ID of the database configuration to create or update. +** *`name` (string)*: The provider-assigned name of the IP geolocation database to download. +** *`maxmind` ({ account_id })*: The configuration necessary to identify which IP geolocation provider to use to download the database, as well as any provider-specific configuration necessary for such downloading. +At present, the only supported provider is maxmind, and the maxmind provider requires that an account_id (string) is configured. +** *`master_timeout` (Optional, string | -1 | 0)*: Period to wait for a connection to the master node. +If no response is received before the timeout expires, the request fails and returns an error. +** *`timeout` (Optional, string | -1 | 0)*: Period to wait for a response. If no response is received before the timeout expires, the request fails and returns an error. [discrete] ==== put_pipeline @@ -5777,8 +5805,8 @@ client.ingest.putPipeline({ id }) ** *`id` (string)*: ID of the ingest pipeline to create or update. ** *`_meta` (Optional, Record)*: Optional metadata about the ingest pipeline. May have any contents. This map is not automatically generated by Elasticsearch. ** *`description` (Optional, string)*: Description of the ingest pipeline. -** *`on_failure` (Optional, { append, attachment, bytes, circle, convert, csv, date, date_index_name, dissect, dot_expander, drop, enrich, fail, foreach, geoip, grok, gsub, inference, join, json, kv, lowercase, pipeline, remove, rename, reroute, script, set, set_security_user, sort, split, trim, uppercase, urldecode, user_agent }[])*: Processors to run immediately after a processor failure. Each processor supports a processor-level `on_failure` value. If a processor without an `on_failure` value fails, Elasticsearch uses this pipeline-level parameter as a fallback. The processors in this parameter run sequentially in the order specified. Elasticsearch will not attempt to run the pipeline's remaining processors. -** *`processors` (Optional, { append, attachment, bytes, circle, convert, csv, date, date_index_name, dissect, dot_expander, drop, enrich, fail, foreach, geoip, grok, gsub, inference, join, json, kv, lowercase, pipeline, remove, rename, reroute, script, set, set_security_user, sort, split, trim, uppercase, urldecode, user_agent }[])*: Processors used to perform transformations on documents before indexing. Processors run sequentially in the order specified. +** *`on_failure` (Optional, { append, attachment, bytes, circle, convert, csv, date, date_index_name, dissect, dot_expander, drop, enrich, fail, foreach, geoip, grok, gsub, html_strip, inference, join, json, kv, lowercase, pipeline, remove, rename, reroute, script, set, set_security_user, sort, split, trim, uppercase, urldecode, uri_parts, user_agent }[])*: Processors to run immediately after a processor failure. Each processor supports a processor-level `on_failure` value. If a processor without an `on_failure` value fails, Elasticsearch uses this pipeline-level parameter as a fallback. The processors in this parameter run sequentially in the order specified. Elasticsearch will not attempt to run the pipeline's remaining processors. +** *`processors` (Optional, { append, attachment, bytes, circle, convert, csv, date, date_index_name, dissect, dot_expander, drop, enrich, fail, foreach, geoip, grok, gsub, html_strip, inference, join, json, kv, lowercase, pipeline, remove, rename, reroute, script, set, set_security_user, sort, split, trim, uppercase, urldecode, uri_parts, user_agent }[])*: Processors used to perform transformations on documents before indexing. Processors run sequentially in the order specified. ** *`version` (Optional, number)*: Version number used by external systems to track ingest pipelines. This parameter is intended for external systems only. Elasticsearch does not use or validate pipeline version numbers. ** *`master_timeout` (Optional, string | -1 | 0)*: Period to wait for a connection to the master node. If no response is received before the timeout expires, the request fails and returns an error. ** *`timeout` (Optional, string | -1 | 0)*: Period to wait for a response. If no response is received before the timeout expires, the request fails and returns an error. @@ -5791,16 +5819,16 @@ Executes an ingest pipeline against a set of provided documents. {ref}/simulate-pipeline-api.html[Endpoint documentation] [source,ts] ---- -client.ingest.simulate({ ... }) +client.ingest.simulate({ docs }) ---- [discrete] ==== Arguments * *Request (object):* +** *`docs` ({ _id, _index, _source }[])*: Sample documents to test in the pipeline. ** *`id` (Optional, string)*: Pipeline to test. If you don’t specify a `pipeline` in the request body, this parameter is required. -** *`docs` (Optional, { _id, _index, _source }[])*: Sample documents to test in the pipeline. ** *`pipeline` (Optional, { description, on_failure, processors, version, _meta })*: Pipeline to test. If you don’t specify the `pipeline` request path parameter, this parameter is required. If you specify both this and the request path parameter, the API only uses the request path parameter. diff --git a/src/api/api/ingest.ts b/src/api/api/ingest.ts index b332c6279..122680b7d 100644 --- a/src/api/api/ingest.ts +++ b/src/api/api/ingest.ts @@ -45,22 +45,22 @@ export default class Ingest { } /** - * Deletes a geoip database configuration + * Deletes a geoip database configuration. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/TODO.html | Elasticsearch API documentation} */ - async deleteGeoipDatabase (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptionsWithOutMeta): Promise - async deleteGeoipDatabase (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptionsWithMeta): Promise> - async deleteGeoipDatabase (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptions): Promise - async deleteGeoipDatabase (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptions): Promise { + async deleteGeoipDatabase (this: That, params: T.IngestDeleteGeoipDatabaseRequest | TB.IngestDeleteGeoipDatabaseRequest, options?: TransportRequestOptionsWithOutMeta): Promise + async deleteGeoipDatabase (this: That, params: T.IngestDeleteGeoipDatabaseRequest | TB.IngestDeleteGeoipDatabaseRequest, options?: TransportRequestOptionsWithMeta): Promise> + async deleteGeoipDatabase (this: That, params: T.IngestDeleteGeoipDatabaseRequest | TB.IngestDeleteGeoipDatabaseRequest, options?: TransportRequestOptions): Promise + async deleteGeoipDatabase (this: That, params: T.IngestDeleteGeoipDatabaseRequest | TB.IngestDeleteGeoipDatabaseRequest, options?: TransportRequestOptions): Promise { const acceptedPath: string[] = ['id'] const querystring: Record = {} const body = undefined - params = params ?? {} for (const key in params) { if (acceptedPath.includes(key)) { continue } else if (key !== 'body') { + // @ts-expect-error querystring[key] = params[key] } } @@ -139,13 +139,13 @@ export default class Ingest { } /** - * Returns geoip database configuration. + * Returns information about one or more geoip database configurations. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/TODO.html | Elasticsearch API documentation} */ - async getGeoipDatabase (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptionsWithOutMeta): Promise - async getGeoipDatabase (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptionsWithMeta): Promise> - async getGeoipDatabase (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptions): Promise - async getGeoipDatabase (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptions): Promise { + async getGeoipDatabase (this: That, params?: T.IngestGetGeoipDatabaseRequest | TB.IngestGetGeoipDatabaseRequest, options?: TransportRequestOptionsWithOutMeta): Promise + async getGeoipDatabase (this: That, params?: T.IngestGetGeoipDatabaseRequest | TB.IngestGetGeoipDatabaseRequest, options?: TransportRequestOptionsWithMeta): Promise> + async getGeoipDatabase (this: That, params?: T.IngestGetGeoipDatabaseRequest | TB.IngestGetGeoipDatabaseRequest, options?: TransportRequestOptions): Promise + async getGeoipDatabase (this: That, params?: T.IngestGetGeoipDatabaseRequest | TB.IngestGetGeoipDatabaseRequest, options?: TransportRequestOptions): Promise { const acceptedPath: string[] = ['id'] const querystring: Record = {} const body = undefined @@ -155,6 +155,7 @@ export default class Ingest { if (acceptedPath.includes(key)) { continue } else if (key !== 'body') { + // @ts-expect-error querystring[key] = params[key] } } @@ -248,22 +249,34 @@ export default class Ingest { } /** - * Puts the configuration for a geoip database to be downloaded + * Returns information about one or more geoip database configurations. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/TODO.html | Elasticsearch API documentation} */ - async putGeoipDatabase (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptionsWithOutMeta): Promise - async putGeoipDatabase (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptionsWithMeta): Promise> - async putGeoipDatabase (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptions): Promise - async putGeoipDatabase (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptions): Promise { + async putGeoipDatabase (this: That, params: T.IngestPutGeoipDatabaseRequest | TB.IngestPutGeoipDatabaseRequest, options?: TransportRequestOptionsWithOutMeta): Promise + async putGeoipDatabase (this: That, params: T.IngestPutGeoipDatabaseRequest | TB.IngestPutGeoipDatabaseRequest, options?: TransportRequestOptionsWithMeta): Promise> + async putGeoipDatabase (this: That, params: T.IngestPutGeoipDatabaseRequest | TB.IngestPutGeoipDatabaseRequest, options?: TransportRequestOptions): Promise + async putGeoipDatabase (this: That, params: T.IngestPutGeoipDatabaseRequest | TB.IngestPutGeoipDatabaseRequest, options?: TransportRequestOptions): Promise { const acceptedPath: string[] = ['id'] + const acceptedBody: string[] = ['name', 'maxmind'] const querystring: Record = {} - const body = undefined + // @ts-expect-error + const userBody: any = params?.body + let body: Record | string + if (typeof userBody === 'string') { + body = userBody + } else { + body = userBody != null ? { ...userBody } : undefined + } - params = params ?? {} for (const key in params) { - if (acceptedPath.includes(key)) { + if (acceptedBody.includes(key)) { + body = body ?? {} + // @ts-expect-error + body[key] = params[key] + } else if (acceptedPath.includes(key)) { continue } else if (key !== 'body') { + // @ts-expect-error querystring[key] = params[key] } } @@ -327,10 +340,10 @@ export default class Ingest { * Executes an ingest pipeline against a set of provided documents. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/simulate-pipeline-api.html | Elasticsearch API documentation} */ - async simulate (this: That, params?: T.IngestSimulateRequest | TB.IngestSimulateRequest, options?: TransportRequestOptionsWithOutMeta): Promise - async simulate (this: That, params?: T.IngestSimulateRequest | TB.IngestSimulateRequest, options?: TransportRequestOptionsWithMeta): Promise> - async simulate (this: That, params?: T.IngestSimulateRequest | TB.IngestSimulateRequest, options?: TransportRequestOptions): Promise - async simulate (this: That, params?: T.IngestSimulateRequest | TB.IngestSimulateRequest, options?: TransportRequestOptions): Promise { + async simulate (this: That, params: T.IngestSimulateRequest | TB.IngestSimulateRequest, options?: TransportRequestOptionsWithOutMeta): Promise + async simulate (this: That, params: T.IngestSimulateRequest | TB.IngestSimulateRequest, options?: TransportRequestOptionsWithMeta): Promise> + async simulate (this: That, params: T.IngestSimulateRequest | TB.IngestSimulateRequest, options?: TransportRequestOptions): Promise + async simulate (this: That, params: T.IngestSimulateRequest | TB.IngestSimulateRequest, options?: TransportRequestOptions): Promise { const acceptedPath: string[] = ['id'] const acceptedBody: string[] = ['docs', 'pipeline'] const querystring: Record = {} @@ -343,7 +356,6 @@ export default class Ingest { body = userBody != null ? { ...userBody } : undefined } - params = params ?? {} for (const key in params) { if (acceptedBody.includes(key)) { body = body ?? {} diff --git a/src/api/types.ts b/src/api/types.ts index 1c0e9d1a0..9c656edef 100644 --- a/src/api/types.ts +++ b/src/api/types.ts @@ -1117,7 +1117,7 @@ export interface RenderSearchTemplateResponse { export interface ScriptsPainlessExecutePainlessContextSetup { document: any index: IndexName - query: QueryDslQueryContainer + query?: QueryDslQueryContainer } export interface ScriptsPainlessExecuteRequest extends RequestBase { @@ -4836,11 +4836,11 @@ export type AnalysisPhoneticRuleType = 'approx' | 'exact' export interface AnalysisPhoneticTokenFilter extends AnalysisTokenFilterBase { type: 'phonetic' encoder: AnalysisPhoneticEncoder - languageset: AnalysisPhoneticLanguage | AnalysisPhoneticLanguage[] + languageset?: AnalysisPhoneticLanguage | AnalysisPhoneticLanguage[] max_code_len?: integer - name_type: AnalysisPhoneticNameType + name_type?: AnalysisPhoneticNameType replace?: boolean - rule_type: AnalysisPhoneticRuleType + rule_type?: AnalysisPhoneticRuleType } export interface AnalysisPorterStemTokenFilter extends AnalysisTokenFilterBase { @@ -12445,6 +12445,11 @@ export interface IngestCsvProcessor extends IngestProcessorBase { trim?: boolean } +export interface IngestDatabaseConfiguration { + name: Name + maxmind: IngestMaxmind +} + export interface IngestDateIndexNameProcessor extends IngestProcessorBase { date_formats: string[] date_rounding: string @@ -12523,6 +12528,12 @@ export interface IngestGsubProcessor extends IngestProcessorBase { target_field?: Field } +export interface IngestHtmlStripProcessor extends IngestProcessorBase { + field: Field + ignore_missing?: boolean + target_field?: Field +} + export interface IngestInferenceConfig { regression?: IngestInferenceConfigRegression classification?: IngestInferenceConfigClassification @@ -12584,6 +12595,10 @@ export interface IngestLowercaseProcessor extends IngestProcessorBase { target_field?: Field } +export interface IngestMaxmind { + account_id: Id +} + export interface IngestPipeline { description?: string on_failure?: IngestProcessorContainer[] @@ -12629,6 +12644,7 @@ export interface IngestProcessorContainer { geoip?: IngestGeoIpProcessor grok?: IngestGrokProcessor gsub?: IngestGsubProcessor + html_strip?: IngestHtmlStripProcessor inference?: IngestInferenceProcessor join?: IngestJoinProcessor json?: IngestJsonProcessor @@ -12646,6 +12662,7 @@ export interface IngestProcessorContainer { trim?: IngestTrimProcessor uppercase?: IngestUppercaseProcessor urldecode?: IngestUrlDecodeProcessor + uri_parts?: IngestUriPartsProcessor user_agent?: IngestUserAgentProcessor } @@ -12716,6 +12733,14 @@ export interface IngestUppercaseProcessor extends IngestProcessorBase { target_field?: Field } +export interface IngestUriPartsProcessor extends IngestProcessorBase { + field: Field + ignore_missing?: boolean + keep_original?: boolean + remove_if_successful?: boolean + target_field?: Field +} + export interface IngestUrlDecodeProcessor extends IngestProcessorBase { field: Field ignore_missing?: boolean @@ -12732,6 +12757,14 @@ export interface IngestUserAgentProcessor extends IngestProcessorBase { export type IngestUserAgentProperty = 'NAME' | 'MAJOR' | 'MINOR' | 'PATCH' | 'OS' | 'OS_NAME' | 'OS_MAJOR' | 'OS_MINOR' | 'DEVICE' | 'BUILD' +export interface IngestDeleteGeoipDatabaseRequest extends RequestBase { + id: Ids + master_timeout?: Duration + timeout?: Duration +} + +export type IngestDeleteGeoipDatabaseResponse = AcknowledgedResponseBase + export interface IngestDeletePipelineRequest extends RequestBase { id: Id master_timeout?: Duration @@ -12744,8 +12777,9 @@ export interface IngestGeoIpStatsGeoIpDownloadStatistics { successful_downloads: integer failed_downloads: integer total_download_time: DurationValue - database_count: integer + databases_count: integer skipped_updates: integer + expired_databases: integer } export interface IngestGeoIpStatsGeoIpNodeDatabaseName { @@ -12765,6 +12799,22 @@ export interface IngestGeoIpStatsResponse { nodes: Record } +export interface IngestGetGeoipDatabaseDatabaseConfigurationMetadata { + id: Id + version: long + modified_date_millis: EpochTime + database: IngestDatabaseConfiguration +} + +export interface IngestGetGeoipDatabaseRequest extends RequestBase { + id?: Ids + master_timeout?: Duration +} + +export interface IngestGetGeoipDatabaseResponse { + databases: IngestGetGeoipDatabaseDatabaseConfigurationMetadata[] +} + export interface IngestGetPipelineRequest extends RequestBase { id?: Id master_timeout?: Duration @@ -12780,6 +12830,16 @@ export interface IngestProcessorGrokResponse { patterns: Record } +export interface IngestPutGeoipDatabaseRequest extends RequestBase { + id: Id + master_timeout?: Duration + timeout?: Duration + name: Name + maxmind: IngestMaxmind +} + +export type IngestPutGeoipDatabaseResponse = AcknowledgedResponseBase + export interface IngestPutPipelineRequest extends RequestBase { id: Id master_timeout?: Duration @@ -12819,21 +12879,29 @@ export interface IngestSimulateIngest { export interface IngestSimulatePipelineSimulation { doc?: IngestSimulateDocumentSimulation - processor_results?: IngestSimulatePipelineSimulation[] tag?: string processor_type?: string status?: WatcherActionStatusOptions + description?: string + ignored_error?: ErrorCause + error?: ErrorCause } export interface IngestSimulateRequest extends RequestBase { id?: Id verbose?: boolean - docs?: IngestSimulateDocument[] + docs: IngestSimulateDocument[] pipeline?: IngestPipeline } export interface IngestSimulateResponse { - docs: IngestSimulatePipelineSimulation[] + docs: IngestSimulateSimulateDocumentResult[] +} + +export interface IngestSimulateSimulateDocumentResult { + doc?: IngestSimulateDocumentSimulation + error?: ErrorCause + processor_results?: IngestSimulatePipelineSimulation[] } export interface LicenseLicense { diff --git a/src/api/typesWithBodyKey.ts b/src/api/typesWithBodyKey.ts index 79fac3c04..655ba221f 100644 --- a/src/api/typesWithBodyKey.ts +++ b/src/api/typesWithBodyKey.ts @@ -1162,7 +1162,7 @@ export interface RenderSearchTemplateResponse { export interface ScriptsPainlessExecutePainlessContextSetup { document: any index: IndexName - query: QueryDslQueryContainer + query?: QueryDslQueryContainer } export interface ScriptsPainlessExecuteRequest extends RequestBase { @@ -4909,11 +4909,11 @@ export type AnalysisPhoneticRuleType = 'approx' | 'exact' export interface AnalysisPhoneticTokenFilter extends AnalysisTokenFilterBase { type: 'phonetic' encoder: AnalysisPhoneticEncoder - languageset: AnalysisPhoneticLanguage | AnalysisPhoneticLanguage[] + languageset?: AnalysisPhoneticLanguage | AnalysisPhoneticLanguage[] max_code_len?: integer - name_type: AnalysisPhoneticNameType + name_type?: AnalysisPhoneticNameType replace?: boolean - rule_type: AnalysisPhoneticRuleType + rule_type?: AnalysisPhoneticRuleType } export interface AnalysisPorterStemTokenFilter extends AnalysisTokenFilterBase { @@ -12670,6 +12670,11 @@ export interface IngestCsvProcessor extends IngestProcessorBase { trim?: boolean } +export interface IngestDatabaseConfiguration { + name: Name + maxmind: IngestMaxmind +} + export interface IngestDateIndexNameProcessor extends IngestProcessorBase { date_formats: string[] date_rounding: string @@ -12748,6 +12753,12 @@ export interface IngestGsubProcessor extends IngestProcessorBase { target_field?: Field } +export interface IngestHtmlStripProcessor extends IngestProcessorBase { + field: Field + ignore_missing?: boolean + target_field?: Field +} + export interface IngestInferenceConfig { regression?: IngestInferenceConfigRegression classification?: IngestInferenceConfigClassification @@ -12809,6 +12820,10 @@ export interface IngestLowercaseProcessor extends IngestProcessorBase { target_field?: Field } +export interface IngestMaxmind { + account_id: Id +} + export interface IngestPipeline { description?: string on_failure?: IngestProcessorContainer[] @@ -12854,6 +12869,7 @@ export interface IngestProcessorContainer { geoip?: IngestGeoIpProcessor grok?: IngestGrokProcessor gsub?: IngestGsubProcessor + html_strip?: IngestHtmlStripProcessor inference?: IngestInferenceProcessor join?: IngestJoinProcessor json?: IngestJsonProcessor @@ -12871,6 +12887,7 @@ export interface IngestProcessorContainer { trim?: IngestTrimProcessor uppercase?: IngestUppercaseProcessor urldecode?: IngestUrlDecodeProcessor + uri_parts?: IngestUriPartsProcessor user_agent?: IngestUserAgentProcessor } @@ -12941,6 +12958,14 @@ export interface IngestUppercaseProcessor extends IngestProcessorBase { target_field?: Field } +export interface IngestUriPartsProcessor extends IngestProcessorBase { + field: Field + ignore_missing?: boolean + keep_original?: boolean + remove_if_successful?: boolean + target_field?: Field +} + export interface IngestUrlDecodeProcessor extends IngestProcessorBase { field: Field ignore_missing?: boolean @@ -12957,6 +12982,14 @@ export interface IngestUserAgentProcessor extends IngestProcessorBase { export type IngestUserAgentProperty = 'NAME' | 'MAJOR' | 'MINOR' | 'PATCH' | 'OS' | 'OS_NAME' | 'OS_MAJOR' | 'OS_MINOR' | 'DEVICE' | 'BUILD' +export interface IngestDeleteGeoipDatabaseRequest extends RequestBase { + id: Ids + master_timeout?: Duration + timeout?: Duration +} + +export type IngestDeleteGeoipDatabaseResponse = AcknowledgedResponseBase + export interface IngestDeletePipelineRequest extends RequestBase { id: Id master_timeout?: Duration @@ -12969,8 +13002,9 @@ export interface IngestGeoIpStatsGeoIpDownloadStatistics { successful_downloads: integer failed_downloads: integer total_download_time: DurationValue - database_count: integer + databases_count: integer skipped_updates: integer + expired_databases: integer } export interface IngestGeoIpStatsGeoIpNodeDatabaseName { @@ -12990,6 +13024,22 @@ export interface IngestGeoIpStatsResponse { nodes: Record } +export interface IngestGetGeoipDatabaseDatabaseConfigurationMetadata { + id: Id + version: long + modified_date_millis: EpochTime + database: IngestDatabaseConfiguration +} + +export interface IngestGetGeoipDatabaseRequest extends RequestBase { + id?: Ids + master_timeout?: Duration +} + +export interface IngestGetGeoipDatabaseResponse { + databases: IngestGetGeoipDatabaseDatabaseConfigurationMetadata[] +} + export interface IngestGetPipelineRequest extends RequestBase { id?: Id master_timeout?: Duration @@ -13005,6 +13055,19 @@ export interface IngestProcessorGrokResponse { patterns: Record } +export interface IngestPutGeoipDatabaseRequest extends RequestBase { + id: Id + master_timeout?: Duration + timeout?: Duration + /** @deprecated The use of the 'body' key has been deprecated, move the nested keys to the top level object. */ + body?: { + name: Name + maxmind: IngestMaxmind + } +} + +export type IngestPutGeoipDatabaseResponse = AcknowledgedResponseBase + export interface IngestPutPipelineRequest extends RequestBase { id: Id master_timeout?: Duration @@ -13047,10 +13110,12 @@ export interface IngestSimulateIngest { export interface IngestSimulatePipelineSimulation { doc?: IngestSimulateDocumentSimulation - processor_results?: IngestSimulatePipelineSimulation[] tag?: string processor_type?: string status?: WatcherActionStatusOptions + description?: string + ignored_error?: ErrorCause + error?: ErrorCause } export interface IngestSimulateRequest extends RequestBase { @@ -13058,13 +13123,19 @@ export interface IngestSimulateRequest extends RequestBase { verbose?: boolean /** @deprecated The use of the 'body' key has been deprecated, move the nested keys to the top level object. */ body?: { - docs?: IngestSimulateDocument[] + docs: IngestSimulateDocument[] pipeline?: IngestPipeline } } export interface IngestSimulateResponse { - docs: IngestSimulatePipelineSimulation[] + docs: IngestSimulateSimulateDocumentResult[] +} + +export interface IngestSimulateSimulateDocumentResult { + doc?: IngestSimulateDocumentSimulation + error?: ErrorCause + processor_results?: IngestSimulatePipelineSimulation[] } export interface LicenseLicense {